From 14eafb84e6e3d0107b3480c3a0dc4af62f1c516c Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Sat, 24 Oct 2020 21:32:12 -0600 Subject: [PATCH 01/17] Create an Uncertainty Notebook Starting a notebook for uncertainty analysis. For now, we are only to the point that we have a plot of correlation vs. offset for one subset of one profile. --- environment.yaml | 1 + notebooks/Uncertainty_Analysis_Tutorial.ipynb | 154 +++++++++++++++++- requirements.txt | 1 + setup.py | 1 + 4 files changed, 156 insertions(+), 1 deletion(-) diff --git a/environment.yaml b/environment.yaml index aa52196..70b1fd5 100644 --- a/environment.yaml +++ b/environment.yaml @@ -6,6 +6,7 @@ dependencies: - jupyterlab=0.35.* - matplotlib=3.0.* - ipympl + - ipywidgets - scipy=1.2.* - numpy - pandas=0.24.* diff --git a/notebooks/Uncertainty_Analysis_Tutorial.ipynb b/notebooks/Uncertainty_Analysis_Tutorial.ipynb index 3034949..74f41fa 100644 --- a/notebooks/Uncertainty_Analysis_Tutorial.ipynb +++ b/notebooks/Uncertainty_Analysis_Tutorial.ipynb @@ -5,7 +5,159 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "# Import the basic libraries\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "%matplotlib notebook" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load the Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Import funcitons for the velocity calculation; Correlate all the beams from one set of repeat ground tracks, rgt = 0848\n", + "from IS2_velocity.correlation_processing import calculate_velocities\n", + "from IS2_velocity.readers import load_data_by_rgt\n", + "from scipy.signal import correlate\n", + "from astropy.time import Time\n", + "\n", + "### Data directory\n", + "data_dir = '../data/'\n", + "\n", + "### Select rgt for now\n", + "rgt = '0848'\n", + "\n", + "### Control the correlation step:\n", + "segment_length = 2000 # meters, how wide is the window we are correlating in each step\n", + "search_width = 1000 # meters, how far in front of and behind the window to check for correlation\n", + "along_track_step = 100 # meters; how much to jump between each consecutivevelocity determination\n", + "max_percent_nans = 10 # Maximum % of segment length that can be nans and still do the correlation step\n", + "\n", + "### Which product\n", + "product = 'ATL06'\n", + "if product == 'ATL06':\n", + " dx = 20\n", + "\n", + "### Select filter type and required arguments; Currently only this running mean is supported\n", + "filter_type = 'running_average'\n", + "running_avg_window = 100 # meters\n", + "\n", + "### Load data\n", + "x_atc, lats, lons, h_li_raw, h_li_raw_NoNans, h_li, h_li_diff, times, min_seg_ids, \\\n", + "segment_ids, cycles_this_rgt, x_ps, y_ps = \\\n", + " load_data_by_rgt(rgt = rgt, path_to_data = data_dir, product = 'ATL06', \\\n", + " filter_type = filter_type, running_avg_window = running_avg_window, \\\n", + " format = 'hdf5')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Correlation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cycle1='03'\n", + "cycle2='04'\n", + "beam = 'gt1l'\n", + "start_ind = 3500\n", + "\n", + "x_in1 = x_atc[cycle1][beam]\n", + "dh1 = h_li_diff[cycle1][beam]\n", + "dh2 = h_li_diff[cycle2][beam]\n", + "\n", + "xi = start_ind\n", + "x_start = x_in1[start_ind]\n", + "\n", + "# find indices for the stencil\n", + "idx1 = np.argmin(abs(x_in1-x_start))\n", + "idx2 = idx1 + int(np.round(segment_length/dx))\n", + "# cut out a wider chunk of data at time t2 (second cycle)\n", + "idx3 = idx1 - int(np.round(search_width/dx)) # offset on earlier end by # indices in search_width\n", + "idx4 = idx2 + int(np.round(search_width/dx)) # offset on later end by # indices in search_width\n", + "\n", + "# get the two arrays that will be correlated to one another\n", + "dh_stencil = dh1[idx1:idx2]\n", + "dh_wide = dh2[idx3:idx4]\n", + "\n", + "# correlate old with newer data\n", + "corr = correlate(dh_stencil, dh_wide, mode = 'valid', method = 'direct')\n", + "norm_val = np.sqrt(np.sum(dh_stencil**2)*np.sum(dh_wide**2)) # normalize so values range between 0 and 1\n", + "corr = corr / norm_val\n", + "lagvec = np.arange(- int(np.round(search_width/dx)), int(search_width/dx) +1,1)# for mode = 'valid'\n", + "shift_vec = lagvec * dx\n", + "\n", + "t1_string = times[cycle1]['gt1l'][0].astype(str) # figure out later if just picking hte first one it ok\n", + "t1 = Time(t1_string)\n", + "t2_string = times[cycle2]['gt1l'][0].astype(str) # figure out later if just picking hte first one it ok\n", + "t2 = Time(t2_string)\n", + "dt = (t2 - t1).jd # difference in julian days" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Plot with Widget" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import ipywidgets\n", + "\n", + "x0 = x_atc[cycle1][beam][start_ind]\n", + "search_x = int(np.round(search_width/dx))\n", + " \n", + "plt.figure(figsize=(8,5))\n", + "plt.subplot(221)\n", + "plt.tick_params(labelbottom=False)\n", + "l1, = plt.plot((x_atc[cycle1][beam][idx1:idx2]-x0-search_width)/1000.,h_li[cycle1][beam][idx1:idx2])\n", + "plt.plot((x_atc[cycle2][beam][idx3:idx4]-x0)/1000.,h_li[cycle2][beam][idx3:idx4])\n", + "plt.ylabel('Elevation (m)')\n", + "\n", + "plt.subplot(223)\n", + "l2, = plt.plot((x_atc[cycle1][beam][idx1:idx2]-x0-search_width)/1000.,h_li_diff[cycle1][beam][idx1:idx2])\n", + "plt.plot((x_atc[cycle2][beam][idx3:idx4]-x0)/1000.,h_li_diff[cycle2][beam][idx3:idx4])\n", + "plt.ylabel('Slope (m/m)')\n", + "plt.xlabel('Along-Track Distance (km)')\n", + "\n", + "plt.subplot(224)\n", + "plt.plot(shift_vec/(dt/365.),corr,'k')\n", + "p1, = plt.plot(shift_vec[0]/(dt/365.),corr[0],'k.',mfc='w',mew=2,ms=10)\n", + "plt.ylabel('Correlation')\n", + "plt.xlabel('Velocity (m/yr)')\n", + "\n", + "plt.tight_layout()\n", + "\n", + "# An update function for the interactive figure\n", + "def update(n_step = (0,search_x*2)):\n", + " l1.set_xdata((x_atc[cycle1][beam][idx1:idx2]-x0+search_width-n_step*dx)/1000.)\n", + " l2.set_xdata((x_atc[cycle1][beam][idx1:idx2]-x0+search_width-n_step*dx)/1000.)\n", + " p1.set_data(np.transpose([shift_vec[int(n_step)]/(dt/365.),corr[int(n_step)]]))\n", + "\n", + "# Update with the sliders\n", + "ipywidgets.interactive(update)" + ] } ], "metadata": { diff --git a/requirements.txt b/requirements.txt index 35e2b5c..b586e1f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,4 +6,5 @@ astropy pyproj pandas ipympl +ipywidgets pointCollection diff --git a/setup.py b/setup.py index 91ae1dc..f7c75a4 100644 --- a/setup.py +++ b/setup.py @@ -16,6 +16,7 @@ 'pyproj', 'pandas', 'ipympl', + 'ipywidgets', 'pointCollection @ git+https://github.com/smithB/pointCollection.git#egg=pointCollection-1.0.0.0' ] ) From 3db9f13dae2d8049713fc84d2f7f538285c47116 Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Mon, 26 Oct 2020 14:42:31 -0600 Subject: [PATCH 02/17] Move beam loop into function The velocity calculation was being done for all beams in a loop within the calculate function. Trying to clean this up. --- IS2_velocity/correlation_processing.py | 366 ++++++++++++------------- 1 file changed, 176 insertions(+), 190 deletions(-) diff --git a/IS2_velocity/correlation_processing.py b/IS2_velocity/correlation_processing.py index e8abb28..d4a771a 100644 --- a/IS2_velocity/correlation_processing.py +++ b/IS2_velocity/correlation_processing.py @@ -2,9 +2,10 @@ # -*- coding: utf-8 -*- import numpy as np -from scipy.signal import correlate +from scipy.signal import correlate,detrend +import glob from astropy.time import Time -import os,re,h5py,pyproj +import h5py,pyproj from IS2_velocity.extract_alongtrack import get_measures_along_track_velocity def fill_seg_ids(x_in,h_in,seg_ids,dx=20): @@ -119,18 +120,11 @@ def calculate_velocities(rgt, x_atc, h_li_raw, h_li_diff, lats, lons, segment_id :param dx: :return: """ - # takes output of load_data_by_rgt - - from scipy.signal import correlate, detrend - import glob ### Create dictionaries to put info in velocities = {} correlations = {} lags = {} - x_atcs_for_velocities = {} - latitudes = {} - longitudes = {} midpoints_x_atc = {} midpoints_lats = {} midpoints_lons = {} @@ -154,10 +148,10 @@ def calculate_velocities(rgt, x_atc, h_li_raw, h_li_diff, lats, lons, segment_id t2 = Time(t2_string) dt = (t2 - t1).jd # difference in julian days - if saving: - ### Where to save the results: - h5_file_out = write_out_path + prepend + '_rgt' + rgt + '.hdf5' + ### Where to save the results: + h5_file_out = write_out_path + prepend + '_rgt' + rgt + '.hdf5' + if saving: ### Save some metadata with h5py.File(h5_file_out, 'w') as f: f['dx'] = dx @@ -171,183 +165,180 @@ def calculate_velocities(rgt, x_atc, h_li_raw, h_li_diff, lats, lons, segment_id f['process_date'] = str(Time.now().value) f['rgt'] = rgt - ### Loop over each beam for beam in beams: + velocities,correlations,lags,midpoints_x_atc,midpoints_lats,midpoints_lons,midpoints_seg_ids,midpoints_xy = calculate_velocity_single_beam(x_atc, + cycle1,cycle2,beam,search_width,segment_length,dx, + velocities,correlations,midpoints_x_atc,midpoints_lats,midpoints_lons, + midpoints_seg_ids,midpoints_xy,rgt, + along_track_step,lags,lats,lons,segment_ids,h_li_diff,h_li_raw, + dt,saving,max_percent_nans,map_data_root,h5_file_out,times,spatial_extent) - ### Determine x1s, which are the x_atc coordinates at which each cut out window begins - # To be common between both repeats, the first point x1 needs to be the larger first value between repeats - min_x_atc_cycle1 = x_atc[cycle1][beam][0] - min_x_atc_cycle2 = x_atc[cycle2][beam][0] - - # pick out the track that starts at greater x_atc, and use that as x1s vector - if min_x_atc_cycle1 != min_x_atc_cycle2: - x1 = np.nanmax([min_x_atc_cycle1, min_x_atc_cycle2]) - cycle_n = np.arange(0, 2)[[min_x_atc_cycle1, min_x_atc_cycle2] == x1][0] - if cycle_n == 0: - cycletmp = cycle2 - elif cycle_n == 1: - cycletmp = cycle1 - # n_segments_this_track = (len(x_atc[cycletmp][beam]) - 2 * search_width/dx) / (along_track_step/dx) - - ### Generate the x1s vector, in the case that the repeat tracks don't start in the same place - x1s = x_atc[cycletmp][beam][ - int(search_width / dx) + 1:-int(segment_length / dx) - int(search_width / dx):int(along_track_step / dx)] - #### ! this line updated 2020 07 23 - # x1s = x_atc[cycletmp][beam][int(search_width/dx)+1::int(search_width/dx)] - # start at search_width/dx in, so the code never tries to get data outside the edges of this rgt - # add 1 bc the data are differentiated, and h_li_diff is therefore one point shorter - - elif min_x_atc_cycle1 == min_x_atc_cycle2: # doesn't matter which cycle - ### Generate the x1s vector, in the case that the repeat tracks do start in the same place - x1s = x_atc[cycle1][beam][ - int(search_width / dx) + 1:-int(segment_length / dx) - int(search_width / dx):int(along_track_step / dx)] - #### ! this line updated 2020 07 23 - # x1s = x_atc[cycle1][beam][int(search_width/dx)+1::int(search_width/dx)] - - ### Determine xend, where the x1s vector ends: smaller value for both beams, if different - max_x_atc_cycle1 = x_atc[cycle1][beam][-1] - search_width / dx - max_x_atc_cycle2 = x_atc[cycle2][beam][-1] - search_width / dx - smallest_xatc = np.min([max_x_atc_cycle1, max_x_atc_cycle2]) - ixmax = np.where(x1s >= (smallest_xatc - search_width / dx)) - if len(ixmax[0]) >= 1: - ixtmp = ixmax[0][0] - x1s = x1s[:ixtmp] - - ### Create vectors to store results in - velocities[rgt][beam] = np.empty_like(x1s) - correlations[rgt][beam] = np.empty_like(x1s) - lags[rgt][beam] = np.empty_like(x1s) - midpoints_x_atc[rgt][beam] = np.empty_like(x1s) - midpoints_lats[rgt][beam] = np.empty_like(x1s) - midpoints_lons[rgt][beam] = np.empty_like(x1s) - midpoints_seg_ids[rgt][beam] = np.empty_like(x1s) - - # midpoints_x_atc = np.empty(np.shape(x1s)) # for writing out - # midpoints_lat = np.empty(np.shape(x1s)) # for writing out - # midpoints_lon = np.empty(np.shape(x1s)) # for writing out - # midpoints_seg_ids = np.empty(np.shape(x1s)) # for writing out - - ### Entire x_atc vectors for both cycles - x_full_t1 = x_atc[cycle1][beam] - x_full_t2 = x_atc[cycle2][beam] - - ### Loop over x1s, positions along track that each window starts at - for xi, x1 in enumerate(x1s): - - ### Cut out data: small chunk of data at time t1 (first cycle) - ix_x1 = np.arange(len(x_full_t1))[x_full_t1 >= x1][0] # Index of first point that is greater than x1 - ix_x2 = ix_x1 + int( - np.round(segment_length / dx)) # ix_x1 + number of datapoints within the desired segment length - x_t1 = x_full_t1[ix_x1:ix_x2] # cut out x_atc values, first cycle - lats_t1 = lats[cycle1][beam][ix_x1:ix_x2] # cut out latitude values, first cycle - lons_t1 = lons[cycle1][beam][ix_x1:ix_x2] # cut out longitude values, first cycle - seg_ids_t1 = segment_ids[cycle1][beam][ix_x1:ix_x2] # cut out segment_ids, first cycle - h_li1 = h_li_diff[cycle1][beam][ - ix_x1 - 1:ix_x2 - 1] # cut out land ice height values, first cycle; start 1 index earlier because - # the h_li_diff data are differentiated, and therefore one sample shorter - - # Find segment midpoints; this is the position where we will assign the velocity measurement from each window - n = len(x_t1) - midpt_ix = int(np.floor(n / 2)) - midpoints_x_atc[rgt][beam][xi] = x_t1[midpt_ix] - midpoints_lats[rgt][beam][xi] = lats_t1[midpt_ix] - midpoints_lons[rgt][beam][xi] = lons_t1[midpt_ix] - midpoints_seg_ids[rgt][beam][xi] = seg_ids_t1[midpt_ix] - - ### Cut out data: wider chunk of data at time t2 (second cycle) - ix_x3 = ix_x1 - int(np.round(search_width / dx)) # extend on earlier end by number of indices in search_width - ix_x4 = ix_x2 + int(np.round(search_width / dx)) # extend on later end by number of indices in search_width - x_t2 = x_full_t2[ix_x3:ix_x4] # cut out x_atc values, second cycle - h_li2 = h_li_diff[cycle2][beam][ - ix_x3 - 1:ix_x4 - 1] # cut out land ice height values, second cycle; start 1 index earlier because - # the h_li_diff data are differentiated, and therefore one sample shorter - - ### Determine number of nans in each data chunk - n_nans1 = np.sum(np.isnan(h_li_raw[cycle1][beam][ix_x1:ix_x2])) - n_nans2 = np.sum(np.isnan(h_li_raw[cycle2][beam][ix_x3:ix_x4])) - - ### Only process if there are fewer than 10% nans in either data chunk: - if (n_nans1 / len(h_li1) <= max_percent_nans / 100) and (n_nans2 / len(h_li2) <= max_percent_nans / 100): - - # Detrend both chunks of data - h_li1 = detrend(h_li1, type='linear') - h_li2 = detrend(h_li2, type='linear') - - # Normalize both chunks of data, if desired - # h_li1 = h_li1 / np.nanmax(np.abs(h_li1)) - # h_li2 = h_li2 / np.nanmax(np.abs(h_li2)) - - ### Correlate the old and new data - # We made the second data vector longer than the first, so the valid method returns values - corr = correlate(h_li1, h_li2, mode='valid', method='direct') - - ### Normalize correlation function by autocorrelations - # Normalizing coefficient changes with each step along track; this section determines a changing along track normalizing coefficiant - coeff_a_val = np.sum(h_li1 ** 2) - coeff_b_val = np.zeros(len(h_li2) - len(h_li1) + 1) - for shift in range(len(h_li2) - len(h_li1) + 1): - h_li2_section = h_li2[shift:shift + len(h_li1)] - coeff_b_val[shift] = np.sum(h_li2_section ** 2) - norm_vec = np.sqrt(coeff_a_val * coeff_b_val) - corr_normed = corr / np.flip( - norm_vec) # i don't really understand why this has to flip, but otherwise it yields correlation values above 1... - # TASK: figure out why the flip is needed - - ### Create a vector of lags for the correlation function - lagvec = np.arange(- int(np.round(search_width / dx)), int(search_width / dx) + 1, 1) # for mode = 'valid' - - ### Convert lag to distance - shift_vec = lagvec * dx - - ### ID peak correlation coefficient - ix_peak = np.arange(len(corr_normed))[corr_normed == np.nanmax(corr_normed)][0] - - ### Save correlation coefficient, best lag, velocity, etc at the location of peak correlation coefficient - best_lag = lagvec[ix_peak] - best_shift = shift_vec[ix_peak] - - # ### ID peak correlation coefficient; 'raw' = no sub-sampling - # plotting = False - # best_lag, peak_corr_value = find_correlation_peak(lagvec, shift_vec, corr_normed, max_width, min_width, dx_interp, type, plotting) - # best_shift = best_lag * dx - - velocities[rgt][beam][xi] = best_shift / (dt / 365) - correlations[rgt][beam][xi] = corr_normed[ix_peak] - lags[rgt][beam][xi] = lagvec[ix_peak] + return velocities, correlations, lags, midpoints_x_atc, midpoints_xy, midpoints_lons, midpoints_lats +# ------------------------------------------------------------------------------------------- - else: - ### If there are too many nans, just save a nan - velocities[rgt][beam][xi] = np.nan - correlations[rgt][beam][xi] = np.nan - lags[rgt][beam][xi] = np.nan - - ### Output xy locations of midpoints as well - midpoints_xy[rgt][beam] = np.array(pyproj.Proj(3031)(midpoints_lons[rgt][beam], midpoints_lats[rgt][beam])) - #extract measures veloc outside of this loop - - if saving: - measures_Vx_path = glob.glob( map_data_root + '*_Vx.tif')[0] - measures_Vy_path = glob.glob( map_data_root + '*_Vy.tif')[0] - - ### Add velocities to hdf5 file for each beam - with h5py.File(h5_file_out, 'a') as f: - f[beam + '/x_atc'] = midpoints_x_atc[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/latitudes'] = midpoints_lats[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/longitudes'] = midpoints_lons[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/velocities'] = velocities[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/correlation_coefficients'] = correlations[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/best_lags'] = lags[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/segment_ids'] = midpoints_seg_ids[rgt][beam] - f[beam + '/first_cycle_time'] = str(Time(times[cycle1][beam][0])) - f[beam + '/second_cycle_time'] = str(Time(times[cycle2][beam][0])) - f[beam + '/Measures_v_along'] = get_measures_along_track_velocity(midpoints_xy[rgt][beam][0], midpoints_xy[rgt][beam][1], spatial_extent, - measures_Vx_path, measures_Vy_path) +def calculate_velocity_single_beam(x_atc,cycle1,cycle2,beam,search_width,segment_length,dx, + velocities,correlations,midpoints_x_atc,midpoints_lats,midpoints_lons, + midpoints_seg_ids,midpoints_xy,rgt, + along_track_step,lags,lats,lons,segment_ids,h_li_diff,h_li_raw, + dt,saving,max_percent_nans,map_data_root,h5_file_out,times,spatial_extent): + """ + """ - return velocities, correlations, lags, midpoints_x_atc, midpoints_xy, midpoints_lons, midpoints_lats + ### Determine x1s, which are the x_atc coordinates at which each cut out window begins + # To be common between both repeats, the first point x1 needs to be the larger first value between repeats + min_x_atc_cycle1 = x_atc[cycle1][beam][0] + min_x_atc_cycle2 = x_atc[cycle2][beam][0] + + # pick out the track that starts at greater x_atc, and use that as x1s vector + if min_x_atc_cycle1 != min_x_atc_cycle2: + x1 = np.nanmax([min_x_atc_cycle1, min_x_atc_cycle2]) + cycle_n = np.arange(0, 2)[[min_x_atc_cycle1, min_x_atc_cycle2] == x1][0] + if cycle_n == 0: + cycletmp = cycle2 + elif cycle_n == 1: + cycletmp = cycle1 + # n_segments_this_track = (len(x_atc[cycletmp][beam]) - 2 * search_width/dx) / (along_track_step/dx) + + ### Generate the x1s vector, in the case that the repeat tracks don't start in the same place + x1s = x_atc[cycletmp][beam][ + int(search_width / dx) + 1:-int(segment_length / dx) - int(search_width / dx):int(along_track_step / dx)] + #### ! this line updated 2020 07 23 + # x1s = x_atc[cycletmp][beam][int(search_width/dx)+1::int(search_width/dx)] + # start at search_width/dx in, so the code never tries to get data outside the edges of this rgt + # add 1 bc the data are differentiated, and h_li_diff is therefore one point shorter + + elif min_x_atc_cycle1 == min_x_atc_cycle2: # doesn't matter which cycle + ### Generate the x1s vector, in the case that the repeat tracks do start in the same place + x1s = x_atc[cycle1][beam][ + int(search_width / dx) + 1:-int(segment_length / dx) - int(search_width / dx):int(along_track_step / dx)] + #### ! this line updated 2020 07 23 + # x1s = x_atc[cycle1][beam][int(search_width/dx)+1::int(search_width/dx)] + + ### Determine xend, where the x1s vector ends: smaller value for both beams, if different + max_x_atc_cycle1 = x_atc[cycle1][beam][-1] - search_width / dx + max_x_atc_cycle2 = x_atc[cycle2][beam][-1] - search_width / dx + smallest_xatc = np.min([max_x_atc_cycle1, max_x_atc_cycle2]) + ixmax = np.where(x1s >= (smallest_xatc - search_width / dx)) + if len(ixmax[0]) >= 1: + ixtmp = ixmax[0][0] + x1s = x1s[:ixtmp] + + ### Create vectors to store results in + velocities[rgt][beam] = np.empty_like(x1s) + correlations[rgt][beam] = np.empty_like(x1s) + lags[rgt][beam] = np.empty_like(x1s) + midpoints_x_atc[rgt][beam] = np.empty_like(x1s) + midpoints_lats[rgt][beam] = np.empty_like(x1s) + midpoints_lons[rgt][beam] = np.empty_like(x1s) + midpoints_seg_ids[rgt][beam] = np.empty_like(x1s) + + ### Entire x_atc vectors for both cycles + x_full_t1 = x_atc[cycle1][beam] + + ### Loop over x1s, positions along track that each window starts at + for xi, x1 in enumerate(x1s): + + ### Cut out data: small chunk of data at time t1 (first cycle) + ix_x1 = np.arange(len(x_full_t1))[x_full_t1 >= x1][0] # Index of first point that is greater than x1 + ix_x2 = ix_x1 + int( + np.round(segment_length / dx)) # ix_x1 + number of datapoints within the desired segment length + x_t1 = x_full_t1[ix_x1:ix_x2] # cut out x_atc values, first cycle + lats_t1 = lats[cycle1][beam][ix_x1:ix_x2] # cut out latitude values, first cycle + lons_t1 = lons[cycle1][beam][ix_x1:ix_x2] # cut out longitude values, first cycle + seg_ids_t1 = segment_ids[cycle1][beam][ix_x1:ix_x2] # cut out segment_ids, first cycle + h_li1 = h_li_diff[cycle1][beam][ + ix_x1 - 1:ix_x2 - 1] # cut out land ice height values, first cycle; start 1 index earlier because + # the h_li_diff data are differentiated, and therefore one sample shorter + + # Find segment midpoints; this is the position where we will assign the velocity measurement from each window + n = len(x_t1) + midpt_ix = int(np.floor(n / 2)) + midpoints_x_atc[rgt][beam][xi] = x_t1[midpt_ix] + midpoints_lats[rgt][beam][xi] = lats_t1[midpt_ix] + midpoints_lons[rgt][beam][xi] = lons_t1[midpt_ix] + midpoints_seg_ids[rgt][beam][xi] = seg_ids_t1[midpt_ix] + + ### Cut out data: wider chunk of data at time t2 (second cycle) + ix_x3 = ix_x1 - int(np.round(search_width / dx)) # extend on earlier end by number of indices in search_width + ix_x4 = ix_x2 + int(np.round(search_width / dx)) # extend on later end by number of indices in search_width + h_li2 = h_li_diff[cycle2][beam][ + ix_x3 - 1:ix_x4 - 1] # cut out land ice height values, second cycle; start 1 index earlier because + # the h_li_diff data are differentiated, and therefore one sample shorter + + ### Determine number of nans in each data chunk + n_nans1 = np.sum(np.isnan(h_li_raw[cycle1][beam][ix_x1:ix_x2])) + n_nans2 = np.sum(np.isnan(h_li_raw[cycle2][beam][ix_x3:ix_x4])) + + ### Only process if there are fewer than 10% nans in either data chunk: + if not (n_nans1 / len(h_li1) <= max_percent_nans / 100) and (n_nans2 / len(h_li2) <= max_percent_nans / 100): + ### If there are too many nans, just save a nan + velocities[rgt][beam][xi] = np.nan + correlations[rgt][beam][xi] = np.nan + lags[rgt][beam][xi] = np.nan + else: + # Detrend both chunks of data + h_li1 = detrend(h_li1, type='linear') + h_li2 = detrend(h_li2, type='linear') + + ### Correlate the old and new data + # We made the second data vector longer than the first, so the valid method returns values + corr = correlate(h_li1, h_li2, mode='valid', method='direct') + + ### Normalize correlation function by autocorrelations + # Normalizing coefficient changes with each step along track; this section determines a changing along track normalizing coefficiant + coeff_a_val = np.sum(h_li1 ** 2) + coeff_b_val = np.zeros(len(h_li2) - len(h_li1) + 1) + for shift in range(len(h_li2) - len(h_li1) + 1): + h_li2_section = h_li2[shift:shift + len(h_li1)] + coeff_b_val[shift] = np.sum(h_li2_section ** 2) + norm_vec = np.sqrt(coeff_a_val * coeff_b_val) + corr_normed = corr / np.flip( + norm_vec) # i don't really understand why this has to flip, but otherwise it yields correlation values above 1... + # TASK: figure out why the flip is needed + + ### Create a vector of lags for the correlation function + lagvec = np.arange(- int(np.round(search_width / dx)), int(search_width / dx) + 1, 1) # for mode = 'valid' + + ### Convert lag to distance + shift_vec = lagvec * dx + + ### ID peak correlation coefficient + ix_peak = np.arange(len(corr_normed))[corr_normed == np.nanmax(corr_normed)][0] + + ### Save correlation coefficient, best lag, velocity, etc at the location of peak correlation coefficient + velocities[rgt][beam][xi] = shift_vec[ix_peak] / (dt / 365) + correlations[rgt][beam][xi] = corr_normed[ix_peak] + lags[rgt][beam][xi] = lagvec[ix_peak] + + ### Output xy locations of midpoints as well + midpoints_xy[rgt][beam] = np.array(pyproj.Proj(3031)(midpoints_lons[rgt][beam], midpoints_lats[rgt][beam])) + #extract measures veloc outside of this loop + + if saving: + measures_Vx_path = glob.glob( map_data_root + '*_Vx.tif')[0] + measures_Vy_path = glob.glob( map_data_root + '*_Vy.tif')[0] + + ### Add velocities to hdf5 file for each beam + with h5py.File(h5_file_out, 'a') as f: + f[beam + '/x_atc'] = midpoints_x_atc[rgt][beam] # assign x_atc value of half way along the segment + f[beam + '/latitudes'] = midpoints_lats[rgt][beam] # assign x_atc value of half way along the segment + f[beam + '/longitudes'] = midpoints_lons[rgt][beam] # assign x_atc value of half way along the segment + f[beam + '/velocities'] = velocities[rgt][beam] # assign x_atc value of half way along the segment + f[beam + '/correlation_coefficients'] = correlations[rgt][beam] # assign x_atc value of half way along the segment + f[beam + '/best_lags'] = lags[rgt][beam] # assign x_atc value of half way along the segment + f[beam + '/segment_ids'] = midpoints_seg_ids[rgt][beam] + f[beam + '/first_cycle_time'] = str(Time(times[cycle1][beam][0])) + f[beam + '/second_cycle_time'] = str(Time(times[cycle2][beam][0])) + f[beam + '/Measures_v_along'] = get_measures_along_track_velocity(midpoints_xy[rgt][beam][0], midpoints_xy[rgt][beam][1], spatial_extent, + measures_Vx_path, measures_Vy_path) + + return velocities,correlations,lags,midpoints_x_atc,midpoints_lats,midpoints_lons,midpoints_seg_ids,midpoints_xy -def velocity_old(x_in1, x_in2, dh1, dh2, dt, segment_length=2000, search_width=1000, along_track_step=100, dx=20, corr_threshold=.65): +# ------------------------------------------------------------------------------------------- + +def velocity_old(x_in, dh1, dh2, dt, segment_length=2000, search_width=1000, along_track_step=100, dx=20, corr_threshold=.65): r"""Calculate along-track velocity by correlating the along-track elevation between repeat acquisitions. Works for a single set of repeat beams @@ -382,18 +373,13 @@ def velocity_old(x_in1, x_in2, dh1, dh2, dt, segment_length=2000, search_width=1 # Generate vector of along window starting positions in the along-track vector x1s = x_in[int(search_width/dx)+1:-int(segment_length/dx) - int(search_width/dx):int(along_track_step/dx)] - # (This is the one that was not quite working correctly in our original notebooks) - - # middle point of each window, where we actually assign the velocity - # xs_middle = - # create output arrays to be filled - velocities = np.nan*np.ones_like(output_xs) - correlations = np.nan*np.ones_like(output_xs) + velocities = np.nan*np.ones_like(x1s) + correlations = np.nan*np.ones_like(x1s) # iterate over all the output locations - for xi,x_start in enumerate(output_xs): + for xi,x_start in enumerate(x1s): # find indices for the stencil idx1 = np.argmin(abs(x_in-x_start)) idx2 = idx1 + int(np.round(segment_length/dx)) From 29c45d91a5e3a301bacf243e8252eceeadbe7829 Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Mon, 26 Oct 2020 15:10:19 -0600 Subject: [PATCH 03/17] Doing an overhaul of the data structuring There are way too many separate arrays. I am moving them all into a dictionary for now. It might be better to restructure this as a class eventually, but it might be too big a step for the group right now. --- IS2_velocity/__init__.py | 3 +- IS2_velocity/correlation_processing.py | 68 +++- IS2_velocity/readers.py | 423 ------------------------- IS2_velocity/readers_atl03.py | 209 ++++++++++++ IS2_velocity/readers_atl06.py | 123 +++++++ 5 files changed, 395 insertions(+), 431 deletions(-) delete mode 100644 IS2_velocity/readers.py create mode 100644 IS2_velocity/readers_atl03.py create mode 100644 IS2_velocity/readers_atl06.py diff --git a/IS2_velocity/__init__.py b/IS2_velocity/__init__.py index 3916844..476dc42 100644 --- a/IS2_velocity/__init__.py +++ b/IS2_velocity/__init__.py @@ -1,4 +1,5 @@ -from . import readers +from . import readers_atl06 +from . import readers_atl03 from . import correlation_processing from . import extract_alongtrack from . import atl03_reprocessing diff --git a/IS2_velocity/correlation_processing.py b/IS2_velocity/correlation_processing.py index d4a771a..fa4a8be 100644 --- a/IS2_velocity/correlation_processing.py +++ b/IS2_velocity/correlation_processing.py @@ -8,6 +8,67 @@ import h5py,pyproj from IS2_velocity.extract_alongtrack import get_measures_along_track_velocity + + +""" + # make a monotonically increasing x vector + # assumes dx = 20 exactly, so be carefull referencing back + ind = seg_ids - np.nanmin(seg_ids) # indices starting at zero, using the segment_id field, so any skipped segment will be kept in correct location + # TASK: Change dx to depend on data loaded, not be hard coded as 20 (as in line below) + x_full = np.arange(np.max(ind)+1) * 20 + x_atc_tmp[0] + h_full = np.zeros(np.max(ind)+1) + np.NaN + h_full[ind] = h_li_tmp + lats_full = np.zeros(np.shape(x_full)) * np.nan + lats_full[ind] = lats_tmp + lons_full = np.zeros(np.shape(x_full)) * np.nan + lons_full[ind] = lons_tmp + x_ps_full = np.zeros(np.shape(x_full)) * np.nan + x_ps_full[ind] = x_ps_tmp + y_ps_full = np.zeros(np.shape(x_full)) * np.nan + y_ps_full[ind] = y_ps_tmp + + ## save the segment id's themselves, with gaps filled in + segment_ids[cycle][beam] = np.zeros(np.max(ind)+1) + np.NaN + segment_ids[cycle][beam][ind] = seg_ids + + + x_atc[cycle][beam] = x_full + h_li_raw[cycle][beam] = h_full # preserves nan values + lons[cycle][beam] = lons_full + lats[cycle][beam] = lats_full + x_ps[cycle][beam] = x_ps_full + y_ps[cycle][beam] = y_ps_full + + ### interpolate nans in pandas + # put in dataframe for just this step; eventually rewrite to use only dataframes? + data = {'x_full': x_full, 'h_full': h_full} + df = pd.DataFrame(data, columns = ['x_full','h_full']) + #df.plot(x='x_full',y='h_full') + # linear interpolation for now + df['h_full'].interpolate(method = 'linear', inplace = True) + h_full_interp = df['h_full'].values + h_li_raw_NoNans[cycle][beam] = h_full_interp # has filled nan values + + ### Apply any filters; differentiate + dx = np.median(x_full[1:] - x_full[:-1]) + if filter_type == 'running_average': + if running_avg_window == None: + running_avg_window = 100 + + h_filt = filt(x1 = x_full, h1 = h_full_interp, dx = dx, filter_type = 'running_average', running_avg_window = running_avg_window) + h_li[cycle][beam] = h_filt + + # differentiate that section of data + h_diff = differentiate(x_full, h_filt) #(h_filt[1:] - h_filt[0:-1]) / (x_full[1:] - x_full[0:-1]) + elif filter_type == None: + h_li[cycle][beam] = h_full_interp + h_diff = differentiate(x_full, h_full_interp) # (h_full_interp[1:] - h_full_interp[0:-1]) / (x_full[1:] - x_full[0:-1]) + + h_li_diff[cycle][beam] = h_diff +""" + + + def fill_seg_ids(x_in,h_in,seg_ids,dx=20): r"""Fill the along-track vector so that there are no skipped points @@ -199,22 +260,15 @@ def calculate_velocity_single_beam(x_atc,cycle1,cycle2,beam,search_width,segment cycletmp = cycle2 elif cycle_n == 1: cycletmp = cycle1 - # n_segments_this_track = (len(x_atc[cycletmp][beam]) - 2 * search_width/dx) / (along_track_step/dx) ### Generate the x1s vector, in the case that the repeat tracks don't start in the same place x1s = x_atc[cycletmp][beam][ int(search_width / dx) + 1:-int(segment_length / dx) - int(search_width / dx):int(along_track_step / dx)] - #### ! this line updated 2020 07 23 - # x1s = x_atc[cycletmp][beam][int(search_width/dx)+1::int(search_width/dx)] - # start at search_width/dx in, so the code never tries to get data outside the edges of this rgt - # add 1 bc the data are differentiated, and h_li_diff is therefore one point shorter elif min_x_atc_cycle1 == min_x_atc_cycle2: # doesn't matter which cycle ### Generate the x1s vector, in the case that the repeat tracks do start in the same place x1s = x_atc[cycle1][beam][ int(search_width / dx) + 1:-int(segment_length / dx) - int(search_width / dx):int(along_track_step / dx)] - #### ! this line updated 2020 07 23 - # x1s = x_atc[cycle1][beam][int(search_width/dx)+1::int(search_width/dx)] ### Determine xend, where the x1s vector ends: smaller value for both beams, if different max_x_atc_cycle1 = x_atc[cycle1][beam][-1] - search_width / dx diff --git a/IS2_velocity/readers.py b/IS2_velocity/readers.py deleted file mode 100644 index c86eec7..0000000 --- a/IS2_velocity/readers.py +++ /dev/null @@ -1,423 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import numpy as np -import os,re,h5py,pyproj -import glob -import pandas as pd - - - -def atl06_to_dict(filename, beam, field_dict=None, index=None, epsg=None, format = 'hdf5'): - """ - Read selected datasets from an ATL06 file - - Input arguments: - filename: ATl06 file to read - beam: a string specifying which beam is to be read (ex: gt1l, gt1r, gt2l, etc) - field_dict: A dictinary describing the fields to be read - keys give the group names to be read, - entries are lists of datasets within the groups - index: which entries in each field to read - epsg: an EPSG code specifying a projection (see www.epsg.org). Good choices are: - for Greenland, 3413 (polar stereographic projection, with Greenland along the Y axis) - for Antarctica, 3031 (polar stereographic projection, centered on the Pouth Pole) - format: what format the files are in. Default = hd5f. No other formats currently supported. - Output argument: - D6: dictionary containing ATL06 data. Each dataset in - dataset_dict has its own entry in D6. Each dataset - in D6 contains a numpy array containing the - data - """ - if format == 'hdf5': - if field_dict is None: - field_dict={None:['latitude','longitude','h_li', 'atl06_quality_summary'],\ - 'ground_track':['x_atc','y_atc'],\ - 'fit_statistics':['dh_fit_dx', 'dh_fit_dy']} - D={} - file_re=re.compile('ATL06_(?P\d+)_(?P\d\d\d\d)(?P\d\d)(?P\d\d)_(?P\d\d\d)_(?P\d\d).h5') - with h5py.File(filename,'r') as h5f: - for key in field_dict: - for ds in field_dict[key]: - if key is not None: - ds_name=beam+'/land_ice_segments/'+key+'/'+ds - else: - ds_name=beam+'/land_ice_segments/'+ds - if index is not None: - D[ds]=np.array(h5f[ds_name][index]) - else: - D[ds]=np.array(h5f[ds_name]) - if '_FillValue' in h5f[ds_name].attrs: - bad_vals=D[ds]==h5f[ds_name].attrs['_FillValue'] - D[ds]=D[ds].astype(float) - D[ds][bad_vals]=np.NaN - D['data_start_utc'] = h5f['/ancillary_data/data_start_utc'][:] - D['delta_time'] = h5f['/' + beam + '/land_ice_segments/delta_time'][:] - D['segment_id'] = h5f['/' + beam + '/land_ice_segments/segment_id'][:] - if epsg is not None: - xy=np.array(pyproj.proj.Proj(epsg)(D['longitude'], D['latitude'])) - D['x']=xy[0,:].reshape(D['latitude'].shape) - D['y']=xy[1,:].reshape(D['latitude'].shape) - temp=file_re.search(filename) - D['rgt']=int(temp['rgt']) - D['cycle']=int(temp['cycle']) - D['beam']=beam - return D - -### Some functions -# MISSING HERE: mask by data quality? -def load_data_by_rgt(rgt, path_to_data, product, filter_type = 'running_average', running_avg_window = None, format = 'hdf5'): - """ - rgt: repeat ground track number of desired data - filter_type: Name of desired filter type. - 'running_average' = a centered running avergae filter of smoothing_window_size will be used - None = return raw data, no filter - running_average_window: Default None - dx: desired spacing. NOT USED, removed from function (should be retrieved from data itself) - path_to_data: - product: ex., ATL06 - format, ex 'hdf5' - """ - - from IS2_velocity.correlation_processing import filt, differentiate - - # hard code these for now: - cycles = ['03','04','05','06','07'] # not doing 1 and 2, because don't overlap exactly - beams = ['gt1l','gt1r','gt2l','gt2r','gt3l','gt3r'] - - ### extract data from all available cycles - x_atc = {} - lats = {} - lons = {} - h_li_raw = {} # unsmoothed data; equally spaced x_atc, still has nans - h_li_raw_NoNans = {} # unsmoothed data; equally spaced x_atc, nans filled with noise - h_li = {} # smoothed data, equally spaced x_atc, nans filled with noise - h_li_diff = {} - times = {} - min_seg_ids = {} - segment_ids = {} - x_ps= {} - y_ps= {} - - cycles_this_rgt = [] - for cycle in cycles: # loop over all available cycles - Di = {} - x_atc[cycle] = {} - lats[cycle] = {} - lons[cycle] = {} - h_li_raw[cycle] = {} - h_li_raw_NoNans[cycle] = {} - h_li[cycle] = {} - h_li_diff[cycle] = {} - times[cycle] = {} - min_seg_ids[cycle] = {} - segment_ids[cycle] = {} - x_ps[cycle]= {} - y_ps[cycle]= {} - - if format == 'hdf5': - - filenames = glob.glob(os.path.join(path_to_data, f'*{product}_*_{rgt}{cycle}*_003*.h5')) - error_count=0 - - - for filename in filenames: # try and load any available files; hopefully is just one - try: - for beam in beams: - Di[filename]=atl06_to_dict(filename,'/'+ beam, index=None, epsg=3031, format = 'hdf5') - - - - times[cycle][beam] = Di[filename]['data_start_utc'] - - # extract h_li and x_atc, and lat/lons for that section - x_atc_tmp = Di[filename]['x_atc'] - h_li_tmp = Di[filename]['h_li']#[ixs] - lats_tmp = Di[filename]['latitude'] - lons_tmp = Di[filename]['longitude'] - x_ps_tmp = Di[filename]['x'] - y_ps_tmp= Di[filename]['y'] - - - # segment ids: - seg_ids = Di[filename]['segment_id'] - min_seg_ids[cycle][beam] = seg_ids[0] - #print(len(seg_ids), len(x_atc_tmp)) - - # make a monotonically increasing x vector - # assumes dx = 20 exactly, so be carefull referencing back - ind = seg_ids - np.nanmin(seg_ids) # indices starting at zero, using the segment_id field, so any skipped segment will be kept in correct location - # TASK: Change dx to depend on data loaded, not be hard coded as 20 (as in line below) - x_full = np.arange(np.max(ind)+1) * 20 + x_atc_tmp[0] - h_full = np.zeros(np.max(ind)+1) + np.NaN - h_full[ind] = h_li_tmp - lats_full = np.zeros(np.shape(x_full)) * np.nan - lats_full[ind] = lats_tmp - lons_full = np.zeros(np.shape(x_full)) * np.nan - lons_full[ind] = lons_tmp - x_ps_full = np.zeros(np.shape(x_full)) * np.nan - x_ps_full[ind] = x_ps_tmp - y_ps_full = np.zeros(np.shape(x_full)) * np.nan - y_ps_full[ind] = y_ps_tmp - - ## save the segment id's themselves, with gaps filled in - segment_ids[cycle][beam] = np.zeros(np.max(ind)+1) + np.NaN - segment_ids[cycle][beam][ind] = seg_ids - - - x_atc[cycle][beam] = x_full - h_li_raw[cycle][beam] = h_full # preserves nan values - lons[cycle][beam] = lons_full - lats[cycle][beam] = lats_full - x_ps[cycle][beam] = x_ps_full - y_ps[cycle][beam] = y_ps_full - - ### fill in nans with noise h_li datasets - # h = ma.array(h_full,mask =np.isnan(h_full)) # created a masked array, mask is where the nans are - # h_full_filled = h.mask * (np.random.randn(*h.shape)) # fill in all the nans with random noise - - ### interpolate nans in pandas - # put in dataframe for just this step; eventually rewrite to use only dataframes? - data = {'x_full': x_full, 'h_full': h_full} - df = pd.DataFrame(data, columns = ['x_full','h_full']) - #df.plot(x='x_full',y='h_full') - # linear interpolation for now - df['h_full'].interpolate(method = 'linear', inplace = True) - h_full_interp = df['h_full'].values - h_li_raw_NoNans[cycle][beam] = h_full_interp # has filled nan values - - ### Apply any filters; differentiate - dx = np.median(x_full[1:] - x_full[:-1]) - if filter_type == 'running_average': - if running_avg_window == None: - running_avg_window = 100 - - h_filt = filt(x1 = x_full, h1 = h_full_interp, dx = dx, filter_type = 'running_average', running_avg_window = running_avg_window) - h_li[cycle][beam] = h_filt - - # differentiate that section of data - h_diff = differentiate(x_full, h_filt) #(h_filt[1:] - h_filt[0:-1]) / (x_full[1:] - x_full[0:-1]) - elif filter_type == None: - h_li[cycle][beam] = h_full_interp - h_diff = differentiate(x_full, h_full_interp) # (h_full_interp[1:] - h_full_interp[0:-1]) / (x_full[1:] - x_full[0:-1]) - - h_li_diff[cycle][beam] = h_diff - - - - #print(len(x_full), len(h_full), len(lats_full), len(seg_ids), len(h_full_interp), len(h_diff)) - - - cycles_this_rgt+=[cycle] - except KeyError as e: - print(f'file {filename} encountered error {e}') - error_count += 1 - - print('Cycles available: ' + ','.join(cycles_this_rgt)) - return x_atc, lats, lons, h_li_raw, h_li_raw_NoNans, h_li, h_li_diff, \ - times, min_seg_ids, segment_ids, cycles_this_rgt, x_ps, y_ps - - - - -def read_HDF5_ATL03(FILENAME, ATTRIBUTES=True, VERBOSE=False): - """ - ### Adapted from a notebook by Tyler Sutterly 6/14/2910 ### - - #-- PURPOSE: read ICESat-2 ATL03 HDF5 data files - - """ - #-- Open the HDF5 file for reading - fileID = h5py.File(os.path.expanduser(FILENAME), 'r') - - #-- Output HDF5 file information - if VERBOSE: - print(fileID.filename) - print(list(fileID.keys())) - - #-- allocate python dictionaries for ICESat-2 ATL03 variables and attributes - IS2_atl03_mds = {} - IS2_atl03_attrs = {} if ATTRIBUTES else None - - #-- read each input beam within the file - IS2_atl03_beams = [k for k in fileID.keys() if bool(re.match('gt\d[lr]',k))] - for gtx in IS2_atl03_beams: - IS2_atl03_mds[gtx] = {} - IS2_atl03_mds[gtx]['heights'] = {} - IS2_atl03_mds[gtx]['geolocation'] = {} - # IS2_atl03_mds[gtx]['bckgrd_atlas'] = {} - IS2_atl03_mds[gtx]['geophys_corr'] = {} - #-- get each HDF5 variable - #-- ICESat-2 Measurement Group - for key,val in fileID[gtx]['heights'].items(): - IS2_atl03_mds[gtx]['heights'][key] = val[:] - #-- ICESat-2 Geolocation Group - for key,val in fileID[gtx]['geolocation'].items(): - IS2_atl03_mds[gtx]['geolocation'][key] = val[:] - # #-- ICESat-2 Background Photon Rate Group - # for key,val in fileID[gtx]['bckgrd_atlas'].items(): - # IS2_atl03_mds[gtx]['bckgrd_atlas'][key] = val[:] - #-- ICESat-2 Geophysical Corrections Group: Values for tides (ocean, - #-- solid earth, pole, load, and equilibrium), inverted barometer (IB) - #-- effects, and range corrections for tropospheric delays - for key,val in fileID[gtx]['geophys_corr'].items(): - IS2_atl03_mds[gtx]['geophys_corr'][key] = val[:] - - #-- Getting attributes of included variables - if ATTRIBUTES: - #-- Getting attributes of IS2_atl03_mds beam variables - IS2_atl03_attrs[gtx] = {} - IS2_atl03_attrs[gtx]['heights'] = {} - IS2_atl03_attrs[gtx]['geolocation'] = {} - # IS2_atl03_attrs[gtx]['bckgrd_atlas'] = {} - IS2_atl03_attrs[gtx]['geophys_corr'] = {} - IS2_atl03_attrs[gtx]['Atlas_impulse_response'] = {} - #-- Global Group Attributes - for att_name,att_val in fileID[gtx].attrs.items(): - IS2_atl03_attrs[gtx][att_name] = att_val - #-- ICESat-2 Measurement Group - for key,val in fileID[gtx]['heights'].items(): - IS2_atl03_attrs[gtx]['heights'][key] = {} - for att_name,att_val in val.attrs.items(): - IS2_atl03_attrs[gtx]['heights'][key][att_name]=att_val - #-- ICESat-2 Geolocation Group - for key,val in fileID[gtx]['geolocation'].items(): - IS2_atl03_attrs[gtx]['geolocation'][key] = {} - for att_name,att_val in val.attrs.items(): - IS2_atl03_attrs[gtx]['geolocation'][key][att_name]=att_val - # #-- ICESat-2 Background Photon Rate Group - # for key,val in fileID[gtx]['bckgrd_atlas'].items(): - # IS2_atl03_attrs[gtx]['bckgrd_atlas'][key] = {} - # for att_name,att_val in val.attrs.items(): - # IS2_atl03_attrs[gtx]['bckgrd_atlas'][key][att_name]=att_val - #-- ICESat-2 Geophysical Corrections Group - for key,val in fileID[gtx]['geophys_corr'].items(): - IS2_atl03_attrs[gtx]['geophys_corr'][key] = {} - for att_name,att_val in val.attrs.items(): - IS2_atl03_attrs[gtx]['geophys_corr'][key][att_name]=att_val - - #-- ICESat-2 spacecraft orientation at time - IS2_atl03_mds['orbit_info'] = {} - IS2_atl03_attrs['orbit_info'] = {} if ATTRIBUTES else None - for key,val in fileID['orbit_info'].items(): - IS2_atl03_mds['orbit_info'][key] = val[:] - #-- Getting attributes of group and included variables - if ATTRIBUTES: - #-- Global Group Attributes - for att_name,att_val in fileID['orbit_info'].attrs.items(): - IS2_atl03_attrs['orbit_info'][att_name] = att_val - #-- Variable Attributes - IS2_atl03_attrs['orbit_info'][key] = {} - for att_name,att_val in val.attrs.items(): - IS2_atl03_attrs['orbit_info'][key][att_name] = att_val - - #-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) - #-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01:T00:00:00Z UTC) - #-- Add this value to delta time parameters to compute full gps_seconds - #-- could alternatively use the Julian day of the ATLAS SDP epoch: 2458119.5 - #-- and add leap seconds since 2018-01-01:T00:00:00Z UTC (ATLAS SDP epoch) - IS2_atl03_mds['ancillary_data'] = {} - IS2_atl03_attrs['ancillary_data'] = {} if ATTRIBUTES else None - for key in ['atlas_sdp_gps_epoch']: - #-- get each HDF5 variable - IS2_atl03_mds['ancillary_data'][key] = fileID['ancillary_data'][key][:] - #-- Getting attributes of group and included variables - if ATTRIBUTES: - #-- Variable Attributes - IS2_atl03_attrs['ancillary_data'][key] = {} - for att_name,att_val in fileID['ancillary_data'][key].attrs.items(): - IS2_atl03_attrs['ancillary_data'][key][att_name] = att_val - - #-- get ATLAS impulse response variables for the transmitter echo path (TEP) - tep1,tep2 = ('atlas_impulse_response','tep_histogram') - IS2_atl03_mds[tep1] = {} - IS2_atl03_attrs[tep1] = {} if ATTRIBUTES else None - for pce in ['pce1_spot1','pce2_spot3']: - IS2_atl03_mds[tep1][pce] = {tep2:{}} - IS2_atl03_attrs[tep1][pce] = {tep2:{}} if ATTRIBUTES else None - #-- for each TEP variable - for key,val in fileID[tep1][pce][tep2].items(): - IS2_atl03_mds[tep1][pce][tep2][key] = val[:] - #-- Getting attributes of included variables - if ATTRIBUTES: - #-- Global Group Attributes - for att_name,att_val in fileID[tep1][pce][tep2].attrs.items(): - IS2_atl03_attrs[tep1][pce][tep2][att_name] = att_val - #-- Variable Attributes - IS2_atl03_attrs[tep1][pce][tep2][key] = {} - for att_name,att_val in val.attrs.items(): - IS2_atl03_attrs[tep1][pce][tep2][key][att_name] = att_val - - #-- Global File Attributes - if ATTRIBUTES: - for att_name,att_val in fileID.attrs.items(): - IS2_atl03_attrs[att_name] = att_val - - #-- Closing the HDF5 file - fileID.close() - #-- Return the datasets and variables - return (IS2_atl03_mds,IS2_atl03_attrs,IS2_atl03_beams) - - - -def get_ATL03_x_atc(IS2_atl03_mds, IS2_atl03_attrs, IS2_atl03_beams): - """ - # Adapted from a notebook by Tyler Sutterly 6/14/2910 - """ - # calculate the along-track and across-track coordinates for ATL03 photons - - Segment_ID = {} - Segment_Index_begin = {} - Segment_PE_count = {} - Segment_Distance = {} - Segment_Length = {} - - #-- background photon rate - background_rate = {} - - #-- for each input beam within the file - for gtx in sorted(IS2_atl03_beams): - #-- data and attributes for beam gtx - val = IS2_atl03_mds[gtx] - val['heights']['x_atc']=np.zeros_like(val['heights']['h_ph'])+np.NaN - val['heights']['y_atc']=np.zeros_like(val['heights']['h_ph'])+np.NaN - attrs = IS2_atl03_attrs[gtx] - #-- ATL03 Segment ID - Segment_ID[gtx] = val['geolocation']['segment_id'] - n_seg = len(Segment_ID[gtx]) - #-- first photon in the segment (convert to 0-based indexing) - Segment_Index_begin[gtx] = val['geolocation']['ph_index_beg'] - 1 - #-- number of photon events in the segment - Segment_PE_count[gtx] = val['geolocation']['segment_ph_cnt'] - #-- along-track distance for each ATL03 segment - Segment_Distance[gtx] = val['geolocation']['segment_dist_x'] - #-- along-track length for each ATL03 segment - Segment_Length[gtx] = val['geolocation']['segment_length'] - #-- Transmit time of the reference photon - delta_time = val['geolocation']['delta_time'] - - #-- iterate over ATL03 segments to calculate 40m means - #-- in ATL03 1-based indexing: invalid == 0 - #-- here in 0-based indexing: invalid == -1 - segment_indices, = np.nonzero((Segment_Index_begin[gtx][:-1] >= 0) & - (Segment_Index_begin[gtx][1:] >= 0)) - for j in segment_indices: - #-- index for segment j - idx = Segment_Index_begin[gtx][j] - #-- number of photons in segment (use 2 ATL03 segments) - c1 = np.copy(Segment_PE_count[gtx][j]) - c2 = np.copy(Segment_PE_count[gtx][j+1]) - cnt = c1 + c2 - #-- time of each Photon event (PE) - segment_delta_times = val['heights']['delta_time'][idx:idx+cnt] - #-- Photon event lat/lon and elevation (WGS84) - segment_heights = val['heights']['h_ph'][idx:idx+cnt] - segment_lats = val['heights']['lat_ph'][idx:idx+cnt] - segment_lons = val['heights']['lon_ph'][idx:idx+cnt] - #-- Along-track and Across-track distances - distance_along_X = np.copy(val['heights']['dist_ph_along'][idx:idx+cnt]) - distance_along_X[:c1] += Segment_Distance[gtx][j] - distance_along_X[c1:] += Segment_Distance[gtx][j+1] - distance_along_Y = np.copy(val['heights']['dist_ph_across'][idx:idx+cnt]) - val['heights']['x_atc'][idx:idx+cnt]=distance_along_X - val['heights']['y_atc'][idx:idx+cnt]=distance_along_Y diff --git a/IS2_velocity/readers_atl03.py b/IS2_velocity/readers_atl03.py new file mode 100644 index 0000000..4d5be86 --- /dev/null +++ b/IS2_velocity/readers_atl03.py @@ -0,0 +1,209 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import numpy as np +import os,re,h5py + +# ----------------------------------------------------------------------------------------------- + +def read_HDF5_ATL03(FILENAME, ATTRIBUTES=True, VERBOSE=False): + """ + ### Adapted from a notebook by Tyler Sutterly 6/14/2910 ### + + #-- PURPOSE: read ICESat-2 ATL03 HDF5 data files + + """ + #-- Open the HDF5 file for reading + fileID = h5py.File(os.path.expanduser(FILENAME), 'r') + + #-- Output HDF5 file information + if VERBOSE: + print(fileID.filename) + print(list(fileID.keys())) + + #-- allocate python dictionaries for ICESat-2 ATL03 variables and attributes + IS2_atl03_mds = {} + IS2_atl03_attrs = {} if ATTRIBUTES else None + + #-- read each input beam within the file + IS2_atl03_beams = [k for k in fileID.keys() if bool(re.match('gt\d[lr]',k))] + for gtx in IS2_atl03_beams: + IS2_atl03_mds[gtx] = {} + IS2_atl03_mds[gtx]['heights'] = {} + IS2_atl03_mds[gtx]['geolocation'] = {} + # IS2_atl03_mds[gtx]['bckgrd_atlas'] = {} + IS2_atl03_mds[gtx]['geophys_corr'] = {} + #-- get each HDF5 variable + #-- ICESat-2 Measurement Group + for key,val in fileID[gtx]['heights'].items(): + IS2_atl03_mds[gtx]['heights'][key] = val[:] + #-- ICESat-2 Geolocation Group + for key,val in fileID[gtx]['geolocation'].items(): + IS2_atl03_mds[gtx]['geolocation'][key] = val[:] + # #-- ICESat-2 Background Photon Rate Group + # for key,val in fileID[gtx]['bckgrd_atlas'].items(): + # IS2_atl03_mds[gtx]['bckgrd_atlas'][key] = val[:] + #-- ICESat-2 Geophysical Corrections Group: Values for tides (ocean, + #-- solid earth, pole, load, and equilibrium), inverted barometer (IB) + #-- effects, and range corrections for tropospheric delays + for key,val in fileID[gtx]['geophys_corr'].items(): + IS2_atl03_mds[gtx]['geophys_corr'][key] = val[:] + + #-- Getting attributes of included variables + if ATTRIBUTES: + #-- Getting attributes of IS2_atl03_mds beam variables + IS2_atl03_attrs[gtx] = {} + IS2_atl03_attrs[gtx]['heights'] = {} + IS2_atl03_attrs[gtx]['geolocation'] = {} + # IS2_atl03_attrs[gtx]['bckgrd_atlas'] = {} + IS2_atl03_attrs[gtx]['geophys_corr'] = {} + IS2_atl03_attrs[gtx]['Atlas_impulse_response'] = {} + #-- Global Group Attributes + for att_name,att_val in fileID[gtx].attrs.items(): + IS2_atl03_attrs[gtx][att_name] = att_val + #-- ICESat-2 Measurement Group + for key,val in fileID[gtx]['heights'].items(): + IS2_atl03_attrs[gtx]['heights'][key] = {} + for att_name,att_val in val.attrs.items(): + IS2_atl03_attrs[gtx]['heights'][key][att_name]=att_val + #-- ICESat-2 Geolocation Group + for key,val in fileID[gtx]['geolocation'].items(): + IS2_atl03_attrs[gtx]['geolocation'][key] = {} + for att_name,att_val in val.attrs.items(): + IS2_atl03_attrs[gtx]['geolocation'][key][att_name]=att_val + # #-- ICESat-2 Background Photon Rate Group + # for key,val in fileID[gtx]['bckgrd_atlas'].items(): + # IS2_atl03_attrs[gtx]['bckgrd_atlas'][key] = {} + # for att_name,att_val in val.attrs.items(): + # IS2_atl03_attrs[gtx]['bckgrd_atlas'][key][att_name]=att_val + #-- ICESat-2 Geophysical Corrections Group + for key,val in fileID[gtx]['geophys_corr'].items(): + IS2_atl03_attrs[gtx]['geophys_corr'][key] = {} + for att_name,att_val in val.attrs.items(): + IS2_atl03_attrs[gtx]['geophys_corr'][key][att_name]=att_val + + #-- ICESat-2 spacecraft orientation at time + IS2_atl03_mds['orbit_info'] = {} + IS2_atl03_attrs['orbit_info'] = {} if ATTRIBUTES else None + for key,val in fileID['orbit_info'].items(): + IS2_atl03_mds['orbit_info'][key] = val[:] + #-- Getting attributes of group and included variables + if ATTRIBUTES: + #-- Global Group Attributes + for att_name,att_val in fileID['orbit_info'].attrs.items(): + IS2_atl03_attrs['orbit_info'][att_name] = att_val + #-- Variable Attributes + IS2_atl03_attrs['orbit_info'][key] = {} + for att_name,att_val in val.attrs.items(): + IS2_atl03_attrs['orbit_info'][key][att_name] = att_val + + #-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC) + #-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01:T00:00:00Z UTC) + #-- Add this value to delta time parameters to compute full gps_seconds + #-- could alternatively use the Julian day of the ATLAS SDP epoch: 2458119.5 + #-- and add leap seconds since 2018-01-01:T00:00:00Z UTC (ATLAS SDP epoch) + IS2_atl03_mds['ancillary_data'] = {} + IS2_atl03_attrs['ancillary_data'] = {} if ATTRIBUTES else None + for key in ['atlas_sdp_gps_epoch']: + #-- get each HDF5 variable + IS2_atl03_mds['ancillary_data'][key] = fileID['ancillary_data'][key][:] + #-- Getting attributes of group and included variables + if ATTRIBUTES: + #-- Variable Attributes + IS2_atl03_attrs['ancillary_data'][key] = {} + for att_name,att_val in fileID['ancillary_data'][key].attrs.items(): + IS2_atl03_attrs['ancillary_data'][key][att_name] = att_val + + #-- get ATLAS impulse response variables for the transmitter echo path (TEP) + tep1,tep2 = ('atlas_impulse_response','tep_histogram') + IS2_atl03_mds[tep1] = {} + IS2_atl03_attrs[tep1] = {} if ATTRIBUTES else None + for pce in ['pce1_spot1','pce2_spot3']: + IS2_atl03_mds[tep1][pce] = {tep2:{}} + IS2_atl03_attrs[tep1][pce] = {tep2:{}} if ATTRIBUTES else None + #-- for each TEP variable + for key,val in fileID[tep1][pce][tep2].items(): + IS2_atl03_mds[tep1][pce][tep2][key] = val[:] + #-- Getting attributes of included variables + if ATTRIBUTES: + #-- Global Group Attributes + for att_name,att_val in fileID[tep1][pce][tep2].attrs.items(): + IS2_atl03_attrs[tep1][pce][tep2][att_name] = att_val + #-- Variable Attributes + IS2_atl03_attrs[tep1][pce][tep2][key] = {} + for att_name,att_val in val.attrs.items(): + IS2_atl03_attrs[tep1][pce][tep2][key][att_name] = att_val + + #-- Global File Attributes + if ATTRIBUTES: + for att_name,att_val in fileID.attrs.items(): + IS2_atl03_attrs[att_name] = att_val + + #-- Closing the HDF5 file + fileID.close() + #-- Return the datasets and variables + return (IS2_atl03_mds,IS2_atl03_attrs,IS2_atl03_beams) + +# ----------------------------------------------------------------------------------------------- + +def get_ATL03_x_atc(IS2_atl03_mds, IS2_atl03_attrs, IS2_atl03_beams): + """ + # Adapted from a notebook by Tyler Sutterly 6/14/2910 + """ + # calculate the along-track and across-track coordinates for ATL03 photons + + Segment_ID = {} + Segment_Index_begin = {} + Segment_PE_count = {} + Segment_Distance = {} + Segment_Length = {} + + #-- background photon rate + background_rate = {} + + #-- for each input beam within the file + for gtx in sorted(IS2_atl03_beams): + #-- data and attributes for beam gtx + val = IS2_atl03_mds[gtx] + val['heights']['x_atc']=np.zeros_like(val['heights']['h_ph'])+np.NaN + val['heights']['y_atc']=np.zeros_like(val['heights']['h_ph'])+np.NaN + attrs = IS2_atl03_attrs[gtx] + #-- ATL03 Segment ID + Segment_ID[gtx] = val['geolocation']['segment_id'] + n_seg = len(Segment_ID[gtx]) + #-- first photon in the segment (convert to 0-based indexing) + Segment_Index_begin[gtx] = val['geolocation']['ph_index_beg'] - 1 + #-- number of photon events in the segment + Segment_PE_count[gtx] = val['geolocation']['segment_ph_cnt'] + #-- along-track distance for each ATL03 segment + Segment_Distance[gtx] = val['geolocation']['segment_dist_x'] + #-- along-track length for each ATL03 segment + Segment_Length[gtx] = val['geolocation']['segment_length'] + #-- Transmit time of the reference photon + delta_time = val['geolocation']['delta_time'] + + #-- iterate over ATL03 segments to calculate 40m means + #-- in ATL03 1-based indexing: invalid == 0 + #-- here in 0-based indexing: invalid == -1 + segment_indices, = np.nonzero((Segment_Index_begin[gtx][:-1] >= 0) & + (Segment_Index_begin[gtx][1:] >= 0)) + for j in segment_indices: + #-- index for segment j + idx = Segment_Index_begin[gtx][j] + #-- number of photons in segment (use 2 ATL03 segments) + c1 = np.copy(Segment_PE_count[gtx][j]) + c2 = np.copy(Segment_PE_count[gtx][j+1]) + cnt = c1 + c2 + #-- time of each Photon event (PE) + segment_delta_times = val['heights']['delta_time'][idx:idx+cnt] + #-- Photon event lat/lon and elevation (WGS84) + segment_heights = val['heights']['h_ph'][idx:idx+cnt] + segment_lats = val['heights']['lat_ph'][idx:idx+cnt] + segment_lons = val['heights']['lon_ph'][idx:idx+cnt] + #-- Along-track and Across-track distances + distance_along_X = np.copy(val['heights']['dist_ph_along'][idx:idx+cnt]) + distance_along_X[:c1] += Segment_Distance[gtx][j] + distance_along_X[c1:] += Segment_Distance[gtx][j+1] + distance_along_Y = np.copy(val['heights']['dist_ph_across'][idx:idx+cnt]) + val['heights']['x_atc'][idx:idx+cnt]=distance_along_X + val['heights']['y_atc'][idx:idx+cnt]=distance_along_Y diff --git a/IS2_velocity/readers_atl06.py b/IS2_velocity/readers_atl06.py new file mode 100644 index 0000000..d63d2ea --- /dev/null +++ b/IS2_velocity/readers_atl06.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import numpy as np +import os,re,h5py,pyproj +import glob + + +def load_data_by_rgt(rgt, path_to_data, product, format = 'hdf5', + cycles = ['03','04','05','06','07'], beams = ['gt1l','gt1r','gt2l','gt2r','gt3l','gt3r']): + """ + rgt: repeat ground track number of desired data + filter_type: Name of desired filter type. + 'running_average' = a centered running avergae filter of smoothing_window_size will be used + None = return raw data, no filter + running_average_window: Default None + dx: desired spacing. NOT USED, removed from function (should be retrieved from data itself) + path_to_data: + product: ex., ATL06 + format, ex 'hdf5' + """ + + variables = ['x_atc','times','min_seg_ids','h_li','latitude','longitude','x','y'] + + ### extract data from all available cycles + D_out = {} + for var in variables: + D_out[var] = {} + + cycles_this_rgt = [] + for cycle in cycles: # loop over all available cycles + Di = {} + for var in variables: + D_out[var][cycle] = {} + + if not format == 'hdf5': + raise ValueError('Can only load hdf5 for now.') + else: + filenames = glob.glob(os.path.join(path_to_data, f'*{product}_*_{rgt}{cycle}*_003*.h5')) + error_count=0 + for filename in filenames: # try and load any available files; hopefully is just one + try: + for beam in beams: + Di[filename]=atl06_to_dict(filename,'/'+ beam, index=None, epsg=3031, format = 'hdf5') + + # Times + D_out['times'][cycle][beam] = Di[filename]['data_start_utc'] + # segment ids: + seg_ids = Di[filename]['segment_id'] + D_out['min_seg_ids'][cycle][beam] = seg_ids[0] + + # extract h_li and x_atc, and lat/lons for that section + for var in variables: + if var in ['times','min_seg_ids']: + continue + D_out[var][cycle][beam] = Di[filename][var] + + cycles_this_rgt+=[cycle] + except KeyError as e: + print(f'file {filename} encountered error {e}') + error_count += 1 + + print('Loaded data from cycles: ' + ','.join(cycles_this_rgt),'for rgt:',rgt) + + return D_out + + +def atl06_to_dict(filename, beam, field_dict=None, index=None, epsg=None, format = 'hdf5'): + """ + Read selected datasets from an ATL06 file + + Input arguments: + filename: ATl06 file to read + beam: a string specifying which beam is to be read (ex: gt1l, gt1r, gt2l, etc) + field_dict: A dictinary describing the fields to be read + keys give the group names to be read, + entries are lists of datasets within the groups + index: which entries in each field to read + epsg: an EPSG code specifying a projection (see www.epsg.org). Good choices are: + for Greenland, 3413 (polar stereographic projection, with Greenland along the Y axis) + for Antarctica, 3031 (polar stereographic projection, centered on the Pouth Pole) + format: what format the files are in. Default = hd5f. No other formats currently supported. + Output argument: + D6: dictionary containing ATL06 data. Each dataset in + dataset_dict has its own entry in D6. Each dataset + in D6 contains a numpy array containing the + data + """ + if format == 'hdf5': + if field_dict is None: + field_dict={None:['latitude','longitude','h_li', 'atl06_quality_summary'],\ + 'ground_track':['x_atc','y_atc'],\ + 'fit_statistics':['dh_fit_dx', 'dh_fit_dy']} + D={} + file_re=re.compile('ATL06_(?P\d+)_(?P\d\d\d\d)(?P\d\d)(?P\d\d)_(?P\d\d\d)_(?P\d\d).h5') + with h5py.File(filename,'r') as h5f: + for key in field_dict: + for ds in field_dict[key]: + if key is not None: + ds_name=beam+'/land_ice_segments/'+key+'/'+ds + else: + ds_name=beam+'/land_ice_segments/'+ds + if index is not None: + D[ds]=np.array(h5f[ds_name][index]) + else: + D[ds]=np.array(h5f[ds_name]) + if '_FillValue' in h5f[ds_name].attrs: + bad_vals=D[ds]==h5f[ds_name].attrs['_FillValue'] + D[ds]=D[ds].astype(float) + D[ds][bad_vals]=np.NaN + D['data_start_utc'] = h5f['/ancillary_data/data_start_utc'][:] + D['delta_time'] = h5f['/' + beam + '/land_ice_segments/delta_time'][:] + D['segment_id'] = h5f['/' + beam + '/land_ice_segments/segment_id'][:] + if epsg is not None: + xy=np.array(pyproj.proj.Proj(epsg)(D['longitude'], D['latitude'])) + D['x']=xy[0,:].reshape(D['latitude'].shape) + D['y']=xy[1,:].reshape(D['latitude'].shape) + temp=file_re.search(filename) + D['rgt']=int(temp['rgt']) + D['cycle']=int(temp['cycle']) + D['beam']=beam + return D + From 68b91b7a4acb1c691979d2da4bf8db1da06afeb4 Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Mon, 26 Oct 2020 17:50:54 -0600 Subject: [PATCH 04/17] Massive restructuring Separating processing functions and limiting the number of variables that we are using. --- IS2_velocity/correlation_processing.py | 299 ++++--------------------- IS2_velocity/preprocessing.py | 121 ++++++++++ IS2_velocity/readers_atl06.py | 8 +- IS2_velocity/saving.py | 53 +++++ notebooks/Introduction_Tutorial.ipynb | 136 ++++++----- 5 files changed, 291 insertions(+), 326 deletions(-) create mode 100644 IS2_velocity/preprocessing.py create mode 100644 IS2_velocity/saving.py diff --git a/IS2_velocity/correlation_processing.py b/IS2_velocity/correlation_processing.py index fa4a8be..c4c7c18 100644 --- a/IS2_velocity/correlation_processing.py +++ b/IS2_velocity/correlation_processing.py @@ -3,146 +3,16 @@ import numpy as np from scipy.signal import correlate,detrend -import glob from astropy.time import Time -import h5py,pyproj -from IS2_velocity.extract_alongtrack import get_measures_along_track_velocity - - - -""" - # make a monotonically increasing x vector - # assumes dx = 20 exactly, so be carefull referencing back - ind = seg_ids - np.nanmin(seg_ids) # indices starting at zero, using the segment_id field, so any skipped segment will be kept in correct location - # TASK: Change dx to depend on data loaded, not be hard coded as 20 (as in line below) - x_full = np.arange(np.max(ind)+1) * 20 + x_atc_tmp[0] - h_full = np.zeros(np.max(ind)+1) + np.NaN - h_full[ind] = h_li_tmp - lats_full = np.zeros(np.shape(x_full)) * np.nan - lats_full[ind] = lats_tmp - lons_full = np.zeros(np.shape(x_full)) * np.nan - lons_full[ind] = lons_tmp - x_ps_full = np.zeros(np.shape(x_full)) * np.nan - x_ps_full[ind] = x_ps_tmp - y_ps_full = np.zeros(np.shape(x_full)) * np.nan - y_ps_full[ind] = y_ps_tmp - - ## save the segment id's themselves, with gaps filled in - segment_ids[cycle][beam] = np.zeros(np.max(ind)+1) + np.NaN - segment_ids[cycle][beam][ind] = seg_ids - - - x_atc[cycle][beam] = x_full - h_li_raw[cycle][beam] = h_full # preserves nan values - lons[cycle][beam] = lons_full - lats[cycle][beam] = lats_full - x_ps[cycle][beam] = x_ps_full - y_ps[cycle][beam] = y_ps_full - - ### interpolate nans in pandas - # put in dataframe for just this step; eventually rewrite to use only dataframes? - data = {'x_full': x_full, 'h_full': h_full} - df = pd.DataFrame(data, columns = ['x_full','h_full']) - #df.plot(x='x_full',y='h_full') - # linear interpolation for now - df['h_full'].interpolate(method = 'linear', inplace = True) - h_full_interp = df['h_full'].values - h_li_raw_NoNans[cycle][beam] = h_full_interp # has filled nan values - - ### Apply any filters; differentiate - dx = np.median(x_full[1:] - x_full[:-1]) - if filter_type == 'running_average': - if running_avg_window == None: - running_avg_window = 100 - - h_filt = filt(x1 = x_full, h1 = h_full_interp, dx = dx, filter_type = 'running_average', running_avg_window = running_avg_window) - h_li[cycle][beam] = h_filt - - # differentiate that section of data - h_diff = differentiate(x_full, h_filt) #(h_filt[1:] - h_filt[0:-1]) / (x_full[1:] - x_full[0:-1]) - elif filter_type == None: - h_li[cycle][beam] = h_full_interp - h_diff = differentiate(x_full, h_full_interp) # (h_full_interp[1:] - h_full_interp[0:-1]) / (x_full[1:] - x_full[0:-1]) - - h_li_diff[cycle][beam] = h_diff -""" - - - -def fill_seg_ids(x_in,h_in,seg_ids,dx=20): - r"""Fill the along-track vector so that there are no skipped points - Parameters - ------ - x_in : array - along-track distance - h_in : array - along-track elevation - seg_ids : array - segment ids as imported with atl06_to_dict function - dx : float; default 20 - step between points (20 m is the default for atl06) - - Output - ------ - x_full : array - filled array of along-track distance - h_full : array - filled array of alont-track elevation - """ - - # make a monotonically increasing x vector - ind = seg_ids - np.nanmin(seg_ids) # indices starting at zero, using the segment_id field, so any skipped segment will be kept in correct location - x_full = np.arange(np.max(ind)+1) * dx + x_in[0] - h_full = np.zeros(np.max(ind)+1) + np.NaN - h_full[ind] = h_in - - return x_full,h_full # ------------------------------------------------------------------------------------------- - -def filt(x1, h1, dx, filter_type = 'running_average', running_avg_window = None): - """ - - :param x1: - :param h1: Land ice height vector. Must be resampled to constant spacing - :param dx: - :param type: - :param running_avg_window: - :return: - """ - if filter_type == 'running_average': # - if not running_avg_window: # if no window size was given, run a default 100 m running average smoother - running_avg_window = 100 - dx = np.median(x1[1:] - x1[:-1]) - filt = np.ones(int(np.round(running_avg_window / dx))) - h_filt = (1/len(filt)) * np.convolve(filt, h1, mode = 'same') - - # TASK: add bandpass, highpass - else: - h_filt = h1 - return h_filt - -# ------------------------------------------------------------------------------------------- - -def differentiate(x_in, h_in): - """ - - :param x_in: - :param h_in: Land ice height vector. Must be resampled to constant spacing - :return: - """ - dh = np.gradient(h_in, x_in) - return dh - -# ------------------------------------------------------------------------------------------- -def time_diff(D1,D2): +def time_diff(data,cycle1,cycle2,beam='gt1l'): r"""Get the time difference between two cycles; units of days Parameters ------ - D1 : Dictionary 1 - D2 : Dictionary 2 + data : Dictionary 1 Output ------ @@ -151,8 +21,8 @@ def time_diff(D1,D2): """ # get time strings - t1_string = D1['data_start_utc'].astype(str) #figure out later if just picking the first one it ok - t2_string = D2['data_start_utc'].astype(str) #figure out later if just picking hte first one it ok + t1_string = data['times'][cycle1][beam] + t2_string = data['times'][cycle2][beam] # use astropy to get time from strings t1 = Time(t1_string) t2 = Time(t2_string) @@ -163,7 +33,7 @@ def time_diff(D1,D2): # ------------------------------------------------------------------------------------------- -def calculate_velocities(rgt, x_atc, h_li_raw, h_li_diff, lats, lons, segment_ids, times, beams, cycle1, cycle2, product, segment_length, search_width, along_track_step, max_percent_nans, dx, saving = False, write_out_path = '.', prepend = '', spatial_extent = None, map_data_root=None): +def calculate_velocities(data, cycle1, cycle2, beams, *args, **kwargs): """ :param x_atc: @@ -183,74 +53,31 @@ def calculate_velocities(rgt, x_atc, h_li_raw, h_li_diff, lats, lons, segment_id """ ### Create dictionaries to put info in - velocities = {} - correlations = {} - lags = {} - midpoints_x_atc = {} - midpoints_lats = {} - midpoints_lons = {} - midpoints_seg_ids = {} - midpoints_xy = {} - - # inside rgt loop - velocities[rgt] = {} - correlations[rgt] = {} - lags[rgt] = {} - midpoints_x_atc[rgt] = {} - midpoints_lats[rgt] = {} - midpoints_lons[rgt] = {} - midpoints_seg_ids[rgt] = {} - midpoints_xy[rgt] = {} + variables = ['velocities','correlations','lags','midpoints_x_atc','midpoints_lats','midpoints_lons', + 'midpoints_seg_ids'] + for var in variables: + data[var] = {} ### Determing Elapsed time between cycles - t1_string = times[cycle1]['gt1l'][0].astype(str) # figure out later if just picking hte first one it ok - t1 = Time(t1_string) - t2_string = times[cycle2]['gt1l'][0].astype(str) # figure out later if just picking hte first one it ok - t2 = Time(t2_string) - dt = (t2 - t1).jd # difference in julian days - - ### Where to save the results: - h5_file_out = write_out_path + prepend + '_rgt' + rgt + '.hdf5' - - if saving: - ### Save some metadata - with h5py.File(h5_file_out, 'w') as f: - f['dx'] = dx - f['product'] = product - f['segment_length'] = segment_length - f['search_width'] = search_width - f['along_track_step'] = along_track_step - f['max_percent_nans'] = max_percent_nans - # f['smoothing'] = smoothing - # f['smoothing_window_size'] = smoothing_window_size - f['process_date'] = str(Time.now().value) - f['rgt'] = rgt + dt = time_diff(data,cycle1,cycle2) ### Loop over each beam for beam in beams: - velocities,correlations,lags,midpoints_x_atc,midpoints_lats,midpoints_lons,midpoints_seg_ids,midpoints_xy = calculate_velocity_single_beam(x_atc, - cycle1,cycle2,beam,search_width,segment_length,dx, - velocities,correlations,midpoints_x_atc,midpoints_lats,midpoints_lons, - midpoints_seg_ids,midpoints_xy,rgt, - along_track_step,lags,lats,lons,segment_ids,h_li_diff,h_li_raw, - dt,saving,max_percent_nans,map_data_root,h5_file_out,times,spatial_extent) + data = calculate_velocity_single_beam(data,cycle1,cycle2,beam,dt,*args,**kwargs) - return velocities, correlations, lags, midpoints_x_atc, midpoints_xy, midpoints_lons, midpoints_lats + return data # ------------------------------------------------------------------------------------------- -def calculate_velocity_single_beam(x_atc,cycle1,cycle2,beam,search_width,segment_length,dx, - velocities,correlations,midpoints_x_atc,midpoints_lats,midpoints_lons, - midpoints_seg_ids,midpoints_xy,rgt, - along_track_step,lags,lats,lons,segment_ids,h_li_diff,h_li_raw, - dt,saving,max_percent_nans,map_data_root,h5_file_out,times,spatial_extent): +def calculate_velocity_single_beam(data,cycle1,cycle2,beam,dt,search_width=1000,segment_length=2000, + max_percent_nans=10,along_track_step=100, dx=20): """ """ ### Determine x1s, which are the x_atc coordinates at which each cut out window begins # To be common between both repeats, the first point x1 needs to be the larger first value between repeats - min_x_atc_cycle1 = x_atc[cycle1][beam][0] - min_x_atc_cycle2 = x_atc[cycle2][beam][0] + min_x_atc_cycle1 = data['x_atc_full'][cycle1][beam][0] + min_x_atc_cycle2 = data['x_atc_full'][cycle2][beam][0] # pick out the track that starts at greater x_atc, and use that as x1s vector if min_x_atc_cycle1 != min_x_atc_cycle2: @@ -262,17 +89,17 @@ def calculate_velocity_single_beam(x_atc,cycle1,cycle2,beam,search_width,segment cycletmp = cycle1 ### Generate the x1s vector, in the case that the repeat tracks don't start in the same place - x1s = x_atc[cycletmp][beam][ + x1s = data['x_atc_full'][cycletmp][beam][ int(search_width / dx) + 1:-int(segment_length / dx) - int(search_width / dx):int(along_track_step / dx)] elif min_x_atc_cycle1 == min_x_atc_cycle2: # doesn't matter which cycle ### Generate the x1s vector, in the case that the repeat tracks do start in the same place - x1s = x_atc[cycle1][beam][ + x1s = data['x_atc_full'][cycle1][beam][ int(search_width / dx) + 1:-int(segment_length / dx) - int(search_width / dx):int(along_track_step / dx)] ### Determine xend, where the x1s vector ends: smaller value for both beams, if different - max_x_atc_cycle1 = x_atc[cycle1][beam][-1] - search_width / dx - max_x_atc_cycle2 = x_atc[cycle2][beam][-1] - search_width / dx + max_x_atc_cycle1 = data['x_atc_full'][cycle1][beam][-1] - search_width / dx + max_x_atc_cycle2 = data['x_atc_full'][cycle2][beam][-1] - search_width / dx smallest_xatc = np.min([max_x_atc_cycle1, max_x_atc_cycle2]) ixmax = np.where(x1s >= (smallest_xatc - search_width / dx)) if len(ixmax[0]) >= 1: @@ -280,57 +107,53 @@ def calculate_velocity_single_beam(x_atc,cycle1,cycle2,beam,search_width,segment x1s = x1s[:ixtmp] ### Create vectors to store results in - velocities[rgt][beam] = np.empty_like(x1s) - correlations[rgt][beam] = np.empty_like(x1s) - lags[rgt][beam] = np.empty_like(x1s) - midpoints_x_atc[rgt][beam] = np.empty_like(x1s) - midpoints_lats[rgt][beam] = np.empty_like(x1s) - midpoints_lons[rgt][beam] = np.empty_like(x1s) - midpoints_seg_ids[rgt][beam] = np.empty_like(x1s) + variables = ['velocities','correlations','lags','midpoints_x_atc','midpoints_lats','midpoints_lons', + 'midpoints_seg_ids'] + for var in variables: + data[var][beam] = np.empty_like(x1s) ### Entire x_atc vectors for both cycles - x_full_t1 = x_atc[cycle1][beam] + x_full_t1 = data['x_atc_full'][cycle1][beam] ### Loop over x1s, positions along track that each window starts at for xi, x1 in enumerate(x1s): - ### Cut out data: small chunk of data at time t1 (first cycle) - ix_x1 = np.arange(len(x_full_t1))[x_full_t1 >= x1][0] # Index of first point that is greater than x1 + ix_x1 = np.arange(len(data['x_atc_full'][cycle1][beam]))[data['x_atc_full'][cycle1][beam] >= x1][0] # Index of first point that is greater than x1 ix_x2 = ix_x1 + int( np.round(segment_length / dx)) # ix_x1 + number of datapoints within the desired segment length x_t1 = x_full_t1[ix_x1:ix_x2] # cut out x_atc values, first cycle - lats_t1 = lats[cycle1][beam][ix_x1:ix_x2] # cut out latitude values, first cycle - lons_t1 = lons[cycle1][beam][ix_x1:ix_x2] # cut out longitude values, first cycle - seg_ids_t1 = segment_ids[cycle1][beam][ix_x1:ix_x2] # cut out segment_ids, first cycle - h_li1 = h_li_diff[cycle1][beam][ + lats_t1 = data['latitude'][cycle1][beam][ix_x1:ix_x2] # cut out latitude values, first cycle + lons_t1 = data['longitude'][cycle1][beam][ix_x1:ix_x2] # cut out longitude values, first cycle + seg_ids_t1 = data['segment_ids'][cycle1][beam][ix_x1:ix_x2] # cut out segment_ids, first cycle + h_li1 = data['h_li_diff'][cycle1][beam][ ix_x1 - 1:ix_x2 - 1] # cut out land ice height values, first cycle; start 1 index earlier because # the h_li_diff data are differentiated, and therefore one sample shorter - # Find segment midpoints; this is the position where we will assign the velocity measurement from each window - n = len(x_t1) - midpt_ix = int(np.floor(n / 2)) - midpoints_x_atc[rgt][beam][xi] = x_t1[midpt_ix] - midpoints_lats[rgt][beam][xi] = lats_t1[midpt_ix] - midpoints_lons[rgt][beam][xi] = lons_t1[midpt_ix] - midpoints_seg_ids[rgt][beam][xi] = seg_ids_t1[midpt_ix] - ### Cut out data: wider chunk of data at time t2 (second cycle) ix_x3 = ix_x1 - int(np.round(search_width / dx)) # extend on earlier end by number of indices in search_width ix_x4 = ix_x2 + int(np.round(search_width / dx)) # extend on later end by number of indices in search_width - h_li2 = h_li_diff[cycle2][beam][ + h_li2 = data['h_li_diff'][cycle2][beam][ ix_x3 - 1:ix_x4 - 1] # cut out land ice height values, second cycle; start 1 index earlier because # the h_li_diff data are differentiated, and therefore one sample shorter + # Find segment midpoints; this is the position where we will assign the velocity measurement from each window + n = len(x_t1) + midpt_ix = int(np.floor(n / 2)) + data['midpoints_x_atc'][beam][xi] = x_t1[midpt_ix] + data['midpoints_lats'][beam][xi] = lats_t1[midpt_ix] + data['midpoints_lons'][beam][xi] = lons_t1[midpt_ix] + data['midpoints_seg_ids'][beam][xi] = seg_ids_t1[midpt_ix] + ### Determine number of nans in each data chunk - n_nans1 = np.sum(np.isnan(h_li_raw[cycle1][beam][ix_x1:ix_x2])) - n_nans2 = np.sum(np.isnan(h_li_raw[cycle2][beam][ix_x3:ix_x4])) + n_nans1 = np.sum(np.isnan(data['h_li'][cycle1][beam][ix_x1:ix_x2])) + n_nans2 = np.sum(np.isnan(data['h_li'][cycle2][beam][ix_x3:ix_x4])) ### Only process if there are fewer than 10% nans in either data chunk: if not (n_nans1 / len(h_li1) <= max_percent_nans / 100) and (n_nans2 / len(h_li2) <= max_percent_nans / 100): ### If there are too many nans, just save a nan - velocities[rgt][beam][xi] = np.nan - correlations[rgt][beam][xi] = np.nan - lags[rgt][beam][xi] = np.nan + data['velocities'][beam][xi] = np.nan + data['correlations'][beam][xi] = np.nan + data['lags'][beam][xi] = np.nan else: # Detrend both chunks of data h_li1 = detrend(h_li1, type='linear') @@ -362,33 +185,11 @@ def calculate_velocity_single_beam(x_atc,cycle1,cycle2,beam,search_width,segment ix_peak = np.arange(len(corr_normed))[corr_normed == np.nanmax(corr_normed)][0] ### Save correlation coefficient, best lag, velocity, etc at the location of peak correlation coefficient - velocities[rgt][beam][xi] = shift_vec[ix_peak] / (dt / 365) - correlations[rgt][beam][xi] = corr_normed[ix_peak] - lags[rgt][beam][xi] = lagvec[ix_peak] - - ### Output xy locations of midpoints as well - midpoints_xy[rgt][beam] = np.array(pyproj.Proj(3031)(midpoints_lons[rgt][beam], midpoints_lats[rgt][beam])) - #extract measures veloc outside of this loop - - if saving: - measures_Vx_path = glob.glob( map_data_root + '*_Vx.tif')[0] - measures_Vy_path = glob.glob( map_data_root + '*_Vy.tif')[0] - - ### Add velocities to hdf5 file for each beam - with h5py.File(h5_file_out, 'a') as f: - f[beam + '/x_atc'] = midpoints_x_atc[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/latitudes'] = midpoints_lats[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/longitudes'] = midpoints_lons[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/velocities'] = velocities[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/correlation_coefficients'] = correlations[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/best_lags'] = lags[rgt][beam] # assign x_atc value of half way along the segment - f[beam + '/segment_ids'] = midpoints_seg_ids[rgt][beam] - f[beam + '/first_cycle_time'] = str(Time(times[cycle1][beam][0])) - f[beam + '/second_cycle_time'] = str(Time(times[cycle2][beam][0])) - f[beam + '/Measures_v_along'] = get_measures_along_track_velocity(midpoints_xy[rgt][beam][0], midpoints_xy[rgt][beam][1], spatial_extent, - measures_Vx_path, measures_Vy_path) - - return velocities,correlations,lags,midpoints_x_atc,midpoints_lats,midpoints_lons,midpoints_seg_ids,midpoints_xy + data['velocities'][beam][xi] = shift_vec[ix_peak] / (dt / 365) + data['correlations'][beam][xi] = corr_normed[ix_peak] + data['lags'][beam][xi] = lagvec[ix_peak] + + return data # ------------------------------------------------------------------------------------------- @@ -467,5 +268,3 @@ def velocity_old(x_in, dh1, dh2, dt, segment_length=2000, search_width=1000, alo return velocities,correlations -# ------------------------------------------------------------------------------------------- - diff --git a/IS2_velocity/preprocessing.py b/IS2_velocity/preprocessing.py new file mode 100644 index 0000000..94d61de --- /dev/null +++ b/IS2_velocity/preprocessing.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import pandas as pd +import numpy as np + +def fill_seg_ids(data,dx=20): + r"""Fill the along-track vector so that there are no skipped points + + Parameters + ------ + x_in : array + along-track distance + h_in : array + along-track elevation + seg_ids : array + segment ids as imported with atl06_to_dict function + dx : float; default 20 + step between points (20 m is the default for atl06) + + Output + ------ + """ + + variables = ['x_atc_full','h_full','lats_full','lons_full','x_full','y_full'] + for var in variables: + data[var] = {} + + # make a monotonically increasing x vector + for cycle in data['x_atc'].keys(): + for var in variables: + data[var][cycle] = {} + for beam in data['x_atc'][cycle].keys(): + # indices starting at zero, using the segment_id field, so any skipped segment will be kept in correct location + seg_ids = data['segment_ids'][cycle][beam] + ind = seg_ids - np.nanmin(seg_ids) + + data['x_atc_full'][cycle][beam] = np.arange(np.max(ind)+1) * dx + data['x_atc'][cycle][beam][0] + data['h_full'][cycle][beam] = np.zeros(np.max(ind)+1) + np.NaN + data['h_full'][cycle][beam][ind] = data['h_li'][cycle][beam] + + data['lats_full'][cycle][beam] = np.zeros(np.shape(data['x_atc_full'][cycle][beam])) * np.nan + data['lats_full'][cycle][beam][ind] = data['latitude'][cycle][beam] + data['lons_full'][cycle][beam] = np.zeros(np.shape(data['x_atc_full'][cycle][beam])) * np.nan + data['lons_full'][cycle][beam][ind] = data['longitude'][cycle][beam] + data['x_full'][cycle][beam] = np.zeros(np.shape(data['x_atc_full'][cycle][beam])) * np.nan + data['x_full'][cycle][beam][ind] = data['x'][cycle][beam] + data['y_full'][cycle][beam] = np.zeros(np.shape(data['x_atc_full'][cycle][beam])) * np.nan + data['y_full'][cycle][beam][ind] = data['y'][cycle][beam] + + ## save the segment id's themselves, with gaps filled in + data['segment_ids'][cycle][beam] = np.zeros(np.max(ind)+1) + np.NaN + data['segment_ids'][cycle][beam][ind] = seg_ids + + return data + +# ------------------------------------------------------------------------------------------- + +def interpolate_nans(data): + data['h_full_interp'] = {} + for cycle in data['h_full'].keys(): + data['h_full_interp'][cycle] = {} + for beam in data['h_full'][cycle].keys(): + ### interpolate nans in pandas + interp_hold = {'x_atc_full': data['x_atc_full'][cycle][beam], 'h_full': data['h_full'][cycle][beam]} + df = pd.DataFrame(interp_hold, columns = ['x_atc_full','h_full']) + # linear interpolation for now + df['h_full'].interpolate(method = 'linear', inplace = True) + data['h_full_interp'][cycle][beam] = df['h_full'].values + return data + +# ------------------------------------------------------------------------------------------- + +def filt(data, dx=None, filter_type = 'running_average', running_avg_window = None): + """ + + :param x1: + :param h1: Land ice height vector. Must be resampled to constant spacing + :param dx: + :param type: + :param running_avg_window: + :return: + """ + + data['h_filt'] = {} + for cycle in data['h_full_interp'].keys(): + data['h_filt'][cycle] = {} + for beam in data['h_full_interp'][cycle].keys(): + if filter_type == 'running_average': # + if not running_avg_window: # if no window size was given, run a default 100 m running average smoother + running_avg_window = 100 + if dx is None: + dx = np.nanmedian(data['x_atc_full'][cycle][beam][1:] - data['x_atc_full'][cycle][beam][:-1]) + filt = np.ones(int(np.round(running_avg_window / dx))) + data['h_filt'][cycle][beam] = (1/len(filt)) * np.convolve(filt, data['h_full_interp'][cycle][beam], mode = 'same') + + #TODO: add bandpass, highpass + else: + data['h_filt'][cycle][beam] = data['h_full_interp'][cycle][beam] + + return data + +# ------------------------------------------------------------------------------------------- + +def differentiate(data,dx=None): + """ + + :param x_in: + :param h_in: Land ice height vector. Must be resampled to constant spacing + :return: + """ + + data['h_li_diff'] = {} + for cycle in data['h_filt'].keys(): + data['h_li_diff'][cycle] = {} + for beam in data['h_filt'][cycle].keys(): + if dx is None: + dx = np.nanmedian(data['x_atc_full'][cycle][beam][1:] - data['x_atc_full'][cycle][beam][:-1]) + data['h_li_diff'][cycle][beam] = np.gradient(data['h_filt'][cycle][beam], dx) + return data + diff --git a/IS2_velocity/readers_atl06.py b/IS2_velocity/readers_atl06.py index d63d2ea..cb1055f 100644 --- a/IS2_velocity/readers_atl06.py +++ b/IS2_velocity/readers_atl06.py @@ -20,7 +20,7 @@ def load_data_by_rgt(rgt, path_to_data, product, format = 'hdf5', format, ex 'hdf5' """ - variables = ['x_atc','times','min_seg_ids','h_li','latitude','longitude','x','y'] + variables = ['x_atc','times','segment_ids','min_seg_ids','h_li','latitude','longitude','x','y'] ### extract data from all available cycles D_out = {} @@ -46,12 +46,12 @@ def load_data_by_rgt(rgt, path_to_data, product, format = 'hdf5', # Times D_out['times'][cycle][beam] = Di[filename]['data_start_utc'] # segment ids: - seg_ids = Di[filename]['segment_id'] - D_out['min_seg_ids'][cycle][beam] = seg_ids[0] + D_out['segment_ids'][cycle][beam] = Di[filename]['segment_id'] + D_out['min_seg_ids'][cycle][beam] = Di[filename]['segment_id'][0] # extract h_li and x_atc, and lat/lons for that section for var in variables: - if var in ['times','min_seg_ids']: + if var in ['times','min_seg_ids','segment_ids']: continue D_out[var][cycle][beam] = Di[filename][var] diff --git a/IS2_velocity/saving.py b/IS2_velocity/saving.py new file mode 100644 index 0000000..fcae6f9 --- /dev/null +++ b/IS2_velocity/saving.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import numpy as np +from astropy.time import Time +import h5py +import pyproj +import glob +from .extract_alongtrack import get_measures_along_track_velocity + +def save_is2_velocity(data,rgt,write_out_path,prepend,product,map_data_root, + cycle1,cycle2,dx,segment_length,search_width,along_track_step, + max_percent_nans,spatial_extent): + + ### Where to save the results: + h5_file_out = write_out_path + prepend + '_rgt' + rgt + '.hdf5' + + ### Save some metadata + with h5py.File(h5_file_out, 'w') as f: + f['dx'] = dx + f['product'] = product + f['segment_length'] = segment_length + f['search_width'] = search_width + f['along_track_step'] = along_track_step + f['max_percent_nans'] = max_percent_nans + # f['smoothing'] = smoothing + # f['smoothing_window_size'] = smoothing_window_size + f['process_date'] = str(Time.now().value) + f['rgt'] = rgt + + for beam in data['velocities'].keys(): + + ### Output xy locations of midpoints as well + midpoints_xy = np.array(pyproj.Proj(3031)(data['midpoints_lons'][beam], data['midpoints_lats'][beam])) + #extract measures veloc outside of this loop + + measures_Vx_path = glob.glob( map_data_root + '*_Vx.tif')[0] + measures_Vy_path = glob.glob( map_data_root + '*_Vy.tif')[0] + + ### Add velocities to hdf5 file for each beam + with h5py.File(h5_file_out, 'a') as f: + f[beam + '/x_atc'] = data['midpoints_x_atc'][beam] # assign x_atc value of half way along the segment + f[beam + '/latitudes'] = data['midpoints_lats'][beam] # assign x_atc value of half way along the segment + f[beam + '/longitudes'] = data['midpoints_lons'][beam] # assign x_atc value of half way along the segment + f[beam + '/velocities'] = data['velocities'][beam] # assign x_atc value of half way along the segment + f[beam + '/correlation_coefficients'] = data['correlations'][beam] # assign x_atc value of half way along the segment + f[beam + '/best_lags'] = data['lags'][beam] # assign x_atc value of half way along the segment + f[beam + '/segment_ids'] = data['midpoints_seg_ids'][beam] + f[beam + '/first_cycle_time'] = str(Time(data['times'][cycle1][beam][0])) + f[beam + '/second_cycle_time'] = str(Time(data['times'][cycle2][beam][0])) + f[beam + '/Measures_v_along'] = get_measures_along_track_velocity(midpoints_xy[0], midpoints_xy[1], spatial_extent, + measures_Vx_path, measures_Vy_path) + diff --git a/notebooks/Introduction_Tutorial.ipynb b/notebooks/Introduction_Tutorial.ipynb index c00362b..8cd4b92 100644 --- a/notebooks/Introduction_Tutorial.ipynb +++ b/notebooks/Introduction_Tutorial.ipynb @@ -46,50 +46,14 @@ "outputs": [], "source": [ "# As an example, import a function from the ICESat-2 surface velocity library\n", - "from IS2_velocity.correlation_processing import calculate_velocities\n", - "help(calculate_velocities)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Velocity calculation: Control correlations" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Import funcitons for the velocity calculation; Correlate all the beams from one set of repeat ground tracks, rgt = 0848\n", - "from IS2_velocity.correlation_processing import calculate_velocities\n", - "\n", - "### Select rgt for now\n", - "rgt = '0848'\n", - "\n", - "### Control the correlation step:\n", - "segment_length = 2000 # meters, how wide is the window we are correlating in each step\n", - "search_width = 1000 # meters, how far in front of and behind the window to check for correlation\n", - "along_track_step = 100 # meters; how much to jump between each consecutivevelocity determination\n", - "max_percent_nans = 10 # Maximum % of segment length that can be nans and still do the correlation step\n", - "\n", - "### Which product\n", - "product = 'ATL06'\n", - "if product == 'ATL06':\n", - " dx = 20\n", - "\n", - "### Select filter type and required arguments; Currently only this running mean is supported\n", - "filter_type = 'running_average'\n", - "running_avg_window = 100 # meters" + "from IS2_velocity.correlation_processing import calculate_velocities" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "# Velocity calculation: Load Data / Import dictionaries" + "## Load data" ] }, { @@ -98,7 +62,7 @@ "metadata": {}, "outputs": [], "source": [ - "from IS2_velocity.readers import load_data_by_rgt\n", + "from IS2_velocity.readers_atl06 import load_data_by_rgt\n", "# atl06_to_dict is within the function load_data_by_rgt\n", "\n", "# path to data, relative to folder /notebooks\n", @@ -110,11 +74,7 @@ "filter_type = 'running_average'\n", "running_avg_window = 100\n", "\n", - "x_atc, lats, lons, h_li_raw, h_li_raw_NoNans, h_li, h_li_diff, times, min_seg_ids, \\\n", - "segment_ids, cycles_this_rgt, x_ps, y_ps = \\\n", - " load_data_by_rgt(rgt = rgt, path_to_data = data_dir, product = 'ATL06', \\\n", - " filter_type = filter_type, running_avg_window = running_avg_window, \\\n", - " format = 'hdf5')\n" + "data = load_data_by_rgt(rgt, data_dir, product = 'ATL06',format = 'hdf5')" ] }, { @@ -140,8 +100,8 @@ "\n", "plt.figure(figsize=(8,4))\n", "\n", - "plt.plot(x_atc[cycle1][beam]/1000.,h_li[cycle1][beam],c='indianred')\n", - "plt.plot(x_atc[cycle2][beam]/1000.,h_li[cycle2][beam],c='steelblue')\n", + "plt.plot(data['x_atc'][cycle1][beam]/1000.,data['h_li'][cycle1][beam],c='indianred')\n", + "plt.plot(data['x_atc'][cycle2][beam]/1000.,data['h_li'][cycle2][beam],c='steelblue')\n", "plt.ylabel('Elevation (m)')\n", "plt.xlabel('Along-Track Distance (km)')\n", "\n", @@ -152,7 +112,28 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Velocity calculation: Calculate velocity between cycles 03 and 04" + "## Preprocessing" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from IS2_velocity.preprocessing import *\n", + "\n", + "data = fill_seg_ids(data)\n", + "data = interpolate_nans(data)\n", + "data = filt(data)\n", + "data = differentiate(data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Calculate velocity between cycles 03 and 04" ] }, { @@ -168,26 +149,32 @@ "cycle2 = '04'\n", "beams = ['gt1l','gt1r','gt2l','gt2r','gt3l','gt3r']\n", "\n", - "saving = True\n", - "write_out_path = '.'\n", - "write_out_prefix = ''\n", - "\n", - "spatial_extent = np.array([-65, -86, -55, -81])\n", - "map_data_root = '/Users/grace/Dropbox/Cornell/projects/003/FIS_data/'\n", - "\n", - "velocities, correlations, lags, midpoints_x_atc, midpoints_xy, midpoints_lons, midpoints_lats = \\\n", - " calculate_velocities(rgt, x_atc, h_li_raw, h_li_diff, lats, lons, segment_ids, times, beams, cycle1, cycle2, \\\n", - " product, segment_length, search_width, along_track_step, max_percent_nans, dx, saving = True, \\\n", - " write_out_path = write_out_path, prepend = write_out_prefix,spatial_extent = spatial_extent, \\\n", - " map_data_root = map_data_root)\n", - "\n" + "data = calculate_velocities(data, cycle1, cycle2, beams)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "# Velocity calculation: Visualize result for one beam " + "## Save result" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "save_is2_velocity(data,rgt,write_out_path,prepend,product,map_data_root,\n", + " cycle1,cycle2,dx,segment_length,search_width,along_track_step,\n", + " max_percent_nans,spatial_extent)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Visualize result for one beam " ] }, { @@ -198,19 +185,17 @@ }, "outputs": [], "source": [ - "\n", - "\n", "from matplotlib.gridspec import GridSpec\n", "\n", - "beam = 'gt1l'\n", - "x1 = x_atc['03'][beam]\n", - "x2 = x_atc['04'][beam]\n", - "h1 = h_li['03'][beam]\n", - "h2 = h_li['04'][beam]\n", - "dh1 = h_li_diff['03'][beam]\n", - "dh2 = h_li_diff['04'][beam]\n", - "vel_xs = midpoints_x_atc[rgt][beam]\n", - "velocs = velocities[rgt][beam]\n", + "beam = 'gt1r'\n", + "x1 = data['x_atc_full'][cycle1][beam]\n", + "x2 = data['x_atc_full'][cycle2][beam]\n", + "h1 = data['h_full'][cycle1][beam]\n", + "h2 = data['h_full'][cycle2][beam]\n", + "dh1 = data['h_li_diff'][cycle1][beam]\n", + "dh2 = data['h_li_diff'][cycle2][beam]\n", + "vel_xs = data['midpoints_x_atc'][beam]\n", + "velocs = data['velocities'][beam]\n", "\n", "plt.figure(figsize=(8,4))\n", "gs = GridSpec(2,2)\n", @@ -262,6 +247,13 @@ "\n", "plot_measures_along_track_comparison(rgt, beams, out_path, correlation_threshold, spatial_extent, plot_out_location, map_data_root, velocity_number)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { From 2b7c9da6d3ea6313e2fb9f7e4f1d794df4354ee2 Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Mon, 26 Oct 2020 17:59:25 -0600 Subject: [PATCH 05/17] Changes to the introduction tutorial Probably at the stage that I can have Grace look at it. --- notebooks/Introduction_Tutorial.ipynb | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/notebooks/Introduction_Tutorial.ipynb b/notebooks/Introduction_Tutorial.ipynb index 8cd4b92..491dc45 100644 --- a/notebooks/Introduction_Tutorial.ipynb +++ b/notebooks/Introduction_Tutorial.ipynb @@ -63,7 +63,6 @@ "outputs": [], "source": [ "from IS2_velocity.readers_atl06 import load_data_by_rgt\n", - "# atl06_to_dict is within the function load_data_by_rgt\n", "\n", "# path to data, relative to folder /notebooks\n", "data_dir = '../data/'\n", @@ -165,7 +164,22 @@ "metadata": {}, "outputs": [], "source": [ - "save_is2_velocity(data,rgt,write_out_path,prepend,product,map_data_root,\n", + "from IS2_velocity.saving import *\n", + "\n", + "write_out_path = '.'\n", + "write_out_prefix = ''\n", + "product = 'ATL06'\n", + "\n", + "dx = 20\n", + "segment_length = 2000\n", + "search_width = 1000\n", + "along_track_step = 100\n", + "max_percent_nans = 10\n", + "\n", + "spatial_extent = np.array([-65, -86, -55, -81])\n", + "map_data_root = '/Users/grace/Dropbox/Cornell/projects/003/FIS_data/'\n", + "\n", + "save_is2_velocity(data,rgt,write_out_path,write_out_prefix,product,map_data_root,\n", " cycle1,cycle2,dx,segment_length,search_width,along_track_step,\n", " max_percent_nans,spatial_extent)" ] @@ -247,13 +261,6 @@ "\n", "plot_measures_along_track_comparison(rgt, beams, out_path, correlation_threshold, spatial_extent, plot_out_location, map_data_root, velocity_number)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { From 2fe2a779e32f73980c2b7f0a9a4b409b357c7d87 Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Tue, 10 Nov 2020 12:14:49 -0700 Subject: [PATCH 06/17] Add some function descriptions --- IS2_velocity/correlation_processing.py | 136 ++++++++----------------- IS2_velocity/preprocessing.py | 60 +++++++---- IS2_velocity/saving.py | 23 +++++ 3 files changed, 107 insertions(+), 112 deletions(-) diff --git a/IS2_velocity/correlation_processing.py b/IS2_velocity/correlation_processing.py index c4c7c18..a98930a 100644 --- a/IS2_velocity/correlation_processing.py +++ b/IS2_velocity/correlation_processing.py @@ -12,7 +12,14 @@ def time_diff(data,cycle1,cycle2,beam='gt1l'): Parameters ------ - data : Dictionary 1 + data : dict + data dictionary + cycle1 : int + first cycle to be compared + cycle2 : int + second cycle to be compared + beam : str + which beam to compare Output ------ @@ -34,22 +41,23 @@ def time_diff(data,cycle1,cycle2,beam='gt1l'): # ------------------------------------------------------------------------------------------- def calculate_velocities(data, cycle1, cycle2, beams, *args, **kwargs): - """ + r"""Calculate velocities by correlating two cycles. + Can be done over a list of beams. + + Parameters + ------ + data : dict + data dictionary + cycle1 : int + first cycle to be compared + cycle2 : int + second cycle to be compared + beams : list + a list of which beam to compare - :param x_atc: - :param h_li_diff: - :param lats: - :param lons: - :param segment_ids: - :param beams: - :param cycle1: - :param cycle2: - :param segment_length: - :param search_width: - :param along_track_step: - :param max_percent_nans: - :param dx: - :return: + Output + ------ + data : dict """ ### Create dictionaries to put info in @@ -71,7 +79,23 @@ def calculate_velocities(data, cycle1, cycle2, beams, *args, **kwargs): def calculate_velocity_single_beam(data,cycle1,cycle2,beam,dt,search_width=1000,segment_length=2000, max_percent_nans=10,along_track_step=100, dx=20): - """ + r"""Calculate velocities by correlating two cycles. + Can be done over a list of beams. + + Parameters + ------ + data : dict + data dictionary + cycle1 : int + first cycle to be compared + cycle2 : int + second cycle to be compared + beams : list + a list of which beam to compare + + Output + ------ + data : dict """ ### Determine x1s, which are the x_atc coordinates at which each cut out window begins @@ -190,81 +214,3 @@ def calculate_velocity_single_beam(data,cycle1,cycle2,beam,dt,search_width=1000, data['lags'][beam][xi] = lagvec[ix_peak] return data - -# ------------------------------------------------------------------------------------------- - -def velocity_old(x_in, dh1, dh2, dt, segment_length=2000, search_width=1000, along_track_step=100, dx=20, corr_threshold=.65): - r"""Calculate along-track velocity by correlating the along-track elevation between repeat acquisitions. - Works for a single set of repeat beams - - Parameters - ------ - x_in : array - along-track distance - dh1 : array - surface slope at cycle 1 - dh2 : array - surface slope at cycle 2 - dt : float - time difference between cycles - output_xs : array # REMOVED THIS ONE, calculate inside the loop - along-track distances at which the velocities will be calculated and output - search_width : float - the maximum distance which the stencil array will be moved to look for a good correlation - segment_length : float - the length of the array to be correlated - dx : float - spacing between points in the x array - corr_threshold : float - minimum correlation to be considered an acceptable fit - - Output - ------ - velocities : array - calculated velocities corresponding to locations output_xs - correlations : array - calculated correlations for each of the velocity points - """ - - # Generate vector of along window starting positions in the along-track vector - x1s = x_in[int(search_width/dx)+1:-int(segment_length/dx) - int(search_width/dx):int(along_track_step/dx)] - - # create output arrays to be filled - velocities = np.nan*np.ones_like(x1s) - correlations = np.nan*np.ones_like(x1s) - - # iterate over all the output locations - for xi,x_start in enumerate(x1s): - # find indices for the stencil - idx1 = np.argmin(abs(x_in-x_start)) - idx2 = idx1 + int(np.round(segment_length/dx)) - # cut out a wider chunk of data at time t2 (second cycle) - idx3 = idx1 - int(np.round(search_width/dx)) # offset on earlier end by # indices in search_width - idx4 = idx2 + int(np.round(search_width/dx)) # offset on later end by # indices in search_width - - # get the two arrays that will be correlated to one another - dh_stencil = dh1[idx1:idx2] - dh_wide = dh2[idx3:idx4] - - # correlate old with newer data - corr = correlate(dh_stencil, dh_wide, mode = 'valid', method = 'direct') - norm_val = np.sqrt(np.sum(dh_stencil**2)*np.sum(dh_wide**2)) # normalize so values range between 0 and 1 - corr = corr / norm_val - lagvec = np.arange(- int(np.round(search_width/dx)), int(search_width/dx) +1,1)# for mode = 'valid' - shift_vec = lagvec * dx - - if all(np.isnan(corr)): - continue - else: - correlation_value = np.nanmax(corr) - if correlation_value >= corr_threshold: - idx_peak = np.arange(len(corr))[corr == correlation_value][0] - best_shift = shift_vec[idx_peak] - velocities[xi] = best_shift/(dt/365) - correlations[xi] = correlation_value - else: - velocities[xi] = np.nan - correlations[xi] = correlation_value - - return velocities,correlations - diff --git a/IS2_velocity/preprocessing.py b/IS2_velocity/preprocessing.py index 94d61de..242a344 100644 --- a/IS2_velocity/preprocessing.py +++ b/IS2_velocity/preprocessing.py @@ -9,17 +9,14 @@ def fill_seg_ids(data,dx=20): Parameters ------ - x_in : array - along-track distance - h_in : array - along-track elevation - seg_ids : array - segment ids as imported with atl06_to_dict function + data : dict + data dictionary dx : float; default 20 step between points (20 m is the default for atl06) Output ------ + data : dict """ variables = ['x_atc_full','h_full','lats_full','lons_full','x_full','y_full'] @@ -57,6 +54,20 @@ def fill_seg_ids(data,dx=20): # ------------------------------------------------------------------------------------------- def interpolate_nans(data): + r"""Linear interpolation between nan values + + Parameters + ------ + data : dict + data dictionary + + Output + ------ + data : dict + """ + + Warning("Be careful interpolating, linear interpolation could lead to misleading correlations.") + data['h_full_interp'] = {} for cycle in data['h_full'].keys(): data['h_full_interp'][cycle] = {} @@ -72,14 +83,22 @@ def interpolate_nans(data): # ------------------------------------------------------------------------------------------- def filt(data, dx=None, filter_type = 'running_average', running_avg_window = None): - """ + r"""Smoothing filter to remove high-frequency noise - :param x1: - :param h1: Land ice height vector. Must be resampled to constant spacing - :param dx: - :param type: - :param running_avg_window: - :return: + Parameters + ------ + data : dict + data dictionary + dx : float; default None + spatial step between points; if None use median + filter_type : str; default running_average + choice on how to filter the data + running_avg_window : int + window size of the filter (number of samples) + + Output + ------ + data : dict """ data['h_filt'] = {} @@ -103,11 +122,18 @@ def filt(data, dx=None, filter_type = 'running_average', running_avg_window = No # ------------------------------------------------------------------------------------------- def differentiate(data,dx=None): - """ + r"""Slope calculation - :param x_in: - :param h_in: Land ice height vector. Must be resampled to constant spacing - :return: + Parameters + ------ + data : dict + data dictionary + dx : float; default None + spatial step between points; if None use median + + Output + ------ + data : dict """ data['h_li_diff'] = {} diff --git a/IS2_velocity/saving.py b/IS2_velocity/saving.py index fcae6f9..0fd46f2 100644 --- a/IS2_velocity/saving.py +++ b/IS2_velocity/saving.py @@ -11,6 +11,29 @@ def save_is2_velocity(data,rgt,write_out_path,prepend,product,map_data_root, cycle1,cycle2,dx,segment_length,search_width,along_track_step, max_percent_nans,spatial_extent): + """Save the dictionary as an hdf5 file. + + Parameters + ------ + data : dict + data dictionary + rgt + write_out_path + prepend + product + map_data_root + cycle1 + cycle2 + dx + segment_length + search_width + along_track_step + max_percent_nans + spatial_extent + + Output + ------ + """ ### Where to save the results: h5_file_out = write_out_path + prepend + '_rgt' + rgt + '.hdf5' From 22e0a8681a297bbb055f3b06267a268deaa9d567 Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Tue, 10 Nov 2020 12:50:31 -0700 Subject: [PATCH 07/17] Update variable names to be more streamlined We were keeping track of a bunch of different variables like 'h_full', etc. I just streamlined this but added a flags field so that we can tell what processing steps have been done. --- IS2_velocity/correlation_processing.py | 16 ++--- IS2_velocity/extract_alongtrack.py | 11 +--- IS2_velocity/preprocessing.py | 82 ++++++++++++++------------ IS2_velocity/readers_atl06.py | 1 + IS2_velocity/saving.py | 2 +- 5 files changed, 56 insertions(+), 56 deletions(-) diff --git a/IS2_velocity/correlation_processing.py b/IS2_velocity/correlation_processing.py index a98930a..5f7d52e 100644 --- a/IS2_velocity/correlation_processing.py +++ b/IS2_velocity/correlation_processing.py @@ -100,8 +100,8 @@ def calculate_velocity_single_beam(data,cycle1,cycle2,beam,dt,search_width=1000, ### Determine x1s, which are the x_atc coordinates at which each cut out window begins # To be common between both repeats, the first point x1 needs to be the larger first value between repeats - min_x_atc_cycle1 = data['x_atc_full'][cycle1][beam][0] - min_x_atc_cycle2 = data['x_atc_full'][cycle2][beam][0] + min_x_atc_cycle1 = data['x_atc'][cycle1][beam][0] + min_x_atc_cycle2 = data['x_atc'][cycle2][beam][0] # pick out the track that starts at greater x_atc, and use that as x1s vector if min_x_atc_cycle1 != min_x_atc_cycle2: @@ -113,17 +113,17 @@ def calculate_velocity_single_beam(data,cycle1,cycle2,beam,dt,search_width=1000, cycletmp = cycle1 ### Generate the x1s vector, in the case that the repeat tracks don't start in the same place - x1s = data['x_atc_full'][cycletmp][beam][ + x1s = data['x_atc'][cycletmp][beam][ int(search_width / dx) + 1:-int(segment_length / dx) - int(search_width / dx):int(along_track_step / dx)] elif min_x_atc_cycle1 == min_x_atc_cycle2: # doesn't matter which cycle ### Generate the x1s vector, in the case that the repeat tracks do start in the same place - x1s = data['x_atc_full'][cycle1][beam][ + x1s = data['x_atc'][cycle1][beam][ int(search_width / dx) + 1:-int(segment_length / dx) - int(search_width / dx):int(along_track_step / dx)] ### Determine xend, where the x1s vector ends: smaller value for both beams, if different - max_x_atc_cycle1 = data['x_atc_full'][cycle1][beam][-1] - search_width / dx - max_x_atc_cycle2 = data['x_atc_full'][cycle2][beam][-1] - search_width / dx + max_x_atc_cycle1 = data['x_atc'][cycle1][beam][-1] - search_width / dx + max_x_atc_cycle2 = data['x_atc'][cycle2][beam][-1] - search_width / dx smallest_xatc = np.min([max_x_atc_cycle1, max_x_atc_cycle2]) ixmax = np.where(x1s >= (smallest_xatc - search_width / dx)) if len(ixmax[0]) >= 1: @@ -137,12 +137,12 @@ def calculate_velocity_single_beam(data,cycle1,cycle2,beam,dt,search_width=1000, data[var][beam] = np.empty_like(x1s) ### Entire x_atc vectors for both cycles - x_full_t1 = data['x_atc_full'][cycle1][beam] + x_full_t1 = data['x_atc'][cycle1][beam] ### Loop over x1s, positions along track that each window starts at for xi, x1 in enumerate(x1s): ### Cut out data: small chunk of data at time t1 (first cycle) - ix_x1 = np.arange(len(data['x_atc_full'][cycle1][beam]))[data['x_atc_full'][cycle1][beam] >= x1][0] # Index of first point that is greater than x1 + ix_x1 = np.arange(len(data['x_atc'][cycle1][beam]))[data['x_atc'][cycle1][beam] >= x1][0] # Index of first point that is greater than x1 ix_x2 = ix_x1 + int( np.round(segment_length / dx)) # ix_x1 + number of datapoints within the desired segment length x_t1 = x_full_t1[ix_x1:ix_x2] # cut out x_atc values, first cycle diff --git a/IS2_velocity/extract_alongtrack.py b/IS2_velocity/extract_alongtrack.py index dcc1811..6d8774f 100644 --- a/IS2_velocity/extract_alongtrack.py +++ b/IS2_velocity/extract_alongtrack.py @@ -24,8 +24,6 @@ def add_surface_velocity_to_is2_dict(is2_dict, spatial_extent, path, vel_x, vel_ spatial_extent = np.array([spatial_extent]) lat=spatial_extent[[1, 3, 3, 1, 1]] lon=spatial_extent[[2, 2, 0, 0, 2]] - print(lat) - print(lon) # project the coordinates to Antarctic polar stereographic xy=np.array(pyproj.Proj(3031)(lon, lat)) # get the bounds of the projected coordinates @@ -89,9 +87,6 @@ def get_measures_along_track_velocity(x_ps_beam, y_ps_beam , spatial_extent, vel XR=[np.nanmin(xy[0,:]), np.nanmax(xy[0,:])] YR=[np.nanmin(xy[1,:]), np.nanmax(xy[1,:])] - #Measures_vx=pc.grid.data().from_geotif(os.path.join(data_root,vel_x), bounds=[XR, YR]) - #Measures_vy=pc.grid.data().from_geotif(os.path.join(data_root,vel_y), bounds=[XR, YR]) - Measures_vx=pc.grid.data().from_geotif(vel_x_path, bounds=[XR, YR]) Measures_vy=pc.grid.data().from_geotif(vel_y_path, bounds=[XR, YR]) @@ -103,16 +98,13 @@ def get_measures_along_track_velocity(x_ps_beam, y_ps_beam , spatial_extent, vel xL=abs((x_ps_beam[0])-(x_ps_beam[1])) yL=abs((y_ps_beam[0])-(y_ps_beam[1])) - #decides if is descending or ascending path + # decides if is descending or ascending path if x_ps_beam[0]-x_ps_beam[1] < 0: - theta_rad=math.atan(xL/yL) #theta_deg=theta_rad*180/math.pi v_along=vy/math.cos(theta_rad) #v_across=vx/math.cos(theta_rad) - else: - theta_rad=math.atan(xL/yL) #theta_deg=theta_rad*180/math.pi v_along=vy/math.sin(theta_rad) @@ -120,4 +112,3 @@ def get_measures_along_track_velocity(x_ps_beam, y_ps_beam , spatial_extent, vel #Vdiff=vy-v_along return v_along - diff --git a/IS2_velocity/preprocessing.py b/IS2_velocity/preprocessing.py index 242a344..d71656d 100644 --- a/IS2_velocity/preprocessing.py +++ b/IS2_velocity/preprocessing.py @@ -19,36 +19,41 @@ def fill_seg_ids(data,dx=20): data : dict """ - variables = ['x_atc_full','h_full','lats_full','lons_full','x_full','y_full'] - for var in variables: - data[var] = {} - # make a monotonically increasing x vector for cycle in data['x_atc'].keys(): - for var in variables: - data[var][cycle] = {} for beam in data['x_atc'][cycle].keys(): # indices starting at zero, using the segment_id field, so any skipped segment will be kept in correct location seg_ids = data['segment_ids'][cycle][beam] ind = seg_ids - np.nanmin(seg_ids) - data['x_atc_full'][cycle][beam] = np.arange(np.max(ind)+1) * dx + data['x_atc'][cycle][beam][0] - data['h_full'][cycle][beam] = np.zeros(np.max(ind)+1) + np.NaN - data['h_full'][cycle][beam][ind] = data['h_li'][cycle][beam] + data['x_atc'][cycle][beam] = np.arange(np.max(ind)+1) * dx + data['x_atc'][cycle][beam][0] + + h_li_hold = np.zeros(np.max(ind)+1) + np.NaN + h_li_hold[ind] = data['h_li'][cycle][beam] + data['h_li'][cycle][beam] = h_li_hold + + lats_hold = np.zeros(np.shape(data['x_atc'][cycle][beam])) * np.nan + lats_hold[ind] = data['latitude'][cycle][beam] + data['latitude'][cycle][beam] = lats_hold + + lons_hold = np.zeros(np.shape(data['x_atc'][cycle][beam])) * np.nan + lons_hold[ind] = data['longitude'][cycle][beam] + data['longitude'][cycle][beam] = lons_hold + + xs_hold = np.zeros(np.shape(data['x_atc'][cycle][beam])) * np.nan + xs_hold[ind] = data['x'][cycle][beam] + data['x'][cycle][beam] = xs_hold - data['lats_full'][cycle][beam] = np.zeros(np.shape(data['x_atc_full'][cycle][beam])) * np.nan - data['lats_full'][cycle][beam][ind] = data['latitude'][cycle][beam] - data['lons_full'][cycle][beam] = np.zeros(np.shape(data['x_atc_full'][cycle][beam])) * np.nan - data['lons_full'][cycle][beam][ind] = data['longitude'][cycle][beam] - data['x_full'][cycle][beam] = np.zeros(np.shape(data['x_atc_full'][cycle][beam])) * np.nan - data['x_full'][cycle][beam][ind] = data['x'][cycle][beam] - data['y_full'][cycle][beam] = np.zeros(np.shape(data['x_atc_full'][cycle][beam])) * np.nan - data['y_full'][cycle][beam][ind] = data['y'][cycle][beam] + ys_hold = np.zeros(np.shape(data['x_atc'][cycle][beam])) * np.nan + ys_hold[ind] = data['y'][cycle][beam] + data['y'][cycle][beam] = ys_hold ## save the segment id's themselves, with gaps filled in data['segment_ids'][cycle][beam] = np.zeros(np.max(ind)+1) + np.NaN data['segment_ids'][cycle][beam][ind] = seg_ids + data['flags'].append('fill_seg_ids') + return data # ------------------------------------------------------------------------------------------- @@ -68,16 +73,16 @@ def interpolate_nans(data): Warning("Be careful interpolating, linear interpolation could lead to misleading correlations.") - data['h_full_interp'] = {} - for cycle in data['h_full'].keys(): - data['h_full_interp'][cycle] = {} - for beam in data['h_full'][cycle].keys(): + for cycle in data['h_li'].keys(): + for beam in data['h_li'][cycle].keys(): ### interpolate nans in pandas - interp_hold = {'x_atc_full': data['x_atc_full'][cycle][beam], 'h_full': data['h_full'][cycle][beam]} - df = pd.DataFrame(interp_hold, columns = ['x_atc_full','h_full']) + interp_hold = {'x_atc': data['x_atc'][cycle][beam], 'h_li': data['h_li'][cycle][beam]} + df = pd.DataFrame(interp_hold, columns = ['x_atc','h_li']) # linear interpolation for now - df['h_full'].interpolate(method = 'linear', inplace = True) - data['h_full_interp'][cycle][beam] = df['h_full'].values + df['h_li'].interpolate(method = 'linear', inplace = True) + data['h_li'][cycle][beam] = df['h_li'].values + + data['flags'].append('interp_nans') return data # ------------------------------------------------------------------------------------------- @@ -101,21 +106,21 @@ def filt(data, dx=None, filter_type = 'running_average', running_avg_window = No data : dict """ - data['h_filt'] = {} - for cycle in data['h_full_interp'].keys(): - data['h_filt'][cycle] = {} - for beam in data['h_full_interp'][cycle].keys(): + for cycle in data['h_li'].keys(): + for beam in data['h_li'][cycle].keys(): if filter_type == 'running_average': # if not running_avg_window: # if no window size was given, run a default 100 m running average smoother running_avg_window = 100 if dx is None: - dx = np.nanmedian(data['x_atc_full'][cycle][beam][1:] - data['x_atc_full'][cycle][beam][:-1]) + dx = np.nanmedian(data['x_atc'][cycle][beam][1:] - data['x_atc'][cycle][beam][:-1]) filt = np.ones(int(np.round(running_avg_window / dx))) - data['h_filt'][cycle][beam] = (1/len(filt)) * np.convolve(filt, data['h_full_interp'][cycle][beam], mode = 'same') + data['h_li'][cycle][beam] = (1/len(filt)) * np.convolve(filt, data['h_li'][cycle][beam], mode = 'same') - #TODO: add bandpass, highpass + #TODO: add options bandpass, highpass else: - data['h_filt'][cycle][beam] = data['h_full_interp'][cycle][beam] + data['h_li'][cycle][beam] = data['h_li'][cycle][beam] + + data['flags'].append('filt_'+filter_type) return data @@ -137,11 +142,14 @@ def differentiate(data,dx=None): """ data['h_li_diff'] = {} - for cycle in data['h_filt'].keys(): + for cycle in data['h_li'].keys(): data['h_li_diff'][cycle] = {} - for beam in data['h_filt'][cycle].keys(): + for beam in data['h_li'][cycle].keys(): if dx is None: - dx = np.nanmedian(data['x_atc_full'][cycle][beam][1:] - data['x_atc_full'][cycle][beam][:-1]) - data['h_li_diff'][cycle][beam] = np.gradient(data['h_filt'][cycle][beam], dx) + dx = np.nanmedian(data['x_atc'][cycle][beam][1:] - data['x_atc'][cycle][beam][:-1]) + data['h_li_diff'][cycle][beam] = np.gradient(data['h_li'][cycle][beam], dx) + + data['flags'].append('differentiate') + return data diff --git a/IS2_velocity/readers_atl06.py b/IS2_velocity/readers_atl06.py index cb1055f..fd320cc 100644 --- a/IS2_velocity/readers_atl06.py +++ b/IS2_velocity/readers_atl06.py @@ -60,6 +60,7 @@ def load_data_by_rgt(rgt, path_to_data, product, format = 'hdf5', print(f'file {filename} encountered error {e}') error_count += 1 + D_out['flags'] = [] print('Loaded data from cycles: ' + ','.join(cycles_this_rgt),'for rgt:',rgt) return D_out diff --git a/IS2_velocity/saving.py b/IS2_velocity/saving.py index 0fd46f2..96b4cb3 100644 --- a/IS2_velocity/saving.py +++ b/IS2_velocity/saving.py @@ -55,8 +55,8 @@ def save_is2_velocity(data,rgt,write_out_path,prepend,product,map_data_root, ### Output xy locations of midpoints as well midpoints_xy = np.array(pyproj.Proj(3031)(data['midpoints_lons'][beam], data['midpoints_lats'][beam])) - #extract measures veloc outside of this loop + #extract measures veloc outside of this loop measures_Vx_path = glob.glob( map_data_root + '*_Vx.tif')[0] measures_Vy_path = glob.glob( map_data_root + '*_Vy.tif')[0] From 5431dc9da276f5e3b8faebc7d9d85049e385d89b Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Tue, 10 Nov 2020 13:15:21 -0700 Subject: [PATCH 08/17] Small changes to the multi-beam plotting script I wanted to remove the dependency on the MOA file. --- IS2_velocity/plotting.py | 59 ++++++++++++++++++++++------------------ 1 file changed, 32 insertions(+), 27 deletions(-) diff --git a/IS2_velocity/plotting.py b/IS2_velocity/plotting.py index cf28839..5b3289c 100644 --- a/IS2_velocity/plotting.py +++ b/IS2_velocity/plotting.py @@ -10,7 +10,7 @@ import matplotlib.pyplot as plt def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_threshold, spatial_extent, plot_out_location, map_data_root, - velocity_number, close=False): + velocity_number, epsg, close=False): """ :param rgt: @@ -27,33 +27,36 @@ def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_thres # plot_out_location is where to save the plot # map_data_root is where the map data are stored, specifically must contain moa_2009_1km.tif for this specific code to work - file = out_path + 'rgt' + rgt + '_veloc' + str(velocity_number).zfill( - 2) + '.hdf5' # < eventually, make velocity number two digits + file = out_path + 'rgt' + rgt + '.hdf5' + # < eventually, make velocity number two digits + # '_veloc' + str(velocity_number).zfill(2) + if glob.glob(file): - ### MOA parameters - moa_datapath = map_data_root # '/srv/tutorial-data/land_ice_applications/' - # spatial_extent = np.array([-102, -76, -98, -74.5]) - # spatial_extent = np.array([-65, -86, -55, -81]) - - lat = spatial_extent[[1, 3, 3, 1, 1]] - lon = spatial_extent[[2, 2, 0, 0, 2]] - # project the coordinates to Antarctic polar stereographic - xy = np.array(pyproj.Proj(3031)(lon, lat)) - # get the bounds of the projected coordinates - XR = [np.nanmin(xy[0, :]), np.nanmax(xy[0, :])] - YR = [np.nanmin(xy[1, :]), np.nanmax(xy[1, :])] - MOA = pc.grid.data().from_geotif(os.path.join(moa_datapath, 'moa_2009_1km.tif'), bounds=[XR, YR]) - # MOA=pc.grid.data().from_geotif(os.path.join(moa_datapath, 'MOA','moa_2009_1km.tif'), bounds=[XR, YR]) - - epsg = 3031 # PS? - plt.close('all') fig = plt.figure(figsize=[11, 8]) grid = plt.GridSpec(6, 2, wspace=0.4, hspace=0.3) - haxMOA = fig.add_subplot(grid[0:4, 1]) - MOA.show(ax=haxMOA, cmap='gray', clim=[14000, 17000]) + + try: + ### MOA parameters + moa_datapath = map_data_root # '/srv/tutorial-data/land_ice_applications/' + + lat = spatial_extent[[1, 3, 3, 1, 1]] + lon = spatial_extent[[2, 2, 0, 0, 2]] + + # project the coordinates to Antarctic polar stereographic + xy = np.array(pyproj.Proj(epsg)(lon, lat)) + # get the bounds of the projected coordinates + XR = [np.nanmin(xy[0, :]), np.nanmax(xy[0, :])] + YR = [np.nanmin(xy[1, :]), np.nanmax(xy[1, :])] + # MOA=pc.grid.data().from_geotif(os.path.join(moa_datapath, 'MOA','moa_2009_1km.tif'), bounds=[XR, YR]) + + MOA = pc.grid.data().from_geotif(os.path.join(moa_datapath, 'moa_2009_1km.tif'), bounds=[XR, YR]) + + haxMOA = fig.add_subplot(grid[0:4, 1]) + MOA.show(ax=haxMOA, cmap='gray', clim=[14000, 17000]) + except: + pass with h5py.File(file, 'r') as f: for ib, beam in enumerate(beams): @@ -81,11 +84,13 @@ def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_thres c = plt.colorbar(h0, ax=hax0) c.set_label('Correlation coefficient (0 -> 1)') - h2 = haxMOA.scatter(xy[0][ixs0], xy[1][ixs0], 0.02, 'k') - h3 = haxMOA.scatter(xy[0][ixs], xy[1][ixs], 0.15, velocs[ixs], vmin=-800, vmax=800, cmap='plasma') - - c = plt.colorbar(h3, ax=haxMOA) - c.set_label('Along-track velocity (m/yr)') + try: + h2 = haxMOA.scatter(xy[0][ixs0], xy[1][ixs0], 0.02, 'k') + h3 = haxMOA.scatter(xy[0][ixs], xy[1][ixs], 0.15, velocs[ixs], vmin=-800, vmax=800, cmap='plasma') + c = plt.colorbar(h3, ax=haxMOA) + c.set_label('Along-track velocity (m/yr)') + except: + pass outfile = plot_out_location + 'rgt' + rgt + '.' + beam + '_vs_measures_veloc' + str(velocity_number).zfill( 2) + '.png' From 6b62dc3e30721be6c725d05bc916b71671638fac Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Tue, 10 Nov 2020 13:19:49 -0700 Subject: [PATCH 09/17] Remove this old script with an unused function in it. --- IS2_velocity/draft_functions.py | 144 -------------------------------- 1 file changed, 144 deletions(-) delete mode 100644 IS2_velocity/draft_functions.py diff --git a/IS2_velocity/draft_functions.py b/IS2_velocity/draft_functions.py deleted file mode 100644 index 8e928bc..0000000 --- a/IS2_velocity/draft_functions.py +++ /dev/null @@ -1,144 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import numpy as np - -### UNFINISHED FUNCTION - basic structure is good -### working on debugging problems with peaks at edges for the sub-sample routine - -def find_correlation_peak(lagvec, shift_vec, corr_normed, max_width, min_width, dx_interp, fit_type, plotting): - # max_width = how many SAMPLES wide the half peak can be, max, samples - # min_width = the narrowest the half peak can be, samples - # dx_interp = interpolated sample spacing, in units of SAMPLES - - from scipy.interpolate import interp1d - - # peak of un-interpolated data - ix_peak = np.arange(len(corr_normed))[corr_normed == np.nanmax(corr_normed)][0] - ix_peak = np.where(corr_normed == np.nanmax(corr_normed))[0][0] - - if fit_type == 'raw': - best_lag = lagvec[ix_peak] - best_shift = shift_vec[ix_peak] - peak_corr_value = corr_normed[ix_peak] - - else: - ### Assume we're doing a sub-sample fit - # find troughs before and after peak - - corr_diff = corr_normed[1:] - corr_normed[:-1] # to find troughs, zero-crossings - - # cases: - # ix_peak is too close to the start (ix_peak < min_width) - # calculate right width by finding trough or max/min, use ix_peak as left width - if ix_peak < min_width: - left_width = ix_peak # go to left edge - # determine right width - if ix_peak == 0: - ix_next_trough = np.where(corr_diff[ix_peak:] > 0)[0][0] + ix_peak - else: - ix_next_trough = np.where(corr_diff[ix_peak-1:] > 0)[0][0] + ix_peak - width_next_trough = -(ix_peak - ix_next_trough) - if width_next_trough < min_width: - right_width = min_width - elif width_next_trough > max_width: - right_width = max_width - else: - right_width = width_next_trough - - # ix_peak is too close to the end (len(corr_normed) - ix_peak < min_width) - # calculate left width by finding trough or max/min, use len(corr_normed) - ix_peak as right width - elif len(corr_normed) - ix_peak < min_width: - right_width = len(corr_normed) - ix_peak -1 - #determine left width - ix_prev_trough = ix_peak - np.where(np.flip(corr_diff[:ix_peak-1]) < 0)[0][0] - width_prev_trough = ix_peak - ix_prev_trough - if width_prev_trough < min_width: - left_width = min_width - elif width_prev_trough > max_width: - left_width = max_width - else: - left_width = width_prev_trough - - # other - else: - # calculate both left and right width - ix_next_trough = np.where(corr_diff[ix_peak-1:] > 0)[0][0] + ix_peak - ix_prev_trough = ix_peak - np.where(np.flip(corr_diff[:ix_peak-1]) < 0)[0][0] - # if width is greater than min, pick smaller width, so is same on both sides - width = np.min([ix_peak - ix_prev_trough, -(ix_peak - ix_next_trough)]) - if width > max_width: - width = max_width - elif width < min_width: - width = min_width - left_width = width - right_width = width - -# # deal with edges; just go to edge -# if ix_peak + width >= len(corr_normed): -# right_width = len(corr_normed) - ix_peak -1 -# else: -# right_width = width - -# if width>= ix_peak: -# left_width = ix_peak -# else: -# left_width = width - - # cut out data and an x vector before and after the peak (for plotting) - dcut = corr_normed[ix_peak - left_width: ix_peak + right_width +1] - xvec_cut = np.arange(ix_peak - left_width, ix_peak + right_width + 1) - lagvec_cut = lagvec[xvec_cut] - - if fit_type == 'parabola': - # fit parabola; in these data, usually a poor fit - fit_coeffs = np.polyfit(xvec_cut, dcut, deg=2, full = True) - - # create xvec to interpolate the parabola onto and compute - interp_xvec = np.arange(ix_peak - left_width, ix_peak + right_width + 1, 0.01) - fit = np.polyval(p = fit_coeffs[0], x = interp_xvec) - - # interp lagvec - lagvec_interp = np.arange(lagvec[ix_peak - left_width], lagvec[ix_peak + right_width + 1], dx_interp) - - # compute peak - ix_peak_interp = np.where(fit == np.max(fit))[0][0] - best_lag = interp_xvec[ix_peak_interp] - peak_corr_value = fit[ix_peak_interp] - - elif fit_type == 'cubic': -# # find troughs before and after peak -# corr_diff = corr_normed[1:] - corr_normed[:-1] -# ix_next_trough = np.where(corr_diff[ix_peak:] > 0)[0][0] + ix_peak -# ix_prev_trough = ix_peak - np.where(np.flip(corr_diff[:ix_peak]) < 0)[0][0] -# width = np.min([ix_peak - ix_prev_trough, -(ix_peak - ix_next_trough)]) -# if width > max_width: -# width = max_width -# elif width < min_width: -# width = min_width - -# # cut out data and an x vector before and after the peak (for plotting) -# dcut = corr_normed[ix_peak - width: ix_peak + width +1] -# xvec_cut = np.arange(ix_peak - width, ix_peak + width + 1) -# lagvec_cut = lagvec[xvec_cut] - -# if np.sum(np.isnan(dcut)) >0: -# print(str(np.sum(np.isnan(dcut))) + ' nans') - - # cubic spline interpolation - # create xvector to interpolate onto - interp_xvec = np.arange(ix_peak - left_width, ix_peak + right_width , dx_interp) - # create interpolation function - f = interp1d(xvec_cut, dcut, kind = 'cubic') - # evaluate interpolation function on new xvector - fit = f(interp_xvec) - - # interpolate lagvec - lagvec_interp = np.arange(lagvec[ix_peak - left_width], lagvec[ix_peak + right_width], dx_interp) - - # compute peak - ix_peak_interp = np.where(fit == np.max(fit))[0][0] - best_lag = lagvec_interp[ix_peak_interp] - peak_corr_value = fit[ix_peak_interp] - - return best_lag, peak_corr_value From aff9a5049c2381217343ee9452bc330d738c6ee0 Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Tue, 10 Nov 2020 14:21:36 -0700 Subject: [PATCH 10/17] Clean up the function for extracting Measures velocities. --- IS2_velocity/extract_alongtrack.py | 124 ++++++++++------------------- IS2_velocity/plotting.py | 18 +++-- IS2_velocity/saving.py | 16 +--- 3 files changed, 54 insertions(+), 104 deletions(-) diff --git a/IS2_velocity/extract_alongtrack.py b/IS2_velocity/extract_alongtrack.py index 6d8774f..79169a2 100644 --- a/IS2_velocity/extract_alongtrack.py +++ b/IS2_velocity/extract_alongtrack.py @@ -6,109 +6,65 @@ import pointCollection as pc except: print('Continuing withough pointCollection') -import os, pyproj - -def add_surface_velocity_to_is2_dict(is2_dict, spatial_extent, path, vel_x, vel_y ): - """ - - is2_dict: Python dictionary with ATL06 track data - spatial_extent: bounding box of the interest area in the format: - (e.g. [-65, -86, -55, -81] == [min_lon, min_lat, max_lon, max_lat]) - path: local path to velocity data - vel_x: tif velocity raster with x component - vel_y: tif velocity raster with y component - - """ - data_root = path - - spatial_extent = np.array([spatial_extent]) - lat=spatial_extent[[1, 3, 3, 1, 1]] - lon=spatial_extent[[2, 2, 0, 0, 2]] - # project the coordinates to Antarctic polar stereographic - xy=np.array(pyproj.Proj(3031)(lon, lat)) - # get the bounds of the projected coordinates - XR=[np.nanmin(xy[0,:]), np.nanmax(xy[0,:])] - YR=[np.nanmin(xy[1,:]), np.nanmax(xy[1,:])] - - Measures_vx=pc.grid.data().from_geotif(os.path.join(data_root,vel_x), bounds=[XR, YR]) - Measures_vy=pc.grid.data().from_geotif(os.path.join(data_root,vel_y), bounds=[XR, YR]) - - vx = Measures_vx.interp(is2_dict['x'],is2_dict['y']) - vy = Measures_vy.interp(is2_dict['x'],is2_dict['y']) - - #Solve for angle to rotate Vy to be along track and Vx to be across track - import math - xL=abs((is2_dict['x'][0])-(is2_dict['x'][1])) - yL=abs((is2_dict['y'][0])-(is2_dict['y'][1])) - - #decides if is descending or ascending path - if is2_dict['x'][0]-is2_dict['x'][1] < 0: - - theta_rad=math.atan(xL/yL) - #theta_deg=theta_rad*180/math.pi - is2_dict['v_along']=vy/math.cos(theta_rad) - is2_dict['v_across']=vx/math.cos(theta_rad) - - else: - - theta_rad=math.atan(xL/yL) - #theta_deg=theta_rad*180/math.pi - is2_dict['v_along']=vy/math.sin(theta_rad) - is2_dict['v_across']=vx/math.sin(theta_rad) - - return is2_dict - - -def get_measures_along_track_velocity(x_ps_beam, y_ps_beam , spatial_extent, vel_x_path, vel_y_path): - """ - - is2_dict: Python dictionary with ATL06 track data - spatial_extent: bounding box of the interest area in the format: - (e.g. [-65, -86, -55, -81] == [min_lon, min_lat, max_lon, max_lat]) - path: local path to velocity data - vel_x: tif velocity raster with x component - vel_y: tif velocity raster with y component - +from pyproj import Proj +proj_stere = Proj('epsg:3031') + +def get_measures(data, spatial_extent, measures_path): + r"""Add the Measures surface velocity to the local data dictionary. + + Parameters + ------ + data : dict + data dictionary + spatial_extent : list + bounding box of the interest area in the format + (e.g. [-65, -86, -55, -81] == [min_lon, min_lat, max_lon, max_lat]) + measures_path : str + local path to velocity data + + Output + ------ + data : dict """ #fix with if statement about type of list or array DONE - if type(spatial_extent) == type([]): - spatial_extent = np.array([spatial_extent]) - - lat=spatial_extent[[1, 3, 3, 1, 1]] lon=spatial_extent[[2, 2, 0, 0, 2]] # project the coordinates to Antarctic polar stereographic - xy=np.array(pyproj.Proj(3031)(lon, lat)) + meas_xy=np.array(proj_stere(lon, lat)) # get the bounds of the projected coordinates - XR=[np.nanmin(xy[0,:]), np.nanmax(xy[0,:])] - YR=[np.nanmin(xy[1,:]), np.nanmax(xy[1,:])] + XR=[np.nanmin(meas_xy[0,:]), np.nanmax(meas_xy[0,:])] + YR=[np.nanmin(meas_xy[1,:]), np.nanmax(meas_xy[1,:])] - Measures_vx=pc.grid.data().from_geotif(vel_x_path, bounds=[XR, YR]) - Measures_vy=pc.grid.data().from_geotif(vel_y_path, bounds=[XR, YR]) + Measures_vx=pc.grid.data().from_geotif(measures_path+'Vx.tif', bounds=[XR, YR]) + Measures_vy=pc.grid.data().from_geotif(measures_path+'Vy.tif', bounds=[XR, YR]) - vx = Measures_vx.interp(x_ps_beam,y_ps_beam) - vy = Measures_vy.interp(x_ps_beam,y_ps_beam) + beam1 = list(data['midpoints_lats'].keys())[0] + lats = data['midpoints_lats'][beam1] + lons = data['midpoints_lons'][beam1] + x,y = proj_stere(lons,lats) + + vx = Measures_vx.interp(x,y) + vy = Measures_vy.interp(x,y) #Solve for angle to rotate Vy to be along track and Vx to be across track import math - xL=abs((x_ps_beam[0])-(x_ps_beam[1])) - yL=abs((y_ps_beam[0])-(y_ps_beam[1])) + xL=abs((x[0])-(x[1])) + yL=abs((y[0])-(y[1])) - # decides if is descending or ascending path - if x_ps_beam[0]-x_ps_beam[1] < 0: + #decides if is descending or ascending path + if x[0]-x[1] < 0: theta_rad=math.atan(xL/yL) #theta_deg=theta_rad*180/math.pi - v_along=vy/math.cos(theta_rad) - #v_across=vx/math.cos(theta_rad) + data['meas_v_along']=vy/math.cos(theta_rad) + data['meas_v_across']=vx/math.cos(theta_rad) else: theta_rad=math.atan(xL/yL) #theta_deg=theta_rad*180/math.pi - v_along=vy/math.sin(theta_rad) - #v_across=vx/math.sin(theta_rad) + data['meas_v_along']=vy/math.sin(theta_rad) + data['meas_v_across']=vx/math.sin(theta_rad) - #Vdiff=vy-v_along - return v_along + return data diff --git a/IS2_velocity/plotting.py b/IS2_velocity/plotting.py index 5b3289c..e3f26c7 100644 --- a/IS2_velocity/plotting.py +++ b/IS2_velocity/plotting.py @@ -1,16 +1,18 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -import glob, pyproj, os, h5py +import glob, os, h5py import numpy as np try: import pointCollection as pc except: print('Continuing without pointCollection.') import matplotlib.pyplot as plt +from pyproj import Proj +proj_stere = Proj('epsg:3031') def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_threshold, spatial_extent, plot_out_location, map_data_root, - velocity_number, epsg, close=False): + velocity_number, close=False): """ :param rgt: @@ -45,7 +47,7 @@ def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_thres lon = spatial_extent[[2, 2, 0, 0, 2]] # project the coordinates to Antarctic polar stereographic - xy = np.array(pyproj.Proj(epsg)(lon, lat)) + xy = np.array(proj_stere(lon, lat)) # get the bounds of the projected coordinates XR = [np.nanmin(xy[0, :]), np.nanmax(xy[0, :])] YR = [np.nanmin(xy[1, :]), np.nanmax(xy[1, :])] @@ -59,6 +61,11 @@ def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_thres pass with h5py.File(file, 'r') as f: + beam = beams[0] + lats = f[f'/{beam}/latitudes'][()] + lons = f[f'/{beam}/longitudes'][()] + meas_v = f[f'/{beam}/Measures_v_along'][()] + meas_xy = np.array(proj_stere(lons, lats)) for ib, beam in enumerate(beams): hax0 = fig.add_subplot(grid[ib, 0]) # 1hax1=fig.add_subplot(212) @@ -70,14 +77,13 @@ def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_thres lons = f[f'/{beam}/longitudes'][()] coeffs = f[f'/{beam}/correlation_coefficients'][()] velocs = f[f'/{beam}/velocities'][()] - v_along = f[f'/{beam}/Measures_v_along'][()] - xy = np.array(pyproj.proj.Proj(3031)(lons, lats)) + xy = np.array(proj_stere(lons, lats)) ixs0 = coeffs <= correlation_threshold ixs = coeffs > correlation_threshold h0 = hax0.scatter(xy[0], velocs, 1, coeffs, vmin=0, vmax=1, cmap='viridis') - h1 = hax0.plot(xy[0], v_along, 'k-') + h1 = hax0.plot(meas_xy[0], meas_v, 'k-') # whether v_along is + or - must depend on ascending vs descending; not done correctly yet hax0.set_ylim(-800, 800) diff --git a/IS2_velocity/saving.py b/IS2_velocity/saving.py index 96b4cb3..1cbfe7c 100644 --- a/IS2_velocity/saving.py +++ b/IS2_velocity/saving.py @@ -1,12 +1,8 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -import numpy as np from astropy.time import Time import h5py -import pyproj -import glob -from .extract_alongtrack import get_measures_along_track_velocity def save_is2_velocity(data,rgt,write_out_path,prepend,product,map_data_root, cycle1,cycle2,dx,segment_length,search_width,along_track_step, @@ -53,13 +49,6 @@ def save_is2_velocity(data,rgt,write_out_path,prepend,product,map_data_root, for beam in data['velocities'].keys(): - ### Output xy locations of midpoints as well - midpoints_xy = np.array(pyproj.Proj(3031)(data['midpoints_lons'][beam], data['midpoints_lats'][beam])) - - #extract measures veloc outside of this loop - measures_Vx_path = glob.glob( map_data_root + '*_Vx.tif')[0] - measures_Vy_path = glob.glob( map_data_root + '*_Vy.tif')[0] - ### Add velocities to hdf5 file for each beam with h5py.File(h5_file_out, 'a') as f: f[beam + '/x_atc'] = data['midpoints_x_atc'][beam] # assign x_atc value of half way along the segment @@ -71,6 +60,5 @@ def save_is2_velocity(data,rgt,write_out_path,prepend,product,map_data_root, f[beam + '/segment_ids'] = data['midpoints_seg_ids'][beam] f[beam + '/first_cycle_time'] = str(Time(data['times'][cycle1][beam][0])) f[beam + '/second_cycle_time'] = str(Time(data['times'][cycle2][beam][0])) - f[beam + '/Measures_v_along'] = get_measures_along_track_velocity(midpoints_xy[0], midpoints_xy[1], spatial_extent, - measures_Vx_path, measures_Vy_path) - + f[beam + '/Measures_v_along'] = data['meas_v_along'] + f[beam + '/Measures_v_across'] = data['meas_v_across'] From 5d621d337347cbb17aef190049ede0811b2a0e7f Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Tue, 10 Nov 2020 15:22:01 -0700 Subject: [PATCH 11/17] Change the vector rotation for Measures velocities I think that the conversion was ignoring one of the velocity components. I tested this updated conversion for all combinations of +/- vx and vy terms. --- IS2_velocity/extract_alongtrack.py | 20 ++++++-------------- IS2_velocity/plotting.py | 8 +++++--- 2 files changed, 11 insertions(+), 17 deletions(-) diff --git a/IS2_velocity/extract_alongtrack.py b/IS2_velocity/extract_alongtrack.py index 79169a2..585e378 100644 --- a/IS2_velocity/extract_alongtrack.py +++ b/IS2_velocity/extract_alongtrack.py @@ -51,20 +51,12 @@ def get_measures(data, spatial_extent, measures_path): vy = Measures_vy.interp(x,y) #Solve for angle to rotate Vy to be along track and Vx to be across track - import math - xL=abs((x[0])-(x[1])) - yL=abs((y[0])-(y[1])) + xL = x[1] - x[0] + yL = y[1] - y[0] + theta = np.arctan(yL/xL) - #decides if is descending or ascending path - if x[0]-x[1] < 0: - theta_rad=math.atan(xL/yL) - #theta_deg=theta_rad*180/math.pi - data['meas_v_along']=vy/math.cos(theta_rad) - data['meas_v_across']=vx/math.cos(theta_rad) - else: - theta_rad=math.atan(xL/yL) - #theta_deg=theta_rad*180/math.pi - data['meas_v_along']=vy/math.sin(theta_rad) - data['meas_v_across']=vx/math.sin(theta_rad) + # Do the rotation + data['meas_v_along'] = vx*np.cos(theta) + vy*np.cos(np.pi/2.-theta) + data['meas_v_across'] = vx*np.sin(theta) - vy*np.sin(np.pi/2.-theta) return data diff --git a/IS2_velocity/plotting.py b/IS2_velocity/plotting.py index e3f26c7..e09ea64 100644 --- a/IS2_velocity/plotting.py +++ b/IS2_velocity/plotting.py @@ -12,7 +12,7 @@ proj_stere = Proj('epsg:3031') def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_threshold, spatial_extent, plot_out_location, map_data_root, - velocity_number, close=False): + velocity_number, flip_meas_sign=False, close=False): """ :param rgt: @@ -23,8 +23,7 @@ def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_thres :param velocity_number: :param close: :return: - """ - # currently just the first velocity determination, veloc0 + """ # currently just the first velocity determination, veloc0 # out_path is where the xcorr results are stored # plot_out_location is where to save the plot # map_data_root is where the map data are stored, specifically must contain moa_2009_1km.tif for this specific code to work @@ -66,6 +65,9 @@ def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_thres lons = f[f'/{beam}/longitudes'][()] meas_v = f[f'/{beam}/Measures_v_along'][()] meas_xy = np.array(proj_stere(lons, lats)) + if flip_meas_sign: + meas_v *= -1 + for ib, beam in enumerate(beams): hax0 = fig.add_subplot(grid[ib, 0]) # 1hax1=fig.add_subplot(212) From 9ddf7d802cf083739ac9700070dad138964128a2 Mon Sep 17 00:00:00 2001 From: Grace Barcheck Date: Wed, 11 Nov 2020 14:17:20 -0500 Subject: [PATCH 12/17] re-arrange correlation steps slightly; add function to plot a single correlation function; update tutorial notebook to reflect --- IS2_velocity/correlation_processing.py | 232 +- IS2_velocity/plotting.py | 144 +- notebooks/Introduction_Tutorial.ipynb | 4105 +++++++++++++++++++++++- 3 files changed, 4362 insertions(+), 119 deletions(-) diff --git a/IS2_velocity/correlation_processing.py b/IS2_velocity/correlation_processing.py index 5f7d52e..7aff778 100644 --- a/IS2_velocity/correlation_processing.py +++ b/IS2_velocity/correlation_processing.py @@ -40,7 +40,8 @@ def time_diff(data,cycle1,cycle2,beam='gt1l'): # ------------------------------------------------------------------------------------------- -def calculate_velocities(data, cycle1, cycle2, beams, *args, **kwargs): +def calculate_velocities(data, cycle1, cycle2, beams, search_width=1000, segment_length=2000, + max_percent_nans=10, along_track_step=100, dx=20, *args, **kwargs): r"""Calculate velocities by correlating two cycles. Can be done over a list of beams. @@ -66,18 +67,16 @@ def calculate_velocities(data, cycle1, cycle2, beams, *args, **kwargs): for var in variables: data[var] = {} - ### Determing Elapsed time between cycles - dt = time_diff(data,cycle1,cycle2) - ### Loop over each beam for beam in beams: - data = calculate_velocity_single_beam(data,cycle1,cycle2,beam,dt,*args,**kwargs) + data = calculate_velocity_single_beam(data,cycle1,cycle2,beam,search_width=search_width,segment_length=segment_length, + max_percent_nans=max_percent_nans,along_track_step=along_track_step, dx=dx) return data # ------------------------------------------------------------------------------------------- -def calculate_velocity_single_beam(data,cycle1,cycle2,beam,dt,search_width=1000,segment_length=2000, +def calculate_velocity_single_beam(data,cycle1,cycle2,beam,search_width=1000,segment_length=2000, max_percent_nans=10,along_track_step=100, dx=20): r"""Calculate velocities by correlating two cycles. Can be done over a list of beams. @@ -98,6 +97,39 @@ def calculate_velocity_single_beam(data,cycle1,cycle2,beam,dt,search_width=1000, data : dict """ + ### Determine x1s, the along-track distances at which to start each window + x1s = determine_x1s(data, cycle1, cycle2, beam, search_width=search_width, segment_length=segment_length, + along_track_step=along_track_step, dx=dx) + + ### Create vectors to store results in + variables = ['velocities','correlations','lags','midpoints_x_atc','midpoints_lats','midpoints_lons', + 'midpoints_seg_ids'] + for var in variables: + data[var][beam] = np.empty_like(x1s) + + ### Loop over x1s, positions along track that each window starts at + for x1 in x1s: + data = correlate_single_x1(x1, data, cycle1, cycle2, beam, search_width=search_width, + segment_length=segment_length, along_track_step=along_track_step, + max_percent_nans=max_percent_nans, dx=dx) + return data + +# ------------------------------------------------------------------------------------------- + +def determine_x1s(data, cycle1, cycle2, beam, search_width=1000, segment_length=2000, + along_track_step=100, dx=20): + """ + + :param data: + :param cycle1: + :param cycle2: + :param beam: + :param search_width: + :param segment_length: + :param along_track_step: + :param dx: + :return: + """ ### Determine x1s, which are the x_atc coordinates at which each cut out window begins # To be common between both repeats, the first point x1 needs to be the larger first value between repeats min_x_atc_cycle1 = data['x_atc'][cycle1][beam][0] @@ -130,87 +162,119 @@ def calculate_velocity_single_beam(data,cycle1,cycle2,beam,dt,search_width=1000, ixtmp = ixmax[0][0] x1s = x1s[:ixtmp] - ### Create vectors to store results in - variables = ['velocities','correlations','lags','midpoints_x_atc','midpoints_lats','midpoints_lons', - 'midpoints_seg_ids'] - for var in variables: - data[var][beam] = np.empty_like(x1s) + return x1s +# ------------------------------------------------------------------------------------------- + +def correlate_single_x1(x1, data, cycle1, cycle2, beam, search_width=1000, segment_length=2000, along_track_step=100, + max_percent_nans=10, dx=20, return_correlation_function = False): + """ + + :param data: + :param cycle1: + :param cycle2: + :param beam: + :param search_width: + :param segment_length: + :param max_percent_nans: + :param dx: + :return: + """ + ### Determing Elapsed time between cycles + dt = time_diff(data,cycle1,cycle2) + + ### Determine x1s, the along-track distances at which to start each window + x1s = determine_x1s(data, cycle1, cycle2, beam, search_width=search_width, segment_length=segment_length, + along_track_step=along_track_step, dx=dx) + + ### Determine xi, the index of the current along track distance in the x1s vector + xi = np.where(x1s == x1)[0][0] ### Entire x_atc vectors for both cycles x_full_t1 = data['x_atc'][cycle1][beam] - ### Loop over x1s, positions along track that each window starts at - for xi, x1 in enumerate(x1s): - ### Cut out data: small chunk of data at time t1 (first cycle) - ix_x1 = np.arange(len(data['x_atc'][cycle1][beam]))[data['x_atc'][cycle1][beam] >= x1][0] # Index of first point that is greater than x1 - ix_x2 = ix_x1 + int( - np.round(segment_length / dx)) # ix_x1 + number of datapoints within the desired segment length - x_t1 = x_full_t1[ix_x1:ix_x2] # cut out x_atc values, first cycle - lats_t1 = data['latitude'][cycle1][beam][ix_x1:ix_x2] # cut out latitude values, first cycle - lons_t1 = data['longitude'][cycle1][beam][ix_x1:ix_x2] # cut out longitude values, first cycle - seg_ids_t1 = data['segment_ids'][cycle1][beam][ix_x1:ix_x2] # cut out segment_ids, first cycle - h_li1 = data['h_li_diff'][cycle1][beam][ - ix_x1 - 1:ix_x2 - 1] # cut out land ice height values, first cycle; start 1 index earlier because - # the h_li_diff data are differentiated, and therefore one sample shorter - - ### Cut out data: wider chunk of data at time t2 (second cycle) - ix_x3 = ix_x1 - int(np.round(search_width / dx)) # extend on earlier end by number of indices in search_width - ix_x4 = ix_x2 + int(np.round(search_width / dx)) # extend on later end by number of indices in search_width - h_li2 = data['h_li_diff'][cycle2][beam][ - ix_x3 - 1:ix_x4 - 1] # cut out land ice height values, second cycle; start 1 index earlier because - # the h_li_diff data are differentiated, and therefore one sample shorter - - # Find segment midpoints; this is the position where we will assign the velocity measurement from each window - n = len(x_t1) - midpt_ix = int(np.floor(n / 2)) - data['midpoints_x_atc'][beam][xi] = x_t1[midpt_ix] - data['midpoints_lats'][beam][xi] = lats_t1[midpt_ix] - data['midpoints_lons'][beam][xi] = lons_t1[midpt_ix] - data['midpoints_seg_ids'][beam][xi] = seg_ids_t1[midpt_ix] - - ### Determine number of nans in each data chunk - n_nans1 = np.sum(np.isnan(data['h_li'][cycle1][beam][ix_x1:ix_x2])) - n_nans2 = np.sum(np.isnan(data['h_li'][cycle2][beam][ix_x3:ix_x4])) - - ### Only process if there are fewer than 10% nans in either data chunk: - if not (n_nans1 / len(h_li1) <= max_percent_nans / 100) and (n_nans2 / len(h_li2) <= max_percent_nans / 100): - ### If there are too many nans, just save a nan - data['velocities'][beam][xi] = np.nan - data['correlations'][beam][xi] = np.nan - data['lags'][beam][xi] = np.nan - else: - # Detrend both chunks of data - h_li1 = detrend(h_li1, type='linear') - h_li2 = detrend(h_li2, type='linear') - - ### Correlate the old and new data - # We made the second data vector longer than the first, so the valid method returns values - corr = correlate(h_li1, h_li2, mode='valid', method='direct') - - ### Normalize correlation function by autocorrelations - # Normalizing coefficient changes with each step along track; this section determines a changing along track normalizing coefficiant - coeff_a_val = np.sum(h_li1 ** 2) - coeff_b_val = np.zeros(len(h_li2) - len(h_li1) + 1) - for shift in range(len(h_li2) - len(h_li1) + 1): - h_li2_section = h_li2[shift:shift + len(h_li1)] - coeff_b_val[shift] = np.sum(h_li2_section ** 2) - norm_vec = np.sqrt(coeff_a_val * coeff_b_val) - corr_normed = corr / np.flip( - norm_vec) # i don't really understand why this has to flip, but otherwise it yields correlation values above 1... - # TASK: figure out why the flip is needed - - ### Create a vector of lags for the correlation function - lagvec = np.arange(- int(np.round(search_width / dx)), int(search_width / dx) + 1, 1) # for mode = 'valid' - - ### Convert lag to distance - shift_vec = lagvec * dx - - ### ID peak correlation coefficient - ix_peak = np.arange(len(corr_normed))[corr_normed == np.nanmax(corr_normed)][0] - - ### Save correlation coefficient, best lag, velocity, etc at the location of peak correlation coefficient - data['velocities'][beam][xi] = shift_vec[ix_peak] / (dt / 365) - data['correlations'][beam][xi] = corr_normed[ix_peak] - data['lags'][beam][xi] = lagvec[ix_peak] + ### Cut out data: small chunk of data at time t1 (first cycle) + npts_short = int( np.round(segment_length / dx)) # how many data points in the later, shorter vector to correlate + npts_extra_long = int(np.round(search_width / dx)) # how many extra datapoints before/after in the earlier, longer vector to correlate + + ix_x1 = np.arange(len(data['x_atc'][cycle1][beam]))[data['x_atc'][cycle1][beam] >= x1][ + 0] # Index of first point that is greater than x1 + ix_x2 = ix_x1 + npts_short # ix_x1 + number of datapoints within the desired segment length + x_t1 = x_full_t1[ix_x1:ix_x2] # cut out x_atc values, first cycle + lats_t1 = data['latitude'][cycle1][beam][ix_x1:ix_x2] # cut out latitude values, first cycle + lons_t1 = data['longitude'][cycle1][beam][ix_x1:ix_x2] # cut out longitude values, first cycle + seg_ids_t1 = data['segment_ids'][cycle1][beam][ix_x1:ix_x2] # cut out segment_ids, first cycle + h_li1 = data['h_li_diff'][cycle1][beam][ + ix_x1 - 1:ix_x2 - 1] # cut out land ice height values, first cycle; start 1 index earlier because + # the h_li_diff data are differentiated, and therefore one sample shorter + + ### Cut out data: wider chunk of data at time t2 (second cycle) + ix_x3 = ix_x1 - npts_extra_long # extend on earlier end by number of indices in search_width + ix_x4 = ix_x2 + npts_extra_long # extend on later end by number of indices in search_width + h_li2 = data['h_li_diff'][cycle2][beam][ + ix_x3 - 1:ix_x4 - 1] # cut out land ice height values, second cycle; start 1 index earlier because + # the h_li_diff data are differentiated, and therefore one sample shorter + + # Find segment midpoints; this is the position where we will assign the velocity measurement from each window + n = len(x_t1) + midpt_ix = int(np.floor(n / 2)) + data['midpoints_x_atc'][beam][xi] = x_t1[midpt_ix] + data['midpoints_lats'][beam][xi] = lats_t1[midpt_ix] + data['midpoints_lons'][beam][xi] = lons_t1[midpt_ix] + data['midpoints_seg_ids'][beam][xi] = seg_ids_t1[midpt_ix] + + ### Determine number of nans in each data chunk + n_nans1 = np.sum(np.isnan(data['h_li'][cycle1][beam][ix_x1:ix_x2])) + n_nans2 = np.sum(np.isnan(data['h_li'][cycle2][beam][ix_x3:ix_x4])) + + ### Only process if there are fewer than 10% nans in either data chunk: + if not (n_nans1 / len(h_li1) <= max_percent_nans / 100) and (n_nans2 / len(h_li2) <= max_percent_nans / 100): + ### If there are too many nans, just save a nan + data['velocities'][beam][xi] = np.nan + data['correlations'][beam][xi] = np.nan + data['lags'][beam][xi] = np.nan + else: + # Detrend both chunks of data + h_li1 = detrend(h_li1, type='linear') + h_li2 = detrend(h_li2, type='linear') + + ### Correlate the old and new data + # We made the second data vector longer than the first, so the valid method returns values + corr = correlate(h_li1, h_li2, mode='valid', method='direct') + + ### Normalize correlation function by autocorrelations + # Normalizing coefficient changes with each step along track; this section determines a changing along track normalizing coefficiant + coeff_a_val = np.sum(h_li1 ** 2) + coeff_b_val = np.zeros(len(h_li2) - len(h_li1) + 1) + for shift in range(len(h_li2) - len(h_li1) + 1): + h_li2_section = h_li2[shift:shift + len(h_li1)] + coeff_b_val[shift] = np.sum(h_li2_section ** 2) + norm_vec = np.sqrt(coeff_a_val * coeff_b_val) + corr_normed = corr / np.flip( + norm_vec) # i don't really understand why this has to flip, but otherwise it yields correlation values above 1... + # TASK: figure out why the flip is needed + + ### Create a vector of lags for the correlation function + lagvec = np.arange(- int(np.round(search_width / dx)), int(search_width / dx) + 1, 1) # for mode = 'valid' + + ### Convert lag to distance + shift_vec = lagvec * dx + + ### ID peak correlation coefficient + ix_peak = np.arange(len(corr_normed))[corr_normed == np.nanmax(corr_normed)][0] + + ### Save correlation coefficient, best lag, velocity, etc at the location of peak correlation coefficient + data['velocities'][beam][xi] = shift_vec[ix_peak] / (dt / 365) + data['correlations'][beam][xi] = corr_normed[ix_peak] + data['lags'][beam][xi] = lagvec[ix_peak] + + if return_correlation_function: # will only work for one at a time; cannot return all at once + data['correlation_functions'] = {} + data['correlation_functions'][beam] = {} + data['correlation_functions'][beam][xi] = {} + data['correlation_functions'][beam][xi]['shift_vec'] = shift_vec + data['correlation_functions'][beam][xi]['lag_vec'] = lagvec + data['correlation_functions'][beam][xi]['correlation_function'] = corr_normed + return data + diff --git a/IS2_velocity/plotting.py b/IS2_velocity/plotting.py index 5b3289c..9f71c62 100644 --- a/IS2_velocity/plotting.py +++ b/IS2_velocity/plotting.py @@ -3,13 +3,14 @@ import glob, pyproj, os, h5py import numpy as np +from IS2_velocity.correlation_processing import determine_x1s, correlate_single_x1, time_diff try: import pointCollection as pc except: print('Continuing without pointCollection.') import matplotlib.pyplot as plt -def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_threshold, spatial_extent, plot_out_location, map_data_root, +def plot_measures_along_track_comparison(rgt, beams, data_path, correlation_threshold, spatial_extent, plot_out_location, map_data_root, velocity_number, epsg, close=False): """ @@ -27,7 +28,8 @@ def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_thres # plot_out_location is where to save the plot # map_data_root is where the map data are stored, specifically must contain moa_2009_1km.tif for this specific code to work - file = out_path + 'rgt' + rgt + '.hdf5' + file = glob.glob(data_path + '*_rgt' + rgt + '.hdf5')[0] + # < eventually, make velocity number two digits # '_veloc' + str(velocity_number).zfill(2) + @@ -82,14 +84,16 @@ def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_thres hax0.set_ylim(-800, 800) c = plt.colorbar(h0, ax=hax0) - c.set_label('Correlation coefficient (0 -> 1)') + c.set_label('Correlation\ncoefficient\n(-1 -> 1)') try: h2 = haxMOA.scatter(xy[0][ixs0], xy[1][ixs0], 0.02, 'k') h3 = haxMOA.scatter(xy[0][ixs], xy[1][ixs], 0.15, velocs[ixs], vmin=-800, vmax=800, cmap='plasma') - c = plt.colorbar(h3, ax=haxMOA) - c.set_label('Along-track velocity (m/yr)') + if ib == 0: + c = plt.colorbar(h3, ax=haxMOA) + c.set_label('Along-track velocity (m/yr)') except: + print('error') pass outfile = plot_out_location + 'rgt' + rgt + '.' + beam + '_vs_measures_veloc' + str(velocity_number).zfill( @@ -97,3 +101,133 @@ def plot_measures_along_track_comparison(rgt, beams, out_path, correlation_thres plt.savefig(outfile, dpi=200) if close == True: plt.close('all') + +def plot_correlation_function(x1, data, cycle1, cycle2, beam, search_width=1000, segment_length=2000, along_track_step=100, + max_percent_nans=10, dx=20): + r""" Re-calculate correlation function for a particular along-track starting place. + Useful for visualizing quality of correlation functions + + Parameters + ------ + :param data: + Dictionary containing loaded hdf5 data and correlation processing results + :param cycle1: + Name of earlier cycle to compare + :param cycle2: + Name of later cycle to compare + :param beam: + Name of specific beam to compare + :param xi: + Along-track window number + + Output + ------ + fig + """ + + ### This function reproduces the correlation processing, but only for a single value of along-track distance + + ### Determine x1s, the along-track distances at which to start each window + x1s = determine_x1s(data, cycle1, cycle2, beam, search_width=search_width, segment_length=segment_length, + along_track_step=along_track_step, dx=dx) + + ### Determine xi, the index of the current along track distance in the x1s vector + xi = np.where(x1s == x1)[0][0] + + ### Correlate for that single value of x1 + data_tmp = correlate_single_x1(x1, data, cycle1, cycle2, beam, search_width=search_width, segment_length=segment_length, + max_percent_nans=max_percent_nans, dx=dx, return_correlation_function=True) + + + ### Cut out data; code taken from correlate_single_x1s + npts_short = int( np.round(segment_length / dx)) # how many data points in the later, shorter vector to correlate + npts_extra_long = int(np.round(search_width / dx)) # how many extra datapoints before/after in the earlier, longer vector to correlate + + # x_full_t1 = data['x_atc'][cycle1][beam] + + ### Cut out data, t1 (first cycle) + ix_x1 = np.arange(len(data['x_atc'][cycle1][beam]))[data['x_atc'][cycle1][beam] >= x1][ + 0] # Index of first point that is greater than x1 + ix_x2 = ix_x1 + npts_short # ix_x1 + number of datapoints within the desired segment length + # x_t1 = x_full_t1[ix_x1:ix_x2] # cut out x_atc values, first cycle + # lats_t1 = data['latitude'][cycle1][beam][ix_x1:ix_x2] # cut out latitude values, first cycle + # lons_t1 = data['longitude'][cycle1][beam][ix_x1:ix_x2] # cut out longitude values, first cycle + # seg_ids_t1 = data['segment_ids'][cycle1][beam][ix_x1:ix_x2] # cut out segment_ids, first cycle + x_atc1 = data['x_atc'][cycle1][beam][ix_x1:ix_x2] + h_li1 = data['h_li'][cycle1][beam][ix_x1:ix_x2] + h_li_diff1 = data['h_li_diff'][cycle1][beam][ ix_x1 - 1:ix_x2 - 1] # cut out land ice height values, first cycle; start 1 index earlier because + # the h_li_diff data are differentiated, and therefore one sample shorter + + ### Cut out data: wider chunk of data at time t2 (second cycle) + ix_x3 = ix_x1 - npts_extra_long # extend on earlier end by number of indices in search_width + ix_x4 = ix_x2 + npts_extra_long # extend on later end by number of indices in search_width + x_atc2 = data['x_atc'][cycle1][beam][ix_x3:ix_x4] + h_li2 = data['h_li'][cycle2][beam][ix_x3:ix_x4] # cut out land ice height values, second cycle; start 1 index earlier because + h_li_diff2 = data['h_li_diff'][cycle2][beam][ ix_x3 - 1:ix_x4 - 1] # cut out land ice height values, second cycle; start 1 index earlier because + # the h_li_diff data are differentiated, and therefore one sample shorter + + veloc = data_tmp['velocities'][beam][xi] + correlation_value = data_tmp['correlations'][beam][xi] + best_lag = data_tmp['lags'][beam][xi] + + shift_vec = data['correlation_functions'][beam][xi]['shift_vec'] # units of meters + correlation_function = data['correlation_functions'][beam][xi]['correlation_function'] + + ### Create figure + fig = plt.figure(figsize=[11, 8]) + grid = plt.GridSpec(5, 1, wspace=0.4, hspace=0.3) + + ### Plot raw data for this comparison + ax0 = fig.add_subplot(grid[0, 0]) + cycle2_plot, = ax0.plot(x_atc2 - x_atc1[0], h_li2, 'r') + cycle1_plot, = ax0.plot(x_atc1 - x_atc1[0], h_li1, 'k') + ax0.legend((cycle1_plot, cycle2_plot), (cycle1, cycle2), loc='upper right') + ax0.set_ylabel('Ice Surface\nElevation (m)') + ax0.set_xlim(np.min(x_atc2 - x_atc1[0]), np.max(x_atc2 - x_atc1[0])) + ax0.text(x_atc2[3] - x_atc1[0], np.max(h_li2)-0.25, 'Raw data') + + ### Plot differentiated data for this comparison + ax1 = fig.add_subplot(grid[1, 0]) + ax1.plot(x_atc2 - x_atc1[0], h_li_diff2, 'r') + ax1.plot(x_atc1 - x_atc1[0], h_li_diff1, 'k') + ax1.set_ylabel('Differentiated\nElevation') + ax1.text(x_atc2[3] - x_atc1[0], 0.8*np.max(h_li_diff2), 'Raw data, differentiated') + ax1.set_xlim(np.min(x_atc2 - x_atc1[0]), np.max(x_atc2 - x_atc1[0])) + + + ### Plot shifted data for this comparison + ax2 = fig.add_subplot(grid[2, 0]) + ax2.plot(x_atc2 - x_atc1[0], h_li_diff2, 'r') + ax2.plot(x_atc1 - x_atc1[0] - best_lag * dx, h_li_diff1, + 'k') # best_lag * dx in case best_lag is sub-sample and can't offset by index + ax2.set_xlabel('Meters along track') + ax2.set_ylabel('Differentiated\nElevation') + ax2.text(x_atc2[3] - x_atc1[0], 0.8*np.max(h_li_diff2), 'Shifted by best lag') + ax2.set_xlim(np.min(x_atc2 - x_atc1[0]), np.max(x_atc2 - x_atc1[0])) + + ### Plot correlation function for this comparison + ax3 = fig.add_subplot(grid[4, 0]) + corr_vec_plot, = ax3.plot(shift_vec, correlation_function, 'k') + ax3.set_xlabel('Shift (meters)') + ax3.set_ylabel('Correlation coefficient\n(-1 to 1)') + ax3.set_xlim(np.min(shift_vec), np.max(shift_vec)) + + ax3.legend(([corr_vec_plot]), (['Correlation\nFunction']), loc='upper right') + + lag_ix = int(search_width / dx + best_lag) + ax3.plot(shift_vec[lag_ix], correlation_value, 'ro', markerfacecolor='none') + + dt = time_diff(data_tmp, cycle1, cycle2, beam) + + info = 'Peak correlation value of ' + str(np.round(correlation_value,3)) + '\n' \ + 'Peak correlation value at shift of ' + str(shift_vec[lag_ix]) +\ + ' m\nCorresponding Velocity = ' + str(np.round(veloc,1)) + ' m/yr' + ax3.set_title(info) + + info2 = 'x1 = ' + str(np.round(x1,3)) + '; xi = ' + str(xi) + \ + '\nCycles: ' + ','.join([cycle1, cycle2]) + \ + '\nBeam: ' + beam + fig.suptitle(info2) + + return fig + diff --git a/notebooks/Introduction_Tutorial.ipynb b/notebooks/Introduction_Tutorial.ipynb index 491dc45..5b8610d 100644 --- a/notebooks/Introduction_Tutorial.ipynb +++ b/notebooks/Introduction_Tutorial.ipynb @@ -15,7 +15,31 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['/Users/grace/Dropbox/Cornell/projects/003/git_repo/notebooks', '/Users/grace/anaconda3/envs/icesat2020/lib/python37.zip', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/lib-dynload', '', '/Users/grace/.local/lib/python3.7/site-packages', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/site-packages', '/Users/grace/Dropbox/Cornell/projects/003/icepyx_20200722/icepyx', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/site-packages/IS2_velocity-0.0.1-py3.7.egg', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/site-packages/cligj-0.7.0-py3.7.egg', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/site-packages/appdirs-1.4.4-py3.7.egg', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/site-packages/IPython/extensions', '/Users/grace/.ipython']\n", + "['/Users/grace/Dropbox/Cornell/projects/003/git_repo', '/Users/grace/Dropbox/Cornell/projects/003/git_repo/notebooks', '/Users/grace/anaconda3/envs/icesat2020/lib/python37.zip', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/lib-dynload', '', '/Users/grace/.local/lib/python3.7/site-packages', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/site-packages', '/Users/grace/Dropbox/Cornell/projects/003/icepyx_20200722/icepyx', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/site-packages/IS2_velocity-0.0.1-py3.7.egg', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/site-packages/cligj-0.7.0-py3.7.egg', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/site-packages/appdirs-1.4.4-py3.7.egg', '/Users/grace/anaconda3/envs/icesat2020/lib/python3.7/site-packages/IPython/extensions', '/Users/grace/.ipython']\n" + ] + } + ], + "source": [ + "# This cell: put the versions of codes I'm working on first in path\n", + "import sys\n", + "print(sys.path)\n", + "working_module_directory = '/Users/grace/Dropbox/Cornell/projects/003/git_repo'\n", + "if not sys.path[0] == working_module_directory:\n", + " sys.path.insert(0, working_module_directory)\n", + "print(sys.path)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -41,7 +65,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -58,9 +82,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loaded data from cycles: 03,04 for rgt: 0848\n" + ] + } + ], "source": [ "from IS2_velocity.readers_atl06 import load_data_by_rgt\n", "\n", @@ -85,11 +117,801 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": { "scrolled": false }, - "outputs": [], + "outputs": [ + { + "data": { + "application/javascript": [ + "/* Put everything inside the global mpl namespace */\n", + "window.mpl = {};\n", + "\n", + "\n", + "mpl.get_websocket_type = function() {\n", + " if (typeof(WebSocket) !== 'undefined') {\n", + " return WebSocket;\n", + " } else if (typeof(MozWebSocket) !== 'undefined') {\n", + " return MozWebSocket;\n", + " } else {\n", + " alert('Your browser does not have WebSocket support. ' +\n", + " 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n", + " 'Firefox 4 and 5 are also supported but you ' +\n", + " 'have to enable WebSockets in about:config.');\n", + " };\n", + "}\n", + "\n", + "mpl.figure = function(figure_id, websocket, ondownload, parent_element) {\n", + " this.id = figure_id;\n", + "\n", + " this.ws = websocket;\n", + "\n", + " this.supports_binary = (this.ws.binaryType != undefined);\n", + "\n", + " if (!this.supports_binary) {\n", + " var warnings = document.getElementById(\"mpl-warnings\");\n", + " if (warnings) {\n", + " warnings.style.display = 'block';\n", + " warnings.textContent = (\n", + " \"This browser does not support binary websocket messages. \" +\n", + " \"Performance may be slow.\");\n", + " }\n", + " }\n", + "\n", + " this.imageObj = new Image();\n", + "\n", + " this.context = undefined;\n", + " this.message = undefined;\n", + " this.canvas = undefined;\n", + " this.rubberband_canvas = undefined;\n", + " this.rubberband_context = undefined;\n", + " this.format_dropdown = undefined;\n", + "\n", + " this.image_mode = 'full';\n", + "\n", + " this.root = $('
');\n", + " this._root_extra_style(this.root)\n", + " this.root.attr('style', 'display: inline-block');\n", + "\n", + " $(parent_element).append(this.root);\n", + "\n", + " this._init_header(this);\n", + " this._init_canvas(this);\n", + " this._init_toolbar(this);\n", + "\n", + " var fig = this;\n", + "\n", + " this.waiting = false;\n", + "\n", + " this.ws.onopen = function () {\n", + " fig.send_message(\"supports_binary\", {value: fig.supports_binary});\n", + " fig.send_message(\"send_image_mode\", {});\n", + " if (mpl.ratio != 1) {\n", + " fig.send_message(\"set_dpi_ratio\", {'dpi_ratio': mpl.ratio});\n", + " }\n", + " fig.send_message(\"refresh\", {});\n", + " }\n", + "\n", + " this.imageObj.onload = function() {\n", + " if (fig.image_mode == 'full') {\n", + " // Full images could contain transparency (where diff images\n", + " // almost always do), so we need to clear the canvas so that\n", + " // there is no ghosting.\n", + " fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n", + " }\n", + " fig.context.drawImage(fig.imageObj, 0, 0);\n", + " };\n", + "\n", + " this.imageObj.onunload = function() {\n", + " fig.ws.close();\n", + " }\n", + "\n", + " this.ws.onmessage = this._make_on_message_function(this);\n", + "\n", + " this.ondownload = ondownload;\n", + "}\n", + "\n", + "mpl.figure.prototype._init_header = function() {\n", + " var titlebar = $(\n", + " '
');\n", + " var titletext = $(\n", + " '
');\n", + " titlebar.append(titletext)\n", + " this.root.append(titlebar);\n", + " this.header = titletext[0];\n", + "}\n", + "\n", + "\n", + "\n", + "mpl.figure.prototype._canvas_extra_style = function(canvas_div) {\n", + "\n", + "}\n", + "\n", + "\n", + "mpl.figure.prototype._root_extra_style = function(canvas_div) {\n", + "\n", + "}\n", + "\n", + "mpl.figure.prototype._init_canvas = function() {\n", + " var fig = this;\n", + "\n", + " var canvas_div = $('
');\n", + "\n", + " canvas_div.attr('style', 'position: relative; clear: both; outline: 0');\n", + "\n", + " function canvas_keyboard_event(event) {\n", + " return fig.key_event(event, event['data']);\n", + " }\n", + "\n", + " canvas_div.keydown('key_press', canvas_keyboard_event);\n", + " canvas_div.keyup('key_release', canvas_keyboard_event);\n", + " this.canvas_div = canvas_div\n", + " this._canvas_extra_style(canvas_div)\n", + " this.root.append(canvas_div);\n", + "\n", + " var canvas = $('');\n", + " canvas.addClass('mpl-canvas');\n", + " canvas.attr('style', \"left: 0; top: 0; z-index: 0; outline: 0\")\n", + "\n", + " this.canvas = canvas[0];\n", + " this.context = canvas[0].getContext(\"2d\");\n", + "\n", + " var backingStore = this.context.backingStorePixelRatio ||\n", + "\tthis.context.webkitBackingStorePixelRatio ||\n", + "\tthis.context.mozBackingStorePixelRatio ||\n", + "\tthis.context.msBackingStorePixelRatio ||\n", + "\tthis.context.oBackingStorePixelRatio ||\n", + "\tthis.context.backingStorePixelRatio || 1;\n", + "\n", + " mpl.ratio = (window.devicePixelRatio || 1) / backingStore;\n", + "\n", + " var rubberband = $('');\n", + " rubberband.attr('style', \"position: absolute; left: 0; top: 0; z-index: 1;\")\n", + "\n", + " var pass_mouse_events = true;\n", + "\n", + " canvas_div.resizable({\n", + " start: function(event, ui) {\n", + " pass_mouse_events = false;\n", + " },\n", + " resize: function(event, ui) {\n", + " fig.request_resize(ui.size.width, ui.size.height);\n", + " },\n", + " stop: function(event, ui) {\n", + " pass_mouse_events = true;\n", + " fig.request_resize(ui.size.width, ui.size.height);\n", + " },\n", + " });\n", + "\n", + " function mouse_event_fn(event) {\n", + " if (pass_mouse_events)\n", + " return fig.mouse_event(event, event['data']);\n", + " }\n", + "\n", + " rubberband.mousedown('button_press', mouse_event_fn);\n", + " rubberband.mouseup('button_release', mouse_event_fn);\n", + " // Throttle sequential mouse events to 1 every 20ms.\n", + " rubberband.mousemove('motion_notify', mouse_event_fn);\n", + "\n", + " rubberband.mouseenter('figure_enter', mouse_event_fn);\n", + " rubberband.mouseleave('figure_leave', mouse_event_fn);\n", + "\n", + " canvas_div.on(\"wheel\", function (event) {\n", + " event = event.originalEvent;\n", + " event['data'] = 'scroll'\n", + " if (event.deltaY < 0) {\n", + " event.step = 1;\n", + " } else {\n", + " event.step = -1;\n", + " }\n", + " mouse_event_fn(event);\n", + " });\n", + "\n", + " canvas_div.append(canvas);\n", + " canvas_div.append(rubberband);\n", + "\n", + " this.rubberband = rubberband;\n", + " this.rubberband_canvas = rubberband[0];\n", + " this.rubberband_context = rubberband[0].getContext(\"2d\");\n", + " this.rubberband_context.strokeStyle = \"#000000\";\n", + "\n", + " this._resize_canvas = function(width, height) {\n", + " // Keep the size of the canvas, canvas container, and rubber band\n", + " // canvas in synch.\n", + " canvas_div.css('width', width)\n", + " canvas_div.css('height', height)\n", + "\n", + " canvas.attr('width', width * mpl.ratio);\n", + " canvas.attr('height', height * mpl.ratio);\n", + " canvas.attr('style', 'width: ' + width + 'px; height: ' + height + 'px;');\n", + "\n", + " rubberband.attr('width', width);\n", + " rubberband.attr('height', height);\n", + " }\n", + "\n", + " // Set the figure to an initial 600x600px, this will subsequently be updated\n", + " // upon first draw.\n", + " this._resize_canvas(600, 600);\n", + "\n", + " // Disable right mouse context menu.\n", + " $(this.rubberband_canvas).bind(\"contextmenu\",function(e){\n", + " return false;\n", + " });\n", + "\n", + " function set_focus () {\n", + " canvas.focus();\n", + " canvas_div.focus();\n", + " }\n", + "\n", + " window.setTimeout(set_focus, 100);\n", + "}\n", + "\n", + "mpl.figure.prototype._init_toolbar = function() {\n", + " var fig = this;\n", + "\n", + " var nav_element = $('
');\n", + " nav_element.attr('style', 'width: 100%');\n", + " this.root.append(nav_element);\n", + "\n", + " // Define a callback function for later on.\n", + " function toolbar_event(event) {\n", + " return fig.toolbar_button_onclick(event['data']);\n", + " }\n", + " function toolbar_mouse_event(event) {\n", + " return fig.toolbar_button_onmouseover(event['data']);\n", + " }\n", + "\n", + " for(var toolbar_ind in mpl.toolbar_items) {\n", + " var name = mpl.toolbar_items[toolbar_ind][0];\n", + " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n", + " var image = mpl.toolbar_items[toolbar_ind][2];\n", + " var method_name = mpl.toolbar_items[toolbar_ind][3];\n", + "\n", + " if (!name) {\n", + " // put a spacer in here.\n", + " continue;\n", + " }\n", + " var button = $('');\n", + " button.click(method_name, toolbar_event);\n", + " button.mouseover(tooltip, toolbar_mouse_event);\n", + " nav_element.append(button);\n", + " }\n", + "\n", + " // Add the status bar.\n", + " var status_bar = $('');\n", + " nav_element.append(status_bar);\n", + " this.message = status_bar[0];\n", + "\n", + " // Add the close button to the window.\n", + " var buttongrp = $('
');\n", + " var button = $('');\n", + " button.click(function (evt) { fig.handle_close(fig, {}); } );\n", + " button.mouseover('Stop Interaction', toolbar_mouse_event);\n", + " buttongrp.append(button);\n", + " var titlebar = this.root.find($('.ui-dialog-titlebar'));\n", + " titlebar.prepend(buttongrp);\n", + "}\n", + "\n", + "mpl.figure.prototype._root_extra_style = function(el){\n", + " var fig = this\n", + " el.on(\"remove\", function(){\n", + "\tfig.close_ws(fig, {});\n", + " });\n", + "}\n", + "\n", + "mpl.figure.prototype._canvas_extra_style = function(el){\n", + " // this is important to make the div 'focusable\n", + " el.attr('tabindex', 0)\n", + " // reach out to IPython and tell the keyboard manager to turn it's self\n", + " // off when our div gets focus\n", + "\n", + " // location in version 3\n", + " if (IPython.notebook.keyboard_manager) {\n", + " IPython.notebook.keyboard_manager.register_events(el);\n", + " }\n", + " else {\n", + " // location in version 2\n", + " IPython.keyboard_manager.register_events(el);\n", + " }\n", + "\n", + "}\n", + "\n", + "mpl.figure.prototype._key_event_extra = function(event, name) {\n", + " var manager = IPython.notebook.keyboard_manager;\n", + " if (!manager)\n", + " manager = IPython.keyboard_manager;\n", + "\n", + " // Check for shift+enter\n", + " if (event.shiftKey && event.which == 13) {\n", + " this.canvas_div.blur();\n", + " // select the cell after this one\n", + " var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n", + " IPython.notebook.select(index + 1);\n", + " }\n", + "}\n", + "\n", + "mpl.figure.prototype.handle_save = function(fig, msg) {\n", + " fig.ondownload(fig, null);\n", + "}\n", + "\n", + "\n", + "mpl.find_output_cell = function(html_output) {\n", + " // Return the cell and output element which can be found *uniquely* in the notebook.\n", + " // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n", + " // IPython event is triggered only after the cells have been serialised, which for\n", + " // our purposes (turning an active figure into a static one), is too late.\n", + " var cells = IPython.notebook.get_cells();\n", + " var ncells = cells.length;\n", + " for (var i=0; i= 3 moved mimebundle to data attribute of output\n", + " data = data.data;\n", + " }\n", + " if (data['text/html'] == html_output) {\n", + " return [cell, data, j];\n", + " }\n", + " }\n", + " }\n", + " }\n", + "}\n", + "\n", + "// Register the function which deals with the matplotlib target/channel.\n", + "// The kernel may be null if the page has been refreshed.\n", + "if (IPython.notebook.kernel != null) {\n", + " IPython.notebook.kernel.comm_manager.register_target('matplotlib', mpl.mpl_figure_comm);\n", + "}\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], "source": [ "from matplotlib.gridspec import GridSpec\n", "\n", "beam = 'gt1r'\n", - "x1 = data['x_atc_full'][cycle1][beam]\n", - "x2 = data['x_atc_full'][cycle2][beam]\n", - "h1 = data['h_full'][cycle1][beam]\n", - "h2 = data['h_full'][cycle2][beam]\n", + "x1 = data['x_atc'][cycle1][beam]\n", + "x2 = data['x_atc'][cycle2][beam]\n", + "h1 = data['h_li'][cycle1][beam]\n", + "h2 = data['h_li'][cycle2][beam]\n", "dh1 = data['h_li_diff'][cycle1][beam]\n", "dh2 = data['h_li_diff'][cycle2][beam]\n", "vel_xs = data['midpoints_x_atc'][beam]\n", "velocs = data['velocities'][beam]\n", + "correlation_vals = data['correlations'][beam]\n", + "\n", "\n", "plt.figure(figsize=(8,4))\n", "gs = GridSpec(2,2)\n", @@ -234,9 +1848,11 @@ "\n", "# Plot the calculated velocities along track\n", "ax5 = plt.subplot(gs[0,1])\n", - "plt.plot(vel_xs/1000.-29000,velocs,'.',c='k',label='ATL06')\n", + "# plt.plot(vel_xs/1000.-29000,velocs,'.',c='k',label='ATL06')\n", + "plt.scatter(vel_xs/1000.-29000,velocs,s=2,marker='.',c=correlation_vals,label='ATL06')\n", "plt.ylabel('Velocity (m/yr)')\n", "plt.xlabel('Along-Track Distance (km)')\n", + "plt.colorbar(label='Correlation coefficient')\n", "plt.xlim(80,580)\n", "plt.ylim(-500,1500)\n", "\n", @@ -245,29 +1861,2458 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "application/javascript": [ + "/* Put everything inside the global mpl namespace */\n", + "window.mpl = {};\n", + "\n", + "\n", + "mpl.get_websocket_type = function() {\n", + " if (typeof(WebSocket) !== 'undefined') {\n", + " return WebSocket;\n", + " } else if (typeof(MozWebSocket) !== 'undefined') {\n", + " return MozWebSocket;\n", + " } else {\n", + " alert('Your browser does not have WebSocket support. ' +\n", + " 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n", + " 'Firefox 4 and 5 are also supported but you ' +\n", + " 'have to enable WebSockets in about:config.');\n", + " };\n", + "}\n", + "\n", + "mpl.figure = function(figure_id, websocket, ondownload, parent_element) {\n", + " this.id = figure_id;\n", + "\n", + " this.ws = websocket;\n", + "\n", + " this.supports_binary = (this.ws.binaryType != undefined);\n", + "\n", + " if (!this.supports_binary) {\n", + " var warnings = document.getElementById(\"mpl-warnings\");\n", + " if (warnings) {\n", + " warnings.style.display = 'block';\n", + " warnings.textContent = (\n", + " \"This browser does not support binary websocket messages. \" +\n", + " \"Performance may be slow.\");\n", + " }\n", + " }\n", + "\n", + " this.imageObj = new Image();\n", + "\n", + " this.context = undefined;\n", + " this.message = undefined;\n", + " this.canvas = undefined;\n", + " this.rubberband_canvas = undefined;\n", + " this.rubberband_context = undefined;\n", + " this.format_dropdown = undefined;\n", + "\n", + " this.image_mode = 'full';\n", + "\n", + " this.root = $('
');\n", + " this._root_extra_style(this.root)\n", + " this.root.attr('style', 'display: inline-block');\n", + "\n", + " $(parent_element).append(this.root);\n", + "\n", + " this._init_header(this);\n", + " this._init_canvas(this);\n", + " this._init_toolbar(this);\n", + "\n", + " var fig = this;\n", + "\n", + " this.waiting = false;\n", + "\n", + " this.ws.onopen = function () {\n", + " fig.send_message(\"supports_binary\", {value: fig.supports_binary});\n", + " fig.send_message(\"send_image_mode\", {});\n", + " if (mpl.ratio != 1) {\n", + " fig.send_message(\"set_dpi_ratio\", {'dpi_ratio': mpl.ratio});\n", + " }\n", + " fig.send_message(\"refresh\", {});\n", + " }\n", + "\n", + " this.imageObj.onload = function() {\n", + " if (fig.image_mode == 'full') {\n", + " // Full images could contain transparency (where diff images\n", + " // almost always do), so we need to clear the canvas so that\n", + " // there is no ghosting.\n", + " fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n", + " }\n", + " fig.context.drawImage(fig.imageObj, 0, 0);\n", + " };\n", + "\n", + " this.imageObj.onunload = function() {\n", + " fig.ws.close();\n", + " }\n", + "\n", + " this.ws.onmessage = this._make_on_message_function(this);\n", + "\n", + " this.ondownload = ondownload;\n", + "}\n", + "\n", + "mpl.figure.prototype._init_header = function() {\n", + " var titlebar = $(\n", + " '
');\n", + " var titletext = $(\n", + " '
');\n", + " titlebar.append(titletext)\n", + " this.root.append(titlebar);\n", + " this.header = titletext[0];\n", + "}\n", + "\n", + "\n", + "\n", + "mpl.figure.prototype._canvas_extra_style = function(canvas_div) {\n", + "\n", + "}\n", + "\n", + "\n", + "mpl.figure.prototype._root_extra_style = function(canvas_div) {\n", + "\n", + "}\n", + "\n", + "mpl.figure.prototype._init_canvas = function() {\n", + " var fig = this;\n", + "\n", + " var canvas_div = $('
');\n", + "\n", + " canvas_div.attr('style', 'position: relative; clear: both; outline: 0');\n", + "\n", + " function canvas_keyboard_event(event) {\n", + " return fig.key_event(event, event['data']);\n", + " }\n", + "\n", + " canvas_div.keydown('key_press', canvas_keyboard_event);\n", + " canvas_div.keyup('key_release', canvas_keyboard_event);\n", + " this.canvas_div = canvas_div\n", + " this._canvas_extra_style(canvas_div)\n", + " this.root.append(canvas_div);\n", + "\n", + " var canvas = $('');\n", + " canvas.addClass('mpl-canvas');\n", + " canvas.attr('style', \"left: 0; top: 0; z-index: 0; outline: 0\")\n", + "\n", + " this.canvas = canvas[0];\n", + " this.context = canvas[0].getContext(\"2d\");\n", + "\n", + " var backingStore = this.context.backingStorePixelRatio ||\n", + "\tthis.context.webkitBackingStorePixelRatio ||\n", + "\tthis.context.mozBackingStorePixelRatio ||\n", + "\tthis.context.msBackingStorePixelRatio ||\n", + "\tthis.context.oBackingStorePixelRatio ||\n", + "\tthis.context.backingStorePixelRatio || 1;\n", + "\n", + " mpl.ratio = (window.devicePixelRatio || 1) / backingStore;\n", + "\n", + " var rubberband = $('');\n", + " rubberband.attr('style', \"position: absolute; left: 0; top: 0; z-index: 1;\")\n", + "\n", + " var pass_mouse_events = true;\n", + "\n", + " canvas_div.resizable({\n", + " start: function(event, ui) {\n", + " pass_mouse_events = false;\n", + " },\n", + " resize: function(event, ui) {\n", + " fig.request_resize(ui.size.width, ui.size.height);\n", + " },\n", + " stop: function(event, ui) {\n", + " pass_mouse_events = true;\n", + " fig.request_resize(ui.size.width, ui.size.height);\n", + " },\n", + " });\n", + "\n", + " function mouse_event_fn(event) {\n", + " if (pass_mouse_events)\n", + " return fig.mouse_event(event, event['data']);\n", + " }\n", + "\n", + " rubberband.mousedown('button_press', mouse_event_fn);\n", + " rubberband.mouseup('button_release', mouse_event_fn);\n", + " // Throttle sequential mouse events to 1 every 20ms.\n", + " rubberband.mousemove('motion_notify', mouse_event_fn);\n", + "\n", + " rubberband.mouseenter('figure_enter', mouse_event_fn);\n", + " rubberband.mouseleave('figure_leave', mouse_event_fn);\n", + "\n", + " canvas_div.on(\"wheel\", function (event) {\n", + " event = event.originalEvent;\n", + " event['data'] = 'scroll'\n", + " if (event.deltaY < 0) {\n", + " event.step = 1;\n", + " } else {\n", + " event.step = -1;\n", + " }\n", + " mouse_event_fn(event);\n", + " });\n", + "\n", + " canvas_div.append(canvas);\n", + " canvas_div.append(rubberband);\n", + "\n", + " this.rubberband = rubberband;\n", + " this.rubberband_canvas = rubberband[0];\n", + " this.rubberband_context = rubberband[0].getContext(\"2d\");\n", + " this.rubberband_context.strokeStyle = \"#000000\";\n", + "\n", + " this._resize_canvas = function(width, height) {\n", + " // Keep the size of the canvas, canvas container, and rubber band\n", + " // canvas in synch.\n", + " canvas_div.css('width', width)\n", + " canvas_div.css('height', height)\n", + "\n", + " canvas.attr('width', width * mpl.ratio);\n", + " canvas.attr('height', height * mpl.ratio);\n", + " canvas.attr('style', 'width: ' + width + 'px; height: ' + height + 'px;');\n", + "\n", + " rubberband.attr('width', width);\n", + " rubberband.attr('height', height);\n", + " }\n", + "\n", + " // Set the figure to an initial 600x600px, this will subsequently be updated\n", + " // upon first draw.\n", + " this._resize_canvas(600, 600);\n", + "\n", + " // Disable right mouse context menu.\n", + " $(this.rubberband_canvas).bind(\"contextmenu\",function(e){\n", + " return false;\n", + " });\n", + "\n", + " function set_focus () {\n", + " canvas.focus();\n", + " canvas_div.focus();\n", + " }\n", + "\n", + " window.setTimeout(set_focus, 100);\n", + "}\n", + "\n", + "mpl.figure.prototype._init_toolbar = function() {\n", + " var fig = this;\n", + "\n", + " var nav_element = $('
');\n", + " nav_element.attr('style', 'width: 100%');\n", + " this.root.append(nav_element);\n", + "\n", + " // Define a callback function for later on.\n", + " function toolbar_event(event) {\n", + " return fig.toolbar_button_onclick(event['data']);\n", + " }\n", + " function toolbar_mouse_event(event) {\n", + " return fig.toolbar_button_onmouseover(event['data']);\n", + " }\n", + "\n", + " for(var toolbar_ind in mpl.toolbar_items) {\n", + " var name = mpl.toolbar_items[toolbar_ind][0];\n", + " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n", + " var image = mpl.toolbar_items[toolbar_ind][2];\n", + " var method_name = mpl.toolbar_items[toolbar_ind][3];\n", + "\n", + " if (!name) {\n", + " // put a spacer in here.\n", + " continue;\n", + " }\n", + " var button = $('');\n", + " button.click(method_name, toolbar_event);\n", + " button.mouseover(tooltip, toolbar_mouse_event);\n", + " nav_element.append(button);\n", + " }\n", + "\n", + " // Add the status bar.\n", + " var status_bar = $('');\n", + " nav_element.append(status_bar);\n", + " this.message = status_bar[0];\n", + "\n", + " // Add the close button to the window.\n", + " var buttongrp = $('
');\n", + " var button = $('');\n", + " button.click(function (evt) { fig.handle_close(fig, {}); } );\n", + " button.mouseover('Stop Interaction', toolbar_mouse_event);\n", + " buttongrp.append(button);\n", + " var titlebar = this.root.find($('.ui-dialog-titlebar'));\n", + " titlebar.prepend(buttongrp);\n", + "}\n", + "\n", + "mpl.figure.prototype._root_extra_style = function(el){\n", + " var fig = this\n", + " el.on(\"remove\", function(){\n", + "\tfig.close_ws(fig, {});\n", + " });\n", + "}\n", + "\n", + "mpl.figure.prototype._canvas_extra_style = function(el){\n", + " // this is important to make the div 'focusable\n", + " el.attr('tabindex', 0)\n", + " // reach out to IPython and tell the keyboard manager to turn it's self\n", + " // off when our div gets focus\n", + "\n", + " // location in version 3\n", + " if (IPython.notebook.keyboard_manager) {\n", + " IPython.notebook.keyboard_manager.register_events(el);\n", + " }\n", + " else {\n", + " // location in version 2\n", + " IPython.keyboard_manager.register_events(el);\n", + " }\n", + "\n", + "}\n", + "\n", + "mpl.figure.prototype._key_event_extra = function(event, name) {\n", + " var manager = IPython.notebook.keyboard_manager;\n", + " if (!manager)\n", + " manager = IPython.keyboard_manager;\n", + "\n", + " // Check for shift+enter\n", + " if (event.shiftKey && event.which == 13) {\n", + " this.canvas_div.blur();\n", + " // select the cell after this one\n", + " var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n", + " IPython.notebook.select(index + 1);\n", + " }\n", + "}\n", + "\n", + "mpl.figure.prototype.handle_save = function(fig, msg) {\n", + " fig.ondownload(fig, null);\n", + "}\n", + "\n", + "\n", + "mpl.find_output_cell = function(html_output) {\n", + " // Return the cell and output element which can be found *uniquely* in the notebook.\n", + " // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n", + " // IPython event is triggered only after the cells have been serialised, which for\n", + " // our purposes (turning an active figure into a static one), is too late.\n", + " var cells = IPython.notebook.get_cells();\n", + " var ncells = cells.length;\n", + " for (var i=0; i= 3 moved mimebundle to data attribute of output\n", + " data = data.data;\n", + " }\n", + " if (data['text/html'] == html_output) {\n", + " return [cell, data, j];\n", + " }\n", + " }\n", + " }\n", + " }\n", + "}\n", + "\n", + "// Register the function which deals with the matplotlib target/channel.\n", + "// The kernel may be null if the page has been refreshed.\n", + "if (IPython.notebook.kernel != null) {\n", + " IPython.notebook.kernel.comm_manager.register_target('matplotlib', mpl.mpl_figure_comm);\n", + "}\n" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/javascript": [ + "/* Put everything inside the global mpl namespace */\n", + "window.mpl = {};\n", + "\n", + "\n", + "mpl.get_websocket_type = function() {\n", + " if (typeof(WebSocket) !== 'undefined') {\n", + " return WebSocket;\n", + " } else if (typeof(MozWebSocket) !== 'undefined') {\n", + " return MozWebSocket;\n", + " } else {\n", + " alert('Your browser does not have WebSocket support. ' +\n", + " 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n", + " 'Firefox 4 and 5 are also supported but you ' +\n", + " 'have to enable WebSockets in about:config.');\n", + " };\n", + "}\n", + "\n", + "mpl.figure = function(figure_id, websocket, ondownload, parent_element) {\n", + " this.id = figure_id;\n", + "\n", + " this.ws = websocket;\n", + "\n", + " this.supports_binary = (this.ws.binaryType != undefined);\n", + "\n", + " if (!this.supports_binary) {\n", + " var warnings = document.getElementById(\"mpl-warnings\");\n", + " if (warnings) {\n", + " warnings.style.display = 'block';\n", + " warnings.textContent = (\n", + " \"This browser does not support binary websocket messages. \" +\n", + " \"Performance may be slow.\");\n", + " }\n", + " }\n", + "\n", + " this.imageObj = new Image();\n", + "\n", + " this.context = undefined;\n", + " this.message = undefined;\n", + " this.canvas = undefined;\n", + " this.rubberband_canvas = undefined;\n", + " this.rubberband_context = undefined;\n", + " this.format_dropdown = undefined;\n", + "\n", + " this.image_mode = 'full';\n", + "\n", + " this.root = $('
');\n", + " this._root_extra_style(this.root)\n", + " this.root.attr('style', 'display: inline-block');\n", + "\n", + " $(parent_element).append(this.root);\n", + "\n", + " this._init_header(this);\n", + " this._init_canvas(this);\n", + " this._init_toolbar(this);\n", + "\n", + " var fig = this;\n", + "\n", + " this.waiting = false;\n", + "\n", + " this.ws.onopen = function () {\n", + " fig.send_message(\"supports_binary\", {value: fig.supports_binary});\n", + " fig.send_message(\"send_image_mode\", {});\n", + " if (mpl.ratio != 1) {\n", + " fig.send_message(\"set_dpi_ratio\", {'dpi_ratio': mpl.ratio});\n", + " }\n", + " fig.send_message(\"refresh\", {});\n", + " }\n", + "\n", + " this.imageObj.onload = function() {\n", + " if (fig.image_mode == 'full') {\n", + " // Full images could contain transparency (where diff images\n", + " // almost always do), so we need to clear the canvas so that\n", + " // there is no ghosting.\n", + " fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n", + " }\n", + " fig.context.drawImage(fig.imageObj, 0, 0);\n", + " };\n", + "\n", + " this.imageObj.onunload = function() {\n", + " fig.ws.close();\n", + " }\n", + "\n", + " this.ws.onmessage = this._make_on_message_function(this);\n", + "\n", + " this.ondownload = ondownload;\n", + "}\n", + "\n", + "mpl.figure.prototype._init_header = function() {\n", + " var titlebar = $(\n", + " '
');\n", + " var titletext = $(\n", + " '
');\n", + " titlebar.append(titletext)\n", + " this.root.append(titlebar);\n", + " this.header = titletext[0];\n", + "}\n", + "\n", + "\n", + "\n", + "mpl.figure.prototype._canvas_extra_style = function(canvas_div) {\n", + "\n", + "}\n", + "\n", + "\n", + "mpl.figure.prototype._root_extra_style = function(canvas_div) {\n", + "\n", + "}\n", + "\n", + "mpl.figure.prototype._init_canvas = function() {\n", + " var fig = this;\n", + "\n", + " var canvas_div = $('
');\n", + "\n", + " canvas_div.attr('style', 'position: relative; clear: both; outline: 0');\n", + "\n", + " function canvas_keyboard_event(event) {\n", + " return fig.key_event(event, event['data']);\n", + " }\n", + "\n", + " canvas_div.keydown('key_press', canvas_keyboard_event);\n", + " canvas_div.keyup('key_release', canvas_keyboard_event);\n", + " this.canvas_div = canvas_div\n", + " this._canvas_extra_style(canvas_div)\n", + " this.root.append(canvas_div);\n", + "\n", + " var canvas = $('');\n", + " canvas.addClass('mpl-canvas');\n", + " canvas.attr('style', \"left: 0; top: 0; z-index: 0; outline: 0\")\n", + "\n", + " this.canvas = canvas[0];\n", + " this.context = canvas[0].getContext(\"2d\");\n", + "\n", + " var backingStore = this.context.backingStorePixelRatio ||\n", + "\tthis.context.webkitBackingStorePixelRatio ||\n", + "\tthis.context.mozBackingStorePixelRatio ||\n", + "\tthis.context.msBackingStorePixelRatio ||\n", + "\tthis.context.oBackingStorePixelRatio ||\n", + "\tthis.context.backingStorePixelRatio || 1;\n", + "\n", + " mpl.ratio = (window.devicePixelRatio || 1) / backingStore;\n", + "\n", + " var rubberband = $('');\n", + " rubberband.attr('style', \"position: absolute; left: 0; top: 0; z-index: 1;\")\n", + "\n", + " var pass_mouse_events = true;\n", + "\n", + " canvas_div.resizable({\n", + " start: function(event, ui) {\n", + " pass_mouse_events = false;\n", + " },\n", + " resize: function(event, ui) {\n", + " fig.request_resize(ui.size.width, ui.size.height);\n", + " },\n", + " stop: function(event, ui) {\n", + " pass_mouse_events = true;\n", + " fig.request_resize(ui.size.width, ui.size.height);\n", + " },\n", + " });\n", + "\n", + " function mouse_event_fn(event) {\n", + " if (pass_mouse_events)\n", + " return fig.mouse_event(event, event['data']);\n", + " }\n", + "\n", + " rubberband.mousedown('button_press', mouse_event_fn);\n", + " rubberband.mouseup('button_release', mouse_event_fn);\n", + " // Throttle sequential mouse events to 1 every 20ms.\n", + " rubberband.mousemove('motion_notify', mouse_event_fn);\n", + "\n", + " rubberband.mouseenter('figure_enter', mouse_event_fn);\n", + " rubberband.mouseleave('figure_leave', mouse_event_fn);\n", + "\n", + " canvas_div.on(\"wheel\", function (event) {\n", + " event = event.originalEvent;\n", + " event['data'] = 'scroll'\n", + " if (event.deltaY < 0) {\n", + " event.step = 1;\n", + " } else {\n", + " event.step = -1;\n", + " }\n", + " mouse_event_fn(event);\n", + " });\n", + "\n", + " canvas_div.append(canvas);\n", + " canvas_div.append(rubberband);\n", + "\n", + " this.rubberband = rubberband;\n", + " this.rubberband_canvas = rubberband[0];\n", + " this.rubberband_context = rubberband[0].getContext(\"2d\");\n", + " this.rubberband_context.strokeStyle = \"#000000\";\n", + "\n", + " this._resize_canvas = function(width, height) {\n", + " // Keep the size of the canvas, canvas container, and rubber band\n", + " // canvas in synch.\n", + " canvas_div.css('width', width)\n", + " canvas_div.css('height', height)\n", + "\n", + " canvas.attr('width', width * mpl.ratio);\n", + " canvas.attr('height', height * mpl.ratio);\n", + " canvas.attr('style', 'width: ' + width + 'px; height: ' + height + 'px;');\n", + "\n", + " rubberband.attr('width', width);\n", + " rubberband.attr('height', height);\n", + " }\n", + "\n", + " // Set the figure to an initial 600x600px, this will subsequently be updated\n", + " // upon first draw.\n", + " this._resize_canvas(600, 600);\n", + "\n", + " // Disable right mouse context menu.\n", + " $(this.rubberband_canvas).bind(\"contextmenu\",function(e){\n", + " return false;\n", + " });\n", + "\n", + " function set_focus () {\n", + " canvas.focus();\n", + " canvas_div.focus();\n", + " }\n", + "\n", + " window.setTimeout(set_focus, 100);\n", + "}\n", + "\n", + "mpl.figure.prototype._init_toolbar = function() {\n", + " var fig = this;\n", + "\n", + " var nav_element = $('
');\n", + " nav_element.attr('style', 'width: 100%');\n", + " this.root.append(nav_element);\n", + "\n", + " // Define a callback function for later on.\n", + " function toolbar_event(event) {\n", + " return fig.toolbar_button_onclick(event['data']);\n", + " }\n", + " function toolbar_mouse_event(event) {\n", + " return fig.toolbar_button_onmouseover(event['data']);\n", + " }\n", + "\n", + " for(var toolbar_ind in mpl.toolbar_items) {\n", + " var name = mpl.toolbar_items[toolbar_ind][0];\n", + " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n", + " var image = mpl.toolbar_items[toolbar_ind][2];\n", + " var method_name = mpl.toolbar_items[toolbar_ind][3];\n", + "\n", + " if (!name) {\n", + " // put a spacer in here.\n", + " continue;\n", + " }\n", + " var button = $('');\n", - " button.click(method_name, toolbar_event);\n", - " button.mouseover(tooltip, toolbar_mouse_event);\n", - " nav_element.append(button);\n", - " }\n", - "\n", - " // Add the status bar.\n", - " var status_bar = $('');\n", - " nav_element.append(status_bar);\n", - " this.message = status_bar[0];\n", - "\n", - " // Add the close button to the window.\n", - " var buttongrp = $('
');\n", - " var button = $('');\n", - " button.click(function (evt) { fig.handle_close(fig, {}); } );\n", - " button.mouseover('Stop Interaction', toolbar_mouse_event);\n", - " buttongrp.append(button);\n", - " var titlebar = this.root.find($('.ui-dialog-titlebar'));\n", - " titlebar.prepend(buttongrp);\n", - "}\n", - "\n", - "mpl.figure.prototype._root_extra_style = function(el){\n", - " var fig = this\n", - " el.on(\"remove\", function(){\n", - "\tfig.close_ws(fig, {});\n", - " });\n", - "}\n", - "\n", - "mpl.figure.prototype._canvas_extra_style = function(el){\n", - " // this is important to make the div 'focusable\n", - " el.attr('tabindex', 0)\n", - " // reach out to IPython and tell the keyboard manager to turn it's self\n", - " // off when our div gets focus\n", - "\n", - " // location in version 3\n", - " if (IPython.notebook.keyboard_manager) {\n", - " IPython.notebook.keyboard_manager.register_events(el);\n", - " }\n", - " else {\n", - " // location in version 2\n", - " IPython.keyboard_manager.register_events(el);\n", - " }\n", - "\n", - "}\n", - "\n", - "mpl.figure.prototype._key_event_extra = function(event, name) {\n", - " var manager = IPython.notebook.keyboard_manager;\n", - " if (!manager)\n", - " manager = IPython.keyboard_manager;\n", - "\n", - " // Check for shift+enter\n", - " if (event.shiftKey && event.which == 13) {\n", - " this.canvas_div.blur();\n", - " // select the cell after this one\n", - " var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n", - " IPython.notebook.select(index + 1);\n", - " }\n", - "}\n", - "\n", - "mpl.figure.prototype.handle_save = function(fig, msg) {\n", - " fig.ondownload(fig, null);\n", - "}\n", - "\n", - "\n", - "mpl.find_output_cell = function(html_output) {\n", - " // Return the cell and output element which can be found *uniquely* in the notebook.\n", - " // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n", - " // IPython event is triggered only after the cells have been serialised, which for\n", - " // our purposes (turning an active figure into a static one), is too late.\n", - " var cells = IPython.notebook.get_cells();\n", - " var ncells = cells.length;\n", - " for (var i=0; i= 3 moved mimebundle to data attribute of output\n", - " data = data.data;\n", - " }\n", - " if (data['text/html'] == html_output) {\n", - " return [cell, data, j];\n", - " }\n", - " }\n", - " }\n", - " }\n", - "}\n", - "\n", - "// Register the function which deals with the matplotlib target/channel.\n", - "// The kernel may be null if the page has been refreshed.\n", - "if (IPython.notebook.kernel != null) {\n", - " IPython.notebook.kernel.comm_manager.register_target('matplotlib', mpl.mpl_figure_comm);\n", - "}\n" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "# Plot the landice elevation along the pass.\n", "\n", @@ -938,7 +131,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -959,7 +152,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -982,7 +175,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -1015,801 +208,11 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": { "scrolled": false }, - "outputs": [ - { - "data": { - "application/javascript": [ - "/* Put everything inside the global mpl namespace */\n", - "window.mpl = {};\n", - "\n", - "\n", - "mpl.get_websocket_type = function() {\n", - " if (typeof(WebSocket) !== 'undefined') {\n", - " return WebSocket;\n", - " } else if (typeof(MozWebSocket) !== 'undefined') {\n", - " return MozWebSocket;\n", - " } else {\n", - " alert('Your browser does not have WebSocket support. ' +\n", - " 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n", - " 'Firefox 4 and 5 are also supported but you ' +\n", - " 'have to enable WebSockets in about:config.');\n", - " };\n", - "}\n", - "\n", - "mpl.figure = function(figure_id, websocket, ondownload, parent_element) {\n", - " this.id = figure_id;\n", - "\n", - " this.ws = websocket;\n", - "\n", - " this.supports_binary = (this.ws.binaryType != undefined);\n", - "\n", - " if (!this.supports_binary) {\n", - " var warnings = document.getElementById(\"mpl-warnings\");\n", - " if (warnings) {\n", - " warnings.style.display = 'block';\n", - " warnings.textContent = (\n", - " \"This browser does not support binary websocket messages. \" +\n", - " \"Performance may be slow.\");\n", - " }\n", - " }\n", - "\n", - " this.imageObj = new Image();\n", - "\n", - " this.context = undefined;\n", - " this.message = undefined;\n", - " this.canvas = undefined;\n", - " this.rubberband_canvas = undefined;\n", - " this.rubberband_context = undefined;\n", - " this.format_dropdown = undefined;\n", - "\n", - " this.image_mode = 'full';\n", - "\n", - " this.root = $('
');\n", - " this._root_extra_style(this.root)\n", - " this.root.attr('style', 'display: inline-block');\n", - "\n", - " $(parent_element).append(this.root);\n", - "\n", - " this._init_header(this);\n", - " this._init_canvas(this);\n", - " this._init_toolbar(this);\n", - "\n", - " var fig = this;\n", - "\n", - " this.waiting = false;\n", - "\n", - " this.ws.onopen = function () {\n", - " fig.send_message(\"supports_binary\", {value: fig.supports_binary});\n", - " fig.send_message(\"send_image_mode\", {});\n", - " if (mpl.ratio != 1) {\n", - " fig.send_message(\"set_dpi_ratio\", {'dpi_ratio': mpl.ratio});\n", - " }\n", - " fig.send_message(\"refresh\", {});\n", - " }\n", - "\n", - " this.imageObj.onload = function() {\n", - " if (fig.image_mode == 'full') {\n", - " // Full images could contain transparency (where diff images\n", - " // almost always do), so we need to clear the canvas so that\n", - " // there is no ghosting.\n", - " fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n", - " }\n", - " fig.context.drawImage(fig.imageObj, 0, 0);\n", - " };\n", - "\n", - " this.imageObj.onunload = function() {\n", - " fig.ws.close();\n", - " }\n", - "\n", - " this.ws.onmessage = this._make_on_message_function(this);\n", - "\n", - " this.ondownload = ondownload;\n", - "}\n", - "\n", - "mpl.figure.prototype._init_header = function() {\n", - " var titlebar = $(\n", - " '
');\n", - " var titletext = $(\n", - " '
');\n", - " titlebar.append(titletext)\n", - " this.root.append(titlebar);\n", - " this.header = titletext[0];\n", - "}\n", - "\n", - "\n", - "\n", - "mpl.figure.prototype._canvas_extra_style = function(canvas_div) {\n", - "\n", - "}\n", - "\n", - "\n", - "mpl.figure.prototype._root_extra_style = function(canvas_div) {\n", - "\n", - "}\n", - "\n", - "mpl.figure.prototype._init_canvas = function() {\n", - " var fig = this;\n", - "\n", - " var canvas_div = $('
');\n", - "\n", - " canvas_div.attr('style', 'position: relative; clear: both; outline: 0');\n", - "\n", - " function canvas_keyboard_event(event) {\n", - " return fig.key_event(event, event['data']);\n", - " }\n", - "\n", - " canvas_div.keydown('key_press', canvas_keyboard_event);\n", - " canvas_div.keyup('key_release', canvas_keyboard_event);\n", - " this.canvas_div = canvas_div\n", - " this._canvas_extra_style(canvas_div)\n", - " this.root.append(canvas_div);\n", - "\n", - " var canvas = $('');\n", - " canvas.addClass('mpl-canvas');\n", - " canvas.attr('style', \"left: 0; top: 0; z-index: 0; outline: 0\")\n", - "\n", - " this.canvas = canvas[0];\n", - " this.context = canvas[0].getContext(\"2d\");\n", - "\n", - " var backingStore = this.context.backingStorePixelRatio ||\n", - "\tthis.context.webkitBackingStorePixelRatio ||\n", - "\tthis.context.mozBackingStorePixelRatio ||\n", - "\tthis.context.msBackingStorePixelRatio ||\n", - "\tthis.context.oBackingStorePixelRatio ||\n", - "\tthis.context.backingStorePixelRatio || 1;\n", - "\n", - " mpl.ratio = (window.devicePixelRatio || 1) / backingStore;\n", - "\n", - " var rubberband = $('');\n", - " rubberband.attr('style', \"position: absolute; left: 0; top: 0; z-index: 1;\")\n", - "\n", - " var pass_mouse_events = true;\n", - "\n", - " canvas_div.resizable({\n", - " start: function(event, ui) {\n", - " pass_mouse_events = false;\n", - " },\n", - " resize: function(event, ui) {\n", - " fig.request_resize(ui.size.width, ui.size.height);\n", - " },\n", - " stop: function(event, ui) {\n", - " pass_mouse_events = true;\n", - " fig.request_resize(ui.size.width, ui.size.height);\n", - " },\n", - " });\n", - "\n", - " function mouse_event_fn(event) {\n", - " if (pass_mouse_events)\n", - " return fig.mouse_event(event, event['data']);\n", - " }\n", - "\n", - " rubberband.mousedown('button_press', mouse_event_fn);\n", - " rubberband.mouseup('button_release', mouse_event_fn);\n", - " // Throttle sequential mouse events to 1 every 20ms.\n", - " rubberband.mousemove('motion_notify', mouse_event_fn);\n", - "\n", - " rubberband.mouseenter('figure_enter', mouse_event_fn);\n", - " rubberband.mouseleave('figure_leave', mouse_event_fn);\n", - "\n", - " canvas_div.on(\"wheel\", function (event) {\n", - " event = event.originalEvent;\n", - " event['data'] = 'scroll'\n", - " if (event.deltaY < 0) {\n", - " event.step = 1;\n", - " } else {\n", - " event.step = -1;\n", - " }\n", - " mouse_event_fn(event);\n", - " });\n", - "\n", - " canvas_div.append(canvas);\n", - " canvas_div.append(rubberband);\n", - "\n", - " this.rubberband = rubberband;\n", - " this.rubberband_canvas = rubberband[0];\n", - " this.rubberband_context = rubberband[0].getContext(\"2d\");\n", - " this.rubberband_context.strokeStyle = \"#000000\";\n", - "\n", - " this._resize_canvas = function(width, height) {\n", - " // Keep the size of the canvas, canvas container, and rubber band\n", - " // canvas in synch.\n", - " canvas_div.css('width', width)\n", - " canvas_div.css('height', height)\n", - "\n", - " canvas.attr('width', width * mpl.ratio);\n", - " canvas.attr('height', height * mpl.ratio);\n", - " canvas.attr('style', 'width: ' + width + 'px; height: ' + height + 'px;');\n", - "\n", - " rubberband.attr('width', width);\n", - " rubberband.attr('height', height);\n", - " }\n", - "\n", - " // Set the figure to an initial 600x600px, this will subsequently be updated\n", - " // upon first draw.\n", - " this._resize_canvas(600, 600);\n", - "\n", - " // Disable right mouse context menu.\n", - " $(this.rubberband_canvas).bind(\"contextmenu\",function(e){\n", - " return false;\n", - " });\n", - "\n", - " function set_focus () {\n", - " canvas.focus();\n", - " canvas_div.focus();\n", - " }\n", - "\n", - " window.setTimeout(set_focus, 100);\n", - "}\n", - "\n", - "mpl.figure.prototype._init_toolbar = function() {\n", - " var fig = this;\n", - "\n", - " var nav_element = $('
');\n", - " nav_element.attr('style', 'width: 100%');\n", - " this.root.append(nav_element);\n", - "\n", - " // Define a callback function for later on.\n", - " function toolbar_event(event) {\n", - " return fig.toolbar_button_onclick(event['data']);\n", - " }\n", - " function toolbar_mouse_event(event) {\n", - " return fig.toolbar_button_onmouseover(event['data']);\n", - " }\n", - "\n", - " for(var toolbar_ind in mpl.toolbar_items) {\n", - " var name = mpl.toolbar_items[toolbar_ind][0];\n", - " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n", - " var image = mpl.toolbar_items[toolbar_ind][2];\n", - " var method_name = mpl.toolbar_items[toolbar_ind][3];\n", - "\n", - " if (!name) {\n", - " // put a spacer in here.\n", - " continue;\n", - " }\n", - " var button = $('');\n", - " button.click(method_name, toolbar_event);\n", - " button.mouseover(tooltip, toolbar_mouse_event);\n", - " nav_element.append(button);\n", - " }\n", - "\n", - " // Add the status bar.\n", - " var status_bar = $('');\n", - " nav_element.append(status_bar);\n", - " this.message = status_bar[0];\n", - "\n", - " // Add the close button to the window.\n", - " var buttongrp = $('
');\n", - " var button = $('');\n", - " button.click(function (evt) { fig.handle_close(fig, {}); } );\n", - " button.mouseover('Stop Interaction', toolbar_mouse_event);\n", - " buttongrp.append(button);\n", - " var titlebar = this.root.find($('.ui-dialog-titlebar'));\n", - " titlebar.prepend(buttongrp);\n", - "}\n", - "\n", - "mpl.figure.prototype._root_extra_style = function(el){\n", - " var fig = this\n", - " el.on(\"remove\", function(){\n", - "\tfig.close_ws(fig, {});\n", - " });\n", - "}\n", - "\n", - "mpl.figure.prototype._canvas_extra_style = function(el){\n", - " // this is important to make the div 'focusable\n", - " el.attr('tabindex', 0)\n", - " // reach out to IPython and tell the keyboard manager to turn it's self\n", - " // off when our div gets focus\n", - "\n", - " // location in version 3\n", - " if (IPython.notebook.keyboard_manager) {\n", - " IPython.notebook.keyboard_manager.register_events(el);\n", - " }\n", - " else {\n", - " // location in version 2\n", - " IPython.keyboard_manager.register_events(el);\n", - " }\n", - "\n", - "}\n", - "\n", - "mpl.figure.prototype._key_event_extra = function(event, name) {\n", - " var manager = IPython.notebook.keyboard_manager;\n", - " if (!manager)\n", - " manager = IPython.keyboard_manager;\n", - "\n", - " // Check for shift+enter\n", - " if (event.shiftKey && event.which == 13) {\n", - " this.canvas_div.blur();\n", - " // select the cell after this one\n", - " var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n", - " IPython.notebook.select(index + 1);\n", - " }\n", - "}\n", - "\n", - "mpl.figure.prototype.handle_save = function(fig, msg) {\n", - " fig.ondownload(fig, null);\n", - "}\n", - "\n", - "\n", - "mpl.find_output_cell = function(html_output) {\n", - " // Return the cell and output element which can be found *uniquely* in the notebook.\n", - " // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n", - " // IPython event is triggered only after the cells have been serialised, which for\n", - " // our purposes (turning an active figure into a static one), is too late.\n", - " var cells = IPython.notebook.get_cells();\n", - " var ncells = cells.length;\n", - " for (var i=0; i= 3 moved mimebundle to data attribute of output\n", - " data = data.data;\n", - " }\n", - " if (data['text/html'] == html_output) {\n", - " return [cell, data, j];\n", - " }\n", - " }\n", - " }\n", - " }\n", - "}\n", - "\n", - "// Register the function which deals with the matplotlib target/channel.\n", - "// The kernel may be null if the page has been refreshed.\n", - "if (IPython.notebook.kernel != null) {\n", - " IPython.notebook.kernel.comm_manager.register_target('matplotlib', mpl.mpl_figure_comm);\n", - "}\n" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'cmap': 'gray', 'clim': [14000, 17000], 'extent': array([-887950., -356950., 183825., 561825.]), 'origin': 'lower'}\n" - ] - } - ], + "outputs": [], "source": [ "from IS2_velocity.plotting import plot_measures_along_track_comparison\n", "\n", @@ -2685,1588 +291,9 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "application/javascript": [ - "/* Put everything inside the global mpl namespace */\n", - "window.mpl = {};\n", - "\n", - "\n", - "mpl.get_websocket_type = function() {\n", - " if (typeof(WebSocket) !== 'undefined') {\n", - " return WebSocket;\n", - " } else if (typeof(MozWebSocket) !== 'undefined') {\n", - " return MozWebSocket;\n", - " } else {\n", - " alert('Your browser does not have WebSocket support. ' +\n", - " 'Please try Chrome, Safari or Firefox ≥ 6. ' +\n", - " 'Firefox 4 and 5 are also supported but you ' +\n", - " 'have to enable WebSockets in about:config.');\n", - " };\n", - "}\n", - "\n", - "mpl.figure = function(figure_id, websocket, ondownload, parent_element) {\n", - " this.id = figure_id;\n", - "\n", - " this.ws = websocket;\n", - "\n", - " this.supports_binary = (this.ws.binaryType != undefined);\n", - "\n", - " if (!this.supports_binary) {\n", - " var warnings = document.getElementById(\"mpl-warnings\");\n", - " if (warnings) {\n", - " warnings.style.display = 'block';\n", - " warnings.textContent = (\n", - " \"This browser does not support binary websocket messages. \" +\n", - " \"Performance may be slow.\");\n", - " }\n", - " }\n", - "\n", - " this.imageObj = new Image();\n", - "\n", - " this.context = undefined;\n", - " this.message = undefined;\n", - " this.canvas = undefined;\n", - " this.rubberband_canvas = undefined;\n", - " this.rubberband_context = undefined;\n", - " this.format_dropdown = undefined;\n", - "\n", - " this.image_mode = 'full';\n", - "\n", - " this.root = $('
');\n", - " this._root_extra_style(this.root)\n", - " this.root.attr('style', 'display: inline-block');\n", - "\n", - " $(parent_element).append(this.root);\n", - "\n", - " this._init_header(this);\n", - " this._init_canvas(this);\n", - " this._init_toolbar(this);\n", - "\n", - " var fig = this;\n", - "\n", - " this.waiting = false;\n", - "\n", - " this.ws.onopen = function () {\n", - " fig.send_message(\"supports_binary\", {value: fig.supports_binary});\n", - " fig.send_message(\"send_image_mode\", {});\n", - " if (mpl.ratio != 1) {\n", - " fig.send_message(\"set_dpi_ratio\", {'dpi_ratio': mpl.ratio});\n", - " }\n", - " fig.send_message(\"refresh\", {});\n", - " }\n", - "\n", - " this.imageObj.onload = function() {\n", - " if (fig.image_mode == 'full') {\n", - " // Full images could contain transparency (where diff images\n", - " // almost always do), so we need to clear the canvas so that\n", - " // there is no ghosting.\n", - " fig.context.clearRect(0, 0, fig.canvas.width, fig.canvas.height);\n", - " }\n", - " fig.context.drawImage(fig.imageObj, 0, 0);\n", - " };\n", - "\n", - " this.imageObj.onunload = function() {\n", - " fig.ws.close();\n", - " }\n", - "\n", - " this.ws.onmessage = this._make_on_message_function(this);\n", - "\n", - " this.ondownload = ondownload;\n", - "}\n", - "\n", - "mpl.figure.prototype._init_header = function() {\n", - " var titlebar = $(\n", - " '
');\n", - " var titletext = $(\n", - " '
');\n", - " titlebar.append(titletext)\n", - " this.root.append(titlebar);\n", - " this.header = titletext[0];\n", - "}\n", - "\n", - "\n", - "\n", - "mpl.figure.prototype._canvas_extra_style = function(canvas_div) {\n", - "\n", - "}\n", - "\n", - "\n", - "mpl.figure.prototype._root_extra_style = function(canvas_div) {\n", - "\n", - "}\n", - "\n", - "mpl.figure.prototype._init_canvas = function() {\n", - " var fig = this;\n", - "\n", - " var canvas_div = $('
');\n", - "\n", - " canvas_div.attr('style', 'position: relative; clear: both; outline: 0');\n", - "\n", - " function canvas_keyboard_event(event) {\n", - " return fig.key_event(event, event['data']);\n", - " }\n", - "\n", - " canvas_div.keydown('key_press', canvas_keyboard_event);\n", - " canvas_div.keyup('key_release', canvas_keyboard_event);\n", - " this.canvas_div = canvas_div\n", - " this._canvas_extra_style(canvas_div)\n", - " this.root.append(canvas_div);\n", - "\n", - " var canvas = $('');\n", - " canvas.addClass('mpl-canvas');\n", - " canvas.attr('style', \"left: 0; top: 0; z-index: 0; outline: 0\")\n", - "\n", - " this.canvas = canvas[0];\n", - " this.context = canvas[0].getContext(\"2d\");\n", - "\n", - " var backingStore = this.context.backingStorePixelRatio ||\n", - "\tthis.context.webkitBackingStorePixelRatio ||\n", - "\tthis.context.mozBackingStorePixelRatio ||\n", - "\tthis.context.msBackingStorePixelRatio ||\n", - "\tthis.context.oBackingStorePixelRatio ||\n", - "\tthis.context.backingStorePixelRatio || 1;\n", - "\n", - " mpl.ratio = (window.devicePixelRatio || 1) / backingStore;\n", - "\n", - " var rubberband = $('');\n", - " rubberband.attr('style', \"position: absolute; left: 0; top: 0; z-index: 1;\")\n", - "\n", - " var pass_mouse_events = true;\n", - "\n", - " canvas_div.resizable({\n", - " start: function(event, ui) {\n", - " pass_mouse_events = false;\n", - " },\n", - " resize: function(event, ui) {\n", - " fig.request_resize(ui.size.width, ui.size.height);\n", - " },\n", - " stop: function(event, ui) {\n", - " pass_mouse_events = true;\n", - " fig.request_resize(ui.size.width, ui.size.height);\n", - " },\n", - " });\n", - "\n", - " function mouse_event_fn(event) {\n", - " if (pass_mouse_events)\n", - " return fig.mouse_event(event, event['data']);\n", - " }\n", - "\n", - " rubberband.mousedown('button_press', mouse_event_fn);\n", - " rubberband.mouseup('button_release', mouse_event_fn);\n", - " // Throttle sequential mouse events to 1 every 20ms.\n", - " rubberband.mousemove('motion_notify', mouse_event_fn);\n", - "\n", - " rubberband.mouseenter('figure_enter', mouse_event_fn);\n", - " rubberband.mouseleave('figure_leave', mouse_event_fn);\n", - "\n", - " canvas_div.on(\"wheel\", function (event) {\n", - " event = event.originalEvent;\n", - " event['data'] = 'scroll'\n", - " if (event.deltaY < 0) {\n", - " event.step = 1;\n", - " } else {\n", - " event.step = -1;\n", - " }\n", - " mouse_event_fn(event);\n", - " });\n", - "\n", - " canvas_div.append(canvas);\n", - " canvas_div.append(rubberband);\n", - "\n", - " this.rubberband = rubberband;\n", - " this.rubberband_canvas = rubberband[0];\n", - " this.rubberband_context = rubberband[0].getContext(\"2d\");\n", - " this.rubberband_context.strokeStyle = \"#000000\";\n", - "\n", - " this._resize_canvas = function(width, height) {\n", - " // Keep the size of the canvas, canvas container, and rubber band\n", - " // canvas in synch.\n", - " canvas_div.css('width', width)\n", - " canvas_div.css('height', height)\n", - "\n", - " canvas.attr('width', width * mpl.ratio);\n", - " canvas.attr('height', height * mpl.ratio);\n", - " canvas.attr('style', 'width: ' + width + 'px; height: ' + height + 'px;');\n", - "\n", - " rubberband.attr('width', width);\n", - " rubberband.attr('height', height);\n", - " }\n", - "\n", - " // Set the figure to an initial 600x600px, this will subsequently be updated\n", - " // upon first draw.\n", - " this._resize_canvas(600, 600);\n", - "\n", - " // Disable right mouse context menu.\n", - " $(this.rubberband_canvas).bind(\"contextmenu\",function(e){\n", - " return false;\n", - " });\n", - "\n", - " function set_focus () {\n", - " canvas.focus();\n", - " canvas_div.focus();\n", - " }\n", - "\n", - " window.setTimeout(set_focus, 100);\n", - "}\n", - "\n", - "mpl.figure.prototype._init_toolbar = function() {\n", - " var fig = this;\n", - "\n", - " var nav_element = $('
');\n", - " nav_element.attr('style', 'width: 100%');\n", - " this.root.append(nav_element);\n", - "\n", - " // Define a callback function for later on.\n", - " function toolbar_event(event) {\n", - " return fig.toolbar_button_onclick(event['data']);\n", - " }\n", - " function toolbar_mouse_event(event) {\n", - " return fig.toolbar_button_onmouseover(event['data']);\n", - " }\n", - "\n", - " for(var toolbar_ind in mpl.toolbar_items) {\n", - " var name = mpl.toolbar_items[toolbar_ind][0];\n", - " var tooltip = mpl.toolbar_items[toolbar_ind][1];\n", - " var image = mpl.toolbar_items[toolbar_ind][2];\n", - " var method_name = mpl.toolbar_items[toolbar_ind][3];\n", - "\n", - " if (!name) {\n", - " // put a spacer in here.\n", - " continue;\n", - " }\n", - " var button = $('');\n", - " button.click(method_name, toolbar_event);\n", - " button.mouseover(tooltip, toolbar_mouse_event);\n", - " nav_element.append(button);\n", - " }\n", - "\n", - " // Add the status bar.\n", - " var status_bar = $('');\n", - " nav_element.append(status_bar);\n", - " this.message = status_bar[0];\n", - "\n", - " // Add the close button to the window.\n", - " var buttongrp = $('
');\n", - " var button = $('');\n", - " button.click(function (evt) { fig.handle_close(fig, {}); } );\n", - " button.mouseover('Stop Interaction', toolbar_mouse_event);\n", - " buttongrp.append(button);\n", - " var titlebar = this.root.find($('.ui-dialog-titlebar'));\n", - " titlebar.prepend(buttongrp);\n", - "}\n", - "\n", - "mpl.figure.prototype._root_extra_style = function(el){\n", - " var fig = this\n", - " el.on(\"remove\", function(){\n", - "\tfig.close_ws(fig, {});\n", - " });\n", - "}\n", - "\n", - "mpl.figure.prototype._canvas_extra_style = function(el){\n", - " // this is important to make the div 'focusable\n", - " el.attr('tabindex', 0)\n", - " // reach out to IPython and tell the keyboard manager to turn it's self\n", - " // off when our div gets focus\n", - "\n", - " // location in version 3\n", - " if (IPython.notebook.keyboard_manager) {\n", - " IPython.notebook.keyboard_manager.register_events(el);\n", - " }\n", - " else {\n", - " // location in version 2\n", - " IPython.keyboard_manager.register_events(el);\n", - " }\n", - "\n", - "}\n", - "\n", - "mpl.figure.prototype._key_event_extra = function(event, name) {\n", - " var manager = IPython.notebook.keyboard_manager;\n", - " if (!manager)\n", - " manager = IPython.keyboard_manager;\n", - "\n", - " // Check for shift+enter\n", - " if (event.shiftKey && event.which == 13) {\n", - " this.canvas_div.blur();\n", - " // select the cell after this one\n", - " var index = IPython.notebook.find_cell_index(this.cell_info[0]);\n", - " IPython.notebook.select(index + 1);\n", - " }\n", - "}\n", - "\n", - "mpl.figure.prototype.handle_save = function(fig, msg) {\n", - " fig.ondownload(fig, null);\n", - "}\n", - "\n", - "\n", - "mpl.find_output_cell = function(html_output) {\n", - " // Return the cell and output element which can be found *uniquely* in the notebook.\n", - " // Note - this is a bit hacky, but it is done because the \"notebook_saving.Notebook\"\n", - " // IPython event is triggered only after the cells have been serialised, which for\n", - " // our purposes (turning an active figure into a static one), is too late.\n", - " var cells = IPython.notebook.get_cells();\n", - " var ncells = cells.length;\n", - " for (var i=0; i= 3 moved mimebundle to data attribute of output\n", - " data = data.data;\n", - " }\n", - " if (data['text/html'] == html_output) {\n", - " return [cell, data, j];\n", - " }\n", - " }\n", - " }\n", - " }\n", - "}\n", - "\n", - "// Register the function which deals with the matplotlib target/channel.\n", - "// The kernel may be null if the page has been refreshed.\n", - "if (IPython.notebook.kernel != null) {\n", - " IPython.notebook.kernel.comm_manager.register_target('matplotlib', mpl.mpl_figure_comm);\n", - "}\n" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], + "outputs": [], "source": [ "from IS2_velocity.correlation_processing import determine_x1s\n", "from IS2_velocity.plotting import plot_correlation_function\n", @@ -4279,40 +306,13 @@ "plot_correlation_function(x1, data, cycle1, cycle2, beam, search_width=1000, segment_length=2000, along_track_step=100,\n", " max_percent_nans=10, dx=20)\n" ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "29164538.40319396" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "x1\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python [conda env:icesat2020]", + "display_name": "Python 3", "language": "python", - "name": "conda-env-icesat2020-py" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -4324,7 +324,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.6" + "version": "3.7.3" } }, "nbformat": 4, From dd6ea3cbd2c22ac7e867710fde7c87dd6eeb5fe0 Mon Sep 17 00:00:00 2001 From: Benjamin Hills Date: Sun, 15 Nov 2020 16:00:50 -0700 Subject: [PATCH 17/17] Update the flip_meas_sign arg in plotting function --- IS2_velocity/plotting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/IS2_velocity/plotting.py b/IS2_velocity/plotting.py index d351f2d..d56c90c 100644 --- a/IS2_velocity/plotting.py +++ b/IS2_velocity/plotting.py @@ -13,7 +13,7 @@ proj_stere = Proj('epsg:3031') def plot_measures_along_track_comparison(rgt, beams, data_path, correlation_threshold, spatial_extent, plot_out_location, map_data_root, - velocity_number, epsg, close=False): + velocity_number, flip_meas_sign=False, close=False): """ :param rgt: