From d508d93978d421883b31f61351fffd31589e5b18 Mon Sep 17 00:00:00 2001 From: Tyler Sutterley Date: Tue, 29 Nov 2022 11:39:30 -0800 Subject: [PATCH] refactor: comment format to try to match flake8 styling for #90 (#92) --- gravity_toolkit/clenshaw_summation.py | 66 +-- gravity_toolkit/degree_amplitude.py | 14 +- gravity_toolkit/destripe_harmonics.py | 72 +-- gravity_toolkit/fourier_legendre.py | 138 ++--- gravity_toolkit/gauss_weights.py | 32 +- gravity_toolkit/gen_averaging_kernel.py | 52 +- gravity_toolkit/gen_disc_load.py | 118 ++-- gravity_toolkit/gen_harmonics.py | 126 ++--- gravity_toolkit/gen_point_load.py | 42 +- gravity_toolkit/gen_spherical_cap.py | 152 ++--- gravity_toolkit/gen_stokes.py | 82 +-- gravity_toolkit/geocenter.py | 494 ++++++++-------- gravity_toolkit/grace_date.py | 136 ++--- gravity_toolkit/grace_find_months.py | 16 +- gravity_toolkit/grace_input_months.py | 350 ++++++------ gravity_toolkit/grace_months_index.py | 116 ++-- gravity_toolkit/harmonic_gradients.py | 34 +- gravity_toolkit/harmonic_summation.py | 30 +- gravity_toolkit/harmonics.py | 616 ++++++++++---------- gravity_toolkit/hdf5_read.py | 52 +- gravity_toolkit/hdf5_read_stokes.py | 68 +-- gravity_toolkit/hdf5_stokes.py | 58 +- gravity_toolkit/hdf5_write.py | 36 +- gravity_toolkit/legendre.py | 54 +- gravity_toolkit/legendre_polynomials.py | 28 +- gravity_toolkit/mascons.py | 92 +-- gravity_toolkit/ncdf_read.py | 44 +- gravity_toolkit/ncdf_read_stokes.py | 54 +- gravity_toolkit/ncdf_stokes.py | 66 +-- gravity_toolkit/ncdf_write.py | 36 +- gravity_toolkit/ocean_stokes.py | 22 +- gravity_toolkit/piecewise_regress.py | 150 ++--- gravity_toolkit/plm_colombo.py | 38 +- gravity_toolkit/plm_holmes.py | 44 +- gravity_toolkit/plm_mohlenkamp.py | 54 +- gravity_toolkit/read_GIA_model.py | 332 +++++------ gravity_toolkit/read_GRACE_harmonics.py | 150 ++--- gravity_toolkit/read_ICGEM_harmonics.py | 2 +- gravity_toolkit/read_SLR_C20.py | 290 +++++----- gravity_toolkit/read_SLR_C30.py | 140 ++--- gravity_toolkit/read_SLR_C40.py | 44 +- gravity_toolkit/read_SLR_C50.py | 92 +-- gravity_toolkit/read_SLR_CS2.py | 148 ++--- gravity_toolkit/read_SLR_geocenter.py | 10 +- gravity_toolkit/read_SLR_harmonics.py | 128 ++--- gravity_toolkit/read_gfc_harmonics.py | 50 +- gravity_toolkit/read_gravis_geocenter.py | 4 +- gravity_toolkit/read_love_numbers.py | 110 ++-- gravity_toolkit/read_swenson_geocenter.py | 4 +- gravity_toolkit/read_tellus_geocenter.py | 4 +- gravity_toolkit/savitzky_golay.py | 24 +- gravity_toolkit/sea_level_equation.py | 208 +++---- gravity_toolkit/spatial.py | 526 ++++++++--------- gravity_toolkit/time.py | 320 +++++------ gravity_toolkit/tools.py | 22 +- gravity_toolkit/tsregress.py | 128 ++--- gravity_toolkit/tssmooth.py | 150 ++--- gravity_toolkit/utilities.py | 568 +++++++++---------- notebooks/GRACE-Harmonic-Plots.ipynb | 2 +- scripts/aod1b_geocenter.py | 126 ++--- scripts/aod1b_oblateness.py | 124 ++-- scripts/calc_degree_one.py | 656 +++++++++++----------- scripts/calc_harmonic_resolution.py | 4 +- scripts/calc_mascon.py | 496 ++++++++-------- scripts/calc_sensitivity_kernel.py | 316 +++++------ scripts/cnes_grace_sync.py | 130 ++--- scripts/combine_harmonics.py | 158 +++--- scripts/convert_harmonics.py | 102 ++-- scripts/dealiasing_monthly_mean.py | 282 +++++----- scripts/esa_costg_swarm_sync.py | 146 ++--- scripts/geocenter_compare_tellus.py | 98 ++-- scripts/geocenter_monte_carlo.py | 76 +-- scripts/geocenter_ocean_models.py | 68 +-- scripts/geocenter_processing_centers.py | 72 +-- scripts/gfz_icgem_costg_ftp.py | 126 ++--- scripts/gfz_isdc_dealiasing_ftp.py | 114 ++-- scripts/gfz_isdc_grace_ftp.py | 196 +++---- scripts/grace_mean_harmonics.py | 170 +++--- scripts/grace_spatial_error.py | 302 +++++----- scripts/grace_spatial_maps.py | 306 +++++----- scripts/itsg_graz_grace_sync.py | 122 ++-- scripts/make_grace_index.py | 62 +- scripts/mascon_reconstruct.py | 176 +++--- scripts/monte_carlo_degree_one.py | 584 +++++++++---------- scripts/podaac_cumulus.py | 200 +++---- scripts/podaac_grace_sync.py | 238 ++++---- scripts/podaac_webdav.py | 52 +- scripts/regress_grace_maps.py | 236 ++++---- scripts/run_grace_date.py | 46 +- scripts/run_sea_level_equation.py | 130 ++--- scripts/scale_grace_maps.py | 412 +++++++------- test/test_download_and_read.py | 48 +- test/test_legendre.py | 12 +- test/test_love_numbers.py | 10 +- test/test_time.py | 90 +-- 95 files changed, 6762 insertions(+), 6762 deletions(-) diff --git a/gravity_toolkit/clenshaw_summation.py b/gravity_toolkit/clenshaw_summation.py index 37d6b87d..8b0f1eb0 100644 --- a/gravity_toolkit/clenshaw_summation.py +++ b/gravity_toolkit/clenshaw_summation.py @@ -132,91 +132,91 @@ def clenshaw_summation(clm, slm, lon, lat, RAD=0, UNITS=0, LMAX=0, LOVE=None, `doi: 10.1029/2000JB900113 `_ """ - #-- check if lat and lon are the same size + # check if lat and lon are the same size if (len(lat) != len(lon)): raise ValueError('Incompatable vector dimensions (lon, lat)') - #-- calculate colatitude and longitude in radians + # calculate colatitude and longitude in radians th = (90.0 - lat)*np.pi/180.0 phi = np.squeeze(lon*np.pi/180.0) - #-- calculate cos and sin of colatitudes + # calculate cos and sin of colatitudes t = np.cos(th) u = np.sin(th) - #-- dimensions of theta and phi + # dimensions of theta and phi npts = len(th) - #-- Gaussian Smoothing + # Gaussian Smoothing if (RAD != 0): wl = 2.0*np.pi*gauss_weights(RAD,LMAX) else: - #-- else = 1 + # else = 1 wl = np.ones((LMAX+1)) - #-- Setting units factor for output - #-- extract arrays of kl, hl, and ll Love Numbers + # Setting units factor for output + # extract arrays of kl, hl, and ll Love Numbers factors = units(lmax=LMAX).harmonic(*LOVE) - #-- dfactor computes the degree dependent coefficients + # dfactor computes the degree dependent coefficients if (UNITS == 0): - #-- 0: keep original scale + # 0: keep original scale dfactor = factors.norm elif (UNITS == 1): - #-- 1: cmH2O, centimeters water equivalent + # 1: cmH2O, centimeters water equivalent dfactor = factors.cmwe elif (UNITS == 2): - #-- 2: mmGH, mm geoid height + # 2: mmGH, mm geoid height dfactor = factors.mmGH elif (UNITS == 3): - #-- 3: mmCU, mm elastic crustal deformation + # 3: mmCU, mm elastic crustal deformation dfactor = factors.mmCU elif (UNITS == 4): - #-- 4: micGal, microGal gravity perturbations + # 4: micGal, microGal gravity perturbations dfactor = factors.microGal elif (UNITS == 5): - #-- 5: mbar, equivalent surface pressure + # 5: mbar, equivalent surface pressure dfactor = factors.mbar elif (UNITS == 6): - #-- 6: cmVCU, cm viscoelastic crustal uplift (GIA ONLY) + # 6: cmVCU, cm viscoelastic crustal uplift (GIA ONLY) dfactor = factors.cmVCU elif isinstance(UNITS,(list,np.ndarray)): - #-- custom units + # custom units dfactor = np.copy(UNITS) else: raise ValueError(f'Unknown units {UNITS}') - #-- calculate arrays for clenshaw summations over colatitudes + # calculate arrays for clenshaw summations over colatitudes s_m_c = np.zeros((npts,LMAX*2+2)) for m in range(LMAX, -1, -1): - #-- convolve harmonics with unit factors and smoothing + # convolve harmonics with unit factors and smoothing s_m_c[:,2*m:2*m+2] = clenshaw_s_m(t, dfactor*wl, m, clm, slm, LMAX, SCALE=SCALE) - #-- calculate cos(phi) + # calculate cos(phi) cos_phi_2 = 2.0*np.cos(phi) - #-- matrix of cos/sin m*phi summation + # matrix of cos/sin m*phi summation cos_m_phi = np.zeros((npts,LMAX+2),dtype=ASTYPE) sin_m_phi = np.zeros((npts,LMAX+2),dtype=ASTYPE) - #-- initialize matrix with values at lmax+1 and lmax + # initialize matrix with values at lmax+1 and lmax cos_m_phi[:,LMAX+1] = np.cos(ASTYPE(LMAX + 1)*phi) sin_m_phi[:,LMAX+1] = np.sin(ASTYPE(LMAX + 1)*phi) cos_m_phi[:,LMAX] = np.cos(ASTYPE(LMAX)*phi) sin_m_phi[:,LMAX] = np.sin(ASTYPE(LMAX)*phi) - #-- calculate summation for order LMAX + # calculate summation for order LMAX s_m = s_m_c[:,2*LMAX]*cos_m_phi[:,LMAX] + s_m_c[:,2*LMAX+1]*sin_m_phi[:,LMAX] - #-- iterate to calculate complete summation + # iterate to calculate complete summation for m in range(LMAX-1, 0, -1): cos_m_phi[:,m] = cos_phi_2*cos_m_phi[:,m+1] - cos_m_phi[:,m+2] sin_m_phi[:,m] = cos_phi_2*sin_m_phi[:,m+1] - sin_m_phi[:,m+2] - #-- calculate summation for order m + # calculate summation for order m a_m = np.sqrt((2.0*m+3.0)/(2.0*m+2.0)) s_m = a_m*u*s_m + s_m_c[:,2*m]*cos_m_phi[:,m] + s_m_c[:,2*m+1]*sin_m_phi[:,m] - #-- calculate spatial field + # calculate spatial field spatial = np.sqrt(3.0)*u*s_m + s_m_c[:,0] - #-- return the calculated spatial field + # return the calculated spatial field return spatial -#-- PURPOSE: compute conditioned arrays for Clenshaw summation from the -#-- fully-normalized associated Legendre's function for an order m +# PURPOSE: compute conditioned arrays for Clenshaw summation from the +# fully-normalized associated Legendre's function for an order m def clenshaw_s_m(t, f, m, clm1, slm1, lmax, ASTYPE=np.longdouble, SCALE=1e-280): """ Compute conditioned arrays for Clenshaw summation from the fully-normalized @@ -235,13 +235,13 @@ def clenshaw_s_m(t, f, m, clm1, slm1, lmax, ASTYPE=np.longdouble, SCALE=1e-280): ------- s_m_c: conditioned array for clenshaw summation """ - #-- allocate for output matrix + # allocate for output matrix N = len(t) s_m = np.zeros((N,2),dtype=ASTYPE) - #-- scaling to prevent overflow + # scaling to prevent overflow clm = SCALE*clm1.astype(ASTYPE) slm = SCALE*slm1.astype(ASTYPE) - #-- convert lmax and m to float + # convert lmax and m to float lm = ASTYPE(lmax) mm = ASTYPE(m) if (m == lmax): @@ -281,5 +281,5 @@ def clenshaw_s_m(t, f, m, clm1, slm1, lmax, ASTYPE=np.longdouble, SCALE=1e-280): s_mm_c_pre_2 = np.copy(s_mm_c_pre_1) s_mm_c_pre_1 = np.copy(s_mm_c) s_m[:,0] = np.copy(s_mm_c) - #-- return s_m rescaled with scalef + # return s_m rescaled with scalef return s_m/SCALE diff --git a/gravity_toolkit/degree_amplitude.py b/gravity_toolkit/degree_amplitude.py index f9abf483..9ef966d5 100755 --- a/gravity_toolkit/degree_amplitude.py +++ b/gravity_toolkit/degree_amplitude.py @@ -49,25 +49,25 @@ def degree_amplitude(clm, slm, LMAX=None, MMAX=None): amp: float degree amplitude """ - #-- add a singleton dimension to input harmonics + # add a singleton dimension to input harmonics clm = np.atleast_3d(clm) slm = np.atleast_3d(slm) - #-- check shape + # check shape LMp1,MMp1,nt = np.shape(clm) - #-- upper bound of spherical harmonic degrees + # upper bound of spherical harmonic degrees if LMAX is None: LMAX = LMp1 - 1 - #-- upper bound of spherical harmonic orders + # upper bound of spherical harmonic orders if MMAX is None: MMAX = MMp1 - 1 - #-- allocating for output array + # allocating for output array amp = np.zeros((LMAX+1,nt)) for l in range(LMAX+1): m = np.arange(0,MMAX+1) - #-- degree amplitude of spherical harmonic degree + # degree amplitude of spherical harmonic degree amp[l,:] = np.sqrt(np.sum(clm[l,m,:]**2 + slm[l,m,:]**2,axis=0)) - #-- return the degree amplitude with singleton dimensions removed + # return the degree amplitude with singleton dimensions removed return np.squeeze(amp) diff --git a/gravity_toolkit/destripe_harmonics.py b/gravity_toolkit/destripe_harmonics.py index 802b464a..4a4b62d3 100644 --- a/gravity_toolkit/destripe_harmonics.py +++ b/gravity_toolkit/destripe_harmonics.py @@ -95,18 +95,18 @@ def destripe_harmonics(clm1, slm1, LMIN=2, LMAX=60, MMAX=None, `doi: 10.1029/2005GL025285 `_ """ - #-- tests if spherical harmonics have been imported + # tests if spherical harmonics have been imported if (clm1.shape[0] == 1) or (slm1.shape[0] == 1): raise ValueError('Input harmonics need to be matrices') - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) if MMAX is None: MMAX = np.copy(LMAX) - #-- output filtered coefficients (copy to not modify input) + # output filtered coefficients (copy to not modify input) Wclm = clm1.copy() Wslm = slm1.copy() - #-- matrix size declarations + # matrix size declarations clmeven = np.zeros((LMAX), dtype=np.float64) slmeven = np.zeros((LMAX), dtype=np.float64) clmodd = np.zeros((LMAX+1), dtype=np.float64) @@ -114,23 +114,23 @@ def destripe_harmonics(clm1, slm1, LMIN=2, LMAX=60, MMAX=None, clmsm = np.zeros((LMAX+1,MMAX+1), dtype=np.float64) slmsm = np.zeros((LMAX+1,MMAX+1), dtype=np.float64) - #-- start of the smoothing over orders (m) + # start of the smoothing over orders (m) for m in range(int(MMAX+1)): smooth = np.exp(-np.float64(m)/10.0)*15.0 if ROUND: - #-- round(smooth) to nearest even instead of int(smooth) + # round(smooth) to nearest even instead of int(smooth) nsmooth = np.around(smooth) else: - #-- Sean's method for finding nsmooth (use floor of smooth) + # Sean's method for finding nsmooth (use floor of smooth) nsmooth = np.int64(smooth) if (nsmooth < 2): - #-- Isabella's method of picking nsmooth sets minimum to 2 + # Isabella's method of picking nsmooth sets minimum to 2 nsmooth = np.int64(2) rmat = np.zeros((3,3), dtype=np.float64) lll = np.arange(np.float64(nsmooth)*2.+1.)-np.float64(nsmooth) - #-- create design matrix to have the following form: + # create design matrix to have the following form: # [ 1 ll ll^2 ] # [ ll ll^2 ll^3 ] # [ ll^2 ll^3 ll^4 ] @@ -147,14 +147,14 @@ def destripe_harmonics(clm1, slm1, LMIN=2, LMAX=60, MMAX=None, rmat[2,1] += ill**3 rmat[2,2] += ill**4 - #-- put the even and odd l's into their own arrays + # put the even and odd l's into their own arrays ieven = -1 iodd = -1 leven = np.zeros((LMAX), dtype=np.int64) lodd = np.zeros((LMAX), dtype=np.int64) for l in range(int(m),int(LMAX+1)): - #-- check if degree is odd or even + # check if degree is odd or even if np.remainder(l,2).astype(bool): iodd += 1 lodd[iodd] = l @@ -166,21 +166,21 @@ def destripe_harmonics(clm1, slm1, LMIN=2, LMAX=60, MMAX=None, clmeven[ieven] = clm1[l,m].copy() slmeven[ieven] = slm1[l,m].copy() - #-- smooth, by fitting a quadratic polynomial to 7 points at a time - #-- deal with even stokes coefficients + # smooth, by fitting a quadratic polynomial to 7 points at a time + # deal with even stokes coefficients l1 = 0 l2 = ieven if (l1 > (l2-2*nsmooth)): for l in range(l1,l2+1): if NARROW: - #-- Sean's method - #-- Clm=Slm=0 if number of points is less than window size + # Sean's method + # Clm=Slm=0 if number of points is less than window size clmsm[leven[l],m] = 0.0 slmsm[leven[l],m] = 0.0 else: - #-- Isabella's method - #-- Clm and Slm passed through unaltered + # Isabella's method + # Clm and Slm passed through unaltered clmsm[leven[l],m] = clm1[leven[l],m].copy() slmsm[leven[l],m] = slm1[leven[l],m].copy() else: @@ -195,18 +195,18 @@ def destripe_harmonics(clm1, slm1, LMIN=2, LMAX=60, MMAX=None, rhss[1] += slmeven[l+ll]*np.float64(ll) rhss[2] += slmeven[l+ll]*np.float64(ll**2) - #-- fit design matrix to coefficients - #-- to get beta parameters + # fit design matrix to coefficients + # to get beta parameters bhsc = np.linalg.lstsq(rmat,rhsc.T,rcond=-1)[0] bhss = np.linalg.lstsq(rmat,rhss.T,rcond=-1)[0] - #-- all other l is assigned as bhsc + # all other l is assigned as bhsc clmsm[leven[l],m] = bhsc[0].copy() - #-- all other l is assigned as bhss + # all other l is assigned as bhss slmsm[leven[l],m] = bhss[0].copy() if (l == (l1+nsmooth)): - #-- deal with l=l1+nsmooth + # deal with l=l1+nsmooth for ll in range(int(-nsmooth),0): clmsm[leven[l+ll],m] = bhsc[0]+bhsc[1]*np.float64(ll) + \ bhsc[2]*np.float64(ll**2) @@ -214,27 +214,27 @@ def destripe_harmonics(clm1, slm1, LMIN=2, LMAX=60, MMAX=None, bhss[2]*np.float64(ll**2) if (l == (l2-nsmooth)): - #-- deal with l=l2-nsmnooth + # deal with l=l2-nsmnooth for ll in range(1,int(nsmooth+1)): clmsm[leven[l+ll],m] = bhsc[0]+bhsc[1]*np.float64(ll) + \ bhsc[2]*np.float64(ll**2) slmsm[leven[l+ll],m] = bhss[0]+bhss[1]*np.float64(ll) + \ bhss[2]*np.float64(ll**2) - #-- deal with odd stokes coefficients + # deal with odd stokes coefficients l1 = 0 l2 = iodd if (l1 > (l2-2*nsmooth)): for l in range(l1,l2+1): if NARROW: - #-- Sean's method - #-- Clm=Slm=0 if number of points is less than window size + # Sean's method + # Clm=Slm=0 if number of points is less than window size clmsm[lodd[l],m] = 0.0 slmsm[lodd[l],m] = 0.0 else: - #-- Isabella's method - #-- Clm and Slm passed through unaltered + # Isabella's method + # Clm and Slm passed through unaltered clmsm[lodd[l],m] = clm1[lodd[l],m].copy() slmsm[lodd[l],m] = slm1[lodd[l],m].copy() else: @@ -249,18 +249,18 @@ def destripe_harmonics(clm1, slm1, LMIN=2, LMAX=60, MMAX=None, rhss[1] += slmodd[l+ll]*np.float64(ll) rhss[2] += slmodd[l+ll]*np.float64(ll**2) - #-- fit design matrix to coefficients - #-- to get beta parameters + # fit design matrix to coefficients + # to get beta parameters bhsc = np.linalg.lstsq(rmat,rhsc.T,rcond=-1)[0] bhss = np.linalg.lstsq(rmat,rhss.T,rcond=-1)[0] - #-- all other l is assigned as bhsc + # all other l is assigned as bhsc clmsm[lodd[l],m] = bhsc[0].copy() - #-- all other l is assigned as bhss + # all other l is assigned as bhss slmsm[lodd[l],m] = bhss[0].copy() if (l == (l1+nsmooth)): - #-- deal with l=l1+nsmooth + # deal with l=l1+nsmooth for ll in range(int(-nsmooth),0): clmsm[lodd[l+ll],m] = bhsc[0]+bhsc[1]*np.float64(ll) + \ bhsc[2]*np.float64(ll**2) @@ -268,17 +268,17 @@ def destripe_harmonics(clm1, slm1, LMIN=2, LMAX=60, MMAX=None, bhss[2]*np.float64(ll**2) if (l == (l2-nsmooth)): - #-- deal with l=l2-nsmnooth + # deal with l=l2-nsmnooth for ll in range(1,int(nsmooth+1)): clmsm[lodd[l+ll],m] = bhsc[0]+bhsc[1]*np.float64(ll) + \ bhsc[2]*np.float64(ll**2) slmsm[lodd[l+ll],m] = bhss[0]+bhss[1]*np.float64(ll) + \ bhss[2]*np.float64(ll**2) - #-- deal with m greater than or equal to 5 + # deal with m greater than or equal to 5 for l in range(int(m),int(LMAX+1)): if (m >= 5): - #-- remove smoothed clm/slm from original spherical harmonics + # remove smoothed clm/slm from original spherical harmonics Wclm[l,m] -= clmsm[l,m] Wslm[l,m] -= slmsm[l,m] diff --git a/gravity_toolkit/fourier_legendre.py b/gravity_toolkit/fourier_legendre.py index dd10fc72..310c7e1d 100755 --- a/gravity_toolkit/fourier_legendre.py +++ b/gravity_toolkit/fourier_legendre.py @@ -47,141 +47,141 @@ def fourier_legendre(lmax, mmax): Fourier coefficients """ - #-- allocate for output fourier coefficients + # allocate for output fourier coefficients plm = np.zeros((lmax+1,lmax+1,lmax+1)) l_even = np.arange(0,lmax+1,2) l_odd = np.arange(1,lmax,2) m_even = np.arange(0,mmax+1,2) m_odd = np.arange(1,mmax,2) - #-- First compute m=0, m=1 terms - #-- Compute m = 0, l = even terms + # First compute m=0, m=1 terms + # Compute m = 0, l = even terms plm[l_even,0,0] = 1.0 p1 = (l_even*(l_even+1.0))*plm[l_even,0,0] plm[l_even,0,2] = p1 / (l_even*(l_even+1.0)-2.0) - for j in range(2,lmax,2):#-- equivalent to 2:lmax-2 + for j in range(2,lmax,2):# equivalent to 2:lmax-2 p1 = 2.0*(l_even*(l_even+1.0)-j**2.0)*plm[l_even,0,j] p2 = ((j-2.0)*(j-1.0)-l_even*(l_even+1.0))*plm[l_even,0,j-2] dfactor = (l_even*(l_even+1.0)-(j+2.0)*(j+1.0)) plm[l_even,0,j+2] = (p1 + p2) / dfactor - #-- Special case for j = 0 fourier coefficient + # Special case for j = 0 fourier coefficient plm[l_even,0,0] = plm[l_even,0,0]/2.0 - #-- Normalize overall sum to 2 for m == 0 + # Normalize overall sum to 2 for m == 0 norm = np.zeros((len(l_even))) - for j in range(0,lmax+2,2):#-- equivalent to 0:lmax + for j in range(0,lmax+2,2):# equivalent to 0:lmax ptemp = np.squeeze(plm[l_even[:, np.newaxis],0,m_even]) dtemp = 1.0/(1.0-j-m_even) + 1.0/(1.0+j-m_even) + \ 1.0/(1.0-j+m_even) + 1.0/(1.0+j+m_even) norm[l_even//2] = norm[l_even//2] + plm[l_even,0,j] * \ np.dot(ptemp, dtemp)/2.0 - #-- normalize plms + # normalize plms norm = np.sqrt(norm/2.0) - for l in range(0,lmax+2,2):#-- equivalent to 0:lmax + for l in range(0,lmax+2,2):# equivalent to 0:lmax plm[l,0,:] = plm[l,0,:]/norm[l//2] - #-- Compute m = 0, l = odd terms + # Compute m = 0, l = odd terms plm[l_odd,0,1] = 1.0 p1 = (2.0-l_odd*(l_odd+1.0))*plm[l_odd,0,1] plm[l_odd,0,3] = p1 / (6.0-l_odd*(l_odd+1.0)) - for j in range(3,lmax-1,2):#-- equivalent to 3:lmax-3 + for j in range(3,lmax-1,2):# equivalent to 3:lmax-3 p1 = 2.0*(l_odd*(l_odd+1.0)-j**2.0)*plm[l_odd,0,j] p2 = ((j-2.0)*(j-1.0)-l_odd*(l_odd+1.0))*plm[l_odd,0,j-2] dfactor = (l_odd*(l_odd+1.0)-(j+2.0)*(j+1.0)) plm[l_odd,0,j+2] = (p1 + p2) / dfactor - #-- Normalize overall sum to 2 for m == 0 + # Normalize overall sum to 2 for m == 0 norm = np.zeros((len(l_odd))) - for j in range(1,lmax+1,2):#-- equivalent to 1:lmax-1 + for j in range(1,lmax+1,2):# equivalent to 1:lmax-1 ptemp = np.squeeze(plm[l_odd[:, np.newaxis],0,m_odd]) dtemp = 1.0/(1.0-j-m_odd) + 1.0/(1.0+j-m_odd) + \ 1.0/(1.0-j+m_odd) + 1.0/(1.0+j+m_odd) norm[(l_odd-1)//2] = norm[(l_odd-1)//2] + plm[l_odd,0,j] * \ np.dot(ptemp, dtemp)/2.0 - #-- normalize plms + # normalize plms norm = np.sqrt(norm/2.0) - for l in range(1,lmax+1,2):#-- equivalent to 1:lmax-1 + for l in range(1,lmax+1,2):# equivalent to 1:lmax-1 plm[l,0,:] = plm[l,0,:]/norm[(l-1)//2] - #-- Compute m = 1, l = even terms + # Compute m = 1, l = even terms plm[l_even,1,0] = 0.0 plm[l_even,1,2] = 1.0 - for j in range(2,lmax,2):#-- equivalent to 2:lmax-2 + for j in range(2,lmax,2):# equivalent to 2:lmax-2 p1 = 2.0*(l_even*(l_even+1)-j**2.0-2.0)*plm[l_even,1,j] p2 = ((j-2.0)*(j-1.0)-l_even*(l_even+1))*plm[l_even,1,j-2] dfactor = (l_even*(l_even+1.0)-(j+2.0)*(j+1.0)) plm[l_even,1,j+2] = (p1 + p2) / dfactor - #-- Normalize overall sum to 4 for m == 1 - #-- different norm than that of the cosine series + # Normalize overall sum to 4 for m == 1 + # different norm than that of the cosine series norm = np.zeros((len(l_even))) - for j in range(0,lmax+2,2):#-- equivalent to 0:lmax + for j in range(0,lmax+2,2):# equivalent to 0:lmax ptemp = np.squeeze(plm[l_even[:, np.newaxis],1,m_even]) dtemp = -1.0/(1.0-j-m_even) + 1.0/(1+j-m_even) + \ 1.0/(1.0-j+m_even) - 1.0/(1+j+m_even) norm[l_even//2] = norm[l_even//2] + plm[l_even,1,j] * \ np.dot(ptemp, dtemp)/2.0 - #-- normalize plms + # normalize plms norm = np.sqrt(norm/4.0) - for l in range(0,lmax+2,2):#-- equivalent to 0:lmax + for l in range(0,lmax+2,2):# equivalent to 0:lmax plm[l,1,:] = plm[l,1,:]/norm[l//2] - #-- Compute m = 1, l = odd terms + # Compute m = 1, l = odd terms plm[l_odd,1,1] = 1.0 plm[l_odd,1,3] = 3.0*(l_odd*(l_odd+1)-2)*plm[l_odd,1,1]/(l_odd*(l_odd+1)-6) - for j in range(3,lmax-1,2):#-- equivalent to 3:lmax-3 + for j in range(3,lmax-1,2):# equivalent to 3:lmax-3 p1 = 2.0*(l_odd*(l_odd+1.0)-j**2.0-2.0)*plm[l_odd,1,j] p2 = ((j-2.0)*(j-1.0)-l_odd*(l_odd+1.0))*plm[l_odd,1,j-2] dfactor = (l_odd*(l_odd+1.0)-(j+2.0)*(j+1.0)) plm[l_odd,1,j+2] = (p1 + p2) / dfactor - #-- Normalize overall sum to 4 for m == 1 + # Normalize overall sum to 4 for m == 1 norm = np.zeros((len(l_odd))) - for j in range(1,lmax+1,2):#-- equivalent to 1:lmax-1 + for j in range(1,lmax+1,2):# equivalent to 1:lmax-1 ptemp = np.squeeze(plm[l_odd[:, np.newaxis],1,m_odd]) dtemp = -1.0/(1.0-j-m_odd) + 1.0/(1.0+j-m_odd) + \ 1.0/(1.0-j+m_odd) - 1.0/(1.0+j+m_odd) norm[(l_odd-1)//2] = norm[(l_odd-1)//2] + plm[l_odd,1,j] * \ np.dot(ptemp, dtemp)/2.0 - #-- normalize plms + # normalize plms norm = np.sqrt(norm/4.0) - for l in range(1,lmax+1,2):#-- equivalent to 1:lmax-1 + for l in range(1,lmax+1,2):# equivalent to 1:lmax-1 plm[l,1,:] = plm[l,1,:]/norm[(l-1)//2] - #-- Compute coefficients for m > 0 - #-- m = 0 terms on rhs have different normalization + # Compute coefficients for m > 0 + # m = 0 terms on rhs have different normalization m = 0 - #-- m = 0, l = even terms - for l in range(m,lmax-1):#-- equivalent to m:lmax-2 + # m = 0, l = even terms + for l in range(m,lmax-1):# equivalent to m:lmax-2 p1 = np.sqrt((l+m+2.0)*(l+m+1.0)/(2.0*l+1.0))*plm[l,m,m_even] p2 = np.sqrt((l-m+1.0)*(l-m+2.0)/(2.0*l+5.0))*plm[l+2,m,m_even] p3 = np.sqrt((l-m)*(l-m-1.0)/(2.0*l+1.0)/2.0)*plm[l,m+2,m_even] dfactor = np.sqrt((l+m+4.0)*(l+m+3.0)/(2.0*l+5.0)/2.0) plm[l+2,m+2,m_even] = (p1 - p2 + p3) / dfactor - #-- m = 0, l = odd terms - for l in range(m+1,lmax-1):#-- equivalent to m+1:lmax-2 + # m = 0, l = odd terms + for l in range(m+1,lmax-1):# equivalent to m+1:lmax-2 p1 = np.sqrt((l+m+2.0)*(l+m+1.0)/(2.0*l+1.0))*plm[l,m,m_odd] p2 = np.sqrt((l-m+1.0)*(l-m+2.0)/(2.0*l+5.0))*plm[l+2,m,m_odd] p3 = np.sqrt((l-m)*(l-m-1.0)/(2.0*l+1.0)/2.0)*plm[l,m+2,m_odd] dfactor = np.sqrt((l+m+4.0)*(l+m+3.0)/(2.0*l+5.0)/2.0) plm[l+2,m+2,m_odd] = (p1 - p2 + p3) / dfactor - #-- m = even terms - for m in range(2,lmax,2):#-- equivalent to 2:lmax-2 - #-- m = even, > 2, l = even terms - for l in range(m,lmax,2):#-- equivalent to m:lmax-2 + # m = even terms + for m in range(2,lmax,2):# equivalent to 2:lmax-2 + # m = even, > 2, l = even terms + for l in range(m,lmax,2):# equivalent to m:lmax-2 p1 = np.sqrt((l+m+2.0)*(l+m+1.0)/(2.0*l+1.0))*plm[l,m,m_even] p2 = np.sqrt((l-m+1.0)*(l-m+2.0)/(2.0*l+5.0))*plm[l+2,m,m_even] p3 = np.sqrt((l-m)*(l-m-1.0)/(2.0*l+1.0))*plm[l,m+2,m_even] dfactor = np.sqrt((l+m+4.0)*(l+m+3.0)/(2.0*l+5.0)) plm[l+2,m+2,m_even] = (p1 - p2 + p3) / dfactor - #-- m = even, > 2, l = odd terms + # m = even, > 2, l = odd terms for l in range(m+1,lmax-1,2): p1 = np.sqrt((l+m+2.0)*(l+m+1.0)/(2.0*l+1.0))*plm[l,m,m_odd] p2 = np.sqrt((l-m+1.0)*(l-m+2.0)/(2.0*l+5.0))*plm[l+2,m,m_odd] @@ -189,25 +189,25 @@ def fourier_legendre(lmax, mmax): dfactor = np.sqrt((l+m+4.0)*(l+m+3.0)/(2.0*l+5.0)) plm[l+2,m+2,m_odd] = (p1 - p2 + p3) / dfactor - #-- m = odd terms - for m in range(1,lmax-1,2):#-- equivalent to 1:lmax-3 - #-- m = odd, > 1, l = even terms - for l in range(m+1,lmax-1,2):#-- equivalent to m+1,lmax-2 + # m = odd terms + for m in range(1,lmax-1,2):# equivalent to 1:lmax-3 + # m = odd, > 1, l = even terms + for l in range(m+1,lmax-1,2):# equivalent to m+1,lmax-2 p1 = np.sqrt((l+m+2.0)*(l+m+1.0)/(2.0*l+1.0))*plm[l,m,m_even] p2 = np.sqrt((l-m+1.0)*(l-m+2.0)/(2.0*l+5.0))*plm[l+2,m,m_even] p3 = np.sqrt((l-m)*(l-m-1.0)/(2.0*l+1.0))*plm[l,m+2,m_even] dfactor = np.sqrt((l+m+4.0)*(l+m+3.0)/(2.0*l+5.0)) plm[l+2,m+2,m_even] = (p1 - p2 + p3) / dfactor - #-- m = odd, > 1, l = odd terms - for l in range(m,lmax-1,2):#-- equivalent to m:lmax-2 + # m = odd, > 1, l = odd terms + for l in range(m,lmax-1,2):# equivalent to m:lmax-2 p1 = np.sqrt((l+m+2.0)*(l+m+1.0)/(2.0*l+1.0))*plm[l,m,m_odd] p2 = np.sqrt((l-m+1.0)*(l-m+2.0)/(2.0*l+5.0))*plm[l+2,m,m_odd] p3 = np.sqrt((l-m)*(l-m-1.0)/(2.0*l+1.0))*plm[l,m+2,m_odd] dfactor = np.sqrt((l+m+4.0)*(l+m+3.0)/(2.0*l+5.0)) plm[l+2,m+2,m_odd] = (p1 - p2 + p3) / dfactor - #-- return the fourier coefficients + # return the fourier coefficients return plm def legendre_gradient(lmax, mmax): @@ -234,42 +234,42 @@ def legendre_gradient(lmax, mmax): vlm = np.zeros((lmax+1,lmax+1,lmax+1)) wlm = np.zeros((lmax+1,lmax+1,lmax+1)) - #-- l=0 zero by definition + # l=0 zero by definition lind = np.arange(1,lmax+1) - #-- m=0 special case - #-- terms with m=0, m=1 have different coefficients + # m=0 special case + # terms with m=0, m=1 have different coefficients vlm[lind,0,:] = 2.0*np.dot(np.diag(np.sqrt((lind+1)*lind/2.0)), plm[lind,1,:]) - #-- m+1 terms - for l in range(2,lmax+1):#-- from 2 to lmax - m = np.arange(1,l)#-- from 1 to l-1 - lplus = np.arange(l+2,2*l+1)#-- from l+2 to 2*l - lminus = np.arange(l-1,0,-1)#-- from l-1 to 1 + # m+1 terms + for l in range(2,lmax+1):# from 2 to lmax + m = np.arange(1,l)# from 1 to l-1 + lplus = np.arange(l+2,2*l+1)# from l+2 to 2*l + lminus = np.arange(l-1,0,-1)# from l-1 to 1 vlm[l,m,:] = np.dot(np.diag(np.sqrt(lplus*lminus/4.0)), plm[l,m+1,:]) - #-- m-1 terms, m-1=0 has different coefficients + # m-1 terms, m-1=0 has different coefficients vlm[lind,1,:] -= np.dot(np.diag(np.sqrt((lind+1)*lind/2.0)), plm[lind,0,:]) for l in range(2,lmax+1): - m = np.arange(2,l+1)#-- from 2 to l - lplus = np.arange(l+2,2*l+1)#-- from l+2 to 2*l - lminus = np.arange(l-1,0,-1)#-- from l-1 to 1 + m = np.arange(2,l+1)# from 2 to l + lplus = np.arange(l+2,2*l+1)# from l+2 to 2*l + lminus = np.arange(l-1,0,-1)# from l-1 to 1 vlm[l,m,:] -= np.dot(np.diag(np.sqrt(lplus*lminus/4.0)), plm[l,m-1,:]) - #-- normalizations + # normalizations for l in range(1,lmax+1): vlm[l,:,:] /= np.sqrt((l+1)*l) - #-- m+1 terms + # m+1 terms for l in range(2, lmax+1): - m = np.arange(1,l)#-- from 1 to l-1 + m = np.arange(1,l)# from 1 to l-1 dfactor = (2.0*l+1.0)/(2.0*l-1.0) - lminus2 = np.arange(l-2,-1,-1)#-- from l-2 to 0 - lminus1 = np.arange(l-1,0,-1)#-- from l-1 to 1 + lminus2 = np.arange(l-2,-1,-1)# from l-2 to 0 + lminus1 = np.arange(l-1,0,-1)# from l-1 to 1 wlm[l,m,:] = np.sqrt(dfactor) * \ np.dot(np.diag(np.sqrt(lminus2*lminus1/4.0)), plm[l,m+1,:]) - #-- m-1 terms, m-1=0 has different coefficients - #-- m=1 term + # m-1 terms, m-1=0 has different coefficients + # m=1 term for l in range(1, lmax+1): dfactor = (2.0*l+1.0)/(2.0*l-1.0) wlm[l,1,:] += np.sqrt(dfactor)*np.sqrt(l*(l+1)/2.0)*plm[l-1,0,:] @@ -277,14 +277,14 @@ def legendre_gradient(lmax, mmax): for l in range(2,lmax+1): m = np.arange(2,l+1) dfactor = (2.0*l+1.0)/(2.0*l-1.0) - lplus2 = np.arange(l+2,2*l+1)#-- from l+2 to 2*l - lplus1 = np.arange(l+1,2*l)#-- from l+1 to (2*l-1) + lplus2 = np.arange(l+2,2*l+1)# from l+2 to 2*l + lplus1 = np.arange(l+1,2*l)# from l+1 to (2*l-1) wlm[l,m,:] += np.sqrt(dfactor) * \ np.dot(np.diag(np.sqrt(lplus2*lplus1)/4.0), plm[l-1,m-1,:]) - #-- normalizations + # normalizations for l in range(1, lmax+1): wlm[l,:,:] /= np.sqrt((l+1)*l) - #-- normalize vlm + # normalize vlm vlm[:,0,:] /= 2.0 return (vlm, wlm) diff --git a/gravity_toolkit/gauss_weights.py b/gravity_toolkit/gauss_weights.py index e6a0b9be..471db3d3 100755 --- a/gravity_toolkit/gauss_weights.py +++ b/gravity_toolkit/gauss_weights.py @@ -73,36 +73,36 @@ def gauss_weights(hw, LMAX, CUTOFF=1e-10): the Earth's Gravity Field", NASA Grant No. NGR 36-008-161, OSURF Proj. No. 783210, 48 pp., (1981). """ - #-- allocate for output weights + # allocate for output weights wl = np.zeros((LMAX+1)) - #-- radius of the Earth in km + # radius of the Earth in km rad_e = 6371.0 if (hw < CUTOFF): - #-- distance is smaller than cutoff + # distance is smaller than cutoff wl[:]=1.0/(2.0*np.pi) else: - #-- calculate gaussian weights using recursion + # calculate gaussian weights using recursion b = np.log(2.0)/(1.0 - np.cos(hw/rad_e)) - #-- weight for degree 0 + # weight for degree 0 wl[0] = 1.0/(2.0*np.pi) - #-- weight for degree 1 + # weight for degree 1 wl[1] = wl[0]*((1.0+np.exp(-2.0*b))/(1.0-np.exp(-2.0*b))-1.0/b) - #-- valid flag + # valid flag valid = True - #-- spherical harmonic degree + # spherical harmonic degree l = 2 - #-- while valid (within cutoff) - #-- and spherical harmonic degree is less than LMAX + # while valid (within cutoff) + # and spherical harmonic degree is less than LMAX while (valid and (l <= LMAX)): - #-- calculate weight with recursion + # calculate weight with recursion wl[l] = (1.0-2.0*l)/b*wl[l-1]+wl[l-2] - #-- weight is less than cutoff + # weight is less than cutoff if (wl[l] < CUTOFF): - #-- set all weights to cutoff + # set all weights to cutoff wl[l:LMAX+1] = CUTOFF - #-- set valid flag + # set valid flag valid = False - #-- add 1 to l + # add 1 to l l += 1 - #-- return the gaussian weights + # return the gaussian weights return wl diff --git a/gravity_toolkit/gen_averaging_kernel.py b/gravity_toolkit/gen_averaging_kernel.py index 77efb14a..625145ec 100755 --- a/gravity_toolkit/gen_averaging_kernel.py +++ b/gravity_toolkit/gen_averaging_kernel.py @@ -106,64 +106,64 @@ def gen_averaging_kernel(gclm, gslm, eclm, eslm, sigma, hw, Geophysical Research: Solid Earth*, 107(B9), 2193, (2002). `doi: 10.1029/2001JB000576 `_ """ - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) if MMAX is None: MMAX = np.copy(LMAX) - #-- Earth Parameters - #-- extract arrays of kl, hl, and ll Love Numbers + # Earth Parameters + # extract arrays of kl, hl, and ll Love Numbers dfactor = gravity_toolkit.units(lmax=LMAX).harmonic(*LOVE) - #-- average radius of the earth (km) + # average radius of the earth (km) rad_e = dfactor.rad_e/1e5 - #-- allocate for gaussian function + # allocate for gaussian function gl = np.zeros((LMAX+1)) - #-- calculate gaussian weights using recursion + # calculate gaussian weights using recursion b = np.log(2.0)/(1.0-np.cos(hw/rad_e)) - #-- weight for degree 0 + # weight for degree 0 gl[0] = (1.0-np.exp(-2.0*b))/b - #-- weight for degree 1 + # weight for degree 1 gl[1] = (1.0+np.exp(-2.0*b))/b - (1.0-np.exp(-2.0*b))/b**2 - #-- valid flag + # valid flag valid = True - #-- spherical harmonic degree + # spherical harmonic degree l = 2 - #-- generate Legendre coefficients of Gaussian correlation function + # generate Legendre coefficients of Gaussian correlation function while (valid and (l <= LMAX)): gl[l] = (1.0 - 2.0*l)/b*gl[l-1] + gl[l-2] - #-- check validity + # check validity if (np.abs(gl[l]) < 1.0e-15): gl[l:LMAX+1] = 1.0e-15 valid = False - #-- add to counter for spherical harmonic degree + # add to counter for spherical harmonic degree l += 1 - #-- Convert sigma to correlation function amplitude + # Convert sigma to correlation function amplitude area = np.copy(gclm[0,0]) temp_0 = np.zeros((LMAX+1)) - for l in range(0,LMAX+1):#-- equivalent to 0:LMAX - mm = np.min([MMAX,l])#-- find min of MMAX and l - m = np.arange(0,mm+1)#-- create m array 0:l or 0:MMAX + for l in range(0,LMAX+1):# equivalent to 0:LMAX + mm = np.min([MMAX,l])# find min of MMAX and l + m = np.arange(0,mm+1)# create m array 0:l or 0:MMAX temp_0[l] = (gl[l]/2.0)*np.sum(gclm[l,m]**2 + gslm[l,m]**2) - #-- divide by the square of the area under the kernel + # divide by the square of the area under the kernel temp = np.sum(temp_0)/area**2 - #-- signal variance + # signal variance sigma_0 = sigma/np.sqrt(temp) - #-- Compute averaging kernel coefficients + # Compute averaging kernel coefficients wclm = np.zeros((LMAX+1,MMAX+1)) wslm = np.zeros((LMAX+1,MMAX+1)) - #-- for each spherical harmonic degree - for l in range(0,LMAX+1):#-- equivalent to 0:lmax + # for each spherical harmonic degree + for l in range(0,LMAX+1):# equivalent to 0:lmax if (UNITS == 0): - #-- Input coefficients are fully-normalized + # Input coefficients are fully-normalized cmwe = dfactor.cmwe[l] ldivg = (cmwe**2)/(gl[l]*sigma_0**2) elif (UNITS == 1): - #-- Inputs coefficients are mass (cmwe) + # Inputs coefficients are mass (cmwe) ldivg = 1.0/(gl[l]*sigma_0**2) - #-- for each valid spherical harmonic order + # for each valid spherical harmonic order mm = np.min([MMAX,l]) for m in range(0,mm+1): temp = 1.0 + 2.0*ldivg*eclm[l,m]**2 @@ -171,5 +171,5 @@ def gen_averaging_kernel(gclm, gslm, eclm, eslm, sigma, hw, temp = 1.0 + 2.0*ldivg*eslm[l,m]**2 wslm[l,m] = gslm[l,m]/temp - #-- return kernels divided by the area under the kernel + # return kernels divided by the area under the kernel return {'clm':wclm/area, 'slm':wslm/area} diff --git a/gravity_toolkit/gen_disc_load.py b/gravity_toolkit/gen_disc_load.py index 6df7f4d2..4fbf034a 100644 --- a/gravity_toolkit/gen_disc_load.py +++ b/gravity_toolkit/gen_disc_load.py @@ -139,119 +139,119 @@ def gen_disc_load(data, lon, lat, area, LMAX=60, MMAX=None, UNITS=2, `doi: 10.1007/s00190-011-0522-7 `_ """ - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) if MMAX is None: MMAX = np.copy(LMAX) - #-- Earth Parameters + # Earth Parameters factors = gravity_toolkit.units(lmax=LMAX) - rho_e = factors.rho_e#-- Average Density of the Earth [g/cm^3] - rad_e = factors.rad_e#-- Average Radius of the Earth [cm] + rho_e = factors.rho_e# Average Density of the Earth [g/cm^3] + rad_e = factors.rad_e# Average Radius of the Earth [cm] - #-- convert lon and lat to radians - phi = lon*np.pi/180.0#-- Longitude in radians - th = (90.0 - lat)*np.pi/180.0#-- Colatitude in radians + # convert lon and lat to radians + phi = lon*np.pi/180.0# Longitude in radians + th = (90.0 - lat)*np.pi/180.0# Colatitude in radians - #-- convert input area into cm^2 and then divide by area of a half sphere - #-- alpha will be 1 - the ratio of the input area with the half sphere + # convert input area into cm^2 and then divide by area of a half sphere + # alpha will be 1 - the ratio of the input area with the half sphere alpha = (1.0 - 1e10*area/(2.0*np.pi*rad_e**2)) - #-- Calculate factor to convert from input units into g/cm^2 + # Calculate factor to convert from input units into g/cm^2 if (UNITS == 1): - #-- Input data is in cm water equivalent (cmH2O) + # Input data is in cm water equivalent (cmH2O) unit_conv = 1.0 elif (UNITS == 2): - #-- Input data is in gigatonnes (Gt) - #-- 1e15 converts from Gt to grams, 1e10 converts from km^2 to cm^2 + # Input data is in gigatonnes (Gt) + # 1e15 converts from Gt to grams, 1e10 converts from km^2 to cm^2 unit_conv = 1e15/(1e10*area) elif (UNITS == 3): - #-- Input data is in kg/m^2 - #-- 1 kg = 1000 g - #-- 1 m^2 = 100*100 cm^2 = 1e4 cm^2 + # Input data is in kg/m^2 + # 1 kg = 1000 g + # 1 m^2 = 100*100 cm^2 = 1e4 cm^2 unit_conv = 0.1 elif isinstance(UNITS,(list,np.ndarray)): - #-- custom units + # custom units unit_conv = np.copy(UNITS) else: raise ValueError(f'Unknown units {UNITS}') - #-- Coefficient for calculating Stokes coefficients for a disc load - #-- From Jacob et al (2012), Farrell (1972) and Longman (1962) + # Coefficient for calculating Stokes coefficients for a disc load + # From Jacob et al (2012), Farrell (1972) and Longman (1962) coeff = 3.0/(rad_e*rho_e) - #-- extract arrays of kl, hl, and ll Love Numbers + # extract arrays of kl, hl, and ll Love Numbers hl,kl,ll = LOVE - #-- calculate array of l values ranging from 0 to LMAX (harmonic degrees) - #-- LMAX+1 as there are LMAX+1 elements between 0 and LMAX + # calculate array of l values ranging from 0 to LMAX (harmonic degrees) + # LMAX+1 as there are LMAX+1 elements between 0 and LMAX l = np.arange(LMAX+1) - #-- calculate SH degree dependent factors to convert from coefficients - #-- of mass into normalized geoid coefficients - #-- NOTE: these are not the normal factors for converting to geoid due - #-- to the square of the denominator - #-- kl[l] is the Load Love Number of degree l + # calculate SH degree dependent factors to convert from coefficients + # of mass into normalized geoid coefficients + # NOTE: these are not the normal factors for converting to geoid due + # to the square of the denominator + # kl[l] is the Load Love Number of degree l dfactor = (1.0 + kl[l])/((1.0 + 2.0*l)**2) - #-- Calculating plms of the disc - #-- allocating for constructed array + # Calculating plms of the disc + # allocating for constructed array pl_alpha = np.zeros((LMAX+1)) - #-- l=0 is a special case (P(-1) = 1, P(1) = cos(alpha)) + # l=0 is a special case (P(-1) = 1, P(1) = cos(alpha)) pl_alpha[0] = (1.0 - alpha)/2.0 - #-- for all other degrees: calculate the legendre polynomials up to LMAX+1 + # for all other degrees: calculate the legendre polynomials up to LMAX+1 pl_matrix,_ = legendre_polynomials(LMAX+1,alpha) - for l in range(1, LMAX+1):#-- LMAX+1 to include LMAX - #-- from Longman (1962) and Jacob et al (2012) - #-- unnormalizing Legendre polynomials - #-- sqrt(2*l - 1) == sqrt(2*(l-1) + 1) - #-- sqrt(2*l + 3) == sqrt(2*(l+1) + 1) + for l in range(1, LMAX+1):# LMAX+1 to include LMAX + # from Longman (1962) and Jacob et al (2012) + # unnormalizing Legendre polynomials + # sqrt(2*l - 1) == sqrt(2*(l-1) + 1) + # sqrt(2*l + 3) == sqrt(2*(l+1) + 1) pl_lower = pl_matrix[l-1]/np.sqrt(2.0*l-1.0) pl_upper = pl_matrix[l+1]/np.sqrt(2.0*l+3.0) pl_alpha[l] = (pl_lower - pl_upper)/2.0 - #-- Calculate Legendre Polynomials using Holmes and Featherstone relation - #-- this would be the plm for the center of the disc load - #-- used to rotate the disc load to point lat/lon + # Calculate Legendre Polynomials using Holmes and Featherstone relation + # this would be the plm for the center of the disc load + # used to rotate the disc load to point lat/lon if PLM is None: plmout,dplm = plm_holmes(LMAX, np.cos(th)) - #-- truncate precomputed plms to order + # truncate precomputed plms to order plmout = np.squeeze(plmout[:,:MMAX+1,:]) else: - #-- truncate precomputed plms to degree and order + # truncate precomputed plms to degree and order plmout = PLM[:LMAX+1,:MMAX+1] - #-- calculate array of m values ranging from 0 to MMAX (harmonic orders) - #-- MMAX+1 as there are MMAX+1 elements between 0 and MMAX + # calculate array of m values ranging from 0 to MMAX (harmonic orders) + # MMAX+1 as there are MMAX+1 elements between 0 and MMAX m = np.arange(MMAX+1) - #-- Multiplying by the units conversion factor (unit_conv) to - #-- convert from the input units into cmH2O equivalent - #-- Multiplying point mass data (converted to cmH2O) with sin/cos of m*phis - #-- data normally is 1 for a uniform 1cm water equivalent layer - #-- but can be a mass point if reconstructing a spherical harmonic field - #-- NOTE: NOT a matrix multiplication as data (and phi) is a single point + # Multiplying by the units conversion factor (unit_conv) to + # convert from the input units into cmH2O equivalent + # Multiplying point mass data (converted to cmH2O) with sin/cos of m*phis + # data normally is 1 for a uniform 1cm water equivalent layer + # but can be a mass point if reconstructing a spherical harmonic field + # NOTE: NOT a matrix multiplication as data (and phi) is a single point dcos = unit_conv*data*np.cos(m*phi) dsin = unit_conv*data*np.sin(m*phi) - #-- Multiplying by plm_alpha (F_l from Jacob 2012) + # Multiplying by plm_alpha (F_l from Jacob 2012) plm = np.zeros((LMAX+1,MMAX+1)) - #-- Initializing preliminary spherical harmonic matrices + # Initializing preliminary spherical harmonic matrices yclm = np.zeros((LMAX+1,MMAX+1)) yslm = np.zeros((LMAX+1,MMAX+1)) - #-- Initializing output spherical harmonic matrices + # Initializing output spherical harmonic matrices Ylms = gravity_toolkit.harmonics(lmax=LMAX, mmax=MMAX) Ylms.clm = np.zeros((LMAX+1,MMAX+1)) Ylms.slm = np.zeros((LMAX+1,MMAX+1)) - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - l = np.arange(m,LMAX+1)#-- LMAX+1 to include LMAX - #-- rotate disc load to be centered at lat/lon + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + l = np.arange(m,LMAX+1)# LMAX+1 to include LMAX + # rotate disc load to be centered at lat/lon plm[l,m] = plmout[l,m]*pl_alpha[l] - #-- multiplying clm by cos(m*phi) and slm by sin(m*phi) - #-- to get a field of spherical harmonics + # multiplying clm by cos(m*phi) and slm by sin(m*phi) + # to get a field of spherical harmonics yclm[l,m] = plm[l,m]*dcos[m] yslm[l,m] = plm[l,m]*dsin[m] - #-- multiplying by coefficients to convert to geoid coefficients + # multiplying by coefficients to convert to geoid coefficients Ylms.clm[l,m] = coeff*dfactor[l]*yclm[l,m] Ylms.slm[l,m] = coeff*dfactor[l]*yslm[l,m] - #-- return the output spherical harmonics object + # return the output spherical harmonics object return Ylms diff --git a/gravity_toolkit/gen_harmonics.py b/gravity_toolkit/gen_harmonics.py index 161f23d2..33cf1398 100644 --- a/gravity_toolkit/gen_harmonics.py +++ b/gravity_toolkit/gen_harmonics.py @@ -99,26 +99,26 @@ def gen_harmonics(data, lon, lat, **kwargs): m: int spherical harmonic order to MMAX """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('LMAX',60) kwargs.setdefault('MMAX',None) kwargs.setdefault('PLM',0) kwargs.setdefault('METHOD','integration') - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) if kwargs['MMAX'] is None: kwargs['MMAX'] = np.copy(kwargs['LMAX']) - #-- convert latitude and longitude to float if integers + # convert latitude and longitude to float if integers lon = lon.astype(np.float64) lat = lat.astype(np.float64) - #-- reforming data to lonXlat if input latXlon + # reforming data to lonXlat if input latXlon sz = np.shape(data) dinput = np.transpose(data) if (sz[0] == len(lat)) else np.copy(data) - #-- convert spatial field into spherical harmonics + # convert spatial field into spherical harmonics if (kwargs['METHOD'].lower() == 'integration'): Ylms = integration(dinput, lon, lat, **kwargs) elif (kwargs['METHOD'].lower() == 'fourier'): Ylms = fourier(dinput, lon, lat, **kwargs) - #-- return the output spherical harmonics object + # return the output spherical harmonics object return Ylms def integration(data, lon, lat, LMAX=60, MMAX=None, PLM=0, **kwargs): @@ -152,72 +152,72 @@ def integration(data, lon, lat, LMAX=60, MMAX=None, PLM=0, **kwargs): spherical harmonic order to MMAX """ - #-- dimensions of the longitude and latitude arrays + # dimensions of the longitude and latitude arrays nlon = np.int64(len(lon)) nlat = np.int64(len(lat)) - #-- grid step + # grid step dlon = np.abs(lon[1]-lon[0]) dlat = np.abs(lat[1]-lat[0]) - #-- longitude degree spacing in radians + # longitude degree spacing in radians dphi = dlon*np.pi/180.0 - #-- colatitude degree spacing in radians + # colatitude degree spacing in radians dth = dlat*np.pi/180.0 - #-- reformatting longitudes to range 0:360 (if previously -180:180) + # reformatting longitudes to range 0:360 (if previously -180:180) if np.count_nonzero(lon < 0): lon[lon < 0] += 360.0 - #-- calculate longitude and colatitude arrays in radians - phi = np.reshape(lon,(1,nlon))*np.pi/180.0#-- reshape to 1xnlon - th = (90.0 - np.squeeze(lat))*np.pi/180.0#-- remove singleton dimensions + # calculate longitude and colatitude arrays in radians + phi = np.reshape(lon,(1,nlon))*np.pi/180.0# reshape to 1xnlon + th = (90.0 - np.squeeze(lat))*np.pi/180.0# remove singleton dimensions - #-- Calculating cos/sin of phi arrays (output [m,phi]) - #-- LMAX+1 as there are LMAX+1 elements between 0 and LMAX + # Calculating cos/sin of phi arrays (output [m,phi]) + # LMAX+1 as there are LMAX+1 elements between 0 and LMAX m = np.arange(MMAX+1)[:, np.newaxis] ccos = np.cos(np.dot(m,phi)) ssin = np.sin(np.dot(m,phi)) - #-- Multiplying sin(th) with differentials of theta and phi - #-- to calculate the integration factor at each latitude + # Multiplying sin(th) with differentials of theta and phi + # to calculate the integration factor at each latitude int_fact = np.sin(th)*dphi*dth coeff = 1.0/(4.0*np.pi) - #-- Calculate polynomials using Holmes and Featherstone (2002) relation + # Calculate polynomials using Holmes and Featherstone (2002) relation plm = np.zeros((LMAX+1,MMAX+1,nlat)) if (np.ndim(PLM) == 0): plmout,dplm = plm_holmes(LMAX, np.cos(th)) else: - #-- use precomputed plms to improve computational speed - #-- or to use a different recursion relation for polynomials + # use precomputed plms to improve computational speed + # or to use a different recursion relation for polynomials plmout = PLM - #-- Multiply plms by integration factors [sin(theta)*dtheta*dphi] - #-- truncate plms to maximum spherical harmonic order if MMAX < LMAX + # Multiply plms by integration factors [sin(theta)*dtheta*dphi] + # truncate plms to maximum spherical harmonic order if MMAX < LMAX m = np.arange(MMAX+1) for j in range(0,nlat): plm[:,m,j] = plmout[:,m,j]*int_fact[j] - #-- Initializing preliminary spherical harmonic matrices + # Initializing preliminary spherical harmonic matrices yclm = np.zeros((LMAX+1,MMAX+1)) yslm = np.zeros((LMAX+1,MMAX+1)) - #-- Initializing output spherical harmonic matrices + # Initializing output spherical harmonic matrices Ylms = gravity_toolkit.harmonics(lmax=LMAX, mmax=MMAX) Ylms.clm = np.zeros((LMAX+1,MMAX+1)) Ylms.slm = np.zeros((LMAX+1,MMAX+1)) - #-- Multiplying gridded data with sin/cos of m#phis (output [m,theta]) - #-- This will sum through all phis in the dot product + # Multiplying gridded data with sin/cos of m#phis (output [m,theta]) + # This will sum through all phis in the dot product dcos = np.dot(ccos,data) dsin = np.dot(ssin,data) for l in range(0,LMAX+1): - mm = np.min([MMAX,l])#-- truncate to MMAX if specified (if l > MMAX) - m = np.arange(0,mm+1)#-- mm+1 elements between 0 and mm - #-- Summing product of plms and data over all latitudes + mm = np.min([MMAX,l])# truncate to MMAX if specified (if l > MMAX) + m = np.arange(0,mm+1)# mm+1 elements between 0 and mm + # Summing product of plms and data over all latitudes yclm[l,m] = np.sum(plm[l,m,:]*dcos[m,:], axis=1) yslm[l,m] = np.sum(plm[l,m,:]*dsin[m,:], axis=1) - #-- convert to output normalization (4-pi normalized harmonics) + # convert to output normalization (4-pi normalized harmonics) Ylms.clm[l,m] = coeff*yclm[l,m] Ylms.slm[l,m] = coeff*yslm[l,m] - #-- return the output spherical harmonics object + # return the output spherical harmonics object return Ylms def fourier(data, lon, lat, LMAX=60, MMAX=None, PLM=0, **kwargs): @@ -251,32 +251,32 @@ def fourier(data, lon, lat, LMAX=60, MMAX=None, PLM=0, **kwargs): spherical harmonic order to MMAX """ - #-- dimensions of the longitude and latitude arrays + # dimensions of the longitude and latitude arrays nlon = np.int64(len(lon)) nlat = np.int64(len(lat)) - #-- remove singleton dimensions and convert to radians + # remove singleton dimensions and convert to radians phi = (np.squeeze(lon)*np.pi/180.0) - #-- Colatitude in radians + # Colatitude in radians theta = ((90.0 - np.squeeze(lat))*np.pi/180.0) - #-- MMAX+1 to include MMAX + # MMAX+1 to include MMAX mm = np.arange(MMAX+1)[:, np.newaxis] - #-- Calculate cos and sin coefficients of signal + # Calculate cos and sin coefficients of signal ccos = np.cos(np.dot(mm,phi[np.newaxis,:])) ssin = np.sin(np.dot(mm,phi[np.newaxis,:])) dcos = np.dot(ccos,data) dsin = np.dot(ssin,data) - #-- Normalize fourier coefficients + # Normalize fourier coefficients dcos[0,:] = dcos[0,:]/nlon dcos[1:MMAX+1,:] = 2.0*dcos[1:MMAX+1,:]/nlon dsin[0,:] = dsin[0,:]/nlon dsin[1:MMAX+1,:] = 2.0*dsin[1:MMAX+1,:]/nlon - #-- Calculate cos and sin coefficients of theta component - #-- Because the function is defined on (0,pi) - #-- it can be expanded in just cosine terms. - #-- this routine assumes that 0 and pi are not included + # Calculate cos and sin coefficients of theta component + # Because the function is defined on (0,pi) + # it can be expanded in just cosine terms. + # this routine assumes that 0 and pi are not included theta_cc = np.zeros((MMAX+1,MMAX+1)) theta_sc = np.zeros((MMAX+1,MMAX+1)) m_even = np.arange(0,MMAX+1,2) @@ -285,14 +285,14 @@ def fourier(data, lon, lat, LMAX=60, MMAX=None, PLM=0, **kwargs): n_odd = len(m_odd) if np.isclose([theta[0],theta[nlat-1]],[0.0,np.pi]).all(): - #-- non-endpoints + # non-endpoints nt = np.dot(mm,theta[1:nlat-1][np.newaxis,:]) theta_cc[m_even,:] = 2.0*np.dot(dcos[m_even,1:nlat-1],np.cos(nt).T) theta_sc[m_even,:] = 2.0*np.dot(dsin[m_even,1:nlat-1],np.cos(nt).T) theta_cc[m_odd,:] = 2.0*np.dot(dcos[m_odd,1:nlat-1],np.sin(nt).T) theta_sc[m_odd,:] = 2.0*np.dot(dsin[m_odd,1:nlat-1],np.sin(nt).T) - #-- endpoints + # endpoints theta_cc[m_even,:] += np.dot((dcos[m_even,0]*np.cos(theta[0]) + dcos[m_even,nlat-1]*np.cos(theta[nlat-1]))[:,np.newaxis], mm.T) theta_sc[m_even,:] += np.dot((dsin[m_even,0]*np.cos(theta[0]) + @@ -311,40 +311,40 @@ def fourier(data, lon, lat, LMAX=60, MMAX=None, PLM=0, **kwargs): else: raise ValueError('Latitude coordinates incompatible') - #-- Normalize theta fourier coefficients + # Normalize theta fourier coefficients theta_cc[:,0] = theta_cc[:,0]/(2.0*nlat) theta_cc[:,1:MMAX+1] = theta_cc[:,1:MMAX+1]/nlat theta_sc[:,0] = theta_sc[:,0]/(2.0*nlat) theta_sc[:,1:MMAX+1] = theta_sc[:,1:MMAX+1]/nlat - #-- Correct normalization for the incomplete coverage of the sphere + # Correct normalization for the incomplete coverage of the sphere delphi = np.abs(phi[1]-phi[0]) deltheta = np.abs(theta[1]-theta[0]) norm = nlon*delphi/(2.0*np.pi)*nlat*deltheta/np.pi theta_cc = theta_cc*norm theta_sc = theta_sc*norm - #-- Calculate cos and sin coefficients of Legendre functions - #-- Expand m = even terms in a cosine series - #-- Expand m = odd terms in a sine series - #-- Both are stride 2 + # Calculate cos and sin coefficients of Legendre functions + # Expand m = even terms in a cosine series + # Expand m = odd terms in a sine series + # Both are stride 2 if (np.ndim(PLM) == 0): plm = fourier_legendre(LMAX,MMAX) else: - #-- use precomputed plms to improve computational speed + # use precomputed plms to improve computational speed plm = PLM - #-- Initializing output spherical harmonic matrices + # Initializing output spherical harmonic matrices Ylms = gravity_toolkit.harmonics(lmax=LMAX, mmax=MMAX) Ylms.clm = np.zeros((LMAX+1,MMAX+1)) Ylms.slm = np.zeros((LMAX+1,MMAX+1)) - #-- Sum theta fourier coefficients - #-- temp is the integral of cos(n theta) cos(k theta) dcos(theta) - #-- over the interval 0 to pi - #-- n and k must have like parities + # Sum theta fourier coefficients + # temp is the integral of cos(n theta) cos(k theta) dcos(theta) + # over the interval 0 to pi + # n and k must have like parities - #-- m = even terms + # m = even terms k_even = np.zeros((n_even,n_even)) for n in range(0,MMAX+2,2): k_even[:,n//2] = 0.5*(1.0/(1.0-m_even-n) + 1.0/(1.0+m_even-n) + @@ -355,7 +355,7 @@ def fourier(data, lon, lat, LMAX=60, MMAX=None, PLM=0, **kwargs): k_odd[:,(n-1)//2] = 0.5*(1.0/(1-m_odd-n) + 1.0/(1+m_odd-n) + 1.0/(1-m_odd+n) + 1.0/(1+m_odd+n)) - #-- calculate spherical harmonics for m == even terms + # calculate spherical harmonics for m == even terms l_even = np.arange(0,LMAX+1,2) l_odd = np.arange(1,LMAX,2) for m in range(0,MMAX+2,2): @@ -366,7 +366,7 @@ def fourier(data, lon, lat, LMAX=60, MMAX=None, PLM=0, **kwargs): Ylms.clm[l_odd,m] = np.dot(theta_cc[m,m_odd[:,np.newaxis]].T,temp.T) Ylms.slm[l_odd,m] = np.dot(theta_sc[m,m_odd[:,np.newaxis]].T,temp.T) - #-- m = odd terms + # m = odd terms k_even = np.zeros((n_even,n_even)) for n in range(0,MMAX+2,2): k_even[:,n//2] = 0.5*(-1.0/(1-m_even-n) + 1.0/(1.0+m_even-n) + @@ -377,8 +377,8 @@ def fourier(data, lon, lat, LMAX=60, MMAX=None, PLM=0, **kwargs): k_odd[:,(n-1)//2] = 0.5*(-1.0/(1-m_odd-n) + 1.0/(1.0+m_odd-n) + 1.0/(1.0-m_odd+n) - 1.0/(1.0+m_odd+n)) - #-- calculate spherical harmonics for m == odd terms - l_even = np.arange(2,LMAX+1,2)#-- do not in include l=0 + # calculate spherical harmonics for m == odd terms + l_even = np.arange(2,LMAX+1,2)# do not in include l=0 l_odd = np.arange(1,LMAX,2) for m in range(1,MMAX+1,2): temp = np.dot(plm[l_even,m,m_even[:,np.newaxis]].T,k_even) @@ -388,12 +388,12 @@ def fourier(data, lon, lat, LMAX=60, MMAX=None, PLM=0, **kwargs): Ylms.clm[l_odd,m] = np.dot(theta_cc[m,m_odd[:,np.newaxis]].T,temp.T) Ylms.slm[l_odd,m] = np.dot(theta_sc[m,m_odd[:,np.newaxis]].T,temp.T) - #-- Divide by Plm normalization + # Divide by Plm normalization Ylms.clm[:,0] /= 2.0 Ylms.slm[:,0] /= 2.0 Ylms.clm[:,1:MMAX+1] /= 4.0 Ylms.slm[:,1:MMAX+1] /= 4.0 - #-- return the output spherical harmonics object + # return the output spherical harmonics object return Ylms diff --git a/gravity_toolkit/gen_point_load.py b/gravity_toolkit/gen_point_load.py index de6c5f26..8f7da9cd 100644 --- a/gravity_toolkit/gen_point_load.py +++ b/gravity_toolkit/gen_point_load.py @@ -110,53 +110,53 @@ def gen_point_load(data, lon, lat, LMAX=60, MMAX=None, UNITS=1, LOVE=None): `doi: 10.1029/JB078i011p01760 `_ """ - #-- upper bound of spherical harmonic orders (default == LMAX) + # upper bound of spherical harmonic orders (default == LMAX) if MMAX is None: MMAX = np.copy(LMAX) - #-- number of input data points + # number of input data points npts = len(data.flatten()) - #-- convert output longitude and latitude into radians + # convert output longitude and latitude into radians phi = np.pi*lon.flatten()/180.0 theta = np.pi*(90.0 - lat.flatten())/180.0 - #-- SH Degree dependent factors to convert into fully normalized SH's - #-- use splat operator to extract arrays of kl, hl, and ll Love Numbers + # SH Degree dependent factors to convert into fully normalized SH's + # use splat operator to extract arrays of kl, hl, and ll Love Numbers factors = gravity_toolkit.units(lmax=LMAX).spatial(*LOVE) - #-- extract degree dependent factor for specific units + # extract degree dependent factor for specific units int_fact = np.zeros((npts)) if (UNITS == 1): - #-- Default Parameter: Input in grams (g) + # Default Parameter: Input in grams (g) dfactor = factors.cmwe/(factors.rad_e**2) int_fact[:] = 1.0 elif (UNITS == 2): - #-- Input in gigatonnes (Gt) + # Input in gigatonnes (Gt) dfactor = factors.cmwe/(factors.rad_e**2) int_fact[:] = 1e15 elif isinstance(UNITS,(list,np.ndarray)): - #-- custom units + # custom units dfactor = np.copy(UNITS) int_fact[:] = 1.0 else: raise ValueError(f'Unknown units {UNITS}') - #-- flattened form of data converted to units + # flattened form of data converted to units D = int_fact*data.flatten() - #-- Initializing output spherical harmonic matrices + # Initializing output spherical harmonic matrices Ylms = gravity_toolkit.harmonics(lmax=LMAX, mmax=MMAX) Ylms.clm = np.zeros((LMAX+1,MMAX+1)) Ylms.slm = np.zeros((LMAX+1,MMAX+1)) - #-- for each degree l + # for each degree l for l in range(LMAX+1): m1 = np.min([l,MMAX]) + 1 SPH = spherical_harmonic_matrix(l,D,phi,theta,dfactor[l]) - #-- truncate to spherical harmonic order and save to output + # truncate to spherical harmonic order and save to output Ylms.clm[l,:m1] = SPH.real[:m1] Ylms.slm[l,:m1] = SPH.imag[:m1] - #-- return the output spherical harmonics object + # return the output spherical harmonics object return Ylms -#-- calculate spherical harmonics of degree l evaluated at (theta,phi) +# calculate spherical harmonics of degree l evaluated at (theta,phi) def spherical_harmonic_matrix(l,data,phi,theta,coeff): """ Calculates spherical harmonics of degree l evaluated at coordinates @@ -179,15 +179,15 @@ def spherical_harmonic_matrix(l,data,phi,theta,coeff): Ylms: float spherical harmonic coefficients in Eulerian form """ - #-- calculate normalized legendre polynomials (points, order) + # calculate normalized legendre polynomials (points, order) Pl = legendre(l, np.cos(theta), NORMALIZE=True).T - #-- spherical harmonic orders up to degree l + # spherical harmonic orders up to degree l m = np.arange(0,l+1) - #-- calculate Euler's of spherical harmonic order multiplied by azimuth phi + # calculate Euler's of spherical harmonic order multiplied by azimuth phi mphi = np.exp(1j*np.dot(np.squeeze(phi)[:,np.newaxis],m[np.newaxis,:])) - #-- reshape data to order + # reshape data to order D = np.kron(np.ones((1,l+1)), data[:,np.newaxis]) - #-- calculate spherical harmonics and multiply by coefficients and data + # calculate spherical harmonics and multiply by coefficients and data Ylms = coeff*D*Pl*mphi - #-- calculate the sum over all points and return harmonics for degree l + # calculate the sum over all points and return harmonics for degree l return np.sum(Ylms,axis=0) diff --git a/gravity_toolkit/gen_spherical_cap.py b/gravity_toolkit/gen_spherical_cap.py index b597e8d4..fcb2fae4 100755 --- a/gravity_toolkit/gen_spherical_cap.py +++ b/gravity_toolkit/gen_spherical_cap.py @@ -160,144 +160,144 @@ def gen_spherical_cap(data, lon, lat, LMAX=60, MMAX=None, `doi: 10.1007/s00190-011-0522-7 `_ """ - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) if MMAX is None: MMAX = np.copy(LMAX) - #-- Earth Parameters + # Earth Parameters factors = gravity_toolkit.units(lmax=LMAX) - rho_e = factors.rho_e#-- Average Density of the Earth [g/cm^3] - rad_e = factors.rad_e#-- Average Radius of the Earth [cm] + rho_e = factors.rho_e# Average Density of the Earth [g/cm^3] + rad_e = factors.rad_e# Average Radius of the Earth [cm] - #-- convert lon and lat to radians - phi = lon*np.pi/180.0#-- Longitude in radians - th = (90.0 - lat)*np.pi/180.0#-- Colatitude in radians + # convert lon and lat to radians + phi = lon*np.pi/180.0# Longitude in radians + th = (90.0 - lat)*np.pi/180.0# Colatitude in radians - #-- Converting input area into an equivalent spherical cap radius - #-- Following Jacob et al. (2012) Equation 4 and 5 - #-- alpha is the vertical semi-angle subtending a cone at the - #-- center of the earth + # Converting input area into an equivalent spherical cap radius + # Following Jacob et al. (2012) Equation 4 and 5 + # alpha is the vertical semi-angle subtending a cone at the + # center of the earth if (RAD_CAP != 0): - #-- if given spherical cap radius in degrees - #-- converting to radians + # if given spherical cap radius in degrees + # converting to radians alpha = RAD_CAP*np.pi/180.0 elif (AREA != 0): - #-- if given spherical cap area in cm^2 - #-- radius in centimeters + # if given spherical cap area in cm^2 + # radius in centimeters radius_cm = np.sqrt(AREA/np.pi) - #-- Calculating angular radius of spherical cap + # Calculating angular radius of spherical cap alpha = (radius_cm/rad_e) elif (RAD_KM != 0): - #-- if given spherical cap radius in kilometers - #-- Calculating angular radius of spherical cap + # if given spherical cap radius in kilometers + # Calculating angular radius of spherical cap alpha = (1e5*RAD_KM)/rad_e else: raise ValueError('Input RAD_CAP, AREA or RAD_KM of spherical cap') - #-- Calculate factor to convert from input units into cmH2O equivalent - #-- Default input is for inputs already in cmH2O (unit_conv = 1) + # Calculate factor to convert from input units into cmH2O equivalent + # Default input is for inputs already in cmH2O (unit_conv = 1) if (UNITS == 1): - #-- Input data is in cm water equivalent (cmH2O) + # Input data is in cm water equivalent (cmH2O) unit_conv = 1.0 elif (UNITS == 2): - #-- Input data is in gigatonnes (Gt) - #-- calculate spherical cap area from angular radius + # Input data is in gigatonnes (Gt) + # calculate spherical cap area from angular radius area = np.pi*(alpha*rad_e)**2 - #-- the 1.e15 converts from gigatons/cm^2 to cm of water - #-- 1 g/cm^3 = 1000 kg/m^3 = density water - #-- 1 Gt = 1 Pg = 1.e15 g + # the 1.e15 converts from gigatons/cm^2 to cm of water + # 1 g/cm^3 = 1000 kg/m^3 = density water + # 1 Gt = 1 Pg = 1.e15 g unit_conv = 1.e15/area elif (UNITS == 3): - #-- Input data is in kg/m^2 - #-- 1 kg = 1000 g - #-- 1 m^2 = 100*100 cm^2 = 1e4 cm^2 + # Input data is in kg/m^2 + # 1 kg = 1000 g + # 1 m^2 = 100*100 cm^2 = 1e4 cm^2 unit_conv = 0.1 elif isinstance(UNITS,(list,np.ndarray)): - #-- custom units + # custom units unit_conv = np.copy(UNITS) else: raise ValueError(f'Unknown units {UNITS}') - #-- Coefficient for calculating Stokes coefficients for a spherical cap - #-- From Jacob et al (2012), Farrell (1972) and Longman (1962) + # Coefficient for calculating Stokes coefficients for a spherical cap + # From Jacob et al (2012), Farrell (1972) and Longman (1962) coeff = 3.0/(rad_e*rho_e) - #-- extract arrays of kl, hl, and ll Love Numbers + # extract arrays of kl, hl, and ll Love Numbers hl,kl,ll = LOVE - #-- calculate array of l values ranging from 0 to LMAX (harmonic degrees) - #-- LMAX+1 as there are LMAX+1 elements between 0 and LMAX + # calculate array of l values ranging from 0 to LMAX (harmonic degrees) + # LMAX+1 as there are LMAX+1 elements between 0 and LMAX l = np.arange(LMAX+1) - #-- calculate SH degree dependent factors to convert from coefficients - #-- of mass into normalized geoid coefficients - #-- NOTE: these are not the normal factors for converting to geoid due - #-- to the square of the denominator - #-- kl[l] is the Load Love Number of degree l + # calculate SH degree dependent factors to convert from coefficients + # of mass into normalized geoid coefficients + # NOTE: these are not the normal factors for converting to geoid due + # to the square of the denominator + # kl[l] is the Load Love Number of degree l dfactor = (1.0 + kl[l])/((1.0 + 2.0*l)**2) - #-- Calculating plms of the spherical caps - #-- From Longman et al. (1962) - #-- pl_alpha = F(alpha) from Jacob 2011 - #-- pl_alpha is purely zonal and depends only on the size of the cap - #-- allocating for constructed array + # Calculating plms of the spherical caps + # From Longman et al. (1962) + # pl_alpha = F(alpha) from Jacob 2011 + # pl_alpha is purely zonal and depends only on the size of the cap + # allocating for constructed array pl_alpha = np.zeros((LMAX+1)) - #-- l=0 is a special case (P(-1) = 1, P(1) = cos(alpha)) + # l=0 is a special case (P(-1) = 1, P(1) = cos(alpha)) pl_alpha[0] = (1.0 - np.cos(alpha))/2.0 - #-- for all other degrees: calculate the legendre polynomials up to LMAX+1 + # for all other degrees: calculate the legendre polynomials up to LMAX+1 pl_matrix,_ = legendre_polynomials(LMAX+1,np.cos(alpha)) - for l in range(1, LMAX+1):#-- LMAX+1 to include LMAX - #-- from Longman (1962) and Jacob et al (2012) - #-- unnormalizing Legendre polynomials - #-- sqrt(2*l - 1) == sqrt(2*(l-1) + 1) - #-- sqrt(2*l + 3) == sqrt(2*(l+1) + 1) + for l in range(1, LMAX+1):# LMAX+1 to include LMAX + # from Longman (1962) and Jacob et al (2012) + # unnormalizing Legendre polynomials + # sqrt(2*l - 1) == sqrt(2*(l-1) + 1) + # sqrt(2*l + 3) == sqrt(2*(l+1) + 1) pl_lower = pl_matrix[l-1]/np.sqrt(2.0*l-1.0) pl_upper = pl_matrix[l+1]/np.sqrt(2.0*l+3.0) pl_alpha[l] = (pl_lower - pl_upper)/2.0 - #-- Calculating Legendre Polynomials - #-- added option to precompute plms to improve computational speed - #-- this would be the plm for the center of the spherical cap - #-- used to rotate the spherical cap to point lat/lon + # Calculating Legendre Polynomials + # added option to precompute plms to improve computational speed + # this would be the plm for the center of the spherical cap + # used to rotate the spherical cap to point lat/lon if PLM is None: plmout,dplm = plm_holmes(LMAX, np.cos(th)) - #-- truncate precomputed plms to order + # truncate precomputed plms to order plmout = np.squeeze(plmout[:,:MMAX+1,:]) else: - #-- truncate precomputed plms to degree and order + # truncate precomputed plms to degree and order plmout = PLM[:LMAX+1,:MMAX+1] - #-- calculate array of m values ranging from 0 to MMAX (harmonic orders) - #-- MMAX+1 as there are MMAX+1 elements between 0 and MMAX + # calculate array of m values ranging from 0 to MMAX (harmonic orders) + # MMAX+1 as there are MMAX+1 elements between 0 and MMAX m = np.arange(MMAX+1) - #-- Multiplying by the units conversion factor (unit_conv) to - #-- convert from the input units into cmH2O equivalent - #-- Multiplying point mass data (converted to cmH2O) with sin/cos of m*phis - #-- data normally is 1 for a uniform 1cm water equivalent layer - #-- but can be a mass point if reconstructing a spherical harmonic field - #-- NOTE: NOT a matrix multiplication as data (and phi) is a single point + # Multiplying by the units conversion factor (unit_conv) to + # convert from the input units into cmH2O equivalent + # Multiplying point mass data (converted to cmH2O) with sin/cos of m*phis + # data normally is 1 for a uniform 1cm water equivalent layer + # but can be a mass point if reconstructing a spherical harmonic field + # NOTE: NOT a matrix multiplication as data (and phi) is a single point dcos = unit_conv*data*np.cos(m*phi) dsin = unit_conv*data*np.sin(m*phi) - #-- Multiplying by plm_alpha (F_l from Jacob 2012) + # Multiplying by plm_alpha (F_l from Jacob 2012) plm = np.zeros((LMAX+1,MMAX+1)) - #-- Initializing preliminary spherical harmonic matrices + # Initializing preliminary spherical harmonic matrices yclm = np.zeros((LMAX+1,MMAX+1)) yslm = np.zeros((LMAX+1,MMAX+1)) - #-- Initializing output spherical harmonic matrices + # Initializing output spherical harmonic matrices Ylms = gravity_toolkit.harmonics(lmax=LMAX, mmax=MMAX) Ylms.clm = np.zeros((LMAX+1,MMAX+1)) Ylms.slm = np.zeros((LMAX+1,MMAX+1)) - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - l = np.arange(m,LMAX+1)#-- LMAX+1 to include LMAX - #-- rotate spherical cap to be centered at lat/lon + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + l = np.arange(m,LMAX+1)# LMAX+1 to include LMAX + # rotate spherical cap to be centered at lat/lon plm[l,m] = plmout[l,m]*pl_alpha[l] - #-- multiplying clm by cos(m*phi) and slm by sin(m*phi) - #-- to get a field of spherical harmonics + # multiplying clm by cos(m*phi) and slm by sin(m*phi) + # to get a field of spherical harmonics yclm[l,m] = plm[l,m]*dcos[m] yslm[l,m] = plm[l,m]*dsin[m] - #-- multiplying by coefficients to convert to geoid coefficients + # multiplying by coefficients to convert to geoid coefficients Ylms.clm[l,m] = coeff*dfactor[l]*yclm[l,m] Ylms.slm[l,m] = coeff*dfactor[l]*yslm[l,m] - #-- return the output spherical harmonics object + # return the output spherical harmonics object return Ylms diff --git a/gravity_toolkit/gen_stokes.py b/gravity_toolkit/gen_stokes.py index afc71824..095e1c6b 100755 --- a/gravity_toolkit/gen_stokes.py +++ b/gravity_toolkit/gen_stokes.py @@ -123,106 +123,106 @@ def gen_stokes(data, lon, lat, LMIN=0, LMAX=60, MMAX=None, UNITS=1, `doi: 10.1029/98JB02844 `_ """ - #-- converting LMIN and LMAX to integer + # converting LMIN and LMAX to integer LMIN = np.int64(LMIN) LMAX = np.int64(LMAX) - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) MMAX = np.copy(LMAX) if (MMAX is None) else MMAX - #-- grid dimensions + # grid dimensions nlat = np.int64(len(lat)) - #-- grid step + # grid step dlon = np.abs(lon[1]-lon[0]) dlat = np.abs(lat[1]-lat[0]) - #-- longitude degree spacing in radians + # longitude degree spacing in radians dphi = dlon*np.pi/180.0 - #-- colatitude degree spacing in radians + # colatitude degree spacing in radians dth = dlat*np.pi/180.0 - #-- reformatting longitudes to range 0:360 (if previously -180:180) + # reformatting longitudes to range 0:360 (if previously -180:180) lon = np.squeeze(lon.copy()) if np.any(lon < 0): lon_ind, = np.nonzero(lon < 0) lon[lon_ind] += 360.0 - #-- Longitude in radians + # Longitude in radians phi = lon[np.newaxis,:]*np.pi/180.0 - #-- Colatitude in radians + # Colatitude in radians th = (90.0 - np.squeeze(lat.copy()))*np.pi/180.0 - #-- reforming data to lonXlat if input latXlon + # reforming data to lonXlat if input latXlon sz = np.shape(data) data = data.T if (sz[0] == nlat) else np.copy(data) - #-- SH Degree dependent factors to convert into fully normalized SH's - #-- use splat operator to extract arrays of kl, hl, and ll Love Numbers + # SH Degree dependent factors to convert into fully normalized SH's + # use splat operator to extract arrays of kl, hl, and ll Love Numbers factors = gravity_toolkit.units(lmax=LMAX).spatial(*LOVE) - #-- extract degree dependent factor for specific units - #-- calculate integration factors for theta and phi - #-- Multiplying sin(th) with differentials of theta and phi - #-- to calculate the integration factor at each latitude + # extract degree dependent factor for specific units + # calculate integration factors for theta and phi + # Multiplying sin(th) with differentials of theta and phi + # to calculate the integration factor at each latitude int_fact = np.zeros((nlat)) if (UNITS == 1): - #-- Default Parameter: Input in cm w.e. (g/cm^2) + # Default Parameter: Input in cm w.e. (g/cm^2) dfactor = factors.cmwe int_fact[:] = np.sin(th)*dphi*dth elif (UNITS == 2): - #-- Input in gigatonnes (Gt) + # Input in gigatonnes (Gt) dfactor = factors.cmwe - #-- rad_e: Average Radius of the Earth [cm] + # rad_e: Average Radius of the Earth [cm] int_fact[:] = 1e15/(factors.rad_e**2) elif (UNITS == 3): - #-- Input in kg/m^2 (mm w.e.) + # Input in kg/m^2 (mm w.e.) dfactor = factors.mmwe int_fact[:] = np.sin(th)*dphi*dth elif isinstance(UNITS,(list,np.ndarray)): - #-- custom units + # custom units dfactor = np.copy(UNITS) int_fact[:] = np.sin(th)*dphi*dth else: raise ValueError(f'Unknown units {UNITS}') - #-- Calculating cos/sin of phi arrays - #-- output [m,phi] + # Calculating cos/sin of phi arrays + # output [m,phi] m = np.arange(MMAX+1) ccos = np.cos(np.dot(m[:,np.newaxis],phi)) ssin = np.sin(np.dot(m[:,np.newaxis],phi)) - #-- Calculating fully-normalized Legendre Polynomials - #-- Output is plm[l,m,th] + # Calculating fully-normalized Legendre Polynomials + # Output is plm[l,m,th] plm = np.zeros((LMAX+1,MMAX+1,nlat)) - #-- added option to precompute plms to improve computational speed + # added option to precompute plms to improve computational speed if PLM is None: - #-- if plms are not pre-computed: calculate Legendre polynomials + # if plms are not pre-computed: calculate Legendre polynomials PLM, dPLM = plm_holmes(LMAX, np.cos(th)) - #-- Multiplying by integration factors [sin(theta)*dtheta*dphi] - #-- truncate legendre polynomials to spherical harmonic order MMAX + # Multiplying by integration factors [sin(theta)*dtheta*dphi] + # truncate legendre polynomials to spherical harmonic order MMAX for j in range(0,nlat): plm[:,m,j] = PLM[:,m,j]*int_fact[j] - #-- Initializing preliminary spherical harmonic matrices + # Initializing preliminary spherical harmonic matrices yclm = np.zeros((LMAX+1,MMAX+1)) yslm = np.zeros((LMAX+1,MMAX+1)) - #-- Initializing output spherical harmonic matrices + # Initializing output spherical harmonic matrices Ylms = gravity_toolkit.harmonics(lmax=LMAX, mmax=MMAX) Ylms.clm = np.zeros((LMAX+1,MMAX+1)) Ylms.slm = np.zeros((LMAX+1,MMAX+1)) - #-- Multiplying gridded data with sin/cos of m#phis - #-- This will sum through all phis in the dot product - #-- output [m,theta] + # Multiplying gridded data with sin/cos of m#phis + # This will sum through all phis in the dot product + # output [m,theta] dcos = np.dot(ccos,data) dsin = np.dot(ssin,data) - for l in range(LMIN,LMAX+1):#-- equivalent to LMIN:LMAX - mm = np.min([MMAX,l])#-- truncate to MMAX if specified (if l > MMAX) - m = np.arange(0,mm+1)#-- mm+1 elements between 0 and mm - #-- Summing product of plms and data over all latitudes - #-- axis=1 signifies the direction of the summation + for l in range(LMIN,LMAX+1):# equivalent to LMIN:LMAX + mm = np.min([MMAX,l])# truncate to MMAX if specified (if l > MMAX) + m = np.arange(0,mm+1)# mm+1 elements between 0 and mm + # Summing product of plms and data over all latitudes + # axis=1 signifies the direction of the summation yclm[l,m] = np.sum(plm[l,m,:]*dcos[m,:], axis=1) yslm[l,m] = np.sum(plm[l,m,:]*dsin[m,:], axis=1) - #-- Multiplying by factors to convert to fully normalized coefficients + # Multiplying by factors to convert to fully normalized coefficients Ylms.clm[l,m] = dfactor[l]*yclm[l,m] Ylms.slm[l,m] = dfactor[l]*yslm[l,m] - #-- return the output spherical harmonics object + # return the output spherical harmonics object return Ylms \ No newline at end of file diff --git a/gravity_toolkit/geocenter.py b/gravity_toolkit/geocenter.py index b3db7681..c4a1046c 100644 --- a/gravity_toolkit/geocenter.py +++ b/gravity_toolkit/geocenter.py @@ -71,16 +71,16 @@ class geocenter(object): """ np.seterr(invalid='ignore') def __init__(self, **kwargs): - #-- WGS84 ellipsoid parameters - a_axis = 6378137.0#-- [m] semimajor axis of the ellipsoid - flat = 1.0/298.257223563#-- flattening of the ellipsoid - #-- Mean Earth's Radius in mm having the same volume as WGS84 ellipsoid + # WGS84 ellipsoid parameters + a_axis = 6378137.0# [m] semimajor axis of the ellipsoid + flat = 1.0/298.257223563# flattening of the ellipsoid + # Mean Earth's Radius in mm having the same volume as WGS84 ellipsoid kwargs.setdefault('radius', 1000.0*a_axis*(1.0 - flat)**(1.0/3.0)) - #-- cartesian coordinates + # cartesian coordinates kwargs.setdefault('X',None) kwargs.setdefault('Y',None) kwargs.setdefault('Z',None) - #-- set default class attributes + # set default class attributes self.C10=None self.C11=None self.S11=None @@ -90,7 +90,7 @@ def __init__(self, **kwargs): self.time=None self.month=None self.filename=None - #-- Average Radius of the Earth [mm] + # Average Radius of the Earth [mm] self.radius=copy.copy(kwargs['radius']) def case_insensitive_filename(self,filename): @@ -102,29 +102,29 @@ def case_insensitive_filename(self,filename): filename: str input filename """ - #-- check if filename is open file object + # check if filename is open file object if isinstance(filename, io.IOBase): self.filename = copy.copy(filename) else: - #-- tilde-expand input filename + # tilde-expand input filename self.filename = os.path.expanduser(filename) - #-- check if file presently exists with input case + # check if file presently exists with input case if not os.access(self.filename,os.F_OK): - #-- search for filename without case dependence + # search for filename without case dependence basename = os.path.basename(filename) directory = os.path.dirname(os.path.expanduser(filename)) f = [f for f in os.listdir(directory) if re.match(basename,f,re.I)] - #-- check that geocenter file exists + # check that geocenter file exists if not f: errmsg = f'{filename} not found in file system' raise FileNotFoundError(errmsg) self.filename = os.path.join(directory,f.pop()) - #-- print filename + # print filename logging.debug(self.filename) return self - #-- PURPOSE: read AOD1b geocenter for month and calculate the mean harmonics - #-- need to run aod1b_geocenter.py to write these monthly geocenter files + # PURPOSE: read AOD1b geocenter for month and calculate the mean harmonics + # need to run aod1b_geocenter.py to write these monthly geocenter files def from_AOD1B(self, release, calendar_year, calendar_month): """ Reads monthly non-tidal ocean and atmospheric variation geocenter files @@ -139,17 +139,17 @@ def from_AOD1B(self, release, calendar_year, calendar_month): calendar month of data """ - #-- full path to AOD geocenter for month (using glo coefficients) + # full path to AOD geocenter for month (using glo coefficients) args = (release,'glo',calendar_year,calendar_month) AOD1B_file = 'AOD1B_{0}_{1}_{2:4.0f}_{3:02.0f}.txt'.format(*args) - #-- check that file exists + # check that file exists if not os.access(os.path.join(self.directory,AOD1B_file), os.F_OK): errmsg = f'AOD1B File {AOD1B_file} not in File System' raise FileNotFoundError(errmsg) - #-- read AOD1b geocenter skipping over commented header text + # read AOD1b geocenter skipping over commented header text with open(os.path.join(self.directory,AOD1B_file), mode='r', encoding='utf8') as f: file_contents=[i for i in f.read().splitlines() if not re.match(r'#',i)] - #-- extract X,Y,Z from each line in the file + # extract X,Y,Z from each line in the file n_lines = len(file_contents) temp = geocenter() temp.X = np.zeros((n_lines)) @@ -157,16 +157,16 @@ def from_AOD1B(self, release, calendar_year, calendar_month): temp.Z = np.zeros((n_lines)) for i,line in enumerate(file_contents): line_contents = line.split() - #-- first column: ISO-formatted date and time + # first column: ISO-formatted date and time cal_date = time.strptime(line_contents[0],r'%Y-%m-%dT%H:%M:%S') - #-- verify that dates are within year and month + # verify that dates are within year and month assert (cal_date.tm_year == calendar_year) assert (cal_date.tm_mon == calendar_month) - #-- second-fourth columns: X, Y and Z geocenter variations + # second-fourth columns: X, Y and Z geocenter variations temp.X[i],temp.Y[i],temp.Z[i] = np.array(line_contents[1:],dtype='f') - #-- convert X,Y,Z into spherical harmonics + # convert X,Y,Z into spherical harmonics temp.from_cartesian() - #-- return the spherical harmonic coefficients + # return the spherical harmonic coefficients return temp def from_gravis(self, geocenter_file, **kwargs): @@ -192,94 +192,94 @@ def from_gravis(self, geocenter_file, **kwargs): `doi: 10.5880/GFZ.GRAVIS_06_L2B `_ """ - #-- set filename + # set filename self.case_insensitive_filename(geocenter_file) - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('header',True) - #-- Combined GRACE/SLR geocenter solution file produced by GFZ GravIS - #-- Column 1: MJD of BEGINNING of solution data span - #-- Column 2: Year and fraction of year of BEGINNING of solution data span - #-- Column 3: Coefficient C(1,0) - #-- Column 4: Coefficient C(1,0) - mean C(1,0) (1.0E-10) - #-- Column 5: C(1,0) uncertainty (1.0E-10) - #-- Column 6: Coefficient C(1,1) - #-- Column 7: Coefficient C(1,1) - mean C(1,1) (1.0E-10) - #-- Column 8: C(1,1) uncertainty (1.0E-10) - #-- Column 9: Coefficient S(1,1) - #-- Column 10: Coefficient S(1,1) - mean S(1,1) (1.0E-10) - #-- Column 11: S(1,1) uncertainty (1.0E-10) + # Combined GRACE/SLR geocenter solution file produced by GFZ GravIS + # Column 1: MJD of BEGINNING of solution data span + # Column 2: Year and fraction of year of BEGINNING of solution data span + # Column 3: Coefficient C(1,0) + # Column 4: Coefficient C(1,0) - mean C(1,0) (1.0E-10) + # Column 5: C(1,0) uncertainty (1.0E-10) + # Column 6: Coefficient C(1,1) + # Column 7: Coefficient C(1,1) - mean C(1,1) (1.0E-10) + # Column 8: C(1,1) uncertainty (1.0E-10) + # Column 9: Coefficient S(1,1) + # Column 10: Coefficient S(1,1) - mean S(1,1) (1.0E-10) + # Column 11: S(1,1) uncertainty (1.0E-10) with open(self.filename, mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text while kwargs['header']: - #-- file line at count + # file line at count line = file_contents[count] - #-- find PRODUCT: within line to set HEADER flag to False when found + # find PRODUCT: within line to set HEADER flag to False when found kwargs['header'] = not bool(re.match(r'PRODUCT:+',line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- output dictionary with spherical harmonic solutions + # output dictionary with spherical harmonic solutions dinput = {} - #-- number of months within the file + # number of months within the file n_mon = file_lines - count - #-- date and GRACE/GRACE-FO month + # date and GRACE/GRACE-FO month dinput['time'] = np.zeros((n_mon)) dinput['month'] = np.zeros((n_mon),dtype=int) - #-- monthly spherical harmonic replacement solutions + # monthly spherical harmonic replacement solutions dinput['C10'] = np.zeros((n_mon)) dinput['C11'] = np.zeros((n_mon)) dinput['S11'] = np.zeros((n_mon)) - #-- monthly spherical harmonic formal standard deviations + # monthly spherical harmonic formal standard deviations dinput['eC10'] = np.zeros((n_mon)) dinput['eC11'] = np.zeros((n_mon)) dinput['eS11'] = np.zeros((n_mon)) - #-- time count + # time count t = 0 - #-- for every other line: + # for every other line: for line in file_contents[count:]: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives + # find numerical instances in line including exponents, + # decimal points and negatives line_contents = re.findall(r'[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) count = len(line_contents) - #-- check for empty lines + # check for empty lines if (count > 0): - #-- reading decimal year for start of span + # reading decimal year for start of span dinput['time'][t] = np.float64(line_contents[1]) - #-- Spherical Harmonic data for line + # Spherical Harmonic data for line dinput['C10'][t] = np.float64(line_contents[2]) dinput['C11'][t] = np.float64(line_contents[5]) dinput['S11'][t] = np.float64(line_contents[8]) - #-- monthly spherical harmonic formal standard deviations + # monthly spherical harmonic formal standard deviations dinput['eC10'][t] = np.float64(line_contents[4])*1e-10 dinput['eC11'][t] = np.float64(line_contents[7])*1e-10 dinput['eS11'][t] = np.float64(line_contents[10])*1e-10 - #-- GRACE/GRACE-FO month of geocenter solutions + # GRACE/GRACE-FO month of geocenter solutions dinput['month'][t] = gravity_toolkit.time.calendar_to_grace( dinput['time'][t], around=np.round) - #-- add to t count + # add to t count t += 1 - #-- truncate variables if necessary + # truncate variables if necessary for key,val in dinput.items(): dinput[key] = val[:t] - #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with - #-- Accelerometer shutoffs make the relation between month number - #-- and date more complicated as days from other months are used - #-- For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) - #-- For JPL: Dec 2011 (120) is centered in Jan 2012 (121) - #-- For all: May 2015 (161) is centered in Apr 2015 (160) - #-- For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) + # The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + # Accelerometer shutoffs make the relation between month number + # and date more complicated as days from other months are used + # For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) + # For JPL: Dec 2011 (120) is centered in Jan 2012 (121) + # For all: May 2015 (161) is centered in Apr 2015 (160) + # For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) dinput['month'] = gravity_toolkit.time.adjust_months(dinput['month']) - #-- return the GFZ GravIS geocenter solutions + # return the GFZ GravIS geocenter solutions return self.from_dict(dinput) @@ -324,107 +324,107 @@ def from_SLR(self, geocenter_file, **kwargs): - ``'Z_sigma'``: Z-component uncertainty """ - #-- set filename + # set filename self.case_insensitive_filename(geocenter_file) - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('AOD',False) kwargs.setdefault('columns',[]) kwargs.setdefault('header',0) kwargs.setdefault('release',None) - #-- copy keyword arguments to variables + # copy keyword arguments to variables COLUMNS = copy.copy(kwargs['columns']) HEADER = copy.copy(kwargs['header']) - #-- directory setup for AOD1b data starting with input degree 1 file - #-- this will verify that the input paths work + # directory setup for AOD1b data starting with input degree 1 file + # this will verify that the input paths work base_dir = os.path.join(os.path.dirname(self.filename),os.path.pardir) self.directory = os.path.abspath(os.path.join(base_dir,'AOD1B', kwargs['release'],'geocenter')) - #-- check that AOD1B directory exists + # check that AOD1B directory exists if not os.access(self.directory, os.F_OK): errmsg = f'{self.directory} not found in file system' raise FileNotFoundError(errmsg) - #-- Input geocenter file and split lines + # Input geocenter file and split lines with open(os.path.expanduser(geocenter_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() ndate = len(file_contents) - HEADER - #-- compile regular expression operator to find numerical instances + # compile regular expression operator to find numerical instances regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' rx = re.compile(regex_pattern, re.VERBOSE) - #-- initializing output data - #-- Degree 1 Stokes Coefficients + # initializing output data + # Degree 1 Stokes Coefficients self.C10 = np.zeros((ndate)) self.C11 = np.zeros((ndate)) self.S11 = np.zeros((ndate)) - #-- Degree 1 Stokes Coefficient Errors + # Degree 1 Stokes Coefficient Errors self.eC10 = np.zeros((ndate)) self.eC11 = np.zeros((ndate)) self.eS11 = np.zeros((ndate)) - #-- Date information + # Date information self.time = np.zeros((ndate)) self.month = np.zeros((ndate), dtype=np.int32) JD = np.zeros((ndate)) - #-- for each date + # for each date for t,file_line in enumerate(file_contents[HEADER:]): - #-- find numerical instances in line - #-- replacing fortran double precision exponential + # find numerical instances in line + # replacing fortran double precision exponential line_contents = rx.findall(file_line.replace('D','E')) - #-- extract date + # extract date self.time[t] = np.float64(line_contents[COLUMNS.index('time')]) - #-- extract geocenter variations + # extract geocenter variations temp = geocenter(radius=self.radius) temp.X = np.float64(line_contents[COLUMNS.index('X')]) temp.Y = np.float64(line_contents[COLUMNS.index('Y')]) temp.Z = np.float64(line_contents[COLUMNS.index('Z')]) temp.from_cartesian() - #-- copy spherical harmonics to output + # copy spherical harmonics to output self.C10[t] = np.copy(temp.C10) self.C11[t] = np.copy(temp.C11) self.S11[t] = np.copy(temp.S11) - #-- extract geocenter uncertainties + # extract geocenter uncertainties temp = geocenter(radius=self.radius) temp.X = np.float64(line_contents[COLUMNS.index('X_sigma')]) temp.Y = np.float64(line_contents[COLUMNS.index('Y_sigma')]) temp.Z = np.float64(line_contents[COLUMNS.index('Z_sigma')]) temp.from_cartesian() - #-- copy spherical harmonic uncertainties to output + # copy spherical harmonic uncertainties to output self.eC10[t] = np.copy(temp.C10) self.eC11[t] = np.copy(temp.C11) self.eS11[t] = np.copy(temp.S11) - #-- Calculation of the Julian date from calendar date + # Calculation of the Julian date from calendar date JD[t] = gravity_toolkit.time.calendar_to_julian(self.time[t]) - #-- convert the julian date into calendar dates + # convert the julian date into calendar dates YY,MM,DD,hh,mm,ss = gravity_toolkit.time.convert_julian(JD[t], FORMAT='tuple') - #-- calculate the GRACE/GRACE-FO month (Apr02 == 004) - #-- https://grace.jpl.nasa.gov/data/grace-months/ + # calculate the GRACE/GRACE-FO month (Apr02 == 004) + # https://grace.jpl.nasa.gov/data/grace-months/ self.month[t] = gravity_toolkit.time.calendar_to_grace(YY,month=MM) - #-- if removing the Atmospheric and Oceanic dealiasing + # if removing the Atmospheric and Oceanic dealiasing if kwargs['AOD']: - #-- read the AOD1B file for the month and year + # read the AOD1B file for the month and year temp = self.from_AOD1B(kwargs['release'], YY, MM) - #-- remove the monthly mean AOD + # remove the monthly mean AOD self.C10[t] -= np.mean(temp.C10) self.C11[t] -= np.mean(temp.C11) self.S11[t] -= np.mean(temp.S11) - #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with - #-- Accelerometer shutoffs make the relation between month number - #-- and date more complicated as days from other months are used - #-- For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) - #-- For JPL: Dec 2011 (120) is centered in Jan 2012 (121) - #-- For all: May 2015 (161) is centered in Apr 2015 (160) - #-- For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) + # The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + # Accelerometer shutoffs make the relation between month number + # and date more complicated as days from other months are used + # For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) + # For JPL: Dec 2011 (120) is centered in Jan 2012 (121) + # For all: May 2015 (161) is centered in Apr 2015 (160) + # For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) self.month = gravity_toolkit.time.adjust_months(self.month) - #-- return the geocenter harmonics + # return the geocenter harmonics return self def from_UCI(self, geocenter_file, **kwargs): @@ -449,80 +449,80 @@ def from_UCI(self, geocenter_file, **kwargs): and ocean model outputs", *Remote Sensing*, 11(18), 2108, (2019). `doi: 10.3390/rs11182108 `_ """ - #-- set filename + # set filename self.case_insensitive_filename(geocenter_file) - #-- read geocenter file and get contents + # read geocenter file and get contents with open(os.path.expanduser(geocenter_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header HEADER = False count = 0 - #-- Reading over header text + # Reading over header text while (HEADER is False) and (count < file_lines): - #-- file line at count + # file line at count line = file_contents[count] - #--if End of YAML Header is found: set HEADER flag + #if End of YAML Header is found: set HEADER flag HEADER = bool(re.search(r"\# End of YAML header",line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- verify HEADER flag was set + # verify HEADER flag was set if not HEADER: raise IOError(f'Data not found in file:\n\t{geocenter_file}') - #-- number of months within the file + # number of months within the file n_mon = np.int64(file_lines - count) - #-- output time variables + # output time variables DEG1 = {} DEG1['time'] = np.zeros((n_mon)) DEG1['JD'] = np.zeros((n_mon)) DEG1['month'] = np.zeros((n_mon), dtype=np.int64) - #-- parse the YAML header (specifying yaml loader) + # parse the YAML header (specifying yaml loader) DEG1.update(yaml.load('\n'.join(file_contents[:count]), Loader=yaml.BaseLoader)) - #-- compile numerical expression operator + # compile numerical expression operator regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' rx = re.compile(regex_pattern, re.VERBOSE) - #-- get names and columns of input variables + # get names and columns of input variables variables = copy.copy(DEG1['header']['variables']) variables.pop('mid-epoch_time') variables.pop('month') columns = {} - #-- for each output data variable + # for each output data variable for key in variables: DEG1[key] = np.zeros((n_mon)) comment_text, = rx.findall(variables[key]['comment']) columns[key] = int(comment_text) - 1 - #-- for every other line: + # for every other line: for t, line in enumerate(file_contents[count:]): - #-- find numerical instances in line including integers, exponents, - #-- decimal points and negatives + # find numerical instances in line including integers, exponents, + # decimal points and negatives line_contents = rx.findall(line) - #-- extacting mid-date time and GRACE/GRACE-FO "month" + # extacting mid-date time and GRACE/GRACE-FO "month" DEG1['time'][t] = np.float64(line_contents[0]) DEG1['month'][t] = np.int64(line_contents[-1]) - #-- calculate mid-date as Julian dates - #-- calendar year of date + # calculate mid-date as Julian dates + # calendar year of date year = np.floor(DEG1['time'][t]) - #-- check if year is a leap year + # check if year is a leap year days_per_year = np.sum(gravity_toolkit.time.calendar_days(year)) - #-- calculation of day of the year + # calculation of day of the year day_of_the_year = days_per_year*(DEG1['time'][t] % 1) - #-- calculate Julian day + # calculate Julian day DEG1['JD'][t] = np.float64(367.0*year - np.floor(7.0*(year)/4.0) - np.floor(3.0*(np.floor((year - 8.0/7.0)/100.0) + 1.0)/4.0) + np.floor(275.0/9.0) + day_of_the_year + 1721028.5) - #-- extract fully-normalized degree one spherical harmonics + # extract fully-normalized degree one spherical harmonics for key,val in columns.items(): DEG1[key][t] = np.float64(line_contents[val]) - #-- return the geocenter harmonics + # return the geocenter harmonics return self.from_dict(DEG1) def from_swenson(self, geocenter_file, **kwargs): @@ -547,33 +547,33 @@ def from_swenson(self, geocenter_file, **kwargs): Research*, 113(B08410), (2008). `doi: 10.1029/2007JB005338 `_ """ - #-- set filename + # set filename self.case_insensitive_filename(geocenter_file) - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('header',True) - #-- read degree 1 file and get contents + # read degree 1 file and get contents with open(self.filename, mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text while kwargs['header'] and (count < file_lines): - #-- file line at count + # file line at count line = file_contents[count] - #-- find Time within line to set HEADER flag to False when found + # find Time within line to set HEADER flag to False when found kwargs['header'] = not bool(re.search(r"Time",line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- catch to see if HEADER flag was not set to false + # catch to see if HEADER flag was not set to false if kwargs['header']: raise IOError(f'Data lines not found in file {geocenter_file}') - #-- number of months within the file + # number of months within the file n_mon = np.int64(file_lines - count) self.C10 = np.zeros((n_mon)) self.C11 = np.zeros((n_mon)) @@ -582,49 +582,49 @@ def from_swenson(self, geocenter_file, **kwargs): JD = np.zeros((n_mon)) self.month = np.zeros((n_mon), dtype=np.int64) - #-- compile numerical expression operator + # compile numerical expression operator regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' rx = re.compile(regex_pattern, re.VERBOSE) - #-- for every other line: + # for every other line: for t, line in enumerate(file_contents[count:]): - #-- find numerical instances in line including integers, exponents, - #-- decimal points and negatives + # find numerical instances in line including integers, exponents, + # decimal points and negatives line_contents = rx.findall(line) - #-- extacting time + # extacting time self.time[t]=np.float64(line_contents[0]) - #-- extracting spherical harmonics and convert to cmwe + # extracting spherical harmonics and convert to cmwe self.C10[t]=0.1*np.float64(line_contents[1]) self.C11[t]=0.1*np.float64(line_contents[2]) self.S11[t]=0.1*np.float64(line_contents[3]) - #-- calculate the GRACE months + # calculate the GRACE months if (len(line_contents) == 5): - #-- months are included as last column + # months are included as last column self.month[t] = np.int64(line_contents[4]) else: - #-- months to be calculated from date - #-- Calculation of the Julian date from calendar date + # months to be calculated from date + # Calculation of the Julian date from calendar date JD[t] = gravity_toolkit.time.calendar_to_julian(self.time[t]) - #-- convert the julian date into calendar dates (day, month, year) + # convert the julian date into calendar dates (day, month, year) cal_date = gravity_toolkit.time.convert_julian(JD[t]) - #-- calculate the GRACE month (Apr02 == 004) - #-- https://grace.jpl.nasa.gov/data/grace-months/ - #-- Notes on special months (e.g. 119, 120) below + # calculate the GRACE month (Apr02 == 004) + # https://grace.jpl.nasa.gov/data/grace-months/ + # Notes on special months (e.g. 119, 120) below self.month[t] = gravity_toolkit.time.calendar_to_grace( cal_date['year'], month=cal_date['month']) - #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with - #-- Accelerometer shutoffs make the relation between month number - #-- and date more complicated as days from other months are used - #-- For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) - #-- For JPL: Dec 2011 (120) is centered in Jan 2012 (121) - #-- For all: May 2015 (161) is centered in Apr 2015 (160) + # The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + # Accelerometer shutoffs make the relation between month number + # and date more complicated as days from other months are used + # For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) + # For JPL: Dec 2011 (120) is centered in Jan 2012 (121) + # For all: May 2015 (161) is centered in Apr 2015 (160) self.month = gravity_toolkit.time.adjust_months(self.month) - #-- converts from cm water equivalent to fully-normalized + # converts from cm water equivalent to fully-normalized self.from_cmwe() - #-- return the geocenter harmonics + # return the geocenter harmonics return self def from_tellus(self, geocenter_file, **kwargs): @@ -668,60 +668,60 @@ def from_tellus(self, geocenter_file, **kwargs): *Journal of Geophysical Research: Solid Earth*, 121, (2016). `doi: 10.1002/2016JB013073 `_ """ - #-- set filename + # set filename self.case_insensitive_filename(geocenter_file) - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('header',True) kwargs.setdefault('JPL',True) - #-- read degree 1 file and get contents + # read degree 1 file and get contents with open(self.filename, mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text header_flag = r"end\sof\sheader" if kwargs['JPL'] else r"'\(a6," while kwargs['header']: - #-- file line at count + # file line at count line = file_contents[count] - #-- find header_flag within line to set HEADER flag to False when found + # find header_flag within line to set HEADER flag to False when found kwargs['header'] = not bool(re.match(header_flag,line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- number of months within the file + # number of months within the file n_mon = (file_lines - count)//2 - #-- GRACE/GRACE-FO months + # GRACE/GRACE-FO months self.month = np.zeros((n_mon),dtype=np.int64) - #-- calendar dates in year-decimal + # calendar dates in year-decimal self.time = np.zeros((n_mon)) - #-- spherical harmonic data + # spherical harmonic data self.C10 = np.zeros((n_mon)) self.C11 = np.zeros((n_mon)) self.S11 = np.zeros((n_mon)) - #-- spherical harmonic uncertainties + # spherical harmonic uncertainties self.eC10 = np.zeros((n_mon)) self.eC11 = np.zeros((n_mon)) self.eS11 = np.zeros((n_mon)) - #-- compile numerical expression operator + # compile numerical expression operator regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' rx = re.compile(regex_pattern, re.VERBOSE) - #-- time count + # time count t = 0 - #-- for every line of data + # for every line of data for line in file_contents[count:]: - #-- find numerical instances in line including integers, exponents, - #-- decimal points and negatives + # find numerical instances in line including integers, exponents, + # decimal points and negatives line_contents = rx.findall(line) - #-- spherical harmonic order + # spherical harmonic order m = np.int64(line_contents[2]) - #-- extract spherical harmonic data for order + # extract spherical harmonic data for order if (m == 0): self.C10[t] = np.float64(line_contents[3]) self.eC10[t] = np.float64(line_contents[5]) @@ -733,43 +733,43 @@ def from_tellus(self, geocenter_file, **kwargs): else: raise ValueError(f'Unknown harmonic order {m:d}') - #-- calendar year and month + # calendar year and month if kwargs['JPL']: - #-- start and end date of month + # start and end date of month start_date = time.strptime(line_contents[7][:8],r'%Y%m%d') end_date = time.strptime(line_contents[8][:8],r'%Y%m%d') - #-- convert date to year decimal + # convert date to year decimal ts = gravity_toolkit.time.convert_calendar_decimal(start_date.tm_year, start_date.tm_mon, day=start_date.tm_mday) te = gravity_toolkit.time.convert_calendar_decimal(end_date.tm_year, end_date.tm_mon, day=end_date.tm_mday) - #-- calculate mean time + # calculate mean time self.time[t] = np.mean([ts,te]) - #-- calculate year and month for estimating GRACE/GRACE-FO month + # calculate year and month for estimating GRACE/GRACE-FO month year = np.floor(self.time[t]) month = np.int64(12*(self.time[t] % 1) + 1) else: - #-- dates of month + # dates of month cal_date = time.strptime(line_contents[0][:6],r'%Y%m') - #-- calculate year and month for estimating GRACE/GRACE-FO month + # calculate year and month for estimating GRACE/GRACE-FO month year = cal_date.tm_year month = cal_date.tm_mon - #-- convert date to year decimal + # convert date to year decimal self.time[t], = gravity_toolkit.time.convert_calendar_decimal( cal_date.tm_year, cal_date.tm_mon) - #-- estimated GRACE/GRACE-FO month - #-- Accelerometer shutoffs complicate the month number calculation + # estimated GRACE/GRACE-FO month + # Accelerometer shutoffs complicate the month number calculation self.month[t] = gravity_toolkit.time.calendar_to_grace(year,month) - #-- will only advance in time after reading the - #-- order 1 coefficients (t+0=t) + # will only advance in time after reading the + # order 1 coefficients (t+0=t) t += m - #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with - #-- Accelerometer shutoffs make the relation between month number - #-- and date more complicated as days from other months are used + # The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + # Accelerometer shutoffs make the relation between month number + # and date more complicated as days from other months are used self.month = gravity_toolkit.time.adjust_months(self.month) - #-- return the geocenter harmonics + # return the geocenter harmonics return self def from_netCDF4(self, geocenter_file, **kwargs): @@ -792,28 +792,28 @@ def from_netCDF4(self, geocenter_file, **kwargs): `doi: 10.3390/rs11182108 `_ """ kwargs.setdefault('compression',None) - #-- set filename + # set filename self.case_insensitive_filename(geocenter_file) - #-- Open the netCDF4 file for reading + # Open the netCDF4 file for reading if (kwargs['compression'] == 'gzip'): - #-- read gzipped file as in-memory (diskless) netCDF4 dataset + # read gzipped file as in-memory (diskless) netCDF4 dataset with gzip.open(self.filename,'r') as f: fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=f.read()) elif (kwargs['compression'] == 'bytes'): - #-- read as in-memory (diskless) netCDF4 dataset + # read as in-memory (diskless) netCDF4 dataset fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=self.filename.read()) else: fileID = netCDF4.Dataset(self.filename, 'r') - #-- Getting the data from each netCDF4 variable + # Getting the data from each netCDF4 variable DEG1 = {} - #-- converting netCDF4 objects into numpy arrays + # converting netCDF4 objects into numpy arrays for key,val in fileID.variables.items(): DEG1[key] = val[:].copy() - #-- close the netCDF4 file + # close the netCDF4 file fileID.close() - #-- return the geocenter harmonics + # return the geocenter harmonics return self.from_dict(DEG1) def copy(self, **kwargs): @@ -825,12 +825,12 @@ def copy(self, **kwargs): fields: list default keys in geocenter object """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('fields',['time','month', 'C10','C11','S11','eC10','eC11','eS11', 'X','Y','Z']) temp = geocenter() - #-- try to assign variables to self + # try to assign variables to self for key in kwargs['fields']: try: val = getattr(self, key) @@ -850,11 +850,11 @@ def from_dict(self, temp, **kwargs): fields: list default keys in dictionary """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('fields',['time','month', 'C10','C11','S11','eC10','eC11','eS11', 'X','Y','Z','X_sigma','Y_sigma','Z_sigma']) - #-- assign dictionary variables to self + # assign dictionary variables to self for key in kwargs['fields']: try: setattr(self, key, temp[key].copy()) @@ -873,18 +873,18 @@ def from_harmonics(self, temp, **kwargs): fields: list default keys in harmonics object """ - #-- reassign shape and ndim attributes + # reassign shape and ndim attributes temp.update_dimensions() - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('fields',['time','month','filename']) - #-- try to assign variables to self + # try to assign variables to self for key in kwargs['fields']: try: val = getattr(temp, key) setattr(self, key, np.copy(val)) except AttributeError: pass - #-- get spherical harmonic objects + # get spherical harmonic objects if (temp.ndim == 2): self.C10 = np.copy(temp.clm[1,0]) self.C11 = np.copy(temp.clm[1,1]) @@ -893,7 +893,7 @@ def from_harmonics(self, temp, **kwargs): self.C10 = np.copy(temp.clm[1,0,:]) self.C11 = np.copy(temp.clm[1,1,:]) self.S11 = np.copy(temp.slm[1,1,:]) - #-- return the geocenter object + # return the geocenter object return self def from_matrix(self, clm, slm): @@ -907,10 +907,10 @@ def from_matrix(self, clm, slm): slm: float sine spherical harmonics of degree 1 """ - #-- verify dimensions + # verify dimensions clm = np.atleast_3d(clm) slm = np.atleast_3d(slm) - #-- output geocenter object + # output geocenter object self.C10 = np.copy(clm[1,0,:]) self.C11 = np.copy(clm[1,1,:]) self.S11 = np.copy(slm[1,1,:]) @@ -925,13 +925,13 @@ def to_dict(self, **kwargs): fields: obj default attributes in geocenter object """ - #-- output dictionary + # output dictionary temp = {} - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('fields',['time','month', 'C10','C11','S11','eC10','eC11','eS11', 'X','Y','Z','X_sigma','Y_sigma','Z_sigma']) - #-- assign dictionary variables to self + # assign dictionary variables to self for key in kwargs['fields']: try: val = getattr(self, key) @@ -939,19 +939,19 @@ def to_dict(self, **kwargs): pass else: temp[key] = copy.copy(val) - #-- return the dictionary object + # return the dictionary object return temp def to_matrix(self): """ Converts a geocenter object to spherical harmonic matrices """ - #-- verify dimensions + # verify dimensions _,nt = np.shape(np.atleast_2d(self.C10)) - #-- output spherical harmonics + # output spherical harmonics clm = np.zeros((2,2,nt)) slm = np.zeros((2,2,nt)) - #-- copy geocenter harmonics to matrices + # copy geocenter harmonics to matrices clm[1,0,:] = np.atleast_2d(self.C10) clm[1,1,:] = np.atleast_2d(self.C11) slm[1,1,:] = np.atleast_2d(self.S11) @@ -966,14 +966,14 @@ def to_cartesian(self, kl=0.0): kl: float gravitational load love number of degree 1 """ - #-- Stokes Coefficients to cartesian geocenter + # Stokes Coefficients to cartesian geocenter try: self.Z = self.C10*self.radius*np.sqrt(3.0)/(1.0 + kl) self.X = self.C11*self.radius*np.sqrt(3.0)/(1.0 + kl) self.Y = self.S11*self.radius*np.sqrt(3.0)/(1.0 + kl) except Exception as e: pass - #-- convert errors to cartesian geocenter + # convert errors to cartesian geocenter try: self.Z_sigma = self.eC10*self.radius*np.sqrt(3.0)/(1.0 + kl) self.X_sigma = self.eC11*self.radius*np.sqrt(3.0)/(1.0 + kl) @@ -991,15 +991,15 @@ def to_cmwe(self, kl=0.0): kl: float gravitational load love number of degree 1 """ - #-- Average Density of the Earth [g/cm^3] + # Average Density of the Earth [g/cm^3] rho_e = 5.517 - #-- Average Radius of the Earth [cm] + # Average Radius of the Earth [cm] rad_e = 6.371e8 - #-- convert to centimeters water equivalent + # convert to centimeters water equivalent self.C10 *= (rho_e*rad_e)/(1.0 + kl) self.C11 *= (rho_e*rad_e)/(1.0 + kl) self.S11 *= (rho_e*rad_e)/(1.0 + kl) - #-- convert errors to centimeters water equivalent + # convert errors to centimeters water equivalent try: self.eC10 *= (rho_e*rad_e)/(1.0 + kl) self.eC11 *= (rho_e*rad_e)/(1.0 + kl) @@ -1018,11 +1018,11 @@ def to_mmwe(self, kl=0.0): gravitational load love number of degree 1 """ self.to_cmwe(kl=kl) - #-- convert to millimeters water equivalent + # convert to millimeters water equivalent self.C10 *= 10.0 self.C11 *= 10.0 self.S11 *= 10.0 - #-- convert errors to millimeters water equivalent + # convert errors to millimeters water equivalent try: self.eC10 *= 10.0 self.eC11 *= 10.0 @@ -1040,11 +1040,11 @@ def from_cartesian(self, kl=0.0): kl: float gravitational load love number of degree 1 """ - #-- cartesian geocenter to Stokes Coefficients + # cartesian geocenter to Stokes Coefficients self.C10 = (1.0 + kl)*self.Z/(self.radius*np.sqrt(3.0)) self.C11 = (1.0 + kl)*self.X/(self.radius*np.sqrt(3.0)) self.S11 = (1.0 + kl)*self.Y/(self.radius*np.sqrt(3.0)) - #-- convert cartesian geocenter to stokes coefficients + # convert cartesian geocenter to stokes coefficients try: self.eC10 = (1.0 + kl)*self.Z_sigma/(self.radius*np.sqrt(3.0)) self.eC11 = (1.0 + kl)*self.X_sigma/(self.radius*np.sqrt(3.0)) @@ -1062,15 +1062,15 @@ def from_cmwe(self, kl=0.0): kl: float gravitational load love number of degree 1 """ - #-- Average Density of the Earth [g/cm^3] + # Average Density of the Earth [g/cm^3] rho_e = 5.517 - #-- Average Radius of the Earth [cm] + # Average Radius of the Earth [cm] rad_e = 6.371e8 - #-- convert from centimeters water equivalent + # convert from centimeters water equivalent self.C10 *= (1.0 + kl)/(rho_e*rad_e) self.C11 *= (1.0 + kl)/(rho_e*rad_e) self.S11 *= (1.0 + kl)/(rho_e*rad_e) - #-- convert errors from centimeters water equivalent + # convert errors from centimeters water equivalent try: self.eC10 *= (1.0 + kl)/(rho_e*rad_e) self.eC11 *= (1.0 + kl)/(rho_e*rad_e) @@ -1089,11 +1089,11 @@ def from_mmwe(self, kl=0.0): gravitational load love number of degree 1 """ self.from_cmwe(kl=kl) - #-- convert from millimeters water equivalent + # convert from millimeters water equivalent self.C10 /= 10.0 self.C11 /= 10.0 self.S11 /= 10.0 - #-- convert errors from centimeters water equivalent + # convert errors from centimeters water equivalent try: self.eC10 /= 10.0 self.eC11 /= 10.0 @@ -1114,24 +1114,24 @@ def mean(self, apply=False, indices=Ellipsis): indices of input harmonics object to compute mean """ temp = geocenter() - #-- calculate mean static field + # calculate mean static field temp.C10 = np.mean(self.C10[indices]) temp.C11 = np.mean(self.C11[indices]) temp.S11 = np.mean(self.S11[indices]) - #-- calculating the time-variable gravity field by removing - #-- the static component of the gravitational field + # calculating the time-variable gravity field by removing + # the static component of the gravitational field if apply: self.C10 -= temp.C10 self.C11 -= temp.C11 self.S11 -= temp.S11 - #-- calculate mean of temporal variables + # calculate mean of temporal variables for key in ['time','month']: try: val = getattr(self, key) setattr(temp, key, np.mean(val[indices])) except: continue - #-- return the mean field + # return the mean field return temp def add(self, temp): @@ -1202,7 +1202,7 @@ def scale(self, var): temp = geocenter() temp.time = np.copy(self.time) temp.month = np.copy(self.month) - #-- multiply by a single constant or a time-variable scalar + # multiply by a single constant or a time-variable scalar temp.C10 = var*self.C10 temp.C11 = var*self.C11 temp.S11 = var*self.S11 diff --git a/gravity_toolkit/grace_date.py b/gravity_toolkit/grace_date.py index b5ff41b6..08e46c8f 100644 --- a/gravity_toolkit/grace_date.py +++ b/gravity_toolkit/grace_date.py @@ -107,7 +107,7 @@ import numpy as np import gravity_toolkit.time -#-- PURPOSE: parses GRACE/GRACE-FO data files and assigns month numbers +# PURPOSE: parses GRACE/GRACE-FO data files and assigns month numbers def grace_date(base_dir, PROC='', DREL='', DSET='', OUTPUT=True, MODE=0o775): """ Reads index file containing GRACE/GRACE-FO/Swarm data files @@ -151,136 +151,136 @@ def grace_date(base_dir, PROC='', DREL='', DSET='', OUTPUT=True, MODE=0o775): dictionary of GRACE/GRACE-FO files indexed by month """ - #-- Directory of exact product + # Directory of exact product grace_dir = os.path.join(base_dir, PROC, DREL, DSET) - #-- index file containing GRACE/GRACE-FO data filenames + # index file containing GRACE/GRACE-FO data filenames index_file = os.path.join(grace_dir, 'index.txt') - #-- check that index file exists + # check that index file exists if not os.access(index_file, os.F_OK): raise FileNotFoundError(f'{index_file} not found') - #-- log index file if debugging + # log index file if debugging logging.debug(f'Reading index file: {index_file}') - #-- read index file for GRACE/GRACE-FO filenames + # read index file for GRACE/GRACE-FO filenames with open(index_file, mode='r', encoding='utf8') as f: input_files = f.read().splitlines() - #-- number of lines in input_files + # number of lines in input_files n_files = len(input_files) - #-- define date variables - start_yr = np.zeros((n_files))#-- year start date - end_yr = np.zeros((n_files))#-- year end date - start_day = np.zeros((n_files))#-- day number start date - end_day = np.zeros((n_files))#-- day number end date - mid_day = np.zeros((n_files))#-- mid-month day - tot_days = np.zeros((n_files))#-- number of days since Jan 2002 - tdec = np.zeros((n_files))#-- tdec is the date in decimal form - mon = np.zeros((n_files,),dtype=np.int64)#-- GRACE/GRACE-FO month number - - #-- for each data file + # define date variables + start_yr = np.zeros((n_files))# year start date + end_yr = np.zeros((n_files))# year end date + start_day = np.zeros((n_files))# day number start date + end_day = np.zeros((n_files))# day number end date + mid_day = np.zeros((n_files))# mid-month day + tot_days = np.zeros((n_files))# number of days since Jan 2002 + tdec = np.zeros((n_files))# tdec is the date in decimal form + mon = np.zeros((n_files,),dtype=np.int64)# GRACE/GRACE-FO month number + + # for each data file for t,infile in enumerate(input_files): if PROC in ('GRAZ','Swarm',): - #-- get date lists for the start and end of fields + # get date lists for the start and end of fields start_date,end_date = gravity_toolkit.time.parse_gfc_file( infile, PROC, DSET) - #-- start and end year + # start and end year start_yr[t] = np.float64(start_date[0]) end_yr[t] = np.float64(end_date[0]) - #-- number of days in each month for the calendar year + # number of days in each month for the calendar year dpm = gravity_toolkit.time.calendar_days(start_yr[t]) - #-- start and end day of the year + # start and end day of the year start_day[t] = np.sum(dpm[:start_date[1]-1]) + start_date[2] + \ start_date[3]/24. + start_date[4]/1440. + start_date[5]/86400. end_day[t] = np.sum(dpm[:end_date[1]-1]) + end_date[2] + \ end_date[3]/24. + end_date[4]/1440. + end_date[5]/86400. else: - #-- get date lists for the start and end of fields + # get date lists for the start and end of fields start_date,end_date = gravity_toolkit.time.parse_grace_file(infile) - #-- start and end year + # start and end year start_yr[t] = np.float64(start_date[0]) end_yr[t] = np.float64(end_date[0]) - #-- start and end day of the year + # start and end day of the year start_day[t] = np.float64(start_date[1]) end_day[t] = np.float64(end_date[1]) - #-- number of days in the starting year for leap and standard years + # number of days in the starting year for leap and standard years dpy = gravity_toolkit.time.calendar_days(start_yr[t]).sum() - #-- end date taking into account measurements taken on different years + # end date taking into account measurements taken on different years end_cyclic = (end_yr[t]-start_yr[t])*dpy + end_day[t] - #-- calculate mid-month value + # calculate mid-month value mid_day[t] = np.mean([start_day[t], end_cyclic]) - #-- calculate Modified Julian Day from start_yr and mid_day + # calculate Modified Julian Day from start_yr and mid_day MJD = gravity_toolkit.time.convert_calendar_dates(start_yr[t], 1.0,mid_day[t],epoch=(1858,11,17,0,0,0)) - #-- convert from Modified Julian Days to calendar dates + # convert from Modified Julian Days to calendar dates cal_date = gravity_toolkit.time.convert_julian(MJD+2400000.5) - #-- Calculating the mid-month date in decimal form + # Calculating the mid-month date in decimal form tdec[t] = start_yr[t] + mid_day[t]/dpy - #-- Calculation of total days since start of campaign + # Calculation of total days since start of campaign count = 0 n_yrs = np.int64(start_yr[t]-2002) - #-- for each of the GRACE years up to the file year + # for each of the GRACE years up to the file year for iyr in range(n_yrs): - #-- year + # year year = 2002 + iyr - #-- add all days from prior years to count - #-- number of days in year i (if leap year or standard year) + # add all days from prior years to count + # number of days in year i (if leap year or standard year) count += gravity_toolkit.time.calendar_days(year).sum() - #-- calculating the total number of days since 2002 + # calculating the total number of days since 2002 tot_days[t] = np.mean([count+start_day[t], count+end_cyclic]) - #-- Calculates the month number (or 10-day number for CNES RL01,RL02) + # Calculates the month number (or 10-day number for CNES RL01,RL02) if ((PROC == 'CNES') and (DREL in ('RL01','RL02'))): mon[t] = np.round(1.0+(tot_days[t]-tot_days[0])/10.0) else: - #-- calculate the GRACE/GRACE-FO month (Apr02 == 004) - #-- https://grace.jpl.nasa.gov/data/grace-months/ - #-- Notes on special months (e.g. 119, 120) below + # calculate the GRACE/GRACE-FO month (Apr02 == 004) + # https://grace.jpl.nasa.gov/data/grace-months/ + # Notes on special months (e.g. 119, 120) below mon[t] = gravity_toolkit.time.calendar_to_grace( cal_date['year'],cal_date['month']) - #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with - #-- Accelerometer shutoffs make the relation between month number - #-- and date more complicated as days from other months are used - #-- For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) - #-- For JPL: Dec 2011 (120) is centered in Jan 2012 (121) - #-- For all: May 2015 (161) is centered in Apr 2015 (160) + # The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + # Accelerometer shutoffs make the relation between month number + # and date more complicated as days from other months are used + # For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) + # For JPL: Dec 2011 (120) is centered in Jan 2012 (121) + # For all: May 2015 (161) is centered in Apr 2015 (160) mon = gravity_toolkit.time.adjust_months(mon) - #-- Output GRACE/GRACE-FO date ascii file + # Output GRACE/GRACE-FO date ascii file if OUTPUT: date_file = f'{PROC}_{DREL}_DATES.txt' fid = open(os.path.join(grace_dir,date_file), 'w') - #-- date file header information + # date file header information args = ('Mid-date','Month','Start_Day','End_Day','Total_Days') print('{0} {1:>10} {2:>11} {3:>10} {4:>13}'.format(*args),file=fid) - #-- create python dictionary mapping input file names with GRACE months + # create python dictionary mapping input file names with GRACE months grace_files = {} - #-- for each data file + # for each data file for t, infile in enumerate(input_files): - #-- add file to python dictionary mapped to GRACE/GRACE-FO month + # add file to python dictionary mapped to GRACE/GRACE-FO month grace_files[mon[t]] = os.path.join(grace_dir,infile) - #-- print to GRACE dates ascii file (NOTE: tot_days will be rounded) + # print to GRACE dates ascii file (NOTE: tot_days will be rounded) if OUTPUT: print(('{0:13.8f} {1:03d} {2:8.0f} {3:03.0f} {4:8.0f} {5:03.0f} ' '{6:8.0f}').format(tdec[t],mon[t],start_yr[t],start_day[t], end_yr[t],end_day[t],tot_days[t]), file=fid) - #-- close date file - #-- set permissions level of output date file + # close date file + # set permissions level of output date file if OUTPUT: fid.close() os.chmod(os.path.join(grace_dir, date_file), MODE) - #-- return the python dictionary that maps GRACE months with GRACE files + # return the python dictionary that maps GRACE months with GRACE files return grace_files -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Parses dates of each GRACE/GRACE-FO file and @@ -288,51 +288,51 @@ def arguments(): Creates an index of dates for GRACE/GRACE-FO files. """ ) - #-- working data directory + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- Data processing center or satellite mission + # Data processing center or satellite mission parser.add_argument('--center','-c', metavar='PROC', type=str, nargs='+', default=['CSR','GFZ','JPL'], help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, nargs='+', default=['RL06'], help='GRACE/GRACE-FO Data Release') - #-- GRACE/GRACE-FO data product + # GRACE/GRACE-FO data product parser.add_argument('--product','-p', metavar='DSET', type=str.upper, nargs='+', default=['GAC','GAD','GSM'], choices=['GAA','GAB','GAC','GAD','GSM'], help='GRACE/GRACE-FO Level-2 data product') - #-- output GRACE/GRACE-FO ascii date file + # output GRACE/GRACE-FO ascii date file parser.add_argument('--output','-O', default=False, action='store_true', help='Overwrite existing data') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- run GRACE/GRACE-FO date program + # run GRACE/GRACE-FO date program for pr in args.center: for rl in args.release: for ds in args.product: grace_date(args.directory, PROC=pr, DREL=rl, DSET=ds, OUTPUT=args.output, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/gravity_toolkit/grace_find_months.py b/gravity_toolkit/grace_find_months.py index 518ac85f..a9f5e4cb 100644 --- a/gravity_toolkit/grace_find_months.py +++ b/gravity_toolkit/grace_find_months.py @@ -103,26 +103,26 @@ def grace_find_months(base_dir, PROC, DREL, DSET='GSM'): center dates of all available months in a GRACE/GRACE-FO dataset """ - #-- Directory of exact product (using date index from GSM) + # Directory of exact product (using date index from GSM) grace_dir = os.path.join(base_dir, PROC, DREL, DSET) - #-- check that GRACE/GRACE-FO date file exists + # check that GRACE/GRACE-FO date file exists date_file = os.path.join(grace_dir, f'{PROC}_{DREL}_DATES.txt') if not os.access(date_file, os.F_OK): grace_date(base_dir, PROC=PROC, DREL=DREL, DSET=DSET, OUTPUT=True) - #-- read GRACE/GRACE-FO date ascii file from grace_date.py - #-- skip the header row and extract dates (decimal format) and months + # read GRACE/GRACE-FO date ascii file from grace_date.py + # skip the header row and extract dates (decimal format) and months date_input = np.loadtxt(date_file, skiprows=1) tdec = date_input[:,0] months = date_input[:,1].astype(np.int64) - #-- array of all possible months (or in case of CNES RL01/2: 10-day sets) + # array of all possible months (or in case of CNES RL01/2: 10-day sets) all_months = np.arange(1,months.max(),dtype=np.int64) - #-- missing months (values in all_months but not in months) + # missing months (values in all_months but not in months) missing = sorted(set(all_months)-set(months)) - #-- If CNES RL01/2: simply convert into numpy array - #-- else: remove months 1-3 and convert into numpy array + # If CNES RL01/2: simply convert into numpy array + # else: remove months 1-3 and convert into numpy array if ((PROC == 'CNES') & (DREL in ('RL01','RL02'))): missing = np.array(missing,dtype=np.int64) else: diff --git a/gravity_toolkit/grace_input_months.py b/gravity_toolkit/grace_input_months.py index 1d61b5cc..95f725d0 100644 --- a/gravity_toolkit/grace_input_months.py +++ b/gravity_toolkit/grace_input_months.py @@ -341,7 +341,7 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, *Journal of Geophysical Research: Solid Earth*, 120(6), 4597--4615, (2015). `doi: 10.1002/2015JB011986 `_ """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('MMAX',LMAX) kwargs.setdefault('SLR_21','') kwargs.setdefault('SLR_22','') @@ -353,22 +353,22 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, kwargs.setdefault('ATM',False) kwargs.setdefault('POLE_TIDE',False) - #-- directory of exact GRACE/GRACE-FO product + # directory of exact GRACE/GRACE-FO product grace_dir = os.path.join(os.path.expanduser(base_dir), PROC, DREL, DSET) - #-- check that GRACE/GRACE-FO product directory exists + # check that GRACE/GRACE-FO product directory exists if not os.access(grace_dir, os.F_OK): raise FileNotFoundError(grace_dir) - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) MMAX = kwargs.get('MMAX') or np.copy(LMAX) - #-- Range of months from start_mon to end_mon (end_mon+1 to include end_mon) - #-- Removing the missing months and months not to consider + # Range of months from start_mon to end_mon (end_mon+1 to include end_mon) + # Removing the missing months and months not to consider months = sorted(set(np.arange(start_mon,end_mon+1)) - set(missing)) - #-- number of months to consider in analysis + # number of months to consider in analysis n_cons = len(months) - #-- Initializing input data matrices + # Initializing input data matrices grace_Ylms = {} grace_Ylms['clm'] = np.zeros((LMAX+1,MMAX+1,n_cons)) grace_Ylms['slm'] = np.zeros((LMAX+1,MMAX+1,n_cons)) @@ -376,47 +376,47 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, grace_Ylms['eslm'] = np.zeros((LMAX+1,MMAX+1,n_cons)) grace_Ylms['time'] = np.zeros((n_cons)) grace_Ylms['month'] = np.zeros((n_cons),dtype=np.int64) - #-- output dimensions + # output dimensions grace_Ylms['l'] = np.arange(LMAX+1) grace_Ylms['m'] = np.arange(MMAX+1) - #-- input directory for product + # input directory for product grace_Ylms['directory'] = copy.copy(grace_dir) - #-- associate GRACE/GRACE-FO files with each GRACE/GRACE-FO month + # associate GRACE/GRACE-FO files with each GRACE/GRACE-FO month grace_files=grace_date(base_dir,PROC=PROC,DREL=DREL,DSET=DSET,OUTPUT=False) - #-- importing data from GRACE/GRACE-FO files + # importing data from GRACE/GRACE-FO files for i,grace_month in enumerate(months): - #-- read spherical harmonic data products + # read spherical harmonic data products infile = grace_files[grace_month] - #-- log input file if debugging + # log input file if debugging logging.debug(f'Reading file {i:d}: {infile}') - #-- read GRACE/GRACE-FO/Swarm file + # read GRACE/GRACE-FO/Swarm file if PROC in ('GRAZ','Swarm'): - #-- Degree 2 zonals will be converted to a tide free state + # Degree 2 zonals will be converted to a tide free state Ylms = read_gfc_harmonics(infile, TIDE='tide_free') else: - #-- Effects of Pole tide drift will be compensated if specified + # Effects of Pole tide drift will be compensated if specified Ylms = read_GRACE_harmonics(infile, LMAX, MMAX=MMAX, POLE_TIDE=kwargs['POLE_TIDE']) - #-- truncate harmonics to degree and order + # truncate harmonics to degree and order grace_Ylms['clm'][:,:,i] = Ylms['clm'][0:LMAX+1,0:MMAX+1] grace_Ylms['slm'][:,:,i] = Ylms['slm'][0:LMAX+1,0:MMAX+1] - #-- truncate harmonic errors to degree and order + # truncate harmonic errors to degree and order grace_Ylms['eclm'][:,:,i] = Ylms['eclm'][0:LMAX+1,0:MMAX+1] grace_Ylms['eslm'][:,:,i] = Ylms['eslm'][0:LMAX+1,0:MMAX+1] - #-- copy date variables + # copy date variables grace_Ylms['time'][i] = np.copy(Ylms['time']) grace_Ylms['month'][i] = np.int64(grace_month) - #-- single accelerometer months + # single accelerometer months single_acc_months = np.copy(grace_Ylms['month'][grace_Ylms['month'] > 176]) - #-- SLR low-degree harmonic, geocenter and correction flags + # SLR low-degree harmonic, geocenter and correction flags FLAGS = [] - #-- Replacing C2,0 with SLR C2,0 - #-- Running function read_SLR_C20.py - #-- reading SLR C2,0 file for given release if specified + # Replacing C2,0 with SLR C2,0 + # Running function read_SLR_C20.py + # reading SLR C2,0 file for given release if specified if (SLR_C20 == 'CSR'): if (DREL == 'RL04'): SLR_file = os.path.join(base_dir,'TN-05_C20_SLR.txt') @@ -425,251 +425,251 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, elif (DREL == 'RL06'): # SLR_file = os.path.join(base_dir,'TN-11_C20_SLR.txt') SLR_file = os.path.join(base_dir,'C20_RL06.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C20 file: {SLR_file}') - #-- read SLR file + # read SLR file C20_input = read_SLR_C20(SLR_file) FLAGS.append('_wCSR_C20') elif (SLR_C20 == 'GFZ'): SLR_file = os.path.join(base_dir,f'GFZ_{DREL}_C20_SLR.dat') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C20 file: {SLR_file}') - #-- read SLR file + # read SLR file C20_input = read_SLR_C20(SLR_file) FLAGS.append('_wGFZ_C20') elif (SLR_C20 == 'GSFC'): SLR_file = os.path.join(base_dir,'TN-14_C30_C20_GSFC_SLR.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C20 file: {SLR_file}') - #-- read SLR file + # read SLR file C20_input = read_SLR_C20(SLR_file) FLAGS.append('_wGSFC_C20') - #-- Replacing C2,1/S2,1 with SLR - #-- Running function read_SLR_CS2.py + # Replacing C2,1/S2,1 with SLR + # Running function read_SLR_CS2.py if (kwargs['SLR_21'] == 'CSR'): SLR_file = os.path.join(base_dir,f'C21_S21_{DREL}.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C21/S21 file: {SLR_file}') - #-- read SLR file + # read SLR file C21_input = read_SLR_CS2(SLR_file) FLAGS.append('_wCSR_21') elif (kwargs['SLR_21'] == 'GFZ'): GravIS_file = 'GRAVIS-2B_GFZOP_GRACE+SLR_LOW_DEGREES_0002.dat' SLR_file = os.path.join(base_dir,GravIS_file) - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C21/S21 file: {SLR_file}') - #-- read SLR file + # read SLR file C21_input = read_SLR_CS2(SLR_file) FLAGS.append('_wGFZ_21') elif (kwargs['SLR_21'] == 'GSFC'): - #-- calculate monthly averages from 7-day arcs + # calculate monthly averages from 7-day arcs # SLR_file = os.path.join(base_dir,'GSFC_C21_S21.txt') SLR_file = os.path.join(base_dir,'gsfc_slr_5x5c61s61.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C21/S21 file: {SLR_file}') - #-- read SLR file + # read SLR file C21_input = read_SLR_CS2(SLR_file, DATE=grace_Ylms['time'], ORDER=1) FLAGS.append('_wGSFC_21') - #-- Replacing C2,2/S2,2 with SLR - #-- Running function read_SLR_CS2.py + # Replacing C2,2/S2,2 with SLR + # Running function read_SLR_CS2.py if (kwargs['SLR_22'] == 'CSR'): SLR_file = os.path.join(base_dir,f'C22_S22_{DREL}.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C22/S22 file: {SLR_file}') - #-- read SLR file + # read SLR file C22_input = read_SLR_CS2(SLR_file) FLAGS.append('_wCSR_22') elif (kwargs['SLR_22'] == 'GSFC'): SLR_file = os.path.join(base_dir,'gsfc_slr_5x5c61s61.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C22/S22 file: {SLR_file}') - #-- read SLR file + # read SLR file C22_input = read_SLR_CS2(SLR_file, DATE=grace_Ylms['time'], ORDER=2) FLAGS.append('_wGSFC_22') - #-- Replacing C3,0 with SLR C3,0 - #-- Running function read_SLR_C30.py + # Replacing C3,0 with SLR C3,0 + # Running function read_SLR_C30.py if (kwargs['SLR_C30'] == 'CSR'): SLR_file = os.path.join(base_dir,'CSR_Monthly_5x5_Gravity_Harmonics.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C30 file: {SLR_file}') - #-- read SLR file + # read SLR file C30_input = read_SLR_C30(SLR_file) FLAGS.append('_wCSR_C30') elif (kwargs['SLR_C30'] == 'LARES'): SLR_file = os.path.join(base_dir,'C30_LARES_filtered.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C30 file: {SLR_file}') - #-- read SLR file + # read SLR file C30_input = read_SLR_C30(SLR_file) FLAGS.append('_wLARES_C30') elif (kwargs['SLR_C30'] == 'GSFC'): SLR_file = os.path.join(base_dir,'TN-14_C30_C20_GSFC_SLR.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C30 file: {SLR_file}') - #-- read SLR file + # read SLR file C30_input = read_SLR_C30(SLR_file) FLAGS.append('_wGSFC_C30') elif (kwargs['SLR_C30'] == 'GFZ'): GravIS_file = 'GRAVIS-2B_GFZOP_GRACE+SLR_LOW_DEGREES_0002.dat' SLR_file = os.path.join(base_dir,GravIS_file) - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C30 file: {SLR_file}') - #-- read SLR file + # read SLR file C30_input = read_SLR_C30(SLR_file) FLAGS.append('_wGFZ_C30') - #-- Replacing C4,0 with SLR C4,0 - #-- Running function read_SLR_C40.py + # Replacing C4,0 with SLR C4,0 + # Running function read_SLR_C40.py if (kwargs['SLR_C40'] == 'CSR'): SLR_file = os.path.join(base_dir,'CSR_Monthly_5x5_Gravity_Harmonics.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C40 file: {SLR_file}') - #-- read SLR file + # read SLR file C40_input = read_SLR_C40(SLR_file) FLAGS.append('_wCSR_C40') elif (kwargs['SLR_C40'] == 'LARES'): SLR_file = os.path.join(base_dir,'C40_LARES_filtered.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C40 file: {SLR_file}') - #-- read SLR file + # read SLR file C40_input = read_SLR_C40(SLR_file) FLAGS.append('_wLARES_C40') elif (kwargs['SLR_C40'] == 'GSFC'): SLR_file = os.path.join(base_dir,'gsfc_slr_5x5c61s61.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C40 file: {SLR_file}') - #-- read SLR file + # read SLR file C40_input = read_SLR_C40(SLR_file, DATE=grace_Ylms['time']) FLAGS.append('_wGSFC_C40') - #-- Replacing C5,0 with SLR C5,0 - #-- Running function read_SLR_C50.py + # Replacing C5,0 with SLR C5,0 + # Running function read_SLR_C50.py if (kwargs['SLR_C50'] == 'CSR'): SLR_file = os.path.join(base_dir,'CSR_Monthly_5x5_Gravity_Harmonics.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C50 file: {SLR_file}') - #-- read SLR file + # read SLR file C50_input = read_SLR_C50(SLR_file) FLAGS.append('_wCSR_C50') elif (kwargs['SLR_C50'] == 'LARES'): SLR_file = os.path.join(base_dir,'C50_LARES_filtered.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C50 file: {SLR_file}') - #-- read SLR file + # read SLR file C50_input = read_SLR_C50(SLR_file) FLAGS.append('_wLARES_C50') elif (kwargs['SLR_C50'] == 'GSFC'): # SLR_file = os.path.join(base_dir,'GSFC_SLR_C20_C30_C50_GSM_replacement.txt') SLR_file = os.path.join(base_dir,'gsfc_slr_5x5c61s61.txt') - #-- log SLR file if debugging + # log SLR file if debugging logging.debug(f'Reading SLR C50 file: {SLR_file}') - #-- read SLR file + # read SLR file C50_input = read_SLR_C50(SLR_file, DATE=grace_Ylms['time']) FLAGS.append('_wGSFC_C50') - #-- Correcting for Degree 1 (geocenter variations) - #-- reading degree 1 file for given release if specified + # Correcting for Degree 1 (geocenter variations) + # reading degree 1 file for given release if specified if (DEG1 == 'Tellus'): - #-- Tellus (PO.DAAC) degree 1 + # Tellus (PO.DAAC) degree 1 if DREL in ('RL04','RL05'): - #-- old degree one files + # old degree one files default_geocenter = os.path.join(base_dir,'geocenter', f'deg1_coef_{DREL}.txt') JPL = False else: - #-- new TN-13 degree one files + # new TN-13 degree one files default_geocenter = os.path.join(base_dir,'geocenter', f'TN-13_GEOC_{PROC}_{DREL}.txt') JPL = True - #-- read degree one files from JPL GRACE Tellus + # read degree one files from JPL GRACE Tellus DEG1_file = kwargs.get('DEG1_FILE') or default_geocenter - #-- log geocenter file if debugging + # log geocenter file if debugging logging.debug(f'Reading Geocenter file: {DEG1_file}') DEG1_input = gravity_toolkit.geocenter().from_tellus(DEG1_file,JPL=JPL) FLAGS.append(f'_w{DEG1}_DEG1') elif (DEG1 == 'SLR'): - #-- CSR Satellite Laser Ranging (SLR) degree 1 - # #-- SLR-derived degree-1 mass variations - # #-- ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ + # CSR Satellite Laser Ranging (SLR) degree 1 + # # SLR-derived degree-1 mass variations + # # ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ # DEG1_file = os.path.join(base_dir,'geocenter',f'GCN_{DREL}.txt') # COLUMNS = ['time','X','Y','Z','X_sigma','Y_sigma','Z_sigma'] # DEG1_input = gravity_toolkit.geocenter().from_SLR(DEG1_file, # AOD=True, release=DREL, header=16, COLUMNS=COLUMNS) - # #-- new CF-CM file of degree-1 mass variations - # #-- https://cddis.nasa.gov/lw20/docs/2016/papers/14-Ries_paper.pdf - # #-- http://download.csr.utexas.edu/pub/slr/geocenter/GCN_L1_L2_30d_CF-CM.txt + # # new CF-CM file of degree-1 mass variations + # # https://cddis.nasa.gov/lw20/docs/2016/papers/14-Ries_paper.pdf + # # http://download.csr.utexas.edu/pub/slr/geocenter/GCN_L1_L2_30d_CF-CM.txt # DEG1_file = os.path.join(base_dir,'geocenter','GCN_L1_L2_30d_CF-CM.txt') # COLUMNS = ['time','X','Y','Z','X_sigma','Y_sigma','Z_sigma'] # DEG1_input = gravity_toolkit.geocenter().from_SLR(DEG1_file, # AOD=True, release=DREL, header=111, columns=COLUMNS) - #-- new file of degree-1 mass variations from Minkang Cheng - #-- http://download.csr.utexas.edu/outgoing/cheng/gct2est.220_5s + # new file of degree-1 mass variations from Minkang Cheng + # http://download.csr.utexas.edu/outgoing/cheng/gct2est.220_5s DEG1_file = os.path.join(base_dir,'geocenter','gct2est.220_5s') COLUMNS = ['MJD','time','X','Y','Z','XM','YM','ZM', 'X_sigma','Y_sigma','Z_sigma','XM_sigma','YM_sigma','ZM_sigma'] - #-- log geocenter file if debugging + # log geocenter file if debugging logging.debug(f'Reading Geocenter file: {DEG1_file}') - #-- read degree one files from CSR satellite laser ranging + # read degree one files from CSR satellite laser ranging DEG1_input = gravity_toolkit.geocenter(radius=6.378136e9).from_SLR( DEG1_file, AOD=True, release=DREL, header=15, columns=COLUMNS) FLAGS.append(f'_w{DEG1}_DEG1') elif DEG1 in ('SLF','UCI'): - #-- degree one files from Sutterley and Velicogna (2019) - #-- default: iterated and with self-attraction and loading effects + # degree one files from Sutterley and Velicogna (2019) + # default: iterated and with self-attraction and loading effects MODEL = dict(RL04='OMCT', RL05='OMCT', RL06='MPIOM') args = (PROC,DREL,MODEL[DREL],'SLF_iter') default_geocenter = os.path.join(base_dir,'geocenter', '{0}_{1}_{2}_{3}.txt'.format(*args)) - #-- read degree one files from Sutterley and Velicogna (2019) + # read degree one files from Sutterley and Velicogna (2019) DEG1_file = kwargs.get('DEG1_FILE') or default_geocenter - #-- log geocenter file if debugging + # log geocenter file if debugging logging.debug(f'Reading Geocenter file: {DEG1_file}') DEG1_input = gravity_toolkit.geocenter().from_UCI(DEG1_file) FLAGS.append(f'_w{DEG1}_DEG1') elif (DEG1 == 'Swenson'): - #-- degree 1 coefficients provided by Sean Swenson in mm w.e. + # degree 1 coefficients provided by Sean Swenson in mm w.e. default_geocenter = os.path.join(base_dir,'geocenter', f'gad_gsm.{DREL}.txt') - #-- read degree one files from Swenson et al. (2008) + # read degree one files from Swenson et al. (2008) DEG1_file = kwargs.get('DEG1_FILE') or default_geocenter - #-- log geocenter file if debugging + # log geocenter file if debugging logging.debug(f'Reading Geocenter file: {DEG1_file}') DEG1_input = gravity_toolkit.geocenter().from_swenson(DEG1_file) FLAGS.append(f'_w{DEG1}_DEG1') elif (DEG1 == 'GFZ'): - #-- degree 1 coefficients provided by GFZ GravIS - #-- http://gravis.gfz-potsdam.de/corrections + # degree 1 coefficients provided by GFZ GravIS + # http://gravis.gfz-potsdam.de/corrections default_geocenter = os.path.join(base_dir,'geocenter', 'GRAVIS-2B_GFZOP_GEOCENTER_0002.dat') - #-- read degree one files from GFZ GravIS + # read degree one files from GFZ GravIS DEG1_file = kwargs.get('DEG1_FILE') or default_geocenter - #-- log geocenter file if debugging + # log geocenter file if debugging logging.debug(f'Reading Geocenter file: {DEG1_file}') DEG1_input = gravity_toolkit.geocenter().from_gravis(DEG1_file) FLAGS.append(f'_w{DEG1}_DEG1') - #-- atmospheric flag if correcting ECMWF "jumps" (using GAE/GAF/GAG files) + # atmospheric flag if correcting ECMWF "jumps" (using GAE/GAF/GAG files) if kwargs['ATM']: FLAGS.append('_wATM') - #-- pole tide flag if correcting for pole tide drift (Wahr et al. 2015) + # pole tide flag if correcting for pole tide drift (Wahr et al. 2015) if kwargs['POLE_TIDE']: FLAGS.append('_wPT') - #-- full output string (SLR, geocenter and correction flags) + # full output string (SLR, geocenter and correction flags) grace_Ylms['title'] = ''.join(FLAGS) - #-- Replace C20 with SLR coefficients + # Replace C20 with SLR coefficients if SLR_C20 in ('CSR','GFZ','GSFC'): - #-- verify that there are replacement C20 months for specified range + # verify that there are replacement C20 months for specified range months_test = sorted(set(months) - set(C20_input['month'])) if months_test: gm = ','.join(f'{gm:03d}' for gm in months_test) raise IOError(f'No Matching C20 Months ({gm})') - #-- replace C20 with SLR coefficients + # replace C20 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C20_input['month'] == grace_month) if (count != 0): @@ -677,14 +677,14 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, grace_Ylms['clm'][2,0,i] = np.copy(C20_input['data'][k]) grace_Ylms['eclm'][2,0,i] = np.copy(C20_input['error'][k]) - #-- Replace C21/S21 with SLR coefficients for single-accelerometer months + # Replace C21/S21 with SLR coefficients for single-accelerometer months if kwargs['SLR_21'] in ('CSR','GFZ','GSFC'): - #-- verify that there are replacement C21/S21 months for specified range + # verify that there are replacement C21/S21 months for specified range months_test = sorted(set(single_acc_months) - set(C21_input['month'])) if months_test: gm = ','.join(f'{gm:03d}' for gm in months_test) raise IOError(f'No Matching C21/S21 Months ({gm})') - #-- replace C21/S21 with SLR coefficients + # replace C21/S21 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C21_input['month'] == grace_month) if (count != 0) and (grace_month > 176): @@ -694,14 +694,14 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, grace_Ylms['eclm'][2,1,i] = np.copy(C21_input['eC2m'][k]) grace_Ylms['eslm'][2,1,i] = np.copy(C21_input['eS2m'][k]) - #-- Replace C22/S22 with SLR coefficients for single-accelerometer months + # Replace C22/S22 with SLR coefficients for single-accelerometer months if kwargs['SLR_22'] in ('CSR','GSFC'): - #-- verify that there are replacement C22/S22 months for specified range + # verify that there are replacement C22/S22 months for specified range months_test = sorted(set(single_acc_months) - set(C22_input['month'])) if months_test: gm = ','.join(f'{gm:03d}' for gm in months_test) raise IOError(f'No Matching C22/S22 Months ({gm})') - #-- replace C22/S22 with SLR coefficients + # replace C22/S22 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C22_input['month'] == grace_month) if (count != 0) and (grace_month > 176): @@ -711,14 +711,14 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, grace_Ylms['eclm'][2,2,i] = np.copy(C22_input['eC2m'][k]) grace_Ylms['eslm'][2,2,i] = np.copy(C22_input['eS2m'][k]) - #-- Replace C30 with SLR coefficients for single-accelerometer months + # Replace C30 with SLR coefficients for single-accelerometer months if kwargs['SLR_C30'] in ('CSR','GFZ','GSFC','LARES'): - #-- verify that there are replacement C30 months for specified range + # verify that there are replacement C30 months for specified range months_test = sorted(set(single_acc_months) - set(C30_input['month'])) if months_test: gm = ','.join(f'{gm:03d}' for gm in months_test) raise IOError(f'No Matching C30 Months ({gm})') - #-- replace C30 with SLR coefficients + # replace C30 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C30_input['month'] == grace_month) if (count != 0) and (grace_month > 176): @@ -726,14 +726,14 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, grace_Ylms['clm'][3,0,i] = np.copy(C30_input['data'][k]) grace_Ylms['eclm'][3,0,i] = np.copy(C30_input['error'][k]) - #-- Replace C40 with SLR coefficients for single-accelerometer months + # Replace C40 with SLR coefficients for single-accelerometer months if kwargs['SLR_C40'] in ('CSR','GSFC','LARES'): - #-- verify that there are replacement C40 months for specified range + # verify that there are replacement C40 months for specified range months_test = sorted(set(single_acc_months) - set(C40_input['month'])) if months_test: gm = ','.join(f'{gm:03d}' for gm in months_test) raise IOError(f'No Matching C40 Months ({gm})') - #-- replace C40 with SLR coefficients + # replace C40 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C40_input['month'] == grace_month) if (count != 0) and (grace_month > 176): @@ -741,14 +741,14 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, grace_Ylms['clm'][4,0,i] = np.copy(C40_input['data'][k]) grace_Ylms['eclm'][4,0,i] = np.copy(C40_input['error'][k]) - #-- Replace C50 with SLR coefficients for single-accelerometer months + # Replace C50 with SLR coefficients for single-accelerometer months if kwargs['SLR_C50'] in ('CSR','GSFC','LARES'): - #-- verify that there are replacement C50 months for specified range + # verify that there are replacement C50 months for specified range months_test = sorted(set(single_acc_months) - set(C50_input['month'])) if months_test: gm = ','.join(f'{gm:03d}' for gm in months_test) raise IOError(f'No Matching C50 Months ({gm})') - #-- replace C50 with SLR coefficients + # replace C50 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C50_input['month'] == grace_month) if (count != 0) and (grace_month > 176): @@ -756,17 +756,17 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, grace_Ylms['clm'][5,0,i] = np.copy(C50_input['data'][k]) grace_Ylms['eclm'][5,0,i] = np.copy(C50_input['error'][k]) - #-- Use Degree 1 coefficients - #-- Tellus: Tellus Degree 1 (PO.DAAC following Sun et al., 2016) - #-- SLR: CSR Satellite Laser Ranging (SLR) Degree 1 - GRACE AOD - #-- UCI: OMCT/MPIOM coefficients with Sea Level Fingerprint land-water mass - #-- Swenson: GRACE-derived coefficients from Sean Swenson - #-- GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS + # Use Degree 1 coefficients + # Tellus: Tellus Degree 1 (PO.DAAC following Sun et al., 2016) + # SLR: CSR Satellite Laser Ranging (SLR) Degree 1 - GRACE AOD + # UCI: OMCT/MPIOM coefficients with Sea Level Fingerprint land-water mass + # Swenson: GRACE-derived coefficients from Sean Swenson + # GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS if DEG1 in ('GFZ','SLR','SLF','Swenson','Tellus','UCI'): - #-- check if modeling degree 1 or if all months are available + # check if modeling degree 1 or if all months are available if kwargs['MODEL_DEG1']: - #-- least-squares modeling the degree 1 coefficients - #-- fitting annual, semi-annual, linear and quadratic terms + # least-squares modeling the degree 1 coefficients + # fitting annual, semi-annual, linear and quadratic terms C10_model = regress_model(DEG1_input.time, DEG1_input.C10, grace_Ylms['time'], ORDER=2, CYCLES=[0.5,1.0], RELATIVE=2003.3) C11_model = regress_model(DEG1_input.time, DEG1_input.C11, @@ -774,50 +774,50 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, S11_model = regress_model(DEG1_input.time, DEG1_input.S11, grace_Ylms['time'], ORDER=2, CYCLES=[0.5,1.0], RELATIVE=2003.3) else: - #-- check that all months are available for a given geocenter + # check that all months are available for a given geocenter months_test = sorted(set(months) - set(DEG1_input.month)) if months_test: gm = ','.join(f'{gm:03d}' for gm in months_test) raise IOError(f'No Matching Geocenter Months ({gm})') - #-- for each considered date + # for each considered date for i,grace_month in enumerate(months): k, = np.nonzero(DEG1_input.month == grace_month) count = np.count_nonzero(DEG1_input.month == grace_month) - #-- Degree 1 is missing for particular month + # Degree 1 is missing for particular month if (count == 0) and kwargs['MODEL_DEG1']: - #-- using least-squares modeled coefficients from regress_model + # using least-squares modeled coefficients from regress_model grace_Ylms['clm'][1,0,i] = np.copy(C10_model[i]) grace_Ylms['clm'][1,1,i] = np.copy(C11_model[i]) grace_Ylms['slm'][1,1,i] = np.copy(S11_model[i]) - else:#-- using coefficients from data file + else:# using coefficients from data file grace_Ylms['clm'][1,0,i] = np.copy(DEG1_input.C10[k]) grace_Ylms['clm'][1,1,i] = np.copy(DEG1_input.C11[k]) grace_Ylms['slm'][1,1,i] = np.copy(DEG1_input.S11[k]) - #-- read and add/remove the GAE and GAF atmospheric correction coefficients + # read and add/remove the GAE and GAF atmospheric correction coefficients if kwargs['ATM']: - #-- read ECMWF correction files from Fagiolini et al. (2015) + # read ECMWF correction files from Fagiolini et al. (2015) atm_corr = read_ecmwf_corrections(base_dir,LMAX,months,MMAX=MMAX) - #-- Removing GAE/GAF/GAG from RL05 GSM Products + # Removing GAE/GAF/GAG from RL05 GSM Products if (DSET == 'GSM'): - for m in range(0,MMAX+1):#-- MMAX+1 to include l - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX + for m in range(0,MMAX+1):# MMAX+1 to include l + for l in range(m,LMAX+1):# LMAX+1 to include LMAX grace_Ylms['clm'][l,m,:] -= atm_corr['clm'][l,m,:] grace_Ylms['slm'][l,m,:] -= atm_corr['slm'][l,m,:] - #-- Adding GAE/GAF/GAG to RL05 Atmospheric Products (GAA,GAC) + # Adding GAE/GAF/GAG to RL05 Atmospheric Products (GAA,GAC) elif DSET in ('GAC','GAA'): - for m in range(0,MMAX+1):#-- MMAX+1 to include l - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX + for m in range(0,MMAX+1):# MMAX+1 to include l + for l in range(m,LMAX+1):# LMAX+1 to include LMAX grace_Ylms['clm'][l,m,:] += atm_corr['clm'][l,m,:] grace_Ylms['slm'][l,m,:] += atm_corr['slm'][l,m,:] - #-- return the harmonic solutions with possible low-degree replacements - #-- return the harmonic dimensions (spectral and temporal) - #-- return string specifying processing and correction flags - #-- return directory of exact GRACE/GRACE-FO product + # return the harmonic solutions with possible low-degree replacements + # return the harmonic dimensions (spectral and temporal) + # return string specifying processing and correction flags + # return directory of exact GRACE/GRACE-FO product return grace_Ylms -#-- PURPOSE: read atmospheric jump corrections from Fagiolini et al. (2015) +# PURPOSE: read atmospheric jump corrections from Fagiolini et al. (2015) def read_ecmwf_corrections(base_dir, LMAX, months, MMAX=None): """ Read atmospheric jump corrections from [Fagiolini2015]_ @@ -849,50 +849,50 @@ def read_ecmwf_corrections(base_dir, LMAX, months, MMAX=None): `doi: 10.1093/gji/ggv276 `_ """ - #-- correction files + # correction files corr_file = {} corr_file['GAE'] = 'TN-08_GAE-2_2006032-2010031_0000_EIGEN_G---_0005.gz' corr_file['GAF'] = 'TN-09_GAF-2_2010032-2015131_0000_EIGEN_G---_0005.gz' corr_file['GAG'] = 'TN-10_GAG-2_2015132-2099001_0000_EIGEN_G---_0005.gz' - #-- atmospheric correction coefficients + # atmospheric correction coefficients atm_corr_clm = {} atm_corr_slm = {} - #-- number of months to consider in analysis + # number of months to consider in analysis n_cons = len(months) - #-- set maximum order if not equal to maximum degree + # set maximum order if not equal to maximum degree MMAX = LMAX if (MMAX is None) else MMAX - #-- iterate through python dictionary keys (GAE, GAF, GAG) + # iterate through python dictionary keys (GAE, GAF, GAG) for key, val in corr_file.items(): - #-- log ECMWF correction file if debugging + # log ECMWF correction file if debugging infile = os.path.join(base_dir, val) logging.debug(f'Reading ECMWF file: {infile}') - #-- allocate for clm and slm of atmospheric corrections + # allocate for clm and slm of atmospheric corrections atm_corr_clm[key] = np.zeros((LMAX+1,MMAX+1)) atm_corr_slm[key] = np.zeros((LMAX+1,MMAX+1)) - #-- GRACE correction files are compressed gz files + # GRACE correction files are compressed gz files with gzip.open(infile,'rb') as f: file_contents = f.read().decode('ISO-8859-1').splitlines() - #-- for each line in the GRACE correction file + # for each line in the GRACE correction file for line in file_contents: - #-- find if line starts with GRCOF2 + # find if line starts with GRCOF2 if bool(re.match(r'GRCOF2',line)): - #-- split the line into individual components + # split the line into individual components line_contents = line.split() - #-- degree and order for the line + # degree and order for the line l1 = np.int64(line_contents[1]) m1 = np.int64(line_contents[2]) - #-- if degree and order are below the truncation limits + # if degree and order are below the truncation limits if ((l1 <= LMAX) and (m1 <= MMAX)): atm_corr_clm[key][l1,m1] = np.float64(line_contents[3]) atm_corr_slm[key][l1,m1] = np.float64(line_contents[4]) - #-- create output atmospheric corrections to be removed/added to data + # create output atmospheric corrections to be removed/added to data atm_corr = {} atm_corr['clm'] = np.zeros((LMAX+1,LMAX+1,n_cons)) atm_corr['slm'] = np.zeros((LMAX+1,LMAX+1,n_cons)) - #-- for each considered date + # for each considered date for i,grace_month in enumerate(months): - #-- remove correction based on dates + # remove correction based on dates if (grace_month >= 50) & (grace_month <= 97): atm_corr['clm'][:,:,i] = atm_corr_clm['GAE'][:,:] atm_corr['slm'][:,:,i] = atm_corr_slm['GAE'][:,:] @@ -903,10 +903,10 @@ def read_ecmwf_corrections(base_dir, LMAX, months, MMAX=None): atm_corr['clm'][:,:,i] = atm_corr_clm['GAG'][:,:] atm_corr['slm'][:,:,i] = atm_corr_slm['GAG'][:,:] - #-- return the atmospheric corrections + # return the atmospheric corrections return atm_corr -#-- PURPOSE: calculate a regression model for extrapolating values +# PURPOSE: calculate a regression model for extrapolating values def regress_model(t_in, d_in, t_out, ORDER=2, CYCLES=None, RELATIVE=0.0): """ Calculates a regression model for extrapolating values @@ -932,30 +932,30 @@ def regress_model(t_in, d_in, t_out, ORDER=2, CYCLES=None, RELATIVE=0.0): output regressed value data array """ - #-- remove singleton dimensions + # remove singleton dimensions t_in = np.squeeze(t_in) d_in = np.squeeze(d_in) t_out = np.squeeze(t_out) - #-- check dimensions of output + # check dimensions of output t_out = np.atleast_1d(t_out) - #-- set relative to mean of input time + # set relative to mean of input time if not RELATIVE: RELATIVE = np.mean(t_in) - #-- create design matrix based on polynomial order and harmonics + # create design matrix based on polynomial order and harmonics DMAT = [] MMAT = [] - #-- add polynomial orders (0=constant, 1=linear, 2=quadratic) + # add polynomial orders (0=constant, 1=linear, 2=quadratic) for o in range(ORDER+1): DMAT.append((t_in-RELATIVE)**o) MMAT.append((t_out-RELATIVE)**o) - #-- add cyclical terms (0.5=semi-annual, 1=annual) + # add cyclical terms (0.5=semi-annual, 1=annual) for c in CYCLES: DMAT.append(np.sin(2.0*np.pi*t_in/np.float64(c))) DMAT.append(np.cos(2.0*np.pi*t_in/np.float64(c))) MMAT.append(np.sin(2.0*np.pi*t_out/np.float64(c))) MMAT.append(np.cos(2.0*np.pi*t_out/np.float64(c))) - #-- Calculating Least-Squares Coefficients - #-- Standard Least-Squares fitting (the [0] denotes coefficients output) + # Calculating Least-Squares Coefficients + # Standard Least-Squares fitting (the [0] denotes coefficients output) beta_mat = np.linalg.lstsq(np.transpose(DMAT), d_in, rcond=-1)[0] - #-- return modeled time-series + # return modeled time-series return np.dot(np.transpose(MMAT),beta_mat) diff --git a/gravity_toolkit/grace_months_index.py b/gravity_toolkit/grace_months_index.py index b9946ad2..37501cc2 100644 --- a/gravity_toolkit/grace_months_index.py +++ b/gravity_toolkit/grace_months_index.py @@ -86,146 +86,146 @@ def grace_months_index(base_dir, DREL=['RL06','rl06v2.0'], MODE=None): MODE: oct or NoneType, default None Permissions mode of output index file """ - #-- Output GRACE months file + # Output GRACE months file grace_months_file = 'GRACE_months.txt' fid = open(os.path.join(base_dir,grace_months_file), 'w') - #-- Initial parameters - #-- processing centers + # Initial parameters + # processing centers PROC = ['CSR', 'GFZ', 'GSFC', 'JPL'] - #-- read from GSM datasets + # read from GSM datasets DSET = 'GSM' - #-- maximum month of the datasets - #-- checks for the maximum month between processing centers + # maximum month of the datasets + # checks for the maximum month between processing centers max_mon = 0 - #-- contain the information for each dataset + # contain the information for each dataset var_info = {} - #-- Looping through data releases first (all RL04 then all RL05) - #-- for each considered data release (RL04,RL05) + # Looping through data releases first (all RL04 then all RL05) + # for each considered data release (RL04,RL05) for rl in DREL: - #-- for each processing centers (CSR, GFZ, JPL) + # for each processing centers (CSR, GFZ, JPL) for pr in PROC: - #-- Setting the data directory for processing center and release + # Setting the data directory for processing center and release grace_dir = os.path.join(base_dir, pr, rl, DSET) - #-- read GRACE date ascii file - #-- file created in read_grace.py or grace_dates.py + # read GRACE date ascii file + # file created in read_grace.py or grace_dates.py grace_date_file = f'{pr}_{rl}_DATES.txt' if os.access(os.path.join(grace_dir,grace_date_file), os.F_OK): - #-- skip the header line + # skip the header line date_input = np.loadtxt(os.path.join(grace_dir,grace_date_file), skiprows=1) - #-- number of months + # number of months nmon = np.shape(date_input)[0] - #-- Setting the dictionary key e.g. 'CSR_RL04' + # Setting the dictionary key e.g. 'CSR_RL04' var_name = f'{pr}_{rl}' - #-- Creating a python dictionary for each dataset with parameters: - #-- month #, start year, start day, end year, end day - #-- Purpose is to get all of the dates loaded for each dataset - #-- Adding data to dictionary for data processing and release + # Creating a python dictionary for each dataset with parameters: + # month #, start year, start day, end year, end day + # Purpose is to get all of the dates loaded for each dataset + # Adding data to dictionary for data processing and release var_info[var_name] = {} - #-- allocate for output variables + # allocate for output variables var_info[var_name]['mon'] = np.zeros((nmon),dtype=np.int64) var_info[var_name]['styr'] = np.zeros((nmon),dtype=np.int64) var_info[var_name]['stday'] = np.zeros((nmon),dtype=np.int64) var_info[var_name]['endyr'] = np.zeros((nmon),dtype=np.int64) var_info[var_name]['endday'] = np.zeros((nmon),dtype=np.int64) - #-- place output variables in dictionary + # place output variables in dictionary for i,key in enumerate(['mon','styr','stday','endyr','endday']): - #-- first column is date in decimal form (start at 1 not 0) + # first column is date in decimal form (start at 1 not 0) var_info[var_name][key] = date_input[:,i+1].astype(np.int64) - #-- Finding the maximum month measured + # Finding the maximum month measured if (var_info[var_name]['mon'].max() > max_mon): - #-- if the maximum month in this dataset is greater - #-- than the previously read datasets + # if the maximum month in this dataset is greater + # than the previously read datasets max_mon = np.int64(var_info[var_name]['mon'].max()) - #-- sort datasets alphanumerically + # sort datasets alphanumerically var_name = sorted(var_info.keys()) txt = ''.join([f'{d:^21}' for d in var_name]) - #-- printing header to file + # printing header to file print(f'{"MONTH":^11} {txt}', file=fid) - #-- for each possible month - #-- GRACE starts at month 004 (April 2002) - #-- max_mon+1 to include max_mon + # for each possible month + # GRACE starts at month 004 (April 2002) + # max_mon+1 to include max_mon for m in range(4, max_mon+1): - #-- finding the month name e.g. Apr + # finding the month name e.g. Apr calendar_year,calendar_month = grace_to_calendar(m) month_string = calendar.month_abbr[calendar_month] - #-- create list object for output string + # create list object for output string output_string = [] - #-- for each processing center and data release + # for each processing center and data release for var in var_name: - #-- find if the month of data exists - #-- exists will be greater than 0 if there is a match + # find if the month of data exists + # exists will be greater than 0 if there is a match exists = np.count_nonzero(var_info[var]['mon'] == m) if (exists != 0): - #-- if there is a matching month - #-- indice of matching month + # if there is a matching month + # indice of matching month ind, = np.nonzero(var_info[var]['mon'] == m) - #-- start date + # start date st_yr, = var_info[var]['styr'][ind] st_day, = var_info[var]['stday'][ind] - #-- end date + # end date end_yr, = var_info[var]['endyr'][ind] end_day, = var_info[var]['endday'][ind] - #-- output string is the date range - #-- string format: 2002_102--2002_120 + # output string is the date range + # string format: 2002_102--2002_120 output_string.append(f'{st_yr:4d}_{st_day:03d}--' f'{end_yr:4d}_{end_day:03d}') else: - #-- if there is no matching month = missing + # if there is no matching month = missing output_string.append(' ** missing ** ') - #-- create single string with output string components - #-- formatting the strings to be 20 characters in length + # create single string with output string components + # formatting the strings to be 20 characters in length data_string = ' '.join([f'{s:>20}' for s in output_string]) - #-- printing data line to file + # printing data line to file args = (m, month_string, calendar_year, data_string) print('{0:03d} {1:>3}{2:4d} {3}'.format(*args), file=fid) - #-- close months file + # close months file fid.close() - #-- set the permissions level of the output file + # set the permissions level of the output file os.chmod(os.path.join(base_dir,grace_months_file), MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Creates a file with the start and end days for each month of GRACE/GRACE-FO data """ ) - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, nargs='+', default=['RL06','rl06v2.0'], help='GRACE/GRACE-FO Data Release') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- run GRACE/GRACE-FO months program + # run GRACE/GRACE-FO months program grace_months_index(args.directory, DREL=args.release, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/gravity_toolkit/harmonic_gradients.py b/gravity_toolkit/harmonic_gradients.py index 191ac87e..c6ee1423 100644 --- a/gravity_toolkit/harmonic_gradients.py +++ b/gravity_toolkit/harmonic_gradients.py @@ -68,36 +68,36 @@ def harmonic_gradients(clm1, slm1, lon, lat, zonal and meridional gradient fields """ - #-- if LMAX is not specified, will use the size of the input harmonics + # if LMAX is not specified, will use the size of the input harmonics if (LMAX == 0): LMAX = np.shape(clm1)[0]-1 - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) if MMAX is None: MMAX = np.copy(LMAX) - #-- Longitude in radians + # Longitude in radians phi = (np.squeeze(lon)*np.pi/180.0)[np.newaxis,:] - #-- Colatitude in radians + # Colatitude in radians th = (90.0 - np.squeeze(lat))*np.pi/180.0 thmax = len(np.squeeze(lat)) phimax = len(np.squeeze(lon)) - #-- Truncating harmonics to degree and order LMAX - #-- removing coefficients below LMIN and above MMAX + # Truncating harmonics to degree and order LMAX + # removing coefficients below LMIN and above MMAX mm = np.arange(0,MMAX+1) clm = np.zeros((LMAX+1,MMAX+1)) slm = np.zeros((LMAX+1,MMAX+1)) clm[LMIN:LMAX+1,mm] = clm1[LMIN:LMAX+1,mm] slm[LMIN:LMAX+1,mm] = slm1[LMIN:LMAX+1,mm] - #-- spherical harmonic degree and order - ll = np.arange(0,LMAX+1)[np.newaxis, :]#-- lmax+1 to include lmax - mm = np.arange(0,MMAX+1)[:, np.newaxis]#-- mmax+1 to include mmax + # spherical harmonic degree and order + ll = np.arange(0,LMAX+1)[np.newaxis, :]# lmax+1 to include lmax + mm = np.arange(0,MMAX+1)[:, np.newaxis]# mmax+1 to include mmax - #-- generate Vlm coefficients (vlm and wlm) + # generate Vlm coefficients (vlm and wlm) vlm, wlm = legendre_gradient(LMAX, MMAX) dlm = np.zeros((LMAX+1,LMAX+1,2)) - #-- minus sign is because lat and theta change with opposite sign + # minus sign is because lat and theta change with opposite sign for l in range(0,LMAX+1): dlm[l,:,0] = -clm[l,:]*np.sqrt((l+1.0)*l) dlm[l,:,1] = -slm[l,:]*np.sqrt((l+1.0)*l) @@ -105,7 +105,7 @@ def harmonic_gradients(clm1, slm1, lon, lat, m_even = np.arange(0,MMAX+2,2) m_odd = np.arange(1,MMAX,2) - #-- Calculate fourier coefficients from legendre coefficients + # Calculate fourier coefficients from legendre coefficients d_cos = np.zeros((LMAX+1,thmax,2)) d_sin = np.zeros((LMAX+1,thmax,2)) cnk = np.cos(np.dot(th[:,np.newaxis],ll)) @@ -113,7 +113,7 @@ def harmonic_gradients(clm1, slm1, lon, lat, wtmp = np.zeros((len(m_even),LMAX+1,2)) vtmp = np.zeros((len(m_even),LMAX+1,2)) - #-- m = even terms (vlm,wlm sine series) + # m = even terms (vlm,wlm sine series) for n in range(0,LMAX+1): wtmp[:,n,0] = np.sum(wlm[:,m_even,n]*dlm[:,m_even,0],axis=0) wtmp[:,n,1] = np.sum(wlm[:,m_even,n]*dlm[:,m_even,1],axis=0) @@ -125,7 +125,7 @@ def harmonic_gradients(clm1, slm1, lon, lat, d_cos[m_even,:,1] = np.dot(vtmp[:,:,1],np.transpose(snk)) d_sin[m_even,:,1] = np.dot(-vtmp[:,:,0],np.transpose(snk)) - #-- m = odd terms (vlm,wlm cosine series) + # m = odd terms (vlm,wlm cosine series) wtmp = np.zeros((len(m_odd),LMAX+1,2)) vtmp = np.zeros((len(m_odd),LMAX+1,2)) for n in range(0,LMAX+1): @@ -139,14 +139,14 @@ def harmonic_gradients(clm1, slm1, lon, lat, d_cos[m_odd,:,1] = np.dot(vtmp[:,:,1],np.transpose(cnk)) d_sin[m_odd,:,1] = np.dot(-vtmp[:,:,0],np.transpose(cnk)) - #-- Calculating cos(m*phi) and sin(m*phi) + # Calculating cos(m*phi) and sin(m*phi) ccos = np.cos(np.dot(mm,phi)) ssin = np.sin(np.dot(mm,phi)) - #-- Final signal recovery from fourier coefficients + # Final signal recovery from fourier coefficients gradients = np.zeros((phimax,thmax,2)) gradients[:,:,0] = np.dot(np.transpose(ccos), d_cos[:,:,0]) + \ np.dot(np.transpose(ssin), d_sin[:,:,0]) gradients[:,:,1] = np.dot(np.transpose(ccos), d_cos[:,:,1]) + \ np.dot(np.transpose(ssin), d_sin[:,:,1]) - #-- return the gradient fields + # return the gradient fields return gradients diff --git a/gravity_toolkit/harmonic_summation.py b/gravity_toolkit/harmonic_summation.py index fa131ead..691b8fc7 100755 --- a/gravity_toolkit/harmonic_summation.py +++ b/gravity_toolkit/harmonic_summation.py @@ -65,45 +65,45 @@ def harmonic_summation(clm1, slm1, lon, lat, spatial field """ - #-- if LMAX is not specified, will use the size of the input harmonics + # if LMAX is not specified, will use the size of the input harmonics if (LMAX == 0): LMAX = np.shape(clm1)[0]-1 - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) if MMAX is None: MMAX = np.copy(LMAX) - #-- Longitude in radians + # Longitude in radians phi = (np.squeeze(lon)*np.pi/180.0)[np.newaxis,:] - #-- Colatitude in radians + # Colatitude in radians th = (90.0 - np.squeeze(lat))*np.pi/180.0 thmax = len(th) - #-- Calculate fourier coefficients from legendre coefficients - d_cos = np.zeros((MMAX+1,thmax))#-- [m,th] - d_sin = np.zeros((MMAX+1,thmax))#-- [m,th] + # Calculate fourier coefficients from legendre coefficients + d_cos = np.zeros((MMAX+1,thmax))# [m,th] + d_sin = np.zeros((MMAX+1,thmax))# [m,th] if PLM is None: - #-- if plms are not pre-computed: calculate Legendre polynomials + # if plms are not pre-computed: calculate Legendre polynomials PLM, dPLM = plm_holmes(LMAX, np.cos(th)) - #-- Truncating harmonics to degree and order LMAX - #-- removing coefficients below LMIN and above MMAX + # Truncating harmonics to degree and order LMAX + # removing coefficients below LMIN and above MMAX mm = np.arange(0,MMAX+1) clm = np.zeros((LMAX+1,MMAX+1)) slm = np.zeros((LMAX+1,MMAX+1)) clm[LMIN:LMAX+1,mm] = clm1[LMIN:LMAX+1,mm] slm[LMIN:LMAX+1,mm] = slm1[LMIN:LMAX+1,mm] for k in range(0,thmax): - #-- summation over all spherical harmonic degrees + # summation over all spherical harmonic degrees d_cos[:,k] = np.sum(PLM[:,mm,k]*clm[:,mm],axis=0) d_sin[:,k] = np.sum(PLM[:,mm,k]*slm[:,mm],axis=0) - #-- Final signal recovery from fourier coefficients + # Final signal recovery from fourier coefficients m = np.arange(0,MMAX+1)[:,np.newaxis] - #-- Calculating cos(m*phi) and sin(m*phi) + # Calculating cos(m*phi) and sin(m*phi) ccos = np.cos(np.dot(m,phi)) ssin = np.sin(np.dot(m,phi)) - #-- summation of cosine and sine harmonics + # summation of cosine and sine harmonics s = np.dot(np.transpose(ccos),d_cos) + np.dot(np.transpose(ssin),d_sin) - #-- return output data + # return output data return s diff --git a/gravity_toolkit/harmonics.py b/gravity_toolkit/harmonics.py index 958d6648..b276714a 100644 --- a/gravity_toolkit/harmonics.py +++ b/gravity_toolkit/harmonics.py @@ -115,10 +115,10 @@ class harmonics(object): """ np.seterr(invalid='ignore') def __init__(self, **kwargs): - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('lmax',None) kwargs.setdefault('mmax',None) - #-- set default class attributes + # set default class attributes self.clm=None self.slm=None self.time=None @@ -141,15 +141,15 @@ def case_insensitive_filename(self,filename): filename: str input filename """ - #-- check if filename is open file object + # check if filename is open file object if isinstance(filename, io.IOBase): self.filename = copy.copy(filename) else: - #-- tilde-expand input filename + # tilde-expand input filename self.filename = os.path.expanduser(filename) - #-- check if file presently exists with input case + # check if file presently exists with input case if not os.access(self.filename,os.F_OK): - #-- search for filename without case dependence + # search for filename without case dependence basename = os.path.basename(filename) directory = os.path.dirname(os.path.expanduser(filename)) f = [f for f in os.listdir(directory) if re.match(basename,f,re.I)] @@ -157,7 +157,7 @@ def case_insensitive_filename(self,filename): errmsg = f'{filename} not found in file system' raise FileNotFoundError(errmsg) self.filename = os.path.join(directory,f.pop()) - #-- print filename + # print filename logging.debug(self.filename) return self @@ -180,66 +180,66 @@ def from_ascii(self, filename, **kwargs): verbose: bool, default False print file and variable information """ - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- set default parameters + # set default parameters kwargs.setdefault('date',True) kwargs.setdefault('verbose',False) kwargs.setdefault('compression',None) - #-- open the ascii file and extract contents + # open the ascii file and extract contents logging.info(self.filename) if (kwargs['compression'] == 'gzip'): - #-- read input ascii data from gzip compressed file and split lines + # read input ascii data from gzip compressed file and split lines with gzip.open(self.filename,'r') as f: file_contents = f.read().decode('ISO-8859-1').splitlines() elif (kwargs['compression'] == 'zip'): - #-- read input ascii data from zipped file and split lines + # read input ascii data from zipped file and split lines base,_ = os.path.splitext(self.filename) with zipfile.ZipFile(self.filename) as z: file_contents = z.read(base).decode('ISO-8859-1').splitlines() elif (kwargs['compression'] == 'bytes'): - #-- read input file object and split lines + # read input file object and split lines file_contents = self.filename.read().splitlines() else: - #-- read input ascii file (.txt, .asc) and split lines + # read input ascii file (.txt, .asc) and split lines with open(self.filename, mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- compile regular expression operator for extracting numerical values - #-- from input ascii files of spherical harmonics + # compile regular expression operator for extracting numerical values + # from input ascii files of spherical harmonics regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[EeD][+-]?\d+)?' rx = re.compile(regex_pattern, re.VERBOSE) - #-- find maximum degree and order of harmonics + # find maximum degree and order of harmonics self.lmax = 0 self.mmax = 0 - #-- for each line in the file + # for each line in the file for line in file_contents: l1,m1,clm1,slm1,*aux = rx.findall(line) - #-- convert line degree and order to integers + # convert line degree and order to integers l1,m1 = np.array([l1,m1],dtype=np.int64) self.lmax = np.copy(l1) if (l1 > self.lmax) else self.lmax self.mmax = np.copy(m1) if (m1 > self.mmax) else self.mmax - #-- output spherical harmonics dimensions array + # output spherical harmonics dimensions array self.l = np.arange(self.lmax+1) self.m = np.arange(self.mmax+1) - #-- output spherical harmonics data + # output spherical harmonics data self.clm = np.zeros((self.lmax+1,self.mmax+1)) self.slm = np.zeros((self.lmax+1,self.mmax+1)) - #-- if the ascii file contains date variables + # if the ascii file contains date variables if kwargs['date']: self.time = np.float64(aux[0]) self.month = np.int64(calendar_to_grace(self.time)) - #-- adjust months to fix special cases if necessary + # adjust months to fix special cases if necessary self.month = adjust_months(self.month) - #-- extract harmonics and convert to matrix - #-- for each line in the file + # extract harmonics and convert to matrix + # for each line in the file for line in file_contents: l1,m1,clm1,slm1,*aux = rx.findall(line) - #-- convert line degree and order to integers + # convert line degree and order to integers ll,mm = np.array([l1,m1],dtype=np.int64) - #-- convert fortran exponentials if applicable + # convert fortran exponentials if applicable self.clm[ll,mm] = np.float64(clm1.replace('D','E')) self.slm[ll,mm] = np.float64(slm1.replace('D','E')) - #-- assign shape and ndim attributes + # assign shape and ndim attributes self.update_dimensions() return self @@ -262,57 +262,57 @@ def from_netCDF4(self, filename, **kwargs): verbose: bool, default False print file and variable information """ - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- set default parameters + # set default parameters kwargs.setdefault('date',True) kwargs.setdefault('verbose',False) kwargs.setdefault('compression',None) - #-- Open the NetCDF4 file for reading + # Open the NetCDF4 file for reading if (kwargs['compression'] == 'gzip'): - #-- read as in-memory (diskless) netCDF4 dataset + # read as in-memory (diskless) netCDF4 dataset with gzip.open(self.filename,'r') as f: fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=f.read()) elif (kwargs['compression'] == 'zip'): - #-- read zipped file and extract file into in-memory file object + # read zipped file and extract file into in-memory file object fileBasename,_ = os.path.splitext(os.path.basename(filename)) with zipfile.ZipFile(self.filename) as z: - #-- first try finding a netCDF4 file with same base filename - #-- if none found simply try searching for a netCDF4 file + # first try finding a netCDF4 file with same base filename + # if none found simply try searching for a netCDF4 file try: f,=[f for f in z.namelist() if re.match(fileBasename,f,re.I)] except: f,=[f for f in z.namelist() if re.search(r'\.nc(4)?$',f)] - #-- read bytes from zipfile as in-memory (diskless) netCDF4 dataset + # read bytes from zipfile as in-memory (diskless) netCDF4 dataset fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=z.read(f)) elif (kwargs['compression'] == 'bytes'): - #-- read as in-memory (diskless) netCDF4 dataset + # read as in-memory (diskless) netCDF4 dataset fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=filename.read()) else: - #-- read netCDF4 dataset + # read netCDF4 dataset fileID = netCDF4.Dataset(self.filename, 'r') - #-- Output NetCDF file information + # Output NetCDF file information logging.info(fileID.filepath()) logging.info(list(fileID.variables.keys())) - #-- read flattened spherical harmonics + # read flattened spherical harmonics temp = harmonics() temp.filename = copy.copy(self.filename) - #-- create list of variables to retrieve + # create list of variables to retrieve fields = ['l','m','clm','slm'] - #-- retrieve date variables if specified + # retrieve date variables if specified if kwargs['date']: fields.extend(['time','month']) - #-- Getting the data from each NetCDF variable + # Getting the data from each NetCDF variable for field in fields: setattr(temp, field, fileID.variables[field][:].copy()) - #-- calculate maximum degree and order + # calculate maximum degree and order temp.lmax = np.max(temp.l) temp.mmax = np.max(temp.m) - #-- expand the spherical harmonics to dimensions + # expand the spherical harmonics to dimensions self = temp.expand(date=kwargs['date']) - #-- adjust months to fix special cases if necessary + # adjust months to fix special cases if necessary self.month = adjust_months(self.month) - #-- get attributes for the included variables + # get attributes for the included variables for key in fields: try: self.attributes[key] = [ @@ -321,16 +321,16 @@ def from_netCDF4(self, filename, **kwargs): ] except (KeyError,ValueError,AttributeError): pass - #-- Global attributes + # Global attributes for att_name in ['title','description','reference']: try: s, = [s for s in fileID.ncattrs() if re.match(att_name,s,re.I)] self.attributes[att_name] = fileID.getncattr(s) except (ValueError, KeyError, AttributeError): pass - #-- Closing the NetCDF file + # Closing the NetCDF file fileID.close() - #-- assign shape and ndim attributes + # assign shape and ndim attributes self.update_dimensions() return self @@ -353,70 +353,70 @@ def from_HDF5(self, filename, **kwargs): verbose: bool, default False print file and variable information """ - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- set default parameters + # set default parameters kwargs.setdefault('date',True) kwargs.setdefault('verbose',False) kwargs.setdefault('compression',None) - #-- Open the HDF5 file for reading + # Open the HDF5 file for reading if (kwargs['compression'] == 'gzip'): - #-- read gzip compressed file and extract into in-memory file object + # read gzip compressed file and extract into in-memory file object with gzip.open(self.filename,'r') as f: fid = io.BytesIO(f.read()) - #-- set filename of BytesIO object + # set filename of BytesIO object fid.filename = os.path.basename(filename) - #-- rewind to start of file + # rewind to start of file fid.seek(0) - #-- read as in-memory (diskless) HDF5 dataset from BytesIO object + # read as in-memory (diskless) HDF5 dataset from BytesIO object fileID = h5py.File(fid, 'r') elif (kwargs['compression'] == 'zip'): - #-- read zipped file and extract file into in-memory file object + # read zipped file and extract file into in-memory file object fileBasename,_ = os.path.splitext(os.path.basename(filename)) with zipfile.ZipFile(self.filename) as z: - #-- first try finding a HDF5 file with same base filename - #-- if none found simply try searching for a HDF5 file + # first try finding a HDF5 file with same base filename + # if none found simply try searching for a HDF5 file try: f,=[f for f in z.namelist() if re.match(fileBasename,f,re.I)] except: f,=[f for f in z.namelist() if re.search(r'\.H(DF)?5$',f,re.I)] - #-- read bytes from zipfile into in-memory BytesIO object + # read bytes from zipfile into in-memory BytesIO object fid = io.BytesIO(z.read(f)) - #-- set filename of BytesIO object + # set filename of BytesIO object fid.filename = os.path.basename(filename) - #-- rewind to start of file + # rewind to start of file fid.seek(0) - #-- read as in-memory (diskless) HDF5 dataset from BytesIO object + # read as in-memory (diskless) HDF5 dataset from BytesIO object fileID = h5py.File(fid, 'r') elif (kwargs['compression'] == 'bytes'): - #-- read as in-memory (diskless) HDF5 dataset + # read as in-memory (diskless) HDF5 dataset fileID = h5py.File(self.filename, 'r') else: - #-- read HDF5 dataset + # read HDF5 dataset fileID = h5py.File(self.filename, 'r') - #-- Output HDF5 file information + # Output HDF5 file information logging.info(fileID.filename) logging.info(list(fileID.keys())) - #-- read flattened spherical harmonics + # read flattened spherical harmonics temp = harmonics() temp.filename = copy.copy(self.filename) - #-- create list of variables to retrieve + # create list of variables to retrieve fields = ['l','m','clm','slm'] - #-- retrieve date variables if specified + # retrieve date variables if specified if kwargs['date']: fields.extend(['time','month']) - #-- Getting the data from each HDF5 variable + # Getting the data from each HDF5 variable for field in fields: setattr(temp, field, fileID[field][:].copy()) - #-- calculate maximum degree and order + # calculate maximum degree and order temp.lmax = np.max(temp.l) temp.mmax = np.max(temp.m) - #-- expand the spherical harmonics to dimensions + # expand the spherical harmonics to dimensions self = temp.expand(date=kwargs['date']) - #-- adjust months to fix special cases if necessary + # adjust months to fix special cases if necessary self.month = adjust_months(self.month) - #-- Getting attributes of clm/slm and included variables - #-- get attributes for the included variables + # Getting attributes of clm/slm and included variables + # get attributes for the included variables for key in fields: try: self.attributes[key] = [ @@ -425,15 +425,15 @@ def from_HDF5(self, filename, **kwargs): ] except (KeyError, AttributeError): pass - #-- Global attributes + # Global attributes for att_name in ['title','description','reference']: try: self.attributes[att_name] = fileID.attrs[att_name] except (ValueError, KeyError, AttributeError): pass - #-- Closing the HDF5 file + # Closing the HDF5 file fileID.close() - #-- assign shape and ndim attributes + # assign shape and ndim attributes self.update_dimensions() return self @@ -456,39 +456,39 @@ def from_gfc(self, filename, **kwargs): verbose: bool, default False print file and variable information """ - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- set default parameters + # set default parameters kwargs.setdefault('date',False) kwargs.setdefault('tide',None) kwargs.setdefault('verbose',False) - #-- read data from gfc file + # read data from gfc file if kwargs['date']: Ylms = read_gfc_harmonics(self.filename, TIDE=kwargs['tide']) else: Ylms = read_ICGEM_harmonics(self.filename, TIDE=kwargs['tide']) - #-- Output file information + # Output file information logging.info(self.filename) logging.info(list(Ylms.keys())) - #-- copy variables for gravity model + # copy variables for gravity model self.clm = Ylms['clm'].copy() self.slm = Ylms['slm'].copy() self.lmax = np.int64(Ylms['max_degree']) self.mmax = np.int64(Ylms['max_degree']) self.l = np.arange(self.lmax+1) self.m = np.arange(self.mmax+1) - #-- copy date variables + # copy date variables if kwargs['date']: self.time = Ylms['time'].copy() - #-- adjust months to fix special cases if necessary + # adjust months to fix special cases if necessary self.month = adjust_months(Ylms['month']) - #-- geophysical parameters of gravity model + # geophysical parameters of gravity model self.GM = np.float64(Ylms['earth_gravity_constant']) self.R = np.float64(Ylms['radius']) self.tide = Ylms['tide_system'] - #-- assign shape and ndim attributes + # assign shape and ndim attributes self.update_dimensions() return self @@ -507,23 +507,23 @@ def from_SHM(self, filename, **kwargs): verbose: bool, default False print file and variable information """ - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('verbose',False) - #-- read data from SHM file + # read data from SHM file Ylms = read_GRACE_harmonics(self.filename, self.lmax, **kwargs) - #-- Output file information + # Output file information logging.info(self.filename) logging.info(list(Ylms.keys())) - #-- copy variables for gravity model + # copy variables for gravity model self.clm = Ylms['clm'].copy() self.slm = Ylms['slm'].copy() self.time = Ylms['time'].copy() self.month = np.int64(calendar_to_grace(self.time)) - #-- copy header information for gravity model + # copy header information for gravity model self.header = Ylms['header'] - #-- assign shape and ndim attributes + # assign shape and ndim attributes self.update_dimensions() return self @@ -555,30 +555,30 @@ def from_GIA(self, filename, **kwargs): verbose: bool, default False print file and variable information """ - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('GIA',None) kwargs.setdefault('MMAX',None) kwargs.setdefault('verbose',False) - #-- read data from GIA file + # read data from GIA file Ylms = read_GIA_model(self.filename, GIA=kwargs['GIA'], LMAX=self.lmax, MMAX=kwargs['MMAX']) - #-- Output file information + # Output file information logging.info(self.filename) logging.info(list(Ylms.keys())) - #-- copy variables for GIA model + # copy variables for GIA model self.clm = Ylms['clm'].copy() self.slm = Ylms['slm'].copy() - #-- copy dimensions for GIA model + # copy dimensions for GIA model self.lmax = np.max(Ylms['l']) self.mmax = np.max(Ylms['m']) - #-- copy information for GIA model + # copy information for GIA model self.title = Ylms['title'] self.citation = Ylms['citation'] self.reference = Ylms['reference'] self.url = Ylms['url'] - #-- assign shape and ndim attributes + # assign shape and ndim attributes self.update_dimensions() return self @@ -601,33 +601,33 @@ def from_index(self, filename, **kwargs): sort: bool, default True sort harmonics objects by date information """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('format',None) kwargs.setdefault('date',True) kwargs.setdefault('sort',True) - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- file parser for reading index files - #-- removes commented lines (can comment out files in the index) - #-- removes empty lines (if there are extra empty lines) + # file parser for reading index files + # removes commented lines (can comment out files in the index) + # removes empty lines (if there are extra empty lines) parser = re.compile(r'^(?!\#|\%|$)', re.VERBOSE) - #-- Read index file of input spherical harmonics + # Read index file of input spherical harmonics with open(self.filename, mode='r', encoding='utf8') as f: file_list = [l for l in f.read().splitlines() if parser.match(l)] - #-- create a list of harmonic objects + # create a list of harmonic objects h = [] - #-- for each file in the index + # for each file in the index for i,f in enumerate(file_list): if (kwargs['format'] == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) h.append(harmonics().from_ascii(f, date=kwargs['date'])) elif (kwargs['format'] == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) h.append(harmonics().from_netCDF4(f, date=kwargs['date'])) elif (kwargs['format'] == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) h.append(harmonics().from_HDF5(f, date=kwargs['date'])) - #-- create a single harmonic object from the list + # create a single harmonic object from the list return self.from_list(h,date=kwargs['date'],sort=kwargs['sort']) def from_list(self, object_list, **kwargs): @@ -645,51 +645,51 @@ def from_list(self, object_list, **kwargs): clear: bool, default True clear the harmonics list from memory """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('date',True) kwargs.setdefault('sort',True) kwargs.setdefault('clear',False) - #-- number of harmonic objects in list + # number of harmonic objects in list n = len(object_list) - #-- indices to sort data objects if harmonics list contain dates + # indices to sort data objects if harmonics list contain dates if kwargs['date'] and kwargs['sort']: list_sort = np.argsort([d.time for d in object_list],axis=None) else: list_sort = np.arange(n) - #-- truncate to maximum degree and order + # truncate to maximum degree and order self.lmax = np.min([d.lmax for d in object_list]) self.mmax = np.min([d.mmax for d in object_list]) - #-- output degree and order + # output degree and order self.l = np.arange(self.lmax+1) self.m = np.arange(self.mmax+1) - #-- create output harmonics + # create output harmonics self.clm = np.zeros((self.lmax+1,self.mmax+1,n)) self.slm = np.zeros((self.lmax+1,self.mmax+1,n)) - #-- create list of files + # create list of files self.filename = [] - #-- output dates + # output dates if kwargs['date']: self.time = np.zeros((n)) self.month = np.zeros((n),dtype=np.int64) - #-- for each indice + # for each indice for t,i in enumerate(list_sort): self.clm[:,:,t] = object_list[i].clm[:self.lmax+1,:self.mmax+1] self.slm[:,:,t] = object_list[i].slm[:self.lmax+1,:self.mmax+1] if kwargs['date']: self.time[t] = np.atleast_1d(object_list[i].time) self.month[t] = np.atleast_1d(object_list[i].month) - #-- append filename to list + # append filename to list if getattr(object_list[i], 'filename'): self.filename.append(object_list[i].filename) - #-- adjust months to fix special cases if necessary + # adjust months to fix special cases if necessary if kwargs['date']: self.month = adjust_months(self.month) - #-- assign shape and ndim attributes + # assign shape and ndim attributes self.update_dimensions() - #-- clear the input list to free memory + # clear the input list to free memory if kwargs['clear']: object_list = None - #-- return the single harmonic object + # return the single harmonic object return self def from_file(self, filename, format=None, date=True, **kwargs): @@ -715,25 +715,25 @@ def from_file(self, filename, format=None, date=True, **kwargs): **kwargs: dict keyword arguments for input readers """ - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- set default verbosity + # set default verbosity kwargs.setdefault('verbose',False) - #-- read from file + # read from file if (format == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) return harmonics().from_ascii(filename, date=date, **kwargs) elif (format == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) return harmonics().from_netCDF4(filename, date=date, **kwargs) elif (format == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) return harmonics().from_HDF5(filename, date=date, **kwargs) elif (format == 'gfc'): - #-- ICGEM gravity model (.gfc) + # ICGEM gravity model (.gfc) return harmonics().from_gfc(filename, **kwargs) elif (format == 'SHM'): - #-- spherical harmonic model + # spherical harmonic model return harmonics().from_SHM(filename, self.lmax, **kwargs) def from_dict(self, d, **kwargs): @@ -745,16 +745,16 @@ def from_dict(self, d, **kwargs): d: dict dictionary object to be converted """ - #-- assign dictionary variables to self + # assign dictionary variables to self for key in ['l','m','clm','slm','time','month']: try: setattr(self, key, d[key].copy()) except (AttributeError, KeyError): pass - #-- maximum degree and order + # maximum degree and order self.lmax = np.max(d['l']) self.mmax = np.max(d['m']) - #-- assign shape and ndim attributes + # assign shape and ndim attributes self.update_dimensions() return self @@ -772,21 +772,21 @@ def to_ascii(self, filename, date=True, **kwargs): Output file and variable information """ self.filename = os.path.expanduser(filename) - #-- set default verbosity + # set default verbosity kwargs.setdefault('verbose',False) logging.info(self.filename) - #-- open the output file + # open the output file fid = open(self.filename, 'w') if date: file_format = '{0:5d} {1:5d} {2:+21.12e} {3:+21.12e} {4:10.4f}' else: file_format = '{0:5d} {1:5d} {2:+21.12e} {3:+21.12e}' - #-- write to file for each spherical harmonic degree and order + # write to file for each spherical harmonic degree and order for m in range(0, self.mmax+1): for l in range(m, self.lmax+1): args = (l, m, self.clm[l,m], self.slm[l,m], self.time) print(file_format.format(*args), file=fid) - #-- close the output file + # close the output file fid.close() def to_netCDF4(self, filename, **kwargs): @@ -820,7 +820,7 @@ def to_netCDF4(self, filename, **kwargs): verbose: bool, default False Output file and variable information """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('units','Geodesy_Normalization') kwargs.setdefault('time_units','years') kwargs.setdefault('time_longname','Date_in_Decimal_Years') @@ -832,31 +832,31 @@ def to_netCDF4(self, filename, **kwargs): kwargs.setdefault('date',True) kwargs.setdefault('clobber',True) kwargs.setdefault('verbose',False) - #-- setting NetCDF clobber attribute + # setting NetCDF clobber attribute clobber = 'w' if kwargs['clobber'] else 'a' - #-- opening netCDF file for writing + # opening netCDF file for writing self.filename = os.path.expanduser(filename) fileID = netCDF4.Dataset(self.filename, clobber, format="NETCDF4") - #-- flatten harmonics + # flatten harmonics temp = self.flatten(date=kwargs['date']) - #-- Defining the netCDF dimensions + # Defining the netCDF dimensions n_harm = len(temp.l) fields = ['l','m','clm','slm'] fileID.createDimension('lm', n_harm) - #-- defining netCDF temporal dimension + # defining netCDF temporal dimension if kwargs['date']: n_time = len(np.atleast_1d(temp.time)) fields.extend(['time','month']) fileID.createDimension('time', n_time) - #-- convert time variables to arrays + # convert time variables to arrays temp.time = np.atleast_1d(temp.time) temp.month = np.atleast_1d(temp.month) - #-- defining the netCDF variables + # defining the netCDF variables nc = {} - #-- degree and order + # degree and order nc['l'] = fileID.createVariable('l', 'i', ('lm',)) nc['m'] = fileID.createVariable('m', 'i', ('lm',)) - #-- spherical harmonics + # spherical harmonics if (kwargs['date'] and (n_time > 1)): nc['clm'] = fileID.createVariable('clm', 'd', ('lm','time',)) nc['slm'] = fileID.createVariable('slm', 'd', ('lm','time',)) @@ -864,45 +864,45 @@ def to_netCDF4(self, filename, **kwargs): nc['clm'] = fileID.createVariable('clm', 'd', ('lm',)) nc['slm'] = fileID.createVariable('slm', 'd', ('lm',)) if kwargs['date']: - #-- time (in decimal form) + # time (in decimal form) nc['time'] = fileID.createVariable('time', 'd', ('time',)) - #-- GRACE/GRACE-FO month (or integer date) + # GRACE/GRACE-FO month (or integer date) nc['month'] = fileID.createVariable(kwargs['months_name'], 'i', ('time',)) - #-- filling netCDF variables + # filling netCDF variables for key in fields: nc[key][:] = getattr(temp,key) - #-- Defining attributes for degree and order - #-- SH degree long name + # Defining attributes for degree and order + # SH degree long name nc['l'].long_name = 'spherical_harmonic_degree' - #-- SH degree units + # SH degree units nc['l'].units = 'Wavenumber' - #-- SH order long name + # SH order long name nc['m'].long_name = 'spherical_harmonic_order' - #-- SH order units + # SH order units nc['m'].units = 'Wavenumber' - #-- Defining attributes for harmonics + # Defining attributes for harmonics nc['clm'].long_name = 'cosine_spherical_harmonics' nc['clm'].units = kwargs['units'] nc['slm'].long_name = 'sine_spherical_harmonics' nc['slm'].units = kwargs['units'] if kwargs['date']: - #-- Defining attributes for date and month + # Defining attributes for date and month nc['time'].long_name = kwargs['time_longname'] nc['time'].units = kwargs['time_units'] nc['month'].long_name = kwargs['months_longname'] nc['month'].units = kwargs['months_units'] - #-- global variables of NetCDF file + # global variables of NetCDF file if kwargs['title']: fileID.title = kwargs['title'] if kwargs['reference']: fileID.reference = kwargs['reference'] - #-- date created + # date created fileID.date_created = time.strftime('%Y-%m-%d',time.localtime()) - #-- Output netCDF structure information + # Output netCDF structure information logging.info(self.filename) logging.info(list(fileID.variables.keys())) - #-- Closing the netCDF file + # Closing the netCDF file fileID.close() def to_HDF5(self, filename, **kwargs): @@ -936,7 +936,7 @@ def to_HDF5(self, filename, **kwargs): verbose: bool, default False Output file and variable information """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('units','Geodesy_Normalization') kwargs.setdefault('time_units','years') kwargs.setdefault('time_longname','Date_in_Decimal_Years') @@ -948,59 +948,59 @@ def to_HDF5(self, filename, **kwargs): kwargs.setdefault('date',True) kwargs.setdefault('clobber',True) kwargs.setdefault('verbose',False) - #-- setting HDF5 clobber attribute + # setting HDF5 clobber attribute clobber = 'w' if kwargs['clobber'] else 'w-' - #-- opening HDF5 file for writing + # opening HDF5 file for writing self.filename = os.path.expanduser(filename) fileID = h5py.File(self.filename, clobber) - #-- flatten harmonics + # flatten harmonics temp = self.flatten(date=kwargs['date']) fields = ['l','m','clm','slm'] - #-- defining netCDF temporal dimension + # defining netCDF temporal dimension if kwargs['date']: fields.extend(['time','month']) - #-- convert time variables to arrays + # convert time variables to arrays temp.time = np.atleast_1d(temp.time) temp.month = np.atleast_1d(temp.month) - #-- Defining the HDF5 dataset variables + # Defining the HDF5 dataset variables h5 = {} for key in fields: val = getattr(temp, key) h5[key] = fileID.create_dataset(key, val.shape, data=val, dtype=val.dtype, compression='gzip') - #-- filling HDF5 dataset attributes - #-- Defining attributes for degree and order - #-- degree long name + # filling HDF5 dataset attributes + # Defining attributes for degree and order + # degree long name h5['l'].attrs['long_name'] = 'spherical_harmonic_degree' - #-- SH degree units + # SH degree units h5['l'].attrs['units'] = 'Wavenumber' - #-- order long name + # order long name h5['m'].attrs['long_name'] = 'spherical_harmonic_order' - #-- SH order units + # SH order units h5['m'].attrs['units'] = 'Wavenumber' - #-- Defining attributes for dataset + # Defining attributes for dataset h5['clm'].attrs['long_name'] = 'cosine_spherical_harmonics' h5['clm'].attrs['units'] = kwargs['units'] h5['slm'].attrs['long_name'] = 'sine_spherical_harmonics' h5['slm'].attrs['units'] = kwargs['units'] if kwargs['date']: - #-- Defining attributes for date and month (or integer date) + # Defining attributes for date and month (or integer date) h5['time'].attrs['long_name'] = kwargs['time_longname'] h5['time'].attrs['units'] = kwargs['time_units'] h5['month'].attrs['long_name'] = kwargs['months_longname'] h5['month'].attrs['units'] = kwargs['months_units'] - #-- description of file + # description of file if kwargs['title']: fileID.attrs['description'] = kwargs['title'] - #-- reference of file + # reference of file if kwargs['reference']: fileID.attrs['reference'] = kwargs['reference'] - #-- date created + # date created fileID.attrs['date_created'] = time.strftime('%Y-%m-%d',time.localtime()) - #-- Output HDF5 structure information + # Output HDF5 structure information logging.info(self.filename) logging.info(list(fileID.keys())) - #-- Closing the HDF5 file + # Closing the HDF5 file fileID.close() def to_index(self, filename, file_list, format=None, date=True, **kwargs): @@ -1026,28 +1026,28 @@ def to_index(self, filename, file_list, format=None, date=True, **kwargs): kwargs: dict keyword arguments for output writers """ - #-- Write index file of output spherical harmonics + # Write index file of output spherical harmonics self.filename = os.path.expanduser(filename) fid = open(self.filename,'w') - #-- set default verbosity + # set default verbosity kwargs.setdefault('verbose',False) - #-- for each file to be in the index + # for each file to be in the index for i,f in enumerate(file_list): - #-- print filename to index + # print filename to index print(f.replace(os.path.expanduser('~'),'~'), file=fid) - #-- index harmonics object at i + # index harmonics object at i h = self.index(i, date=date) - #-- write to file + # write to file if (format == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) h.to_ascii(f, date=date, **kwargs) elif (format == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) h.to_netCDF4(f, date=date, **kwargs) elif (format == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) h.to_HDF5(f, date=date, **kwargs) - #-- close the index file + # close the index file fid.close() def to_file(self, filename, format=None, date=True, **kwargs): @@ -1071,17 +1071,17 @@ def to_file(self, filename, format=None, date=True, **kwargs): kwargs: dict keyword arguments for output writers """ - #-- set default verbosity + # set default verbosity kwargs.setdefault('verbose',False) - #-- write to file + # write to file if (format == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) self.to_ascii(filename, date=date, **kwargs) elif (format == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) self.to_netCDF4(filename, date=date, **kwargs) elif (format == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) self.to_HDF5(filename, date=date, **kwargs) def to_dict(self): @@ -1093,27 +1093,27 @@ def to_dict(self): d: dict converted dictionary object """ - #-- assign dictionary variables from self + # assign dictionary variables from self d = {} for key in ['l','m','clm','slm','time','month']: try: d[key] = getattr(self, key) except (AttributeError, KeyError): pass - #-- return the dictionary object + # return the dictionary object return d def to_masked_array(self): """ Convert a harmonics object to a masked numpy array """ - #-- reassign shape and ndim attributes + # reassign shape and ndim attributes self.update_dimensions() - #-- verify dimensions and get shape + # verify dimensions and get shape ndim_prev = self.ndim self.expand_dims() l1,m1,nt = self.shape - #-- create single triangular matrices with harmonics + # create single triangular matrices with harmonics Ylms = np.ma.zeros((self.lmax+1,2*self.lmax+1,nt)) Ylms.mask = np.ones((self.lmax+1,2*self.lmax+1,nt),dtype=bool) for m in range(-self.mmax,self.mmax+1): @@ -1125,10 +1125,10 @@ def to_masked_array(self): else: Ylms.data[l,self.lmax+m,:] = self.clm[l,mm,:] Ylms.mask[l,self.lmax+m,:] = False - #-- reshape to previous + # reshape to previous if (self.ndim != ndim_prev): self.squeeze() - #-- return the triangular matrix + # return the triangular matrix return Ylms def update_dimensions(self): @@ -1150,7 +1150,7 @@ def add(self, temp): temp: obj harmonic object to be added """ - #-- reassign shape and ndim attributes + # reassign shape and ndim attributes self.update_dimensions() temp.update_dimensions() l1 = self.lmax+1 if (temp.lmax > self.lmax) else temp.lmax+1 @@ -1176,7 +1176,7 @@ def subtract(self, temp): temp: obj harmonic object to be subtracted """ - #-- reassign shape and ndim attributes + # reassign shape and ndim attributes self.update_dimensions() temp.update_dimensions() l1 = self.lmax+1 if (temp.lmax > self.lmax) else temp.lmax+1 @@ -1202,7 +1202,7 @@ def multiply(self, temp): temp: obj harmonic object to be multiplied """ - #-- reassign shape and ndim attributes + # reassign shape and ndim attributes self.update_dimensions() temp.update_dimensions() l1 = self.lmax+1 if (temp.lmax > self.lmax) else temp.lmax+1 @@ -1228,14 +1228,14 @@ def divide(self, temp): temp: obj harmonic object to be divided """ - #-- reassign shape and ndim attributes + # reassign shape and ndim attributes self.update_dimensions() temp.update_dimensions() l1 = self.lmax+1 if (temp.lmax > self.lmax) else temp.lmax+1 m1 = self.mmax+1 if (temp.mmax > self.mmax) else temp.mmax+1 - #-- indices for cosine spherical harmonics (including zonals) + # indices for cosine spherical harmonics (including zonals) lc,mc = np.tril_indices(l1, m=m1) - #-- indices for sine spherical harmonics (excluding zonals) + # indices for sine spherical harmonics (excluding zonals) m0 = np.nonzero(mc != 0) ls,ms = (lc[m0],mc[m0]) if (self.ndim == 2): @@ -1255,14 +1255,14 @@ def copy(self): Copy a harmonics object to a new harmonics object """ temp = harmonics(lmax=self.lmax, mmax=self.mmax) - #-- try to assign variables to self + # try to assign variables to self for key in ['clm','slm','time','month','shape','ndim','filename']: try: val = getattr(self, key) setattr(temp, key, np.copy(val)) except AttributeError: pass - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.update_dimensions() return temp @@ -1271,14 +1271,14 @@ def zeros_like(self): Create a harmonics object using the dimensions of another """ temp = harmonics(lmax=self.lmax, mmax=self.mmax) - #-- assign variables to self + # assign variables to self for key in ['clm','slm','time','month']: try: val = getattr(self, key) setattr(temp, key, np.zeros_like(val)) except AttributeError: pass - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.update_dimensions() return temp @@ -1286,14 +1286,14 @@ def expand_dims(self): """ Add a singleton dimension to a harmonics object if non-existent """ - #-- change time dimensions to be iterable + # change time dimensions to be iterable self.time = np.atleast_1d(self.time) self.month = np.atleast_1d(self.month) - #-- output harmonics with a third dimension + # output harmonics with a third dimension if (self.ndim == 2): self.clm = self.clm[:,:,None] self.slm = self.slm[:,:,None] - #-- reassign ndim and shape attributes + # reassign ndim and shape attributes self.update_dimensions() return self @@ -1301,12 +1301,12 @@ def squeeze(self): """ Remove singleton dimensions from a harmonics object """ - #-- squeeze singleton dimensions + # squeeze singleton dimensions self.time = np.squeeze(self.time) self.month = np.squeeze(self.month) self.clm = np.squeeze(self.clm) self.slm = np.squeeze(self.slm) - #-- reassign ndim and shape attributes + # reassign ndim and shape attributes self.update_dimensions() return self @@ -1321,18 +1321,18 @@ def flatten(self, date=True): """ n_harm = (self.lmax**2 + 3*self.lmax - (self.lmax-self.mmax)**2 - (self.lmax-self.mmax))//2 + 1 - #-- restructured degree and order + # restructured degree and order temp = harmonics(lmax=self.lmax, mmax=self.mmax) temp.l = np.zeros((n_harm,), dtype=np.int32) temp.m = np.zeros((n_harm,), dtype=np.int32) - #-- get filenames if applicable + # get filenames if applicable if getattr(self, 'filename'): temp.filename = copy.copy(self.filename) - #-- copy date variables if applicable + # copy date variables if applicable if date: temp.time = np.copy(self.time) temp.month = np.copy(self.month) - #-- restructured spherical harmonic arrays + # restructured spherical harmonic arrays if (self.clm.ndim == 2): temp.clm = np.zeros((n_harm)) temp.slm = np.zeros((n_harm)) @@ -1340,10 +1340,10 @@ def flatten(self, date=True): n = self.clm.shape[-1] temp.clm = np.zeros((n_harm,n)) temp.slm = np.zeros((n_harm,n)) - #-- create counter variable lm + # create counter variable lm lm = 0 - for m in range(0,self.mmax+1):#-- MMAX+1 to include MMAX - for l in range(m,self.lmax+1):#-- LMAX+1 to include LMAX + for m in range(0,self.mmax+1):# MMAX+1 to include MMAX + for l in range(m,self.lmax+1):# LMAX+1 to include LMAX temp.l[lm] = np.int64(l) temp.m[lm] = np.int64(m) if (self.clm.ndim == 2): @@ -1352,12 +1352,12 @@ def flatten(self, date=True): else: temp.clm[lm,:] = self.clm[l,m,:] temp.slm[lm,:] = self.slm[l,m,:] - #-- add 1 to lm counter variable + # add 1 to lm counter variable lm += 1 - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.ndim = temp.clm.ndim temp.shape = temp.clm.shape - #-- return the flattened arrays + # return the flattened arrays return temp def expand(self, date=True): @@ -1371,16 +1371,16 @@ def expand(self, date=True): """ n_harm = (self.lmax**2 + 3*self.lmax - (self.lmax-self.mmax)**2 - (self.lmax-self.mmax))//2 + 1 - #-- restructured degree and order + # restructured degree and order temp = harmonics(lmax=self.lmax, mmax=self.mmax) - #-- get filenames if applicable + # get filenames if applicable if getattr(self, 'filename'): temp.filename = copy.copy(self.filename) - #-- copy date variables if applicable + # copy date variables if applicable if date: temp.time = np.copy(self.time) temp.month = np.copy(self.month) - #-- restructured spherical harmonic matrices + # restructured spherical harmonic matrices if (self.clm.ndim == 1): temp.clm = np.zeros((self.lmax+1,self.mmax+1)) temp.slm = np.zeros((self.lmax+1,self.mmax+1)) @@ -1388,7 +1388,7 @@ def expand(self, date=True): n = self.clm.shape[-1] temp.clm = np.zeros((self.lmax+1,self.mmax+1,n)) temp.slm = np.zeros((self.lmax+1,self.mmax+1,n)) - #-- create counter variable lm + # create counter variable lm for lm in range(n_harm): l = self.l[lm] m = self.m[lm] @@ -1398,9 +1398,9 @@ def expand(self, date=True): else: temp.clm[l,m,:] = self.clm[lm,:] temp.slm[l,m,:] = self.slm[lm,:] - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.update_dimensions() - #-- return the expanded harmonics object + # return the expanded harmonics object return temp def index(self, indice, date=True): @@ -1414,24 +1414,24 @@ def index(self, indice, date=True): date: bool, default True harmonics objects contain date information """ - #-- output harmonics object + # output harmonics object temp = harmonics(lmax=np.copy(self.lmax),mmax=np.copy(self.mmax)) - #-- subset output harmonics + # subset output harmonics temp.clm = self.clm[:,:,indice].copy() temp.slm = self.slm[:,:,indice].copy() - #-- subset output dates + # subset output dates if date: temp.time = self.time[indice].copy() temp.month = self.month[indice].copy() - #-- subset filenames if applicable + # subset filenames if applicable if getattr(self, 'filename'): if isinstance(self.filename, list): temp.filename = self.filename[indice] elif isinstance(self.filename, str): temp.filename = copy.copy(self.filename) - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.update_dimensions() - #-- return the subsetted object + # return the subsetted object return temp def subset(self, months): @@ -1443,40 +1443,40 @@ def subset(self, months): months: int GRACE/GRACE-FO to subset """ - #-- check if months is an array or a single value + # check if months is an array or a single value months = np.atleast_1d(months) - #-- number of months + # number of months n = len(months) - #-- check that all months are available + # check that all months are available months_check = list(set(months) - set(self.month)) if months_check: m = ','.join([f'{m:03d}' for m in months_check]) raise IOError(f'GRACE/GRACE-FO months {m} not Found') - #-- indices to sort data objects + # indices to sort data objects months_list = [i for i,m in enumerate(self.month) if m in months] - #-- output harmonics object + # output harmonics object temp = harmonics(lmax=np.copy(self.lmax),mmax=np.copy(self.mmax)) - #-- create output harmonics + # create output harmonics temp.clm = np.zeros((temp.lmax+1,temp.mmax+1,n)) temp.slm = np.zeros((temp.lmax+1,temp.mmax+1,n)) temp.time = np.zeros((n)) temp.month = np.zeros((n),dtype=np.int64) temp.filename = [] - #-- for each indice + # for each indice for t,i in enumerate(months_list): temp.clm[:,:,t] = self.clm[:,:,i].copy() temp.slm[:,:,t] = self.slm[:,:,i].copy() temp.time[t] = self.time[i].copy() temp.month[t] = self.month[i].copy() - #-- subset filenames if applicable + # subset filenames if applicable if getattr(self, 'filename'): if isinstance(self.filename, list): temp.filename.append(self.filename[i]) elif isinstance(self.filename, str): temp.filename.append(self.filename) - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.update_dimensions() - #-- remove singleton dimensions if importing a single value + # remove singleton dimensions if importing a single value return temp.squeeze() def truncate(self, lmax, lmin=0, mmax=None): @@ -1492,20 +1492,20 @@ def truncate(self, lmax, lmin=0, mmax=None): mmax: int or NoneType, default None maximum order of spherical harmonics """ - #-- output harmonics dimensions + # output harmonics dimensions lmax = np.copy(self.lmax) if (lmax is None) else lmax mmax = np.copy(lmax) if (mmax is None) else mmax - #-- copy prior harmonics object + # copy prior harmonics object temp = self.copy() - #-- set new degree and order + # set new degree and order self.lmax = np.copy(lmax) self.mmax = np.copy(mmax) if mmax else np.copy(lmax) - #-- truncation levels + # truncation levels l1 = self.lmax+1 if (temp.lmax > self.lmax) else temp.lmax+1 m1 = self.mmax+1 if (temp.mmax > self.mmax) else temp.mmax+1 - #-- create output harmonics + # create output harmonics if (temp.ndim == 3): - #-- number of months + # number of months n = temp.clm.shape[-1] self.clm = np.zeros((self.lmax+1,self.mmax+1,n)) self.slm = np.zeros((self.lmax+1,self.mmax+1,n)) @@ -1516,9 +1516,9 @@ def truncate(self, lmax, lmin=0, mmax=None): self.slm = np.zeros((self.lmax+1,self.mmax+1)) self.clm[lmin:l1,:m1] = temp.clm[lmin:l1,:m1].copy() self.slm[lmin:l1,:m1] = temp.slm[lmin:l1,:m1].copy() - #-- reassign ndim and shape attributes + # reassign ndim and shape attributes self.update_dimensions() - #-- return the truncated or expanded harmonics object + # return the truncated or expanded harmonics object return self def mean(self, apply=False, indices=Ellipsis): @@ -1533,30 +1533,30 @@ def mean(self, apply=False, indices=Ellipsis): indices of input harmonics object to compute mean """ temp = harmonics(lmax=np.copy(self.lmax),mmax=np.copy(self.mmax)) - #-- allocate for mean field + # allocate for mean field temp.clm = np.zeros((temp.lmax+1,temp.mmax+1)) temp.slm = np.zeros((temp.lmax+1,temp.mmax+1)) - #-- Computes the mean for each spherical harmonic degree and order - for m in range(0,temp.mmax+1):#-- MMAX+1 to include l - for l in range(m,temp.lmax+1):#-- LMAX+1 to include LMAX - #-- calculate mean static field + # Computes the mean for each spherical harmonic degree and order + for m in range(0,temp.mmax+1):# MMAX+1 to include l + for l in range(m,temp.lmax+1):# LMAX+1 to include LMAX + # calculate mean static field temp.clm[l,m] = np.mean(self.clm[l,m,indices]) temp.slm[l,m] = np.mean(self.slm[l,m,indices]) - #-- calculating the time-variable gravity field by removing - #-- the static component of the gravitational field + # calculating the time-variable gravity field by removing + # the static component of the gravitational field if apply: self.clm[l,m,:] -= temp.clm[l,m] self.slm[l,m,:] -= temp.slm[l,m] - #-- calculate mean of temporal variables + # calculate mean of temporal variables for key in ['time','month']: try: val = getattr(self, key) setattr(temp, key, np.mean(val[indices])) except: continue - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.update_dimensions() - #-- return the mean field + # return the mean field return temp def scale(self, var): @@ -1568,15 +1568,15 @@ def scale(self, var): var: float scalar value to which the harmonics object will be multiplied """ - #-- reassign shape and ndim attributes + # reassign shape and ndim attributes self.update_dimensions() temp = harmonics(lmax=self.lmax, mmax=self.mmax) temp.time = np.copy(self.time) temp.month = np.copy(self.month) - #-- get filenames if applicable + # get filenames if applicable if getattr(self, 'filename'): temp.filename = copy.copy(self.filename) - #-- multiply by a single constant or a time-variable scalar + # multiply by a single constant or a time-variable scalar if (np.ndim(var) == 0): temp.clm = var*self.clm temp.slm = var*self.slm @@ -1590,7 +1590,7 @@ def scale(self, var): for i,v in enumerate(var): temp.clm[:,:,i] = v*self.clm[:,:,i] temp.slm[:,:,i] = v*self.slm[:,:,i] - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.update_dimensions() return temp @@ -1603,18 +1603,18 @@ def power(self, power): var: float power to which the harmonics object will be raised """ - #-- reassign shape and ndim attributes + # reassign shape and ndim attributes self.update_dimensions() temp = harmonics(lmax=self.lmax, mmax=self.mmax) temp.time = np.copy(self.time) temp.month = np.copy(self.month) - #-- get filenames if applicable + # get filenames if applicable if getattr(self, 'filename'): temp.filename = copy.copy(self.filename) for key in ['clm','slm']: val = getattr(self, key) setattr(temp, key, np.power(val,power)) - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.update_dimensions() return temp @@ -1629,21 +1629,21 @@ def drift(self, t, epoch=2003.3): epoch: float reference epoch for times """ - #-- reassign shape and ndim attributes + # reassign shape and ndim attributes self.update_dimensions() temp = harmonics(lmax=self.lmax, mmax=self.mmax) temp.time = np.copy(t) temp.month = np.int64(calendar_to_grace(self.time)) - #-- adjust months to fix special cases if necessary + # adjust months to fix special cases if necessary temp.month = adjust_months(temp.month) - #-- get filenames if applicable + # get filenames if applicable if getattr(self, 'filename'): temp.filename = copy.copy(self.filename) - #-- calculate drift + # calculate drift for i,ti in enumerate(t): temp.clm[:,:,i] = self.clm*(ti - epoch) temp.slm[:,:,i] = self.slm*(ti - epoch) - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.update_dimensions() return temp @@ -1656,19 +1656,19 @@ def convolve(self, var): var: float degree dependent array for convolution """ - #-- reassign shape and ndim attributes + # reassign shape and ndim attributes self.update_dimensions() - #-- check if a single field or a temporal field + # check if a single field or a temporal field if (self.ndim == 2): - for l in range(0,self.lmax+1):#-- LMAX+1 to include LMAX + for l in range(0,self.lmax+1):# LMAX+1 to include LMAX self.clm[l,:] *= var[l] self.slm[l,:] *= var[l] else: for i,t in enumerate(self.time): - for l in range(0,self.lmax+1):#-- LMAX+1 to include LMAX + for l in range(0,self.lmax+1):# LMAX+1 to include LMAX self.clm[l,:,i] *= var[l] self.slm[l,:,i] *= var[l] - #-- return the convolved field + # return the convolved field return self def destripe(self, **kwargs): @@ -1688,15 +1688,15 @@ def destripe(self, **kwargs): *Geophysical Research Letters*, 33(L08402), (2006). `doi: 10.1029/2005GL025285 `_ """ - #-- reassign shape and ndim attributes + # reassign shape and ndim attributes self.update_dimensions() temp = harmonics(lmax=np.copy(self.lmax),mmax=np.copy(self.mmax)) temp.time = np.copy(self.time) temp.month = np.copy(self.month) - #-- get filenames if applicable + # get filenames if applicable if getattr(self, 'filename'): temp.filename = copy.copy(self.filename) - #-- check if a single field or a temporal field + # check if a single field or a temporal field if (self.ndim == 2): Ylms = destripe_harmonics(self.clm, self.slm, LMIN=1, LMAX=self.lmax, MMAX=self.mmax, **kwargs) @@ -1711,9 +1711,9 @@ def destripe(self, **kwargs): LMIN=1, LMAX=self.lmax, MMAX=self.mmax, **kwargs) temp.clm[:,:,i] = Ylms['clm'].copy() temp.slm[:,:,i] = Ylms['slm'].copy() - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.update_dimensions() - #-- return the destriped field + # return the destriped field return temp def amplitude(self, mmax=None): @@ -1725,30 +1725,30 @@ def amplitude(self, mmax=None): mmax: int or NoneType, default None maximum order of spherical harmonics """ - #-- temporary matrix for squared harmonics + # temporary matrix for squared harmonics temp = self.power(2) - #-- truncate to order mmax + # truncate to order mmax if mmax is not None: temp.truncate(self.lmax, mmax=mmax) - #-- check if a single field or a temporal field + # check if a single field or a temporal field if (self.ndim == 2): - #-- allocate for degree amplitudes + # allocate for degree amplitudes self.amp = np.zeros((self.lmax+1)) for l in range(self.lmax+1): - #-- truncate at mmax + # truncate at mmax m = np.arange(l,temp.mmax+1) - #-- degree amplitude of spherical harmonic degree + # degree amplitude of spherical harmonic degree self.amp[l] = np.sqrt(np.sum(temp.clm[l,m] + temp.slm[l,m])) else: - #-- allocate for degree amplitudes + # allocate for degree amplitudes n = self.shape[-1] self.amp = np.zeros((self.lmax+1,n)) for l in range(self.lmax+1): - #-- truncate at mmax + # truncate at mmax m = np.arange(l,temp.mmax+1) - #-- degree amplitude of spherical harmonic degree + # degree amplitude of spherical harmonic degree var = temp.clm[l,m,:] + temp.slm[l,m,:] self.amp[l,:] = np.sqrt(np.sum(var,axis=0)) - #-- return the harmonics object with degree amplitudes + # return the harmonics object with degree amplitudes return self diff --git a/gravity_toolkit/hdf5_read.py b/gravity_toolkit/hdf5_read.py index a5385a8a..7fdc89ab 100755 --- a/gravity_toolkit/hdf5_read.py +++ b/gravity_toolkit/hdf5_read.py @@ -103,76 +103,76 @@ def hdf5_read(filename, **kwargs): time: time value of dataset attributes: HDF5 attributes """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('DATE',False) kwargs.setdefault('VARNAME','z') kwargs.setdefault('LONNAME','lon') kwargs.setdefault('LATNAME','lat') kwargs.setdefault('TIMENAME','time') kwargs.setdefault('COMPRESSION',None) - #-- set deprecation warning + # set deprecation warning warnings.filterwarnings("always") warnings.warn("Deprecated. Please use spatial.from_HDF5", DeprecationWarning) - #-- Open the HDF5 file for reading + # Open the HDF5 file for reading if (kwargs['COMPRESSION'] == 'gzip'): - #-- read gzip compressed file and extract into in-memory file object + # read gzip compressed file and extract into in-memory file object with gzip.open(os.path.expanduser(filename),'r') as f: fid = io.BytesIO(f.read()) - #-- set filename of BytesIO object + # set filename of BytesIO object fid.filename = os.path.basename(filename) - #-- rewind to start of file + # rewind to start of file fid.seek(0) - #-- read as in-memory (diskless) HDF5 dataset from BytesIO object + # read as in-memory (diskless) HDF5 dataset from BytesIO object fileID = h5py.File(fid, 'r') elif (kwargs['COMPRESSION'] == 'zip'): - #-- read zipped file and extract file into in-memory file object + # read zipped file and extract file into in-memory file object fileBasename,_ = os.path.splitext(os.path.basename(filename)) with zipfile.ZipFile(os.path.expanduser(filename)) as z: - #-- first try finding a HDF5 file with same base filename - #-- if none found simply try searching for a HDF5 file + # first try finding a HDF5 file with same base filename + # if none found simply try searching for a HDF5 file try: f,=[f for f in z.namelist() if re.match(fileBasename,f,re.I)] except: f,=[f for f in z.namelist() if re.search(r'\.H(DF)?5$',f,re.I)] - #-- read bytes from zipfile into in-memory BytesIO object + # read bytes from zipfile into in-memory BytesIO object fid = io.BytesIO(z.read(f)) - #-- set filename of BytesIO object + # set filename of BytesIO object fid.filename = os.path.basename(filename) - #-- rewind to start of file + # rewind to start of file fid.seek(0) - #-- read as in-memory (diskless) HDF5 dataset from BytesIO object + # read as in-memory (diskless) HDF5 dataset from BytesIO object fileID = h5py.File(fid, 'r') elif (kwargs['COMPRESSION'] == 'bytes'): - #-- read as in-memory (diskless) HDF5 dataset + # read as in-memory (diskless) HDF5 dataset fileID = h5py.File(filename, 'r') else: - #-- read HDF5 dataset + # read HDF5 dataset fileID = h5py.File(os.path.expanduser(filename), 'r') - #-- allocate python dictionary for output variables + # allocate python dictionary for output variables dinput = {} dinput['attributes'] = {} - #-- Output HDF5 file information + # Output HDF5 file information logging.info(fileID.filename) logging.info(list(fileID.keys())) - #-- mapping between output keys and HDF5 variable names + # mapping between output keys and HDF5 variable names keys = ['lon','lat','data'] h5keys = [kwargs['LONNAME'],kwargs['LATNAME'],kwargs['VARNAME']] if kwargs['DATE']: keys.append('time') h5keys.append(kwargs['TIMENAME']) - #-- list of variable attributes + # list of variable attributes attributes_list = ['description','units','long_name','calendar', 'standard_name','_FillValue','missing_value'] - #-- for each variable + # for each variable for key,h5key in zip(keys,h5keys): - #-- Getting the data from each HDF5 variable + # Getting the data from each HDF5 variable dinput[key] = np.squeeze(fileID[h5key][:]) - #-- Getting attributes of included variables + # Getting attributes of included variables dinput['attributes'][key] = {} for attr in attributes_list: try: @@ -180,18 +180,18 @@ def hdf5_read(filename, **kwargs): except (KeyError, AttributeError): pass - #-- switching data array to lat/lon if lon/lat + # switching data array to lat/lon if lon/lat sz = dinput['data'].shape if (dinput['data'].ndim == 2) and (len(dinput['lon']) == sz[0]): dinput['data'] = dinput['data'].T - #-- Global attributes + # Global attributes for att_name in ['title','description','reference']: try: dinput['attributes'][att_name] = fileID.attrs[att_name] except (ValueError, KeyError, AttributeError): pass - #-- Closing the HDF5 file + # Closing the HDF5 file fileID.close() return dinput diff --git a/gravity_toolkit/hdf5_read_stokes.py b/gravity_toolkit/hdf5_read_stokes.py index be54bef2..17c830ce 100755 --- a/gravity_toolkit/hdf5_read_stokes.py +++ b/gravity_toolkit/hdf5_read_stokes.py @@ -95,64 +95,64 @@ def hdf5_read_stokes(filename, **kwargs): month: GRACE/GRACE-FO month attributes: HDF5 attributes for variables and file """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('DATE',True) kwargs.setdefault('COMPRESSION',None) - #-- set deprecation warning + # set deprecation warning warnings.filterwarnings("always") warnings.warn("Deprecated. Please use harmonics.from_HDF5", DeprecationWarning) - #-- Open the HDF5 file for reading + # Open the HDF5 file for reading if (kwargs['COMPRESSION'] == 'gzip'): - #-- read gzip compressed file and extract into in-memory file object + # read gzip compressed file and extract into in-memory file object with gzip.open(os.path.expanduser(filename),'r') as f: fid = io.BytesIO(f.read()) - #-- set filename of BytesIO object + # set filename of BytesIO object fid.filename = os.path.basename(filename) - #-- rewind to start of file + # rewind to start of file fid.seek(0) - #-- read as in-memory (diskless) HDF5 dataset from BytesIO object + # read as in-memory (diskless) HDF5 dataset from BytesIO object fileID = h5py.File(fid, 'r') elif (kwargs['COMPRESSION'] == 'zip'): - #-- read zipped file and extract file into in-memory file object + # read zipped file and extract file into in-memory file object fileBasename,_ = os.path.splitext(os.path.basename(filename)) with zipfile.ZipFile(os.path.expanduser(filename)) as z: - #-- first try finding a HDF5 file with same base filename - #-- if none found simply try searching for a HDF5 file + # first try finding a HDF5 file with same base filename + # if none found simply try searching for a HDF5 file try: f,=[f for f in z.namelist() if re.match(fileBasename,f,re.I)] except: f,=[f for f in z.namelist() if re.search(r'\.H(DF)?5$',f,re.I)] - #-- read bytes from zipfile into in-memory BytesIO object + # read bytes from zipfile into in-memory BytesIO object fid = io.BytesIO(z.read(f)) - #-- set filename of BytesIO object + # set filename of BytesIO object fid.filename = os.path.basename(filename) - #-- rewind to start of file + # rewind to start of file fid.seek(0) - #-- read as in-memory (diskless) HDF5 dataset from BytesIO object + # read as in-memory (diskless) HDF5 dataset from BytesIO object fileID = h5py.File(fid, 'r') elif (kwargs['COMPRESSION'] == 'bytes'): - #-- read as in-memory (diskless) HDF5 dataset + # read as in-memory (diskless) HDF5 dataset fileID = h5py.File(filename, 'r') else: - #-- read HDF5 dataset + # read HDF5 dataset fileID = h5py.File(os.path.expanduser(filename), 'r') - #-- allocate python dictionary for output variables + # allocate python dictionary for output variables dinput = {} dinput['attributes'] = {} - #-- Output HDF5 file information + # Output HDF5 file information logging.info(fileID.filename) logging.info(list(fileID.keys())) - #-- output variable keys + # output variable keys h5keys = ['l','m','clm','slm'] - #-- Getting the data from each HDF5 variable - #-- converting HDF5 objects into numpy arrays + # Getting the data from each HDF5 variable + # converting HDF5 objects into numpy arrays ll = np.array(fileID['l'][:]) mm = np.array(fileID['m'][:]) - #-- Spherical harmonic files have date information + # Spherical harmonic files have date information if kwargs['DATE']: h5keys.extend(['time','month']) dinput['time'] = fileID['time'][:].copy() @@ -161,36 +161,36 @@ def hdf5_read_stokes(filename, **kwargs): else: n_time = 0 - #-- Restructuring input array back into matrix format + # Restructuring input array back into matrix format LMAX = np.max(ll) MMAX = np.max(mm) - #-- LMAX+1 to include LMAX (LMAX+1 elements) + # LMAX+1 to include LMAX (LMAX+1 elements) dinput['l'] = np.arange(0,LMAX+1) dinput['m'] = np.arange(0,MMAX+1) - #-- convert input clm/slm to numpy arrays + # convert input clm/slm to numpy arrays CLM = np.array(fileID['clm'][:]) SLM = np.array(fileID['slm'][:]) - #-- size of the input grids + # size of the input grids n_harm, = fileID['l'].shape - #-- import spherical harmonic data + # import spherical harmonic data if (kwargs['DATE'] and (n_time > 1)): - #-- contains multiple dates + # contains multiple dates dinput['clm'] = np.zeros((LMAX+1,MMAX+1,n_time)) dinput['slm'] = np.zeros((LMAX+1,MMAX+1,n_time)) for lm in range(n_harm): dinput['clm'][ll[lm],mm[lm],:] = CLM[lm,:] dinput['slm'][ll[lm],mm[lm],:] = SLM[lm,:] else: - #-- contains either no dates or a single date + # contains either no dates or a single date dinput['clm'] = np.zeros((LMAX+1,MMAX+1)) dinput['slm'] = np.zeros((LMAX+1,MMAX+1)) for lm in range(n_harm): dinput['clm'][ll[lm],mm[lm]] = CLM[lm] dinput['slm'][ll[lm],mm[lm]] = SLM[lm] - #-- Getting attributes of clm/slm and included variables - #-- get attributes for the included variables + # Getting attributes of clm/slm and included variables + # get attributes for the included variables for key in h5keys: try: dinput['attributes'][key] = [ @@ -199,15 +199,15 @@ def hdf5_read_stokes(filename, **kwargs): ] except (KeyError, AttributeError): pass - #-- Global attributes + # Global attributes for att_name in ['title','description','reference']: try: dinput['attributes'][att_name] = fileID.attrs[att_name] except (ValueError, KeyError, AttributeError): pass - #-- Closing the HDF5 file + # Closing the HDF5 file fileID.close() - #-- return the output variable + # return the output variable return dinput diff --git a/gravity_toolkit/hdf5_stokes.py b/gravity_toolkit/hdf5_stokes.py index c7756270..f0f84d0d 100755 --- a/gravity_toolkit/hdf5_stokes.py +++ b/gravity_toolkit/hdf5_stokes.py @@ -99,7 +99,7 @@ def hdf5_stokes(clm1, slm1, linp, minp, tinp, month, **kwargs): CLOBBER: will overwrite an existing HDF5 file DATE: harmonics have date information """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('FILENAME',None) kwargs.setdefault('UNITS','Geodesy_Normalization') kwargs.setdefault('TIME_UNITS',None) @@ -111,30 +111,30 @@ def hdf5_stokes(clm1, slm1, linp, minp, tinp, month, **kwargs): kwargs.setdefault('REFERENCE',None) kwargs.setdefault('DATE',True) kwargs.setdefault('CLOBBER',True) - #-- set deprecation warning + # set deprecation warning warnings.filterwarnings("always") warnings.warn("Deprecated. Please use harmonics.to_HDF5", DeprecationWarning) - #-- setting HDF5 clobber attribute + # setting HDF5 clobber attribute clobber = 'w' if kwargs['CLOBBER'] else 'w-' - #-- opening HDF5 file for writing + # opening HDF5 file for writing fileID = h5py.File(kwargs['FILENAME'], clobber) - #-- Maximum spherical harmonic degree (LMAX) and order (MMAX) + # Maximum spherical harmonic degree (LMAX) and order (MMAX) LMAX = np.max(linp) MMAX = np.max(minp) - #-- Calculating the number of cos and sin harmonics up to LMAX - #-- taking into account MMAX (if MMAX == LMAX then LMAX-MMAX=0) + # Calculating the number of cos and sin harmonics up to LMAX + # taking into account MMAX (if MMAX == LMAX then LMAX-MMAX=0) n_harm = (LMAX**2 + 3*LMAX - (LMAX-MMAX)**2 - (LMAX-MMAX))//2 + 1 - #-- dictionary with output variables + # dictionary with output variables output = {} - #-- restructured degree and order + # restructured degree and order output['l'] = np.zeros((n_harm,), dtype=np.int32) output['m'] = np.zeros((n_harm,), dtype=np.int32) - #-- Restructuring output matrix to array format - #-- will reduce matrix size and insure compatibility between platforms + # Restructuring output matrix to array format + # will reduce matrix size and insure compatibility between platforms if kwargs['DATE']: n_time = len(np.atleast_1d(tinp)) output['time'] = np.copy(tinp) @@ -150,10 +150,10 @@ def hdf5_stokes(clm1, slm1, linp, minp, tinp, month, **kwargs): output['clm'] = np.zeros((n_harm)) output['slm'] = np.zeros((n_harm)) - #-- create counter variable lm + # create counter variable lm lm = 0 - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + for l in range(m,LMAX+1):# LMAX+1 to include LMAX output['l'][lm] = np.int64(l) output['m'][lm] = np.int64(m) if (kwargs['DATE'] and (n_time > 1)): @@ -162,44 +162,44 @@ def hdf5_stokes(clm1, slm1, linp, minp, tinp, month, **kwargs): else: output['clm'][lm] = clm1[l,m] output['slm'][lm] = slm1[l,m] - #-- add 1 to lm counter variable + # add 1 to lm counter variable lm += 1 - #-- Defining the HDF5 dataset variables + # Defining the HDF5 dataset variables h5 = {} for key,val in output.items(): h5[key] = fileID.create_dataset(key, val.shape, data=val, dtype=val.dtype, compression='gzip') - #-- filling HDF5 dataset attributes - #-- Defining attributes for degree and order - h5['l'].attrs['long_name'] = 'spherical_harmonic_degree'#-- degree long name - h5['l'].attrs['units'] = 'Wavenumber'#-- SH degree units - h5['m'].attrs['long_name'] = 'spherical_harmonic_order'#-- order long name - h5['m'].attrs['units'] = 'Wavenumber'#-- SH order units - #-- Defining attributes for dataset + # filling HDF5 dataset attributes + # Defining attributes for degree and order + h5['l'].attrs['long_name'] = 'spherical_harmonic_degree'# degree long name + h5['l'].attrs['units'] = 'Wavenumber'# SH degree units + h5['m'].attrs['long_name'] = 'spherical_harmonic_order'# order long name + h5['m'].attrs['units'] = 'Wavenumber'# SH order units + # Defining attributes for dataset h5['clm'].attrs['long_name'] = 'cosine_spherical_harmonics' h5['clm'].attrs['units'] = kwargs['UNITS'] h5['slm'].attrs['long_name'] = 'sine_spherical_harmonics' h5['slm'].attrs['units'] = kwargs['UNITS'] if kwargs['DATE']: - #-- Defining attributes for date and month (or integer date) + # Defining attributes for date and month (or integer date) h5['time'].attrs['long_name'] = kwargs['TIME_LONGNAME'] h5['time'].attrs['units'] = kwargs['TIME_UNITS'] h5['month'].attrs['long_name'] = kwargs['MONTHS_LONGNAME'] h5['month'].attrs['units'] = kwargs['MONTHS_UNITS'] - #-- description of file + # description of file if kwargs['TITLE']: fileID.attrs['description'] = kwargs['TITLE'] - #-- reference of file + # reference of file if kwargs['REFERENCE']: fileID.attrs['reference'] = kwargs['REFERENCE'] - #-- date created + # date created fileID.attrs['date_created'] = time.strftime('%Y-%m-%d',time.localtime()) - #-- Output HDF5 structure information + # Output HDF5 structure information logging.info(kwargs['FILENAME']) logging.info(list(fileID.keys())) - #-- Closing the HDF5 file + # Closing the HDF5 file fileID.close() diff --git a/gravity_toolkit/hdf5_write.py b/gravity_toolkit/hdf5_write.py index 23ebe74b..a4c9b81a 100755 --- a/gravity_toolkit/hdf5_write.py +++ b/gravity_toolkit/hdf5_write.py @@ -94,7 +94,7 @@ def hdf5_write(data, lon, lat, tim, **kwargs): CLOBBER: will overwrite an existing HDF5 file DATE: data has date information """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('FILENAME',None) kwargs.setdefault('VARNAME','z') kwargs.setdefault('LONNAME','lon') @@ -109,23 +109,23 @@ def hdf5_write(data, lon, lat, tim, **kwargs): kwargs.setdefault('REFERENCE',None) kwargs.setdefault('DATE',True) kwargs.setdefault('CLOBBER',True) - #-- set deprecation warning + # set deprecation warning warnings.filterwarnings("always") warnings.warn("Deprecated. Please use spatial.to_HDF5", DeprecationWarning) - #-- setting HDF5 clobber attribute + # setting HDF5 clobber attribute clobber = 'w' if kwargs['CLOBBER'] else 'w-' - #-- opening HDF5 file for writing + # opening HDF5 file for writing fileID = h5py.File(kwargs['FILENAME'], clobber) - #-- create output dictionary with key mapping + # create output dictionary with key mapping output = {} output[kwargs['LONNAME']] = np.copy(lon) output[kwargs['LATNAME']] = np.copy(lat) dimensions = [kwargs['LATNAME'],kwargs['LONNAME']] - #-- extend with date variables + # extend with date variables if kwargs['DATE']: output[kwargs['TIMENAME']] = np.array(tim,dtype='f') output[kwargs['VARNAME']] = np.atleast_3d(data) @@ -133,44 +133,44 @@ def hdf5_write(data, lon, lat, tim, **kwargs): else: output[kwargs['VARNAME']] = np.copy(data) - #-- Defining the HDF5 dataset variables + # Defining the HDF5 dataset variables h5 = {} for key,val in output.items(): h5[key] = fileID.create_dataset(key, val.shape, data=val, dtype=val.dtype, compression='gzip') - #-- add dimensions + # add dimensions for i,dim in enumerate(dimensions): h5[kwargs['VARNAME']].dims[i].label = dim h5[kwargs['VARNAME']].dims[i].attach_scale(h5[dim]) - #-- filling HDF5 dataset attributes - #-- Defining attributes for longitude and latitude + # filling HDF5 dataset attributes + # Defining attributes for longitude and latitude h5[kwargs['LONNAME']].attrs['long_name'] = 'longitude' h5[kwargs['LONNAME']].attrs['units'] = 'degrees_east' h5[kwargs['LATNAME']].attrs['long_name'] = 'latitude' h5[kwargs['LATNAME']].attrs['units'] = 'degrees_north' - #-- Defining attributes for dataset + # Defining attributes for dataset h5[kwargs['VARNAME']].attrs['long_name'] = kwargs['LONGNAME'] h5[kwargs['VARNAME']].attrs['units'] = kwargs['UNITS'] - #-- Dataset contains missing values + # Dataset contains missing values if (kwargs['FILL_VALUE'] is not None): h5[kwargs['VARNAME']].attrs['_FillValue'] = kwargs['FILL_VALUE'] - #-- Defining attributes for date + # Defining attributes for date if kwargs['DATE']: h5[kwargs['TIMENAME']].attrs['long_name'] = kwargs['TIME_LONGNAME'] h5[kwargs['TIMENAME']].attrs['units'] = kwargs['TIME_UNITS'] - #-- description of file + # description of file if kwargs['TITLE']: fileID.attrs['description'] = kwargs['TITLE'] - #-- reference of file + # reference of file if kwargs['REFERENCE']: fileID.attrs['reference'] = kwargs['REFERENCE'] - #-- date created + # date created fileID.attrs['date_created'] = time.strftime('%Y-%m-%d',time.localtime()) - #-- Output HDF5 structure information + # Output HDF5 structure information logging.info(kwargs['FILENAME']) logging.info(list(fileID.keys())) - #-- Closing the HDF5 file + # Closing the HDF5 file fileID.close() diff --git a/gravity_toolkit/legendre.py b/gravity_toolkit/legendre.py index 49401edc..30f54dba 100644 --- a/gravity_toolkit/legendre.py +++ b/gravity_toolkit/legendre.py @@ -70,86 +70,86 @@ def legendre(l, x, NORMALIZE=False): .. [Jacobs1987] J. A. Jacobs, *Geomagnetism*, Volume 1, 1st Edition, 832 pp., (1987). """ - #-- verify integer + # verify integer l = np.int64(l) - #-- verify dimensions + # verify dimensions x = np.atleast_1d(x).flatten() - #-- size of the x array + # size of the x array nx = len(x) - #-- for the l = 0 case + # for the l = 0 case if (l == 0): Pl = np.ones((1,nx), dtype=np.float64) return Pl - #-- for all other degrees greater than 0 - rootl = np.sqrt(np.arange(0,2*l+1))#-- +1 to include 2*l - #-- s is sine of colatitude (cosine of latitude) so that 0 <= s <= 1 - s = np.sqrt(1.0 - x**2)#-- for x=cos(th): s=sin(th) + # for all other degrees greater than 0 + rootl = np.sqrt(np.arange(0,2*l+1))# +1 to include 2*l + # s is sine of colatitude (cosine of latitude) so that 0 <= s <= 1 + s = np.sqrt(1.0 - x**2)# for x=cos(th): s=sin(th) P = np.zeros((l+3,nx), dtype=np.float64) - #-- Find values of x,s for which there will be underflow + # Find values of x,s for which there will be underflow sn = (-s)**l tol = np.sqrt(np.finfo(np.float64).tiny) count = np.count_nonzero((s > 0) & (np.abs(sn) <= tol)) if (count > 0): ind, = np.nonzero((s > 0) & (np.abs(sn) <= tol)) - #-- Approximate solution of x*ln(x) = Pl + # Approximate solution of x*ln(x) = Pl v = 9.2 - np.log(tol)/(l*s[ind]) w = 1.0/np.log(v) m1 = 1+l*s[ind]*v*w*(1.0058+ w*(3.819 - w*12.173)) m1 = np.where(l < np.floor(m1), l, np.floor(m1)).astype(np.int64) - #-- Column-by-column recursion + # Column-by-column recursion for k,mm1 in enumerate(m1): col = ind[k] - #-- Calculate twocot for underflow case + # Calculate twocot for underflow case twocot = -2.0*x[col]/s[col] P[mm1-1:l+1,col] = 0.0 - #-- Start recursion with proper sign + # Start recursion with proper sign tstart = np.finfo(np.float64).eps P[mm1-1,col] = np.sign(np.fmod(mm1,2)-0.5)*tstart if (x[col] < 0): P[mm1-1,col] = np.sign(np.fmod(l+1,2)-0.5)*tstart - #-- Recur from m1 to m = 0, accumulating normalizing factor. + # Recur from m1 to m = 0, accumulating normalizing factor. sumsq = tol.copy() for m in range(mm1-2,-1,-1): P[m,col] = ((m+1)*twocot*P[m+1,col] - \ rootl[l+m+2]*rootl[l-m-1]*P[m+2,col]) / \ (rootl[l+m+1]*rootl[l-m]) sumsq += P[m,col]**2 - #-- calculate scale + # calculate scale scale = 1.0/np.sqrt(2.0*sumsq - P[0,col]**2) P[0:mm1+1,col] = scale*P[0:mm1+1,col] - #-- Find the values of x,s for which there is no underflow, and (x != +/-1) + # Find the values of x,s for which there is no underflow, and (x != +/-1) count = np.count_nonzero((x != 1) & (np.abs(sn) >= tol)) if (count > 0): nind, = np.nonzero((x != 1) & (np.abs(sn) >= tol)) - #-- Calculate twocot for normal case + # Calculate twocot for normal case twocot = -2.0*x[nind]/s[nind] - #-- Produce normalization constant for the m = l function + # Produce normalization constant for the m = l function d = np.arange(2,2*l+2,2) c = np.prod(1.0 - 1.0/d) - #-- Use sn = (-s)**l (written above) to write the m = l function + # Use sn = (-s)**l (written above) to write the m = l function P[l,nind] = np.sqrt(c)*sn[nind] P[l-1,nind] = P[l,nind]*twocot*l/rootl[-1] - #-- Recur downwards to m = 0 + # Recur downwards to m = 0 for m in range(l-2,-1,-1): P[m,nind] = (P[m+1,nind]*twocot*(m+1) - \ P[m+2,nind]*rootl[l+m+2]*rootl[l-m-1]) / \ (rootl[l+m+1]*rootl[l-m]) - #-- calculate Pl from P + # calculate Pl from P Pl = np.copy(P[0:l+1,:]) - #-- Polar argument (x == +/-1) + # Polar argument (x == +/-1) count = np.count_nonzero(s == 0) if (count > 0): s0, = np.nonzero(s == 0) Pl[0,s0] = x[s0]**l - #-- calculate Fully Normalized Associated Legendre functions + # calculate Fully Normalized Associated Legendre functions if NORMALIZE: norm = np.zeros((l+1)) norm[0] = np.sqrt(2.0*l+1) @@ -157,12 +157,12 @@ def legendre(l, x, NORMALIZE=False): norm[1:] = (-1)**m*np.sqrt(2.0*(2.0*l+1.0)) Pl *= np.kron(np.ones((1,nx)), norm[:,np.newaxis]) else: - #-- Calculate the unnormalized Legendre functions by multiplying each row - #-- by: sqrt((l+m)!/(l-m)!) == sqrt(prod(n-m+1:n+m)) - #-- following Abramowitz and Stegun + # Calculate the unnormalized Legendre functions by multiplying each row + # by: sqrt((l+m)!/(l-m)!) == sqrt(prod(n-m+1:n+m)) + # following Abramowitz and Stegun for m in range(1,l): Pl[m,:] *= np.prod(rootl[l-m+1:l+m+1]) - #-- sectoral case (l = m) should be done separately to handle 0! + # sectoral case (l = m) should be done separately to handle 0! Pl[l,:] *= np.prod(rootl[1:]) return Pl diff --git a/gravity_toolkit/legendre_polynomials.py b/gravity_toolkit/legendre_polynomials.py index c748cbd3..946f538a 100755 --- a/gravity_toolkit/legendre_polynomials.py +++ b/gravity_toolkit/legendre_polynomials.py @@ -73,40 +73,40 @@ def legendre_polynomials(lmax,x,ASTYPE=np.float64): *Physical Geodesy*, 2nd Edition, 403 pp., (2006). `doi: 10.1007/978-3-211-33545-1 `_ """ - #-- verify dimensions + # verify dimensions x = np.atleast_1d(x).flatten().astype(ASTYPE) - #-- size of the x array + # size of the x array nx = len(x) - #-- verify data type of spherical harmonic truncation + # verify data type of spherical harmonic truncation lmax = np.int64(lmax) - #-- output matrix of normalized legendre polynomials + # output matrix of normalized legendre polynomials pl = np.zeros((lmax+1,nx),dtype=ASTYPE) - #-- output matrix of First derivative of Legendre polynomials + # output matrix of First derivative of Legendre polynomials dpl = np.zeros((lmax+1,nx),dtype=ASTYPE) - #-- dummy matrix for the recurrence relation + # dummy matrix for the recurrence relation ptemp = np.zeros((lmax+1,nx),dtype=ASTYPE) - #-- u is sine of colatitude (cosine of latitude) so that 0 <= s <= 1 - #-- for x=cos(th): u=sin(th) + # u is sine of colatitude (cosine of latitude) so that 0 <= s <= 1 + # for x=cos(th): u=sin(th) u = np.sqrt(1.0 - x**2) - #-- update where u==0 to eps of data type to prevent invalid divisions + # update where u==0 to eps of data type to prevent invalid divisions u[u == 0] = np.finfo(u.dtype).eps - #-- Initialize the recurrence relation + # Initialize the recurrence relation ptemp[0,:] = 1.0 ptemp[1,:] = x - #-- Normalization is geodesy convention + # Normalization is geodesy convention pl[0,:] = ptemp[0,:] pl[1,:] = np.sqrt(3.0)*ptemp[1,:] for l in range(2,lmax+1): ptemp[l,:] = (((2.0*l)-1.0)/l)*x*ptemp[l-1,:] - ((l-1.0)/l)*ptemp[l-2,:] - #-- Normalization is geodesy convention + # Normalization is geodesy convention pl[l,:] = np.sqrt((2.0*l)+1.0)*ptemp[l,:] - #-- First derivative of Legendre polynomials + # First derivative of Legendre polynomials for l in range(1,lmax+1): fl = np.sqrt(((l**2.0) * (2.0*l + 1.0)) / (2.0*l - 1.0)) dpl[l,:] = (1.0/u)*(l*x*pl[l,:] - fl*pl[l-1,:]) - #-- return the legendre polynomials and their first derivative + # return the legendre polynomials and their first derivative return (pl, dpl) diff --git a/gravity_toolkit/mascons.py b/gravity_toolkit/mascons.py index c9251fdf..07983f1e 100644 --- a/gravity_toolkit/mascons.py +++ b/gravity_toolkit/mascons.py @@ -67,36 +67,36 @@ def to_gsfc(gdata, lon, lat, lon_center, lat_center, lon_span, lat_span): *Journal of Glaciology*, 59(216), (2013). `doi: 10.3189/2013JoG12J147 `_ """ - #-- number of mascons + # number of mascons nmas = len(lon_center) - #-- convert mascon centers to -180:180 + # convert mascon centers to -180:180 gt180, = np.nonzero(lon_center > 180) lon_center[gt180] -= 360.0 - #-- remove singleton dimensions + # remove singleton dimensions lat = np.squeeze(lat) lon = np.squeeze(lon) - #-- for mascons centered on 180: use 0:360 + # for mascons centered on 180: use 0:360 alon = np.copy(lon) lt0, = np.nonzero(lon < 0) alon[lt0] += 360.0 - #-- loop over each mascon bin and average gdata with the cos-lat weights - #-- for that bin + # loop over each mascon bin and average gdata with the cos-lat weights + # for that bin mascon_array = {} mascon_array['data'] = np.zeros((nmas)) mascon_array['lon_center'] = np.zeros((nmas)) mascon_array['lat_center'] = np.zeros((nmas)) for k in range(0,nmas): - #-- create latitudinal and longitudinal bounds for mascon k + # create latitudinal and longitudinal bounds for mascon k if (lat_center[k] == 90.0) | (lat_center[k] == -90.0): - #-- NH and SH polar mascons + # NH and SH polar mascons lon_bound = [0.0,360.0] lat_bound = lat_center[k] + np.array([-1.0,1.0])*lat_span[k] else: - #-- convert from mascon centers to mascon bounds + # convert from mascon centers to mascon bounds lon_bound = lon_center[k] + np.array([-0.5,0.5])*lon_span[k] lat_bound = lat_center[k] + np.array([-0.5,0.5])*lat_span[k] - #-- if mascon is centered on +/-180: use 0:360 + # if mascon is centered on +/-180: use 0:360 if ((lon_bound[0] <= 180.0) & (lon_bound[1] >= 180.0)): ilon = alon.copy() elif ((lon_bound[0] <= -180.0) & (lon_bound[1] >= -180.0)): @@ -104,17 +104,17 @@ def to_gsfc(gdata, lon, lat, lon_center, lat_center, lon_span, lat_span): ilon = alon.copy() else: ilon = lon.copy() - #-- indices for grid points within the mascon + # indices for grid points within the mascon I, = np.nonzero((lat >= lat_bound[0]) & (lat < lat_bound[1])) J, = np.nonzero((ilon >= lon_bound[0]) & (ilon < lon_bound[1])) I,J = (I[np.newaxis,:], J[:,np.newaxis]) - #-- calculate average data for mascon bin + # calculate average data for mascon bin mascon_array['data'][k] = np.mean((np.cos(lat[I]*np.pi/180.0) / np.mean(np.cos(lat[I]*np.pi/180.0)))*gdata[I,J]/len(I)) mascon_array['lat_center'][k] = lat_center[k] mascon_array['lon_center'][k] = lon_center[k] - #-- return python dictionary with the mascon array data, lon and lat + # return python dictionary with the mascon array data, lon and lat return mascon_array def to_jpl(gdata, lon, lat, lon_bound, lat_bound): @@ -153,42 +153,42 @@ def to_jpl(gdata, lon, lat, lon_bound, lat_bound): *Journal of Geophysical Research: Solid Earth*, 120(4), 2648--2671, (2015). `doi: 10.1002/2014JB011547 `_ """ - #-- mascon dimensions + # mascon dimensions nmas,nvar = lat_bound.shape - #-- remove singleton dimensions + # remove singleton dimensions lat = np.squeeze(lat) lon = np.squeeze(lon) - #-- loop over each mascon bin and average gdata with the cos-lat weights - #-- for that bin + # loop over each mascon bin and average gdata with the cos-lat weights + # for that bin mascon_array = {} mascon_array['data'] = np.zeros((nmas)) mascon_array['mask'] = np.zeros((nmas),dtype=bool) mascon_array['lon'] = np.zeros((nmas)) mascon_array['lat'] = np.zeros((nmas)) for k in range(0,nmas): - #-- indices for grid points within the mascon + # indices for grid points within the mascon I, = np.nonzero((lat >= lat_bound[k,1]) & (lat < lat_bound[k,0])) J, = np.nonzero((lon >= lon_bound[k,0]) & (lon < lon_bound[k,2])) nlt = np.count_nonzero((lat >= lat_bound[k,1]) & (lat < lat_bound[k,0])) I,J = (I[np.newaxis,:], J[:,np.newaxis]) - #-- calculate average data for mascon bin + # calculate average data for mascon bin mascon_array['data'][k] = np.mean((np.cos(lat[I]*np.pi/180.0) / np.mean(np.cos(lat[I]*np.pi/180.0)))*gdata[I,J]/nlt) - #-- calculate coordinates of mascon center + # calculate coordinates of mascon center mascon_array['lat'][k] = (lat_bound[k,1]+lat_bound[k,0])/2.0 mascon_array['lon'][k] = (lon_bound[k,1]+lon_bound[k,2])/2.0 mascon_array['mask'][k] = bool(nlt == 0) - #-- Do a check at the poles to make the lat/lon equal to +/-90/0 + # Do a check at the poles to make the lat/lon equal to +/-90/0 if (np.abs(lat_bound[k,0]) == 90): mascon_array['lat'][k] = lat_bound[k,0] mascon_array['lon'][k] = 0.0 if (np.abs(lat_bound[k,1]) == 90): mascon_array['lat'][k] = lat_bound[k,1] mascon_array['lon'][k] = 0.0 - #-- replace invalid data with 0 + # replace invalid data with 0 mascon_array['data'][mascon_array['mask']] = 0.0 - #-- return python dictionary with the mascon array data, lon and lat + # return python dictionary with the mascon array data, lon and lat return mascon_array def from_gsfc(mscdata, grid_spacing, lon_center, lat_center, lon_span, lat_span, @@ -227,45 +227,45 @@ def from_gsfc(mscdata, grid_spacing, lon_center, lat_center, lon_span, lat_span, *Journal of Glaciology*, 59(216), (2013). `doi: 10.3189/2013JoG12J147 `_ """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('transpose', False) - #-- raise warnings for deprecated keyword arguments + # raise warnings for deprecated keyword arguments deprecated_keywords = dict(TRANSPOSE='transpose') for old,new in deprecated_keywords.items(): if old in kwargs.keys(): warnings.warn(f"""Deprecated keyword argument {old}. Changed to '{new}'""", DeprecationWarning) - #-- set renamed argument to not break workflows + # set renamed argument to not break workflows kwargs[new] = copy.copy(kwargs[old]) - #-- number of mascons + # number of mascons nmas = len(lon_center) - #-- convert mascon centers to -180:180 + # convert mascon centers to -180:180 gt180, = np.nonzero(lon_center > 180) lon_center[gt180] -= 360.0 - #-- Define output latitude and longitude grids + # Define output latitude and longitude grids lon = np.arange(-180.0+grid_spacing/2.0,180.0+grid_spacing/2.0,grid_spacing) lat = np.arange(90.0-grid_spacing/2.0,-90.0-grid_spacing/2.0,-grid_spacing) nlon,nlat = (len(lon),len(lat)) - #-- for mascons centered on 180: use 0:360 + # for mascons centered on 180: use 0:360 alon = np.copy(lon) lt0, = np.nonzero(lon < 0) alon[lt0] += 360.0 - #-- loop over each mascon bin and assign value to grid points inside bin: + # loop over each mascon bin and assign value to grid points inside bin: mdata = np.zeros((nlat,nlon)) for k in range(0, nmas): - #-- create latitudinal and longitudinal bounds for mascon k + # create latitudinal and longitudinal bounds for mascon k if (lat_center[k] == 90.0) | (lat_center[k] == -90.0): - #-- NH and SH polar mascons + # NH and SH polar mascons lon_bound = [0.0,360.0] lat_bound = lat_center[k] + np.array([-1.0,1.0])*lat_span[k] else: - #-- convert from mascon centers to mascon bounds + # convert from mascon centers to mascon bounds lon_bound = lon_center[k] + np.array([-0.5,0.5])*lon_span[k] lat_bound = lat_center[k] + np.array([-0.5,0.5])*lat_span[k] - #-- if mascon is centered on +/-180: use 0:360 + # if mascon is centered on +/-180: use 0:360 if ((lon_bound[0] <= 180.0) & (lon_bound[1] >= 180.0)): ilon = alon.copy() elif ((lon_bound[0] <= -180.0) & (lon_bound[1] >= -180.0)): @@ -273,13 +273,13 @@ def from_gsfc(mscdata, grid_spacing, lon_center, lat_center, lon_span, lat_span, ilon = alon.copy() else: ilon = lon.copy() - #-- indices for grid points within the mascon + # indices for grid points within the mascon I, = np.nonzero((lat >= lat_bound[0]) & (lat < lat_bound[1])) J, = np.nonzero((ilon >= lon_bound[0]) & (ilon < lon_bound[1])) I,J = (I[np.newaxis,:], J[:,np.newaxis]) mdata[I,J] = mscdata[k] - #-- return array + # return array if kwargs['transpose']: return mdata.T else: @@ -315,28 +315,28 @@ def from_jpl(mscdata, grid_spacing, lon_bound, lat_bound, **kwargs): *Journal of Geophysical Research: Solid Earth*, 120(4), 2648--2671, (2015). `doi: 10.1002/2014JB011547 `_ """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('transpose', False) - #-- raise warnings for deprecated keyword arguments + # raise warnings for deprecated keyword arguments deprecated_keywords = dict(TRANSPOSE='transpose') for old,new in deprecated_keywords.items(): if old in kwargs.keys(): warnings.warn(f"""Deprecated keyword argument {old}. Changed to '{new}'""", DeprecationWarning) - #-- set renamed argument to not break workflows + # set renamed argument to not break workflows kwargs[new] = copy.copy(kwargs[old]) - #-- mascon dimensions + # mascon dimensions nmas,nvar = lat_bound.shape - #-- Define latitude and longitude grids - #-- output lon will not include 360 - #-- output lat will not include 90 + # Define latitude and longitude grids + # output lon will not include 360 + # output lat will not include 90 lon = np.arange(grid_spacing/2.0, 360.0+grid_spacing/2.0, grid_spacing) lat = np.arange(-90.0+grid_spacing/2.0, 90.0+grid_spacing/2.0, grid_spacing) nlon,nlat = (len(lon),len(lat)) - #-- loop over each mascon bin and assign value to grid points inside bin: + # loop over each mascon bin and assign value to grid points inside bin: mdata = np.zeros((nlat,nlon)) for k in range(0, nmas): I, = np.nonzero((lat >= lat_bound[k,1]) & (lat < lat_bound[k,0])) @@ -344,7 +344,7 @@ def from_jpl(mscdata, grid_spacing, lon_bound, lat_bound, **kwargs): I,J = (I[np.newaxis,:], J[:,np.newaxis]) mdata[I,J] = mscdata[k] - #-- return array + # return array if kwargs['transpose']: return mdata.T else: diff --git a/gravity_toolkit/ncdf_read.py b/gravity_toolkit/ncdf_read.py index e11b55b5..e25973a6 100755 --- a/gravity_toolkit/ncdf_read.py +++ b/gravity_toolkit/ncdf_read.py @@ -110,78 +110,78 @@ def ncdf_read(filename, **kwargs): time: time value of dataset attributes: netCDF4 attributes """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('DATE',False) kwargs.setdefault('VARNAME','z') kwargs.setdefault('LONNAME','lon') kwargs.setdefault('LATNAME','lat') kwargs.setdefault('TIMENAME','time') kwargs.setdefault('COMPRESSION',None) - #-- set deprecation warning + # set deprecation warning warnings.filterwarnings("always") warnings.warn("Deprecated. Please use spatial.from_netCDF4", DeprecationWarning) - #-- Open the NetCDF4 file for reading + # Open the NetCDF4 file for reading if (kwargs['COMPRESSION'] == 'gzip'): - #-- read as in-memory (diskless) netCDF4 dataset + # read as in-memory (diskless) netCDF4 dataset with gzip.open(os.path.expanduser(filename),'r') as f: fileID = netCDF4.Dataset(os.path.basename(filename),memory=f.read()) elif (kwargs['COMPRESSION'] == 'zip'): - #-- read zipped file and extract file into in-memory file object + # read zipped file and extract file into in-memory file object fileBasename,_ = os.path.splitext(os.path.basename(filename)) with zipfile.ZipFile(os.path.expanduser(filename)) as z: - #-- first try finding a netCDF4 file with same base filename - #-- if none found simply try searching for a netCDF4 file + # first try finding a netCDF4 file with same base filename + # if none found simply try searching for a netCDF4 file try: f,=[f for f in z.namelist() if re.match(fileBasename,f,re.I)] except: f,=[f for f in z.namelist() if re.search(r'\.nc(4)?$',f)] - #-- read bytes from zipfile as in-memory (diskless) netCDF4 dataset + # read bytes from zipfile as in-memory (diskless) netCDF4 dataset fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=z.read(f)) elif (kwargs['COMPRESSION'] == 'bytes'): - #-- read as in-memory (diskless) netCDF4 dataset + # read as in-memory (diskless) netCDF4 dataset fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=filename.read()) else: - #-- read netCDF4 dataset + # read netCDF4 dataset fileID = netCDF4.Dataset(os.path.expanduser(filename), 'r') - #-- create python dictionary for output variables + # create python dictionary for output variables dinput = {} dinput['attributes'] = {} - #-- Output NetCDF file information + # Output NetCDF file information logging.info(fileID.filepath()) logging.info(list(fileID.variables.keys())) - #-- mapping between output keys and netCDF4 variable names + # mapping between output keys and netCDF4 variable names keys = ['lon','lat','data'] nckeys = [kwargs['LONNAME'],kwargs['LATNAME'],kwargs['VARNAME']] if kwargs['DATE']: keys.append('time') nckeys.append(kwargs['TIMENAME']) - #-- list of variable attributes + # list of variable attributes attributes_list = ['description','units','long_name','calendar', 'standard_name','_FillValue','missing_value'] - #-- for each variable + # for each variable for key,nckey in zip(keys,nckeys): - #-- Getting the data from each NetCDF variable + # Getting the data from each NetCDF variable dinput[key] = np.squeeze(fileID.variables[nckey][:].data) - #-- Getting attributes of included variables + # Getting attributes of included variables dinput['attributes'][key] = {} for attr in attributes_list: - #-- try getting the attribute + # try getting the attribute try: dinput['attributes'][key][attr] = \ fileID.variables[nckey].getncattr(attr) except (KeyError,ValueError,AttributeError): pass - #-- switching data array to lat/lon if lon/lat + # switching data array to lat/lon if lon/lat sz = dinput['data'].shape if (dinput['data'].ndim == 2) and (len(dinput['lon']) == sz[0]): dinput['data'] = dinput['data'].T - #-- Global attributes + # Global attributes for att_name in ['title','description','reference']: try: ncattr, = [s for s in fileID.ncattrs() @@ -189,7 +189,7 @@ def ncdf_read(filename, **kwargs): dinput['attributes'][att_name] = fileID.getncattr(ncattr) except (ValueError, KeyError, AttributeError): pass - #-- Closing the NetCDF file + # Closing the NetCDF file fileID.close() - #-- return the output variable + # return the output variable return dinput diff --git a/gravity_toolkit/ncdf_read_stokes.py b/gravity_toolkit/ncdf_read_stokes.py index d02a37a6..90deb758 100755 --- a/gravity_toolkit/ncdf_read_stokes.py +++ b/gravity_toolkit/ncdf_read_stokes.py @@ -106,53 +106,53 @@ def ncdf_read_stokes(filename, **kwargs): month: GRACE/GRACE-FO month attributes: netCDF4 attributes for variables and file """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('DATE',True) kwargs.setdefault('COMPRESSION',None) - #-- set deprecation warning + # set deprecation warning warnings.filterwarnings("always") warnings.warn("Deprecated. Please use harmonics.from_netCDF4", DeprecationWarning) - #-- Open the NetCDF4 file for reading + # Open the NetCDF4 file for reading if (kwargs['COMPRESSION'] == 'gzip'): - #-- read as in-memory (diskless) netCDF4 dataset + # read as in-memory (diskless) netCDF4 dataset with gzip.open(os.path.expanduser(filename),'r') as f: fileID = netCDF4.Dataset(os.path.basename(filename),memory=f.read()) elif (kwargs['COMPRESSION'] == 'zip'): - #-- read zipped file and extract file into in-memory file object + # read zipped file and extract file into in-memory file object fileBasename,_ = os.path.splitext(os.path.basename(filename)) with zipfile.ZipFile(os.path.expanduser(filename)) as z: - #-- first try finding a netCDF4 file with same base filename - #-- if none found simply try searching for a netCDF4 file + # first try finding a netCDF4 file with same base filename + # if none found simply try searching for a netCDF4 file try: f,=[f for f in z.namelist() if re.match(fileBasename,f,re.I)] except: f,=[f for f in z.namelist() if re.search(r'\.nc(4)?$',f)] - #-- read bytes from zipfile as in-memory (diskless) netCDF4 dataset + # read bytes from zipfile as in-memory (diskless) netCDF4 dataset fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=z.read(f)) elif (kwargs['COMPRESSION'] == 'bytes'): - #-- read as in-memory (diskless) netCDF4 dataset + # read as in-memory (diskless) netCDF4 dataset fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=filename.read()) else: - #-- read netCDF4 dataset + # read netCDF4 dataset fileID = netCDF4.Dataset(os.path.expanduser(filename), 'r') - #-- create python dictionary for output variables + # create python dictionary for output variables dinput = {} dinput['attributes'] = {} - #-- Output NetCDF file information + # Output NetCDF file information logging.info(fileID.filepath()) logging.info(list(fileID.variables.keys())) - #-- Getting the data from each NetCDF variable - #-- converting NetCDF objects into numpy arrays + # Getting the data from each NetCDF variable + # converting NetCDF objects into numpy arrays nckeys = ['l','m','clm','slm'] ll = fileID.variables['l'][:].copy() mm = fileID.variables['m'][:].copy() clm = fileID.variables['clm'][:].copy() slm = fileID.variables['slm'][:].copy() - #-- read date variables if specified + # read date variables if specified if kwargs['DATE']: nckeys.extend(['time','month']) dinput['time'] = fileID.variables['time'][:].copy() @@ -161,33 +161,33 @@ def ncdf_read_stokes(filename, **kwargs): else: n_time = 0 - #-- Restructuring input array back into matrix format + # Restructuring input array back into matrix format LMAX = np.max(ll) MMAX = np.max(mm) - #-- output spherical harmonic degree and order - #-- LMAX+1 to include LMAX (LMAX+1 elements) + # output spherical harmonic degree and order + # LMAX+1 to include LMAX (LMAX+1 elements) dinput['l'] = np.arange(0,LMAX+1) dinput['m'] = np.arange(0,MMAX+1) - #-- number of harmonics + # number of harmonics n_harm, = fileID.variables['l'].shape - #-- import spherical harmonic data + # import spherical harmonic data if (kwargs['DATE'] and (n_time > 1)): - #-- contains multiple dates + # contains multiple dates dinput['clm'] = np.zeros((LMAX+1,MMAX+1,n_time)) dinput['slm'] = np.zeros((LMAX+1,MMAX+1,n_time)) for lm in range(n_harm): dinput['clm'][ll[lm],mm[lm],:] = clm[lm,:] dinput['slm'][ll[lm],mm[lm],:] = slm[lm,:] else: - #-- contains either no dates or a single date + # contains either no dates or a single date dinput['clm'] = np.zeros((LMAX+1,MMAX+1)) dinput['slm'] = np.zeros((LMAX+1,MMAX+1)) for lm in range(n_harm): dinput['clm'][ll[lm],mm[lm]] = clm[lm] dinput['slm'][ll[lm],mm[lm]] = slm[lm] - #-- Getting attributes of clm/slm and included variables - #-- get attributes for the included variables + # Getting attributes of clm/slm and included variables + # get attributes for the included variables for key in nckeys: try: dinput['attributes'][key] = [ @@ -196,7 +196,7 @@ def ncdf_read_stokes(filename, **kwargs): ] except (KeyError,ValueError,AttributeError): pass - #-- Global attributes + # Global attributes for att_name in ['title','description','reference']: try: ncattr, = [s for s in fileID.ncattrs() @@ -205,8 +205,8 @@ def ncdf_read_stokes(filename, **kwargs): except (ValueError, KeyError, AttributeError): pass - #-- Closing the NetCDF file + # Closing the NetCDF file fileID.close() - #-- return output variable + # return output variable return dinput diff --git a/gravity_toolkit/ncdf_stokes.py b/gravity_toolkit/ncdf_stokes.py index 593378cb..1ceb0653 100755 --- a/gravity_toolkit/ncdf_stokes.py +++ b/gravity_toolkit/ncdf_stokes.py @@ -100,7 +100,7 @@ def ncdf_stokes(clm1, slm1, linp, minp, tinp, month, **kwargs): CLOBBER: will overwrite an existing netCDF4 file DATE: harmonics have date information """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('FILENAME',None) kwargs.setdefault('UNITS','Geodesy_Normalization') kwargs.setdefault('TIME_UNITS',None) @@ -112,30 +112,30 @@ def ncdf_stokes(clm1, slm1, linp, minp, tinp, month, **kwargs): kwargs.setdefault('REFERENCE',None) kwargs.setdefault('DATE',True) kwargs.setdefault('CLOBBER',True) - #-- set deprecation warning + # set deprecation warning warnings.filterwarnings("always") warnings.warn("Deprecated. Please use harmonics.to_netCDF4", DeprecationWarning) - #-- setting NetCDF clobber attribute + # setting NetCDF clobber attribute clobber = 'w' if kwargs['CLOBBER'] else 'a' - #-- opening netCDF file for writing + # opening netCDF file for writing fileID = netCDF4.Dataset(kwargs['FILENAME'], clobber, format="NETCDF4") - #-- Maximum spherical harmonic degree (LMAX) and order (MMAX) + # Maximum spherical harmonic degree (LMAX) and order (MMAX) LMAX = np.max(linp) MMAX = np.max(minp) - #-- Calculating the number of cos and sin harmonics up to LMAX - #-- taking into account MMAX (if MMAX == LMAX then LMAX-MMAX=0) + # Calculating the number of cos and sin harmonics up to LMAX + # taking into account MMAX (if MMAX == LMAX then LMAX-MMAX=0) n_harm = (LMAX**2 + 3*LMAX - (LMAX-MMAX)**2 - (LMAX-MMAX))//2 + 1 - #-- dictionary with output variables + # dictionary with output variables output = {} - #-- restructured degree and order + # restructured degree and order output['l'] = np.zeros((n_harm,), dtype=np.int32) output['m'] = np.zeros((n_harm,), dtype=np.int32) - #-- Restructuring output matrix to array format - #-- will reduce matrix size and insure compatibility between platforms + # Restructuring output matrix to array format + # will reduce matrix size and insure compatibility between platforms if kwargs['DATE']: n_time = len(np.atleast_1d(tinp)) output['time'] = np.copy(tinp) @@ -151,10 +151,10 @@ def ncdf_stokes(clm1, slm1, linp, minp, tinp, month, **kwargs): output['clm'] = np.zeros((n_harm)) output['slm'] = np.zeros((n_harm)) - #-- create counter variable lm + # create counter variable lm lm = 0 - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + for l in range(m,LMAX+1):# LMAX+1 to include LMAX output['l'][lm] = np.int64(l) output['m'][lm] = np.int64(m) if (kwargs['DATE'] and (n_time > 1)): @@ -163,20 +163,20 @@ def ncdf_stokes(clm1, slm1, linp, minp, tinp, month, **kwargs): else: output['clm'][lm] = clm1[l,m] output['slm'][lm] = slm1[l,m] - #-- add 1 to lm counter variable + # add 1 to lm counter variable lm += 1 - #-- Defining the netCDF dimensions + # Defining the netCDF dimensions fileID.createDimension('lm', n_harm) if kwargs['DATE']: fileID.createDimension('time', n_time) - #-- defining the netCDF variables + # defining the netCDF variables nc = {} - #-- degree and order + # degree and order nc['l'] = fileID.createVariable('l', 'i', ('lm',)) nc['m'] = fileID.createVariable('m', 'i', ('lm',)) - #-- spherical harmonics + # spherical harmonics if (kwargs['DATE'] and (n_time > 1)): nc['clm'] = fileID.createVariable('clm', 'd', ('lm','time',)) nc['slm'] = fileID.createVariable('slm', 'd', ('lm','time',)) @@ -184,43 +184,43 @@ def ncdf_stokes(clm1, slm1, linp, minp, tinp, month, **kwargs): nc['clm'] = fileID.createVariable('clm', 'd', ('lm',)) nc['slm'] = fileID.createVariable('slm', 'd', ('lm',)) if kwargs['DATE']: - #-- time (in decimal form) + # time (in decimal form) nc['time'] = fileID.createVariable('time', 'd', ('time',)) - #-- GRACE/GRACE-FO month (or integer date) + # GRACE/GRACE-FO month (or integer date) nc['month'] = fileID.createVariable(kwargs['MONTHS_NAME'], 'i', ('time',)) - #-- filling netCDF variables + # filling netCDF variables for key,val in output.items(): nc[key][:] = val.copy() - #-- Defining attributes for degree and order - nc['l'].long_name = 'spherical_harmonic_degree'#-- SH degree long name - nc['l'].units = 'Wavenumber'#-- SH degree units - nc['m'].long_name = 'spherical_harmonic_order'#-- SH order long name - nc['m'].units = 'Wavenumber'#-- SH order units - #-- Defining attributes for harmonics + # Defining attributes for degree and order + nc['l'].long_name = 'spherical_harmonic_degree'# SH degree long name + nc['l'].units = 'Wavenumber'# SH degree units + nc['m'].long_name = 'spherical_harmonic_order'# SH order long name + nc['m'].units = 'Wavenumber'# SH order units + # Defining attributes for harmonics nc['clm'].long_name = 'cosine_spherical_harmonics' nc['clm'].units = kwargs['UNITS'] nc['slm'].long_name = 'sine_spherical_harmonics' nc['slm'].units = kwargs['UNITS'] if kwargs['DATE']: - #-- Defining attributes for date and month + # Defining attributes for date and month nc['time'].long_name = kwargs['TIME_LONGNAME'] nc['time'].units = kwargs['TIME_UNITS'] nc['month'].long_name = kwargs['MONTHS_LONGNAME'] nc['month'].units = kwargs['MONTHS_UNITS'] - #-- global variables of NetCDF file + # global variables of NetCDF file if kwargs['TITLE']: fileID.title = kwargs['TITLE'] if kwargs['REFERENCE']: fileID.reference = kwargs['REFERENCE'] - #-- date created + # date created fileID.date_created = time.strftime('%Y-%m-%d',time.localtime()) - #-- Output netCDF structure information + # Output netCDF structure information logging.info(kwargs['FILENAME']) logging.info(list(fileID.variables.keys())) - #-- Closing the netCDF file + # Closing the netCDF file fileID.close() diff --git a/gravity_toolkit/ncdf_write.py b/gravity_toolkit/ncdf_write.py index 850c8d2e..74c70eaa 100755 --- a/gravity_toolkit/ncdf_write.py +++ b/gravity_toolkit/ncdf_write.py @@ -93,7 +93,7 @@ def ncdf_write(data, lon, lat, tim, **kwargs): CLOBBER: will overwrite an existing netCDF4 file DATE: data has date information """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('FILENAME',None) kwargs.setdefault('VARNAME','z') kwargs.setdefault('LONNAME','lon') @@ -108,23 +108,23 @@ def ncdf_write(data, lon, lat, tim, **kwargs): kwargs.setdefault('REFERENCE',None) kwargs.setdefault('DATE',True) kwargs.setdefault('CLOBBER',True) - #-- set deprecation warning + # set deprecation warning warnings.filterwarnings("always") warnings.warn("Deprecated. Please use spatial.to_netCDF4", DeprecationWarning) - #-- setting NetCDF clobber attribute + # setting NetCDF clobber attribute clobber = 'w' if kwargs['CLOBBER'] else 'a' - #-- opening NetCDF file for writing - #-- Create the NetCDF file + # opening NetCDF file for writing + # Create the NetCDF file fileID = netCDF4.Dataset(kwargs['FILENAME'], clobber, format="NETCDF4") - #-- create output dictionary with key mapping + # create output dictionary with key mapping output = {} output[kwargs['LONNAME']] = np.copy(lon) output[kwargs['LATNAME']] = np.copy(lat) dimensions = [kwargs['LATNAME'],kwargs['LONNAME']] - #-- extend with date variables + # extend with date variables if kwargs['DATE']: output[kwargs['TIMENAME']] = np.atleast_1d(tim).astype('f') output[kwargs['VARNAME']] = np.atleast_3d(data) @@ -132,44 +132,44 @@ def ncdf_write(data, lon, lat, tim, **kwargs): else: output[kwargs['VARNAME']] = np.copy(data) - #-- defining the NetCDF dimensions and variables + # defining the NetCDF dimensions and variables nc = {} - #-- NetCDF dimensions + # NetCDF dimensions for i,dim in enumerate(dimensions): fileID.createDimension(dim, len(output[dim])) nc[dim] = fileID.createVariable(dim, output[dim].dtype, (dim,)) - #-- NetCDF spatial data + # NetCDF spatial data for key in [kwargs['VARNAME']]: nc[key] = fileID.createVariable(key, output[key].dtype, tuple(dimensions), fill_value=kwargs['FILL_VALUE'], zlib=True) - #-- filling NetCDF variables + # filling NetCDF variables for key,val in output.items(): nc[key][:] = val.copy() - #-- Defining attributes for longitude and latitude + # Defining attributes for longitude and latitude nc[kwargs['LONNAME']].long_name = 'longitude' nc[kwargs['LONNAME']].units = 'degrees_east' nc[kwargs['LATNAME']].long_name = 'latitude' nc[kwargs['LATNAME']].units = 'degrees_north' - #-- Defining attributes for dataset + # Defining attributes for dataset nc[kwargs['VARNAME']].long_name = kwargs['LONGNAME'] nc[kwargs['VARNAME']].units = kwargs['UNITS'] - #-- Defining attributes for date if applicable + # Defining attributes for date if applicable if kwargs['DATE']: nc[kwargs['TIMENAME']].long_name = kwargs['TIME_LONGNAME'] nc[kwargs['TIMENAME']].units = kwargs['TIME_UNITS'] - #-- global variables of NetCDF file + # global variables of NetCDF file if kwargs['TITLE']: fileID.title = kwargs['TITLE'] if kwargs['REFERENCE']: fileID.reference = kwargs['REFERENCE'] - #-- date created + # date created fileID.date_created = time.strftime('%Y-%m-%d',time.localtime()) - #-- Output NetCDF structure information + # Output NetCDF structure information logging.info(kwargs['FILENAME']) logging.info(list(fileID.variables.keys())) - #-- Closing the NetCDF file + # Closing the NetCDF file fileID.close() diff --git a/gravity_toolkit/ocean_stokes.py b/gravity_toolkit/ocean_stokes.py index 0b974c4c..4932a2a5 100644 --- a/gravity_toolkit/ocean_stokes.py +++ b/gravity_toolkit/ocean_stokes.py @@ -90,28 +90,28 @@ def ocean_stokes(LANDMASK, LMAX, MMAX=None, LOVE=None, VARNAME='LSMASK', m: int spherical harmonic order to MMAX """ - #-- maximum spherical harmonic order + # maximum spherical harmonic order MMAX = np.copy(LMAX) if MMAX is None else MMAX - #-- Read Land-Sea Mask of specified input file - #-- 0=Ocean, 1=Land, 2=Lake, 3=Small Island, 4=Ice Shelf - #-- Open the land-sea NetCDF file for reading + # Read Land-Sea Mask of specified input file + # 0=Ocean, 1=Land, 2=Lake, 3=Small Island, 4=Ice Shelf + # Open the land-sea NetCDF file for reading landsea = spatial().from_netCDF4(LANDMASK, date=False, varname=VARNAME) - #-- create land function + # create land function nth,nphi = landsea.shape land_function = np.zeros((nth,nphi),dtype=np.float64) - #-- combine land and island levels for land function + # combine land and island levels for land function indx,indy = np.nonzero((landsea.data >= 1) & (landsea.data <= 3)) land_function[indx,indy] = 1.0 - #-- remove isolated points if specified + # remove isolated points if specified if SIMPLIFY: land_function -= find_isolated_points(land_function) - #-- ocean function reciprocal of land function + # ocean function reciprocal of land function ocean_function = 1.0 - land_function - #-- convert to spherical harmonics (1 cm w.e.) + # convert to spherical harmonics (1 cm w.e.) ocean_Ylms = gen_stokes(ocean_function.T,landsea.lon,landsea.lat, UNITS=1,LMIN=0,LMAX=LMAX,MMAX=MMAX,LOVE=LOVE) - #-- return the spherical harmonic coefficients + # return the spherical harmonic coefficients return ocean_Ylms def find_isolated_points(mask): @@ -138,6 +138,6 @@ def find_isolated_points(mask): temp = np.roll(mask,-1,axis=0) temp[nth-1,:] = mask[nth-2,:] laplacian += mask*temp - #-- create mask of isolated points + # create mask of isolated points isolated = np.where(np.abs(laplacian) >= 3, 1, 0) return isolated \ No newline at end of file diff --git a/gravity_toolkit/piecewise_regress.py b/gravity_toolkit/piecewise_regress.py index 163f45f9..85f1cb78 100755 --- a/gravity_toolkit/piecewise_regress.py +++ b/gravity_toolkit/piecewise_regress.py @@ -179,8 +179,8 @@ def piecewise_regress(t_in, d_in, BREAK_TIME=None, BREAKPOINT=None, d_in = np.squeeze(d_in) nmax = len(t_in) - #-- If indice of cutoff time entered: will calculate cutoff time - #-- If cutoff time entered: will find the cutoff indice + # If indice of cutoff time entered: will calculate cutoff time + # If cutoff time entered: will find the cutoff indice if BREAKPOINT is not None: tco = t_in[BREAKPOINT] nco = np.squeeze(BREAKPOINT) @@ -188,115 +188,115 @@ def piecewise_regress(t_in, d_in, BREAK_TIME=None, BREAKPOINT=None, nco = np.argmin(np.abs(t_in - BREAK_TIME)) tco = np.copy(BREAK_TIME) - #-- create design matrix for sharp breakpoint piecewise regression - #-- y = beta_0 + beta_1*t + e (for x <= alpha) - #-- y = beta_0 + beta_1*t + beta_2*(t-alpha) + e (for x > alpha) + # create design matrix for sharp breakpoint piecewise regression + # y = beta_0 + beta_1*t + e (for x <= alpha) + # y = beta_0 + beta_1*t + beta_2*(t-alpha) + e (for x > alpha) DMAT = [] - #-- add polynomial orders (0=constant, 1=linear) + # add polynomial orders (0=constant, 1=linear) for o in range(2): DMAT.append(t_in**o) - #-- Linear Term 2 (change from linear term1: trend2 = beta1+beta2) + # Linear Term 2 (change from linear term1: trend2 = beta1+beta2) P_x1 = np.zeros((nmax)) P_x1[nco:] = t_in[nco:] - tco DMAT.append(P_x1) - #-- add cyclical terms (0.5=semi-annual, 1=annual) + # add cyclical terms (0.5=semi-annual, 1=annual) for c in CYCLES: DMAT.append(np.sin(2.0*np.pi*t_in/np.float64(c))) DMAT.append(np.cos(2.0*np.pi*t_in/np.float64(c))) - #-- take the transpose of the design matrix + # take the transpose of the design matrix DMAT = np.transpose(DMAT) - #-- Calculating Least-Squares Coefficients + # Calculating Least-Squares Coefficients if WEIGHT: - #-- Weighted Least-Squares fitting + # Weighted Least-Squares fitting if (np.ndim(DATA_ERR) == 0): raise ValueError('Input DATA_ERR for Weighted Least-Squares') - #-- check if any error values are 0 (prevent infinite weights) + # check if any error values are 0 (prevent infinite weights) if np.count_nonzero(DATA_ERR == 0.0): - #-- change to minimum floating point value + # change to minimum floating point value DATA_ERR[DATA_ERR == 0.0] = np.finfo(np.float64).eps - #--- Weight Precision + # Weight Precision wi = np.squeeze(DATA_ERR**(-2)) - #-- If uncorrelated weights are the diagonal + # If uncorrelated weights are the diagonal W = np.diag(wi) - #-- Least-Squares fitting - #-- Temporary Matrix: Inv(X'.W.X) + # Least-Squares fitting + # Temporary Matrix: Inv(X'.W.X) TM1 = np.linalg.inv(np.dot(np.transpose(DMAT),np.dot(W,DMAT))) - #-- Temporary Matrix: (X'.W.Y) + # Temporary Matrix: (X'.W.Y) TM2 = np.dot(np.transpose(DMAT),np.dot(W,d_in)) - #-- Least Squares Solutions: Inv(X'.W.X).(X'.W.Y) + # Least Squares Solutions: Inv(X'.W.X).(X'.W.Y) beta_mat = np.dot(TM1,TM2) - else:#-- Standard Least-Squares fitting (the [0] denotes coefficients output) + else:# Standard Least-Squares fitting (the [0] denotes coefficients output) beta_mat = np.linalg.lstsq(DMAT,d_in,rcond=-1)[0] - #-- Weights are equal + # Weights are equal wi = 1.0 - #-- Calculating trend2 = beta1 + beta2 - #-- beta2 = change in linear term from beta1 - beta_out = np.copy(beta_mat)#-- output beta + # Calculating trend2 = beta1 + beta2 + # beta2 = change in linear term from beta1 + beta_out = np.copy(beta_mat)# output beta beta_out[2] = beta_mat[1] + beta_mat[2] - #-- number of terms in least-squares solution + # number of terms in least-squares solution n_terms = len(beta_mat) - #-- modelled time-series + # modelled time-series mod = np.dot(DMAT,beta_mat) - #-- time-series residuals + # time-series residuals res = d_in[0:nmax] - np.dot(DMAT,beta_mat) - #-- Fitted Values without climate oscillations + # Fitted Values without climate oscillations simple = np.dot(DMAT[:,0:3],beta_mat[0:3]) - #-- Error Analysis - #-- nu = Degrees of Freedom = number of measurements-number of parameters + # Error Analysis + # nu = Degrees of Freedom = number of measurements-number of parameters nu = nmax - n_terms - #-- calculating R^2 values - #-- SStotal = sum((Y-mean(Y))**2) + # calculating R^2 values + # SStotal = sum((Y-mean(Y))**2) SStotal = np.dot(np.transpose(d_in[0:nmax] - np.mean(d_in[0:nmax])), (d_in[0:nmax] - np.mean(d_in[0:nmax]))) - #-- SSerror = sum((Y-X*B)**2) + # SSerror = sum((Y-X*B)**2) SSerror = np.dot(np.transpose(d_in[0:nmax] - np.dot(DMAT,beta_mat)), (d_in[0:nmax] - np.dot(DMAT,beta_mat))) - #-- R**2 term = 1- SSerror/SStotal + # R**2 term = 1- SSerror/SStotal rsquare = 1.0 - (SSerror/SStotal) - #-- Adjusted R**2 term: weighted by degrees of freedom + # Adjusted R**2 term: weighted by degrees of freedom rsq_adj = 1.0 - (SSerror/SStotal)*np.float64((nmax-1.0)/nu) - #-- Fit Criterion - #-- number of parameters including the intercept and the variance + # Fit Criterion + # number of parameters including the intercept and the variance K = np.float64(n_terms + 1) - #-- Log-Likelihood with weights (if unweighted, weight portions == 0) - #-- log(L) = -0.5*n*log(sigma^2) - 0.5*n*log(2*pi) - 0.5*n + # Log-Likelihood with weights (if unweighted, weight portions == 0) + # log(L) = -0.5*n*log(sigma^2) - 0.5*n*log(2*pi) - 0.5*n #log_lik = -0.5*nmax*(np.log(2.0 * np.pi) + 1.0 + np.log(np.sum((res**2)/nmax))) log_lik = 0.5*(np.sum(np.log(wi)) - nmax*(np.log(2.0 * np.pi) + 1.0 - np.log(nmax) + np.log(np.sum(wi * (res**2))))) - #-- Aikaike's Information Criterion + # Aikaike's Information Criterion AIC = -2.0*log_lik + 2.0*K if AICc: - #-- Second-Order AIC correcting for small sample sizes (restricted) - #-- Burnham and Anderson (2002) advocate use of AICc where - #-- ratio num/K is small - #-- A small ratio is defined in the definition at approximately < 40 + # Second-Order AIC correcting for small sample sizes (restricted) + # Burnham and Anderson (2002) advocate use of AICc where + # ratio num/K is small + # A small ratio is defined in the definition at approximately < 40 AIC += (2.0*K*(K+1.0))/(nmax - K - 1.0) - #-- Bayesian Information Criterion (Schwarz Criterion) + # Bayesian Information Criterion (Schwarz Criterion) BIC = -2.0*log_lik + np.log(nmax)*K - #--- Error Analysis + # Error Analysis if WEIGHT: - #-- WEIGHTED LEAST-SQUARES CASE (unequal error) - #-- Covariance Matrix + # WEIGHTED LEAST-SQUARES CASE (unequal error) + # Covariance Matrix Hinv = np.linalg.inv(np.dot(np.transpose(DMAT),np.dot(W,DMAT))) - #-- Normal Equations + # Normal Equations NORMEQ = np.dot(Hinv,np.transpose(np.dot(W,DMAT))) temp_err = np.zeros((n_terms)) - #-- Propagating RMS errors + # Propagating RMS errors for i in range(0,n_terms): temp_err[i] = np.sqrt(np.sum((NORMEQ[i,:]*DATA_ERR)**2)) - #-- Recalculating beta2 error + # Recalculating beta2 error beta_err = np.copy(temp_err) beta_err[2] = np.sqrt(temp_err[1]**2 + temp_err[2]**2) - #-- Weighted sum of squares Error + # Weighted sum of squares Error WSSE = np.dot(np.transpose(wi*(d_in[0:nmax] - np.dot(DMAT,beta_mat))), wi*(d_in[0:nmax] - np.dot(DMAT,beta_mat)))/np.float64(nu) @@ -306,18 +306,18 @@ def piecewise_regress(t_in, d_in, BREAK_TIME=None, BREAKPOINT=None, 'N':n_terms, 'DOF':nu, 'cov_mat':Hinv} elif ((not WEIGHT) and (DATA_ERR != 0)): - #-- LEAST-SQUARES CASE WITH KNOWN AND EQUAL ERROR + # LEAST-SQUARES CASE WITH KNOWN AND EQUAL ERROR P_err = DATA_ERR*np.ones((nmax)) Hinv = np.linalg.inv(np.dot(np.transpose(DMAT),DMAT)) - #-- Normal Equations + # Normal Equations NORMEQ = np.dot(Hinv,np.transpose(DMAT)) temp_err = np.zeros((n_terms)) for i in range(0,n_terms): temp_err[i] = np.sum((NORMEQ[i,:]*P_err)**2) - #-- Recalculating beta2 error + # Recalculating beta2 error beta_err = np.copy(temp_err) beta_err[2] = np.sqrt(temp_err[1]**2 + temp_err[2]**2) - #-- Mean square error + # Mean square error MSE = np.dot(np.transpose(d_in[0:nmax] - np.dot(DMAT,beta_mat)), (d_in[0:nmax] - np.dot(DMAT,beta_mat)))/np.float64(nu) @@ -326,43 +326,43 @@ def piecewise_regress(t_in, d_in, BREAK_TIME=None, BREAKPOINT=None, 'LOGLIK':log_lik, 'model':mod, 'residual':res, 'N':n_terms, 'DOF':nu, 'cov_mat':Hinv} else: - #-- STANDARD LEAST-SQUARES CASE - #-- Regression with Errors with Unknown Standard Deviations - #-- MSE = (1/nu)*sum((Y-X*B)**2) - #-- Mean square error + # STANDARD LEAST-SQUARES CASE + # Regression with Errors with Unknown Standard Deviations + # MSE = (1/nu)*sum((Y-X*B)**2) + # Mean square error MSE = np.dot(np.transpose(d_in[0:nmax] - np.dot(DMAT,beta_mat)), (d_in[0:nmax] - np.dot(DMAT,beta_mat)))/np.float64(nu) - #-- Root mean square error + # Root mean square error RMSE = np.sqrt(MSE) - #-- Normalized root mean square error + # Normalized root mean square error NRMSE = RMSE/(np.max(d_in[0:nmax])-np.min(d_in[0:nmax])) - #-- Covariance Matrix - #-- Multiplying the design matrix by itself + # Covariance Matrix + # Multiplying the design matrix by itself Hinv = np.linalg.inv(np.dot(np.transpose(DMAT),DMAT)) - #-- Taking the diagonal components of the cov matrix + # Taking the diagonal components of the cov matrix hdiag = np.diag(Hinv) - #-- set either the standard deviation or the confidence interval + # set either the standard deviation or the confidence interval if (STDEV != 0): - #-- Setting the standard deviation of the output error + # Setting the standard deviation of the output error alpha = 1.0 - scipy.special.erf(STDEV/np.sqrt(2.0)) elif (CONF != 0): - #-- Setting the confidence interval of the output error + # Setting the confidence interval of the output error alpha = 1.0 - CONF else: - #-- Default is 95% confidence interval + # Default is 95% confidence interval alpha = 1.0 - (0.95) - #-- Student T-Distribution with D.O.F. nu - #-- t.ppf parallels tinv in matlab + # Student T-Distribution with D.O.F. nu + # t.ppf parallels tinv in matlab tstar = scipy.stats.t.ppf(1.0-(alpha/2.0),nu) - #-- beta_err is the error for each coefficient - #-- beta_err = t(nu,1-alpha/2)*standard error + # beta_err is the error for each coefficient + # beta_err = t(nu,1-alpha/2)*standard error temp_std = np.sqrt(MSE*hdiag) temp_err = tstar*temp_std - #-- Recalculating standard error for beta2 + # Recalculating standard error for beta2 st_err = np.copy(temp_std) st_err[2] = np.sqrt(temp_std[1]**2 + temp_std[2]**2) - #-- Recalculating beta2 error + # Recalculating beta2 error beta_err = np.copy(temp_err) beta_err[2] = np.sqrt(temp_err[1]**2 + temp_err[2]**2) diff --git a/gravity_toolkit/plm_colombo.py b/gravity_toolkit/plm_colombo.py index 927f5ace..fd239744 100755 --- a/gravity_toolkit/plm_colombo.py +++ b/gravity_toolkit/plm_colombo.py @@ -86,48 +86,48 @@ def plm_colombo(LMAX, x, ASTYPE=np.float64): `doi: 10.1007/s00190-002-0216-2 `_ """ - #-- removing singleton dimensions of x + # removing singleton dimensions of x x = np.atleast_1d(x).flatten().astype(ASTYPE) - #-- length of the x array + # length of the x array jm = len(x) - #-- verify data type of spherical harmonic truncation + # verify data type of spherical harmonic truncation LMAX = np.int64(LMAX) - #-- allocating for the plm matrix and differentials + # allocating for the plm matrix and differentials plm = np.zeros((LMAX+1,LMAX+1,jm)) dplm = np.zeros((LMAX+1,LMAX+1,jm)) - #-- u is sine of colatitude (cosine of latitude) so that 0 <= s <= 1 - #-- for x=cos(th): u=sin(th) + # u is sine of colatitude (cosine of latitude) so that 0 <= s <= 1 + # for x=cos(th): u=sin(th) u = np.sqrt(1.0 - x**2) - #-- update where u==0 to eps of data type to prevent invalid divisions + # update where u==0 to eps of data type to prevent invalid divisions u[u == 0] = np.finfo(u.dtype).eps - #-- Calculating the initial polynomials for the recursion + # Calculating the initial polynomials for the recursion plm[0,0,:] = 1.0 plm[1,0,:] = np.sqrt(3.0)*x plm[1,1,:] = np.sqrt(3.0)*u - #-- calculating first derivatives for harmonics of degree 1 + # calculating first derivatives for harmonics of degree 1 dplm[1,0,:] = (1.0/u)*(x*plm[1,0,:] - np.sqrt(3)*plm[0,0,:]) dplm[1,1,:] = (x/u)*plm[1,1,:] for l in range(2, LMAX+1): - for m in range(0, l):#-- Zonal and Tesseral harmonics (non-sectorial) - #-- Computes the non-sectorial terms from previously computed - #-- sectorial terms. + for m in range(0, l):# Zonal and Tesseral harmonics (non-sectorial) + # Computes the non-sectorial terms from previously computed + # sectorial terms. alm = np.sqrt(((2.0*l-1.0)*(2.0*l+1.0))/((l-m)*(l+m))) blm = np.sqrt(((2.0*l+1.0)*(l+m-1.0)*(l-m-1.0))/((l-m)*(l+m)*(2.0*l-3.0))) - #-- if (m == l-1): plm[l-2,m,:] will be 0 + # if (m == l-1): plm[l-2,m,:] will be 0 plm[l,m,:] = alm*x*plm[l-1,m,:] - blm*plm[l-2,m,:] - #-- calculate first derivatives + # calculate first derivatives flm = np.sqrt(((l**2.0 - m**2.0)*(2.0*l + 1.0))/(2.0*l - 1.0)) dplm[l,m,:] = (1.0/u)*(l*x*plm[l,m,:] - flm*plm[l-1,m,:]) - #-- Sectorial harmonics - #-- The sectorial harmonics serve as seed values for the recursion - #-- starting with P00 and P11 (outside the loop) + # Sectorial harmonics + # The sectorial harmonics serve as seed values for the recursion + # starting with P00 and P11 (outside the loop) plm[l,l,:] = u*np.sqrt((2.0*l+1.0)/(2.0*l))*np.squeeze(plm[l-1,l-1,:]) - #-- calculate first derivatives for sectorial harmonics + # calculate first derivatives for sectorial harmonics dplm[l,l,:] = np.longdouble(l)*(x/u)*plm[l,l,:] - #-- return the legendre polynomials and their first derivative + # return the legendre polynomials and their first derivative return plm, dplm diff --git a/gravity_toolkit/plm_holmes.py b/gravity_toolkit/plm_holmes.py index ce647fbe..7802ebad 100755 --- a/gravity_toolkit/plm_holmes.py +++ b/gravity_toolkit/plm_holmes.py @@ -93,26 +93,26 @@ def plm_holmes(LMAX, x, ASTYPE=np.float64): `doi: 10.1007/s00190-002-0216-2 `_ """ - #-- removing singleton dimensions of x + # removing singleton dimensions of x x = np.atleast_1d(x).flatten().astype(ASTYPE) - #-- length of the x array + # length of the x array jm = len(x) - #-- verify data type of spherical harmonic truncation + # verify data type of spherical harmonic truncation LMAX = np.int64(LMAX) - #-- scaling factor + # scaling factor scalef = 1.0e-280 - #-- allocate for multiplicative factors, and plms + # allocate for multiplicative factors, and plms f1 = np.zeros(((LMAX+1)*(LMAX+2)//2),dtype=ASTYPE) f2 = np.zeros(((LMAX+1)*(LMAX+2)//2),dtype=ASTYPE) p = np.zeros(((LMAX+1)*(LMAX+2)//2,jm),dtype=ASTYPE) plm = np.zeros((LMAX+1,LMAX+1,jm),dtype=ASTYPE) dplm = np.zeros((LMAX+1,LMAX+1,jm),dtype=ASTYPE) - #-- Precompute multiplicative factors used in recursion relationships - #-- Note that prefactors are not used for the case when m=l and m=l-1, - #-- as a different recursion is used for these two values. - k = 2#-- k = l*(l+1)/2 + m + # Precompute multiplicative factors used in recursion relationships + # Note that prefactors are not used for the case when m=l and m=l-1, + # as a different recursion is used for these two values. + k = 2# k = l*(l+1)/2 + m for l in range(2, LMAX+1): k += 1 f1[k] = np.sqrt(2.0*l-1.0)*np.sqrt(2.0*l+1.0)/np.longdouble(l) @@ -124,13 +124,13 @@ def plm_holmes(LMAX, x, ASTYPE=np.float64): (np.sqrt(2.0*l-3.0)*np.sqrt(l+m)*np.sqrt(l-m)) k += 2 - #-- u is sine of colatitude (cosine of latitude) so that 0 <= s <= 1 - #-- for x=cos(th): u=sin(th) + # u is sine of colatitude (cosine of latitude) so that 0 <= s <= 1 + # for x=cos(th): u=sin(th) u = np.sqrt(1.0 - x**2) - #-- update where u==0 to eps of data type to prevent invalid divisions + # update where u==0 to eps of data type to prevent invalid divisions u[u == 0] = np.finfo(u.dtype).eps - #-- Calculate P(l,0). These are not scaled. + # Calculate P(l,0). These are not scaled. p[0,:] = 1.0 p[1,:] = np.sqrt(3.0)*x k = 1 @@ -138,44 +138,44 @@ def plm_holmes(LMAX, x, ASTYPE=np.float64): k += l p[k,:] = f1[k]*x*p[k-l,:] - f2[k]*p[k-2*l+1,:] - #-- Calculate P(m,m), P(m+1,m), and P(l,m) + # Calculate P(m,m), P(m+1,m), and P(l,m) pmm = np.sqrt(2.0)*scalef rescalem = 1.0/scalef kstart = 0 for m in range(1, LMAX): rescalem = rescalem * u - #-- Calculate P(m,m) + # Calculate P(m,m) kstart += m+1 pmm = pmm * np.sqrt(2*m+1)/np.sqrt(2*m) p[kstart,:] = pmm - #-- Calculate P(m+1,m) + # Calculate P(m+1,m) k = kstart+m+1 p[k,:] = x*np.sqrt(2*m+3)*pmm - #-- Calculate P(l,m) + # Calculate P(l,m) for l in range(m+2, LMAX+1): k += l p[k,:] = x*f1[k]*p[k-l,:] - f2[k]*p[k-2*l+1,:] p[k-2*l+1,:] = p[k-2*l+1,:] * rescalem - #-- rescale + # rescale p[k,:] = p[k,:] * rescalem p[k-LMAX,:] = p[k-LMAX,:] * rescalem - #-- Calculate P(LMAX,LMAX) + # Calculate P(LMAX,LMAX) rescalem = rescalem * u kstart += m+2 p[kstart,:] = pmm * np.sqrt(2*LMAX+1) / np.sqrt(2*LMAX) * rescalem - #-- reshape Legendre polynomials to output dimensions + # reshape Legendre polynomials to output dimensions for m in range(LMAX+1): for l in range(m,LMAX+1): lm = (l*(l+1))//2 + m plm[l,m,:] = p[lm,:] - #-- calculate first derivatives + # calculate first derivatives if (l == m): dplm[l,m,:] = np.longdouble(m)*(x/u)*plm[l,m,:] else: flm = np.sqrt(((l**2.0 - m**2.0)*(2.0*l + 1.0))/(2.0*l - 1.0)) dplm[l,m,:]= (1.0/u)*(l*x*plm[l,m,:] - flm*plm[l-1,m,:]) - #-- return the legendre polynomials and their first derivative + # return the legendre polynomials and their first derivative return plm, dplm diff --git a/gravity_toolkit/plm_mohlenkamp.py b/gravity_toolkit/plm_mohlenkamp.py index 8f08cf0f..0d6834da 100755 --- a/gravity_toolkit/plm_mohlenkamp.py +++ b/gravity_toolkit/plm_mohlenkamp.py @@ -88,56 +88,56 @@ def plm_mohlenkamp(LMAX, x, MMAX=None): .. |ouml| unicode:: U+00F6 .. LATIN SMALL LETTER O WITH DIAERESIS """ - #-- Verify LMAX as integer + # Verify LMAX as integer LMAX = np.int64(LMAX) - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) if MMAX is None: MMAX = np.copy(LMAX) - #-- removing singleton dimensions of x + # removing singleton dimensions of x x = np.atleast_1d(x).flatten() - #-- length of the x array + # length of the x array sx = len(x) - #-- Initialize the output Legendre polynomials + # Initialize the output Legendre polynomials plm=np.zeros((LMAX+1,MMAX+1,sx)) - #-- Jacobi polynomial for the recurrence relation + # Jacobi polynomial for the recurrence relation jlmm=np.zeros((LMAX+1,MMAX+1,sx)) - #-- for x=cos(th): rsin= sin(th) + # for x=cos(th): rsin= sin(th) rsin=np.sqrt(1.0 - x**2) - #-- for all spherical harmonic orders of interest - for mm in range(0,MMAX+1):#-- equivalent to 0:MMAX - #-- Initialize the recurrence relation - #-- J-1,m,m Term == 0 - #-- J0,m,m Term + # for all spherical harmonic orders of interest + for mm in range(0,MMAX+1):# equivalent to 0:MMAX + # Initialize the recurrence relation + # J-1,m,m Term == 0 + # J0,m,m Term if (mm > 0): - #-- j ranges from 1 to mm for the product + # j ranges from 1 to mm for the product j = np.arange(0,mm)+1.0 jlmm[0,mm,:] = np.prod(np.sqrt(1.0 + 1.0/(2.0*j)))/np.sqrt(2.0) - else: #-- if mm == 0: jlmm = 1/sqrt(2) + else: # if mm == 0: jlmm = 1/sqrt(2) jlmm[0,mm,:] = 1.0/np.sqrt(2.0) - #-- Jk,m,m Terms - for k in range(1, LMAX+1):#-- computation for SH degrees - #-- Initialization begins at -1 - #-- this is to make the formula parallel the function written in - #-- Martin Mohlenkamp's Guide to Spherical Harmonics - #-- Jacobi General Terms - if (k == 1):#-- for degree 1 terms + # Jk,m,m Terms + for k in range(1, LMAX+1):# computation for SH degrees + # Initialization begins at -1 + # this is to make the formula parallel the function written in + # Martin Mohlenkamp's Guide to Spherical Harmonics + # Jacobi General Terms + if (k == 1):# for degree 1 terms jlmm[k,mm,:] = 2.0*x * jlmm[k-1,mm,:] * \ np.sqrt(1.0 + (mm - 0.5)/k) * \ np.sqrt(1.0 - (mm - 0.5)/(k + 2.0*mm)) - else:#-- for all other spherical harmonic degrees + else:# for all other spherical harmonic degrees jlmm[k,mm,:] = 2.0*x * jlmm[k-1,mm,:] * \ np.sqrt(1.0 + (mm - 0.5)/k) * \ np.sqrt(1.0 - (mm - 0.5)/(k + 2.0*mm)) - \ jlmm[k-2,mm,:] * np.sqrt(1.0 + 4.0/(2.0*k + 2.0*mm - 3.0)) * \ np.sqrt(1.0 - (1.0/k)) * np.sqrt(1.0 - 1.0/(k + 2.0*mm)) - #-- Normalization is geodesy convention - for l in range(mm,LMAX+1): #-- equivalent to mm:LMAX - if (mm == 0):#-- Geodesy normalization (m=0) == sqrt(2)*sin(th)^0 - #-- rsin^mm term is dropped as rsin^0 = 1 + # Normalization is geodesy convention + for l in range(mm,LMAX+1): # equivalent to mm:LMAX + if (mm == 0):# Geodesy normalization (m=0) == sqrt(2)*sin(th)^0 + # rsin^mm term is dropped as rsin^0 = 1 plm[l,mm,:] = np.sqrt(2.0)*jlmm[l-mm,mm,:] - else:#-- Geodesy normalization all others == 2*sin(th)^mm + else:# Geodesy normalization all others == 2*sin(th)^mm plm[l,mm,:] = 2.0*(rsin**mm)*jlmm[l-mm,mm,:] return plm diff --git a/gravity_toolkit/read_GIA_model.py b/gravity_toolkit/read_GIA_model.py index da4831f6..6afcb73e 100755 --- a/gravity_toolkit/read_GIA_model.py +++ b/gravity_toolkit/read_GIA_model.py @@ -244,15 +244,15 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): glacial isostatic adjustment", *Nature Geoscience*, 3(9), 642-646 (2010). `https://doi.org/10.1038/ngeo938 `_ """ - #-- default keyword arguments + # default keyword arguments kwargs.setdefault('LMAX', None) kwargs.setdefault('MODE', 0o775) - #-- allocate for output Ylms + # allocate for output Ylms gia_Ylms = {} - #-- GIA model citations and references + # GIA model citations and references if (GIA == 'IJ05-R2'): - #-- IJ05-R2: Ivins R2 GIA Models + # IJ05-R2: Ivins R2 GIA Models prefix = 'IJ05_R2' gia_Ylms['citation'] = 'Ivins_et_al._(2013)' gia_Ylms['reference'] = ('E. R. Ivins, T. S. James, J. Wahr, ' @@ -261,12 +261,12 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): 'GIA correction", Journal of Geophysical Research: Solid Earth, ' '118(6), 3126-3141, (2013). https://doi.org/10.1002/jgrb.50208') gia_Ylms['url'] = 'https://doi.org/10.1002/jgrb.50208' - #-- regular expression file pattern + # regular expression file pattern file_pattern = r'Stokes.R2_(.*?)_L120' - #-- default degree of truncation + # default degree of truncation LMAX = 120 if not kwargs['LMAX'] else kwargs['LMAX'] elif (GIA == 'ICE6G'): - #-- ICE6G: ICE-6G VM5 GIA Models + # ICE6G: ICE-6G VM5 GIA Models prefix = 'ICE6G' gia_Ylms['citation'] = 'Peltier_et_al._(2015)' gia_Ylms['reference'] = ('W. R. Peltier, D. F. Argus, and R. Drummond, ' @@ -275,14 +275,14 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): 'Solid Earth, 120(1), 450-487, (2015). ' 'https://doi.org/10.1002/2014JB011176') gia_Ylms['url'] = 'https://doi.org/10.1002/2014JB011176' - #-- regular expression file pattern for test cases + # regular expression file pattern for test cases #file_pattern = r'Stokes_G_Rot_60_I6_A_(.*?)_L90' - #-- regular expression file pattern for VM5 + # regular expression file pattern for VM5 file_pattern = r'Stokes_G_Rot_60_I6_A_(.*)' - #-- default degree of truncation + # default degree of truncation LMAX = 60 if not kwargs['LMAX'] else kwargs['LMAX'] elif (GIA == 'W12a'): - #-- W12a: Whitehouse GIA Models + # W12a: Whitehouse GIA Models prefix = 'W12a' gia_Ylms['citation'] = 'Whitehouse_et_al._(2012)' gia_Ylms['reference'] = ('P. L. Whitehouse, M. J. Bentley, G. A. Milne, ' @@ -292,14 +292,14 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): 'Geophysical Journal International, 190(3), 1464-1482, (2012). ' 'https://doi.org/10.1111/j.1365-246X.2012.05557.x') gia_Ylms['url'] = 'https://doi.org/10.1111/j.1365-246X.2012.05557.x' - #-- for Whitehouse W12a (BEST, LOWER, UPPER): + # for Whitehouse W12a (BEST, LOWER, UPPER): parameters = dict(B='Best', L='Lower', U='Upper') - #-- regular expression file pattern + # regular expression file pattern file_pattern = r'grate_(B|L|U).clm' - #-- default degree of truncation + # default degree of truncation LMAX = 120 if not kwargs['LMAX'] else kwargs['LMAX'] elif (GIA == 'SM09'): - #-- SM09: Simpson/Milne GIA Models + # SM09: Simpson/Milne GIA Models prefix = 'SM09_Huy2' gia_Ylms['citation'] = 'Simpson_et_al._(2009)' gia_Ylms['reference'] = ('M. J. R. Simpson, G. A. Milne, P. Huybrechts, ' @@ -309,12 +309,12 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): 'Quaternary Science Reviews, 28(17-18), 1631-1657, (2009). ' 'https://doi.org/10.1016/j.quascirev.2009.03.004') gia_Ylms['url'] = 'https://doi.org/10.1016/j.quascirev.2009.03.004' - #-- regular expression file pattern + # regular expression file pattern file_pattern = r'grate_(\d+)p(\d)(\d+).clm' - #-- default degree of truncation + # default degree of truncation LMAX = 120 if not kwargs['LMAX'] else kwargs['LMAX'] elif (GIA == 'Wu10'): - #-- Wu10: Wu (2010) GIA Correction + # Wu10: Wu (2010) GIA Correction gia_Ylms['citation'] = 'Wu_et_al._(2010)' gia_Ylms['reference'] = ('X. Wu, M. B. Heflin, H. Schotman, B. L. A. ' 'Vermeersen, D. Dong, R. S. Gross, E. R. Ivins, A. W. Moore, S. E. ' @@ -322,10 +322,10 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): 'transport and glacial isostatic adjustment", Nature Geoscience, ' '3(9), 642-646, (2010). https://doi.org/10.1038/ngeo938') gia_Ylms['url'] = 'https://doi.org/10.1038/ngeo938' - #-- default degree of truncation + # default degree of truncation LMAX = 60 if not kwargs['LMAX'] else kwargs['LMAX'] elif (GIA == 'Caron'): - #-- Caron: Caron JPL GIA Assimilation + # Caron: Caron JPL GIA Assimilation gia_Ylms['citation'] = 'Caron_et_al._(2018)' gia_Ylms['reference'] = ('L. Caron, E. R. Ivins, E. Larour, S. Adhikari, ' 'J. Nilsson and G. Blewitt, "GIA Model Statistics for GRACE ' @@ -333,10 +333,10 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): 'Letters, 45(5), 2203-2212, (2018). ' 'https://doi.org/10.1002/2017GL076644') gia_Ylms['url'] = 'https://doi.org/10.1002/2017GL076644' - #-- default degree of truncation + # default degree of truncation LMAX = 89 if not kwargs['LMAX'] else kwargs['LMAX'] elif (GIA == 'ICE6G-D'): - #-- ICE6G-D: ICE-6G Version-D GIA Models + # ICE6G-D: ICE-6G Version-D GIA Models prefix = 'ICE6G-D' gia_Ylms['citation'] = 'Peltier_et_al._(1018)' gia_Ylms['reference'] = ('W. R. Peltier, D. F. Argus, and R. Drummond, ' @@ -345,12 +345,12 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): 'Geophysical Research: Solid Earth, 123(2), 2019-2028, (2018). ' 'https://doi.org/10.1002/2016JB013844') gia_Ylms['url'] = 'https://doi.org/10.1002/2016JB013844' - #-- regular expression file pattern for Version-D + # regular expression file pattern for Version-D file_pattern = r'(ICE-6G_)?(.*?)[_]?Stokes_trend[_]?(.*?)\.txt$' - #-- default degree of truncation + # default degree of truncation LMAX = 256 if not kwargs['LMAX'] else kwargs['LMAX'] elif (GIA == 'AW13-ICE6G'): - #-- AW13-ICE6G: Geruo A ICE-6G GIA Models + # AW13-ICE6G: Geruo A ICE-6G GIA Models prefix = 'AW13' gia_Ylms['citation'] = 'A_et_al._(2013)' gia_Ylms['reference'] = ('G. A, J. Wahr, and S. Zhong, "Computations of ' @@ -359,12 +359,12 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): 'Antarctica and Canada", Geophysical Journal International, ' '192(2), 557-572, (2013). https://doi.org/10.1093/gji/ggs030') gia_Ylms['url'] = 'https://doi.org/10.1093/gji/ggs030' - #-- regular expressions file pattern + # regular expressions file pattern file_pattern = r'stokes\.(ice6g)[\.\_](.*?)(\.txt)?$' - #-- default degree of truncation + # default degree of truncation LMAX = 100 if not kwargs['LMAX'] else kwargs['LMAX'] elif (GIA == 'AW13-IJ05'): - #-- AW13-IJ05: Geruo A IJ05-R2 GIA Models + # AW13-IJ05: Geruo A IJ05-R2 GIA Models prefix = 'AW13_IJ05' gia_Ylms['citation'] = 'A_et_al._(2013)' gia_Ylms['reference'] = ('G. A, J. Wahr, and S. Zhong, "Computations of ' @@ -373,250 +373,250 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): 'Antarctica and Canada", Geophysical Journal International, ' '192(2), 557-572, (2013). https://doi.org/10.1093/gji/ggs030') gia_Ylms['url'] = 'https://doi.org/10.1093/gji/ggs030' - #-- regular expressions file pattern + # regular expressions file pattern file_pattern = r'stokes\.(R2)_(.*?)(\_ANT)?$' - #-- default degree of truncation + # default degree of truncation LMAX = 256 if not kwargs['LMAX'] else kwargs['LMAX'] else: - #-- return empty GIA harmonics to degree and order + # return empty GIA harmonics to degree and order LMAX = 60 if not kwargs['LMAX'] else kwargs['LMAX'] - #-- compile numerical expression operator + # compile numerical expression operator rx = re.compile(r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?') - #-- Header lines and scale factors for individual models + # Header lines and scale factors for individual models if GIA in ('IJ05-R2', 'ICE6G'): - #-- IJ05 + # IJ05 start = 0 - #-- scale factor for geodesy normalization + # scale factor for geodesy normalization scale = 1e-11 elif (GIA == 'ICE6G-D'): - #-- ICE-6G Version-D - #-- scale factor for geodesy normalization + # ICE-6G Version-D + # scale factor for geodesy normalization scale = 1.0 else: start = 0 scale = 1.0 - #-- initially read for spherical harmonic degree up to LMAX - #-- will truncate to MMAX before exiting program + # initially read for spherical harmonic degree up to LMAX + # will truncate to MMAX before exiting program gia_Ylms['clm'] = np.zeros((LMAX+1,LMAX+1)) gia_Ylms['slm'] = np.zeros((LMAX+1,LMAX+1)) - #-- output spherical harmonic degree and order + # output spherical harmonic degree and order gia_Ylms['l'],gia_Ylms['m'] = (np.arange(LMAX+1),np.arange(LMAX+1)) - #-- Reading GIA files (ICE-6G and Wu have more complex formats) + # Reading GIA files (ICE-6G and Wu have more complex formats) if GIA in ('IJ05-R2', 'W12a', 'SM09', 'AW13-ICE6G', 'AW13-IJ05'): - #-- AW13, IJ05, W12a, SM09 - #-- AW13 notes: file headers - #-- IJ05 notes: need to scale by 1e-11 for geodesy-normalization - #-- exponents are denoted with D for double + # AW13, IJ05, W12a, SM09 + # AW13 notes: file headers + # IJ05 notes: need to scale by 1e-11 for geodesy-normalization + # exponents are denoted with D for double - #-- check that GIA data file is present in file system + # check that GIA data file is present in file system input_file = os.path.expanduser(input_file) if not os.access(input_file, os.F_OK): raise FileNotFoundError(f'{input_file} not found') - #-- log GIA file if debugging + # log GIA file if debugging logging.debug(f'Reading GIA file: {input_file}') - #-- opening gia data file and read contents + # opening gia data file and read contents with open(input_file, mode='r', encoding='utf8') as f: gia_data = f.read().splitlines() - #-- number of lines in file + # number of lines in file gia_lines = len(gia_data) - #-- Skipping file header for geruo files with header + # Skipping file header for geruo files with header for ii in range(start,gia_lines): - #-- check if contents in line + # check if contents in line flag = bool(rx.search(gia_data[ii].replace('D','E'))) if flag: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives - #-- Replacing Double Exponent with Standard Exponent + # find numerical instances in line including exponents, + # decimal points and negatives + # Replacing Double Exponent with Standard Exponent line = rx.findall(gia_data[ii].replace('D','E')) l1 = np.int64(line[0]) m1 = np.int64(line[1]) - #-- truncate to LMAX + # truncate to LMAX if (l1 <= LMAX) and (m1 <= LMAX): - #-- scaling to geodesy normalization + # scaling to geodesy normalization gia_Ylms['clm'][l1,m1] = np.float64(line[2])*scale gia_Ylms['slm'][l1,m1] = np.float64(line[3])*scale elif (GIA == 'ICE6G'): - #-- ICE-6G VM5 notes - #-- need to scale by 1e-11 for geodesy-normalization - #-- spherical harmonic degrees listed only on order 0 - #-- spherical harmonic order is not listed in file + # ICE-6G VM5 notes + # need to scale by 1e-11 for geodesy-normalization + # spherical harmonic degrees listed only on order 0 + # spherical harmonic order is not listed in file - #-- check that GIA data file is present in file system + # check that GIA data file is present in file system input_file = os.path.expanduser(input_file) if not os.access(input_file, os.F_OK): raise FileNotFoundError(f'{input_file} not found') - #-- log GIA file if debugging + # log GIA file if debugging logging.debug(f'Reading GIA file: {input_file}') - #-- opening gia data file and read contents + # opening gia data file and read contents with open(input_file, mode='r', encoding='utf8') as f: gia_data = f.read().splitlines() - #-- counter variable + # counter variable ii = 0 for l in range(0, LMAX+1): for m in range(0, l+1): if ((m % 2) == 0): - #-- reading gia line if the order is even - #-- find numerical instances in line including exponents, - #-- decimal points and negatives + # reading gia line if the order is even + # find numerical instances in line including exponents, + # decimal points and negatives line = rx.findall(gia_data[ii]) - #-- counter to next line + # counter to next line ii += 1 - #-- if m is even: clm column = 1, slm column = 2 + # if m is even: clm column = 1, slm column = 2 c = 0 - else: #-- if m is odd: clm column = 3, slm column = 4 + else: # if m is odd: clm column = 3, slm column = 4 c = 2 if ((m == 0) or (m == 1)): - #-- l is column 1 if m == 0 or 1 - #-- degree is not listed for other SHd: column 1 = clm + # l is column 1 if m == 0 or 1 + # degree is not listed for other SHd: column 1 = clm c += 1 if (len(line) > 0): - #-- no empty lines - #-- convert to float and scale + # no empty lines + # convert to float and scale gia_Ylms['clm'][l,m] = np.float64(line[0+c])*scale gia_Ylms['slm'][l,m] = np.float64(line[1+c])*scale elif (GIA == 'Wu10'): - #-- Wu (2010) notes: - #-- Need to convert from mm geoid to fully normalized - rad_e = 6.371e9#-- Average Radius of the Earth [mm] + # Wu (2010) notes: + # Need to convert from mm geoid to fully normalized + rad_e = 6.371e9# Average Radius of the Earth [mm] - #-- check that GIA data file is present in file system + # check that GIA data file is present in file system input_file = os.path.expanduser(input_file) if not os.access(input_file, os.F_OK): raise FileNotFoundError(f'{input_file} not found') - #-- log GIA file if debugging + # log GIA file if debugging logging.debug(f'Reading GIA file: {input_file}') - #-- The file starts with a header. - #-- converting to numerical array (note 64 bit floating point) + # The file starts with a header. + # converting to numerical array (note 64 bit floating point) gia_data = np.loadtxt(input_file, skiprows=1, dtype='f8') - #-- counter variable to upwrap gia file + # counter variable to upwrap gia file ii = 0 - #-- Order of harmonics in the file: - #-- 1 0 c - #-- 1 1 c - #-- 1 1 s - #-- 2 0 c - #-- 2 1 c + # Order of harmonics in the file: + # 1 0 c + # 1 1 c + # 1 1 s + # 2 0 c + # 2 1 c for l in range(1, LMAX+1): for m in range(0, l+1): for cs in range(0, 2): - #-- unwrapping GIA file and converting to geoid - #-- Clm + # unwrapping GIA file and converting to geoid + # Clm if (cs == 0): gia_Ylms['clm'][l,m] = gia_data[ii]/rad_e ii += 1 - #-- Slm + # Slm if (m != 0) and (cs == 1): gia_Ylms['slm'][l,m] = gia_data[ii]/rad_e ii += 1 elif (GIA == 'Caron'): - #-- Caron et al. (2018) + # Caron et al. (2018) - #-- check that GIA data file is present in file system + # check that GIA data file is present in file system input_file = os.path.expanduser(input_file) if not os.access(input_file, os.F_OK): raise FileNotFoundError(f'{input_file} not found') - #-- log GIA file if debugging + # log GIA file if debugging logging.debug(f'Reading GIA file: {input_file}') - #-- The file starts with a header. - #-- converting to numerical array (note 64 bit floating point) + # The file starts with a header. + # converting to numerical array (note 64 bit floating point) dtype = {'names':('l','m','Ylms'),'formats':('i','i','f8')} gia_data=np.loadtxt(input_file, skiprows=4, dtype=dtype) - #-- Order of harmonics in the file - #-- 0 0 c - #-- 1 1 s - #-- 1 0 c - #-- 1 1 c - #-- 2 2 s - #-- 2 1 s - #-- 2 0 c - #-- 2 1 c - #-- 2 2 c + # Order of harmonics in the file + # 0 0 c + # 1 1 s + # 1 0 c + # 1 1 c + # 2 2 s + # 2 1 s + # 2 0 c + # 2 1 c + # 2 2 c for l,m,Ylm in zip(gia_data['l'],gia_data['m'],gia_data['Ylms']): - #-- unwrapping GIA file - if (m >= 0) and (l <= LMAX) and (m <= LMAX):#-- Clm + # unwrapping GIA file + if (m >= 0) and (l <= LMAX) and (m <= LMAX):# Clm gia_Ylms['clm'][l,m] = Ylm.copy() - elif (m < 0) and (l <= LMAX) and (m <= LMAX):#-- Slm + elif (m < 0) and (l <= LMAX) and (m <= LMAX):# Slm gia_Ylms['slm'][l,np.abs(m)] = Ylm.copy() - #-- Reading ICE-6G Version-D GIA files + # Reading ICE-6G Version-D GIA files elif (GIA == 'ICE6G-D'): - #-- check that GIA data file is present in file system + # check that GIA data file is present in file system input_file = os.path.expanduser(input_file) if not os.access(input_file, os.F_OK): raise FileNotFoundError(f'{input_file} not found') - #-- log GIA file if debugging + # log GIA file if debugging logging.debug(f'Reading GIA file: {input_file}') - #-- opening gia data file and read contents + # opening gia data file and read contents with open(input_file, mode='r', encoding='utf8') as f: gia_data = f.read().splitlines() - #-- number of lines in file + # number of lines in file gia_lines = len(gia_data) - #-- find header lines to skip + # find header lines to skip h1 = r'^GRACE Approximation for degrees 0 to 2' h2 = r'^GRACE Approximation\/Absolute Sea-level Values for degrees \> 2' - #-- header lines to skip + # header lines to skip header, = [(i+1) for i,l in enumerate(gia_data) if re.match(h1,l)] start, = [(i+1) for i,l in enumerate(gia_data) if re.match(h2,l)] - #-- Calculating number of cos and sin harmonics to read from header + # Calculating number of cos and sin harmonics to read from header n_harm = (2**2 + 3*2)//2 + 1 - #-- extract header for GRACE approximation + # extract header for GRACE approximation for ii in range(header,header+n_harm): - #-- check if contents in line + # check if contents in line flag = bool(rx.search(gia_data[ii].replace('D','E'))) if flag: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives - #-- Replacing Double Exponent with Standard Exponent + # find numerical instances in line including exponents, + # decimal points and negatives + # Replacing Double Exponent with Standard Exponent line = rx.findall(gia_data[ii].replace('D','E')) l1 = np.int64(line[0]) m1 = np.int64(line[1]) - #-- truncate to LMAX + # truncate to LMAX if (l1 <= LMAX) and (m1 <= LMAX): - #-- scaling to geodesy normalization + # scaling to geodesy normalization gia_Ylms['clm'][l1,m1] = np.float64(line[2])*scale gia_Ylms['slm'][l1,m1] = np.float64(line[3])*scale - #-- Skipping rest of file header + # Skipping rest of file header for ii in range(start,gia_lines): - #-- check if contents in line + # check if contents in line flag = bool(rx.search(gia_data[ii].replace('D','E'))) if flag: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives - #-- Replacing Double Exponent with Standard Exponent + # find numerical instances in line including exponents, + # decimal points and negatives + # Replacing Double Exponent with Standard Exponent line = rx.findall(gia_data[ii].replace('D','E')) l1 = np.int64(line[0]) m1 = np.int64(line[1]) - #-- truncate to LMAX + # truncate to LMAX if (l1 <= LMAX) and (m1 <= LMAX): - #-- scaling to geodesy normalization + # scaling to geodesy normalization gia_Ylms['clm'][l1,m1] = np.float64(line[2])*scale gia_Ylms['slm'][l1,m1] = np.float64(line[3])*scale # ascii: reformatted GIA in ascii format elif (GIA == 'ascii'): - #-- log GIA file if debugging + # log GIA file if debugging logging.debug(f'Reading GIA file: {input_file}') - #-- reading GIA data from reformatted (simplified) ascii files + # reading GIA data from reformatted (simplified) ascii files Ylms = gravity_toolkit.harmonics().from_ascii(input_file, date=False) Ylms.truncate(LMAX) gia_Ylms.update(Ylms.to_dict()) - #-- copy filename (without extension) for parameters + # copy filename (without extension) for parameters gia_Ylms['title'] = os.path.basename(os.path.splitext(input_file)[0]) gia_Ylms['citation'] = None gia_Ylms['reference'] = None @@ -625,89 +625,89 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): # netCDF4: reformatted GIA in netCDF4 format # HDF5: reformatted GIA in HDF5 format elif GIA in ('netCDF4','HDF5'): - #-- log GIA file if debugging + # log GIA file if debugging logging.debug(f'Reading GIA file: {input_file}') - #-- reading GIA data from reformatted netCDF4 and HDF5 files + # reading GIA data from reformatted netCDF4 and HDF5 files Ylms = gravity_toolkit.harmonics().from_file(input_file, format=GIA, date=False) Ylms.truncate(LMAX) gia_Ylms.update(Ylms.to_dict()) - #-- copy title and reference for model + # copy title and reference for model for att_name in ('title','citation','reference','url'): try: gia_Ylms[att_name] = Ylms.attributes[att_name] except Exception as e: gia_Ylms[att_name] = None - #-- GIA model parameter strings - #-- extract rheology from the file name + # GIA model parameter strings + # extract rheology from the file name if (GIA == 'IJ05-R2'): - #-- IJ05-R2: Ivins R2 GIA Models - #-- adding file specific earth parameters + # IJ05-R2: Ivins R2 GIA Models + # adding file specific earth parameters parameters, = re.findall(file_pattern,os.path.basename(input_file)) gia_Ylms['title'] = f'{prefix}_{parameters}' elif (GIA == 'ICE6G'): - #-- ICE6G: ICE-6G GIA Models - #-- adding file specific earth parameters + # ICE6G: ICE-6G GIA Models + # adding file specific earth parameters parameters, = re.findall(file_pattern,os.path.basename(input_file)) gia_Ylms['title'] = f'{prefix}_{parameters}' elif (GIA == 'W12a'): - #-- W12a: Whitehouse GIA Models - #-- for Whitehouse W12a (BEST, LOWER, UPPER): + # W12a: Whitehouse GIA Models + # for Whitehouse W12a (BEST, LOWER, UPPER): model = re.findall(file_pattern,os.path.basename(input_file)).pop() gia_Ylms['title'] = f'{prefix}_{parameters[model]}' elif (GIA == 'SM09'): - #-- SM09: Simpson/Milne GIA Models - #-- making parameters in the file similar to IJ05 - #-- split rheological parameters between lithospheric thickness, - #-- upper mantle viscosity and lower mantle viscosity + # SM09: Simpson/Milne GIA Models + # making parameters in the file similar to IJ05 + # split rheological parameters between lithospheric thickness, + # upper mantle viscosity and lower mantle viscosity LTh,UMV,LMV=re.findall(file_pattern,os.path.basename(input_file)).pop() - #-- formatting rheology parameters similar to IJ05 models + # formatting rheology parameters similar to IJ05 models gia_Ylms['title'] = f'{prefix}_{LTh}_.{UMV}_{LMV}' elif (GIA == 'Wu10'): - #-- Wu10: Wu (2010) GIA Correction + # Wu10: Wu (2010) GIA Correction gia_Ylms['title'] = 'Wu_2010' elif (GIA == 'Caron'): - #-- Caron: Caron JPL GIA Assimilation + # Caron: Caron JPL GIA Assimilation gia_Ylms['title'] = 'Caron_expt' elif (GIA == 'ICE6G-D'): - #-- ICE6G-D: ICE-6G Version-D GIA Models - #-- adding file specific earth parameters + # ICE6G-D: ICE-6G Version-D GIA Models + # adding file specific earth parameters m1,p1,p2 = re.findall(file_pattern,os.path.basename(input_file)).pop() gia_Ylms['title'] = f'{prefix}_{p1}{p2}' elif (GIA == 'AW13-ICE6G'): - #-- AW13-ICE6G: Geruo A ICE-6G GIA Models - #-- extract the ice history and case flags + # AW13-ICE6G: Geruo A ICE-6G GIA Models + # extract the ice history and case flags hist,case,sf=re.findall(file_pattern,os.path.basename(input_file)).pop() gia_Ylms['title'] = f'{prefix}_{hist}_{case}' elif (GIA == 'AW13-IJ05'): - #-- AW13-IJ05: Geruo A IJ05-R2 GIA Models - #-- adding file specific earth parameters + # AW13-IJ05: Geruo A IJ05-R2 GIA Models + # adding file specific earth parameters vrs,param,aux=re.findall(file_pattern,os.path.basename(input_file)).pop() gia_Ylms['title'] = f'{prefix}_{vrs}_{param}' - #-- output harmonics to ascii, netCDF4 or HDF5 file + # output harmonics to ascii, netCDF4 or HDF5 file if DATAFORM in ('ascii', 'netCDF4', 'HDF5'): - #-- convert dictionary to harmonics object + # convert dictionary to harmonics object Ylms = gravity_toolkit.harmonics().from_dict(gia_Ylms) - #-- output harmonics to file + # output harmonics to file suffix = dict(ascii='txt', netCDF4='nc', HDF5='H5') args = (gia_Ylms['title'], LMAX, suffix[DATAFORM]) output_file = 'stokes_{0}_L{1:d}.{2}'.format(*args) Ylms.to_file(os.path.join(os.path.dirname(input_file),output_file), format=DATAFORM, title=gia_Ylms['title'], reference=gia_Ylms['reference'], date=False) - #-- set permissions level of output file + # set permissions level of output file os.chmod(os.path.join(os.path.dirname(input_file),output_file), mode=kwargs['MODE']) - #-- truncate to MMAX if specified + # truncate to MMAX if specified if MMAX is not None: - #-- spherical harmonic variables + # spherical harmonic variables gia_Ylms['clm'] = gia_Ylms['clm'][:,:MMAX+1] gia_Ylms['slm'] = gia_Ylms['slm'][:,:MMAX+1] - #-- spherical harmonic order + # spherical harmonic order gia_Ylms['m'] = gia_Ylms['m'][:MMAX+1] - #-- return the harmonics and the parameters + # return the harmonics and the parameters return gia_Ylms diff --git a/gravity_toolkit/read_GRACE_harmonics.py b/gravity_toolkit/read_GRACE_harmonics.py index ceebb3ca..5d2fa8bf 100644 --- a/gravity_toolkit/read_GRACE_harmonics.py +++ b/gravity_toolkit/read_GRACE_harmonics.py @@ -71,7 +71,7 @@ import numpy as np import gravity_toolkit.time -#-- PURPOSE: read Level-2 GRACE and GRACE-FO spherical harmonic files +# PURPOSE: read Level-2 GRACE and GRACE-FO spherical harmonic files def read_GRACE_harmonics(input_file, LMAX, **kwargs): """ Extracts spherical harmonic coefficients from GRACE/GRACE-FO files @@ -118,58 +118,58 @@ def read_GRACE_harmonics(input_file, LMAX, **kwargs): *Journal of Geophysical Research: Solid Earth*, 120(6), 4597--4615, (2015). `doi: 10.1002/2015JB011986 `_ """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('MMAX', None) kwargs.setdefault('POLE_TIDE', False) - #-- parse filename + # parse filename PFX,SY,SD,EY,ED,N,PRC,F1,DRL,F2,SFX = parse_file(input_file) - #-- check if file is compressed + # check if file is compressed compressed = (SFX == '.gz') - #-- extract file contents + # extract file contents file_contents = extract_file(input_file, compressed) - #-- JPL mascon solutions in spherical harmonic form + # JPL mascon solutions in spherical harmonic form if PRC in ('JPLMSC',): DSET = 'GSM' DREL = np.int64(DRL) FLAG = r'GRCOF2' - #-- Kusche et al. (2009) DDK filtered solutions - #-- https://doi.org/10.1007/s00190-009-0308-3 + # Kusche et al. (2009) DDK filtered solutions + # https://doi.org/10.1007/s00190-009-0308-3 elif PFX.startswith('kfilter_DDK'): DSET = 'GSM' DREL = np.int64(DRL) FLAG = r'gfc' - #-- COST-G unfiltered combination solutions - #-- https://doi.org/10.5880/ICGEM.COST-G.001 + # COST-G unfiltered combination solutions + # https://doi.org/10.5880/ICGEM.COST-G.001 elif PRC in ('COSTG',): DSET, = re.findall(r'GSM|GAC',PFX) DREL = np.int64(DRL) FLAG = r'gfc' - #-- Standard GRACE/GRACE-FO Level-2 solutions + # Standard GRACE/GRACE-FO Level-2 solutions else: DSET = PFX DREL = np.int64(DRL) FLAG = r'GRCOF2' - #-- output python dictionary with GRACE/GRACE-FO data and metadata + # output python dictionary with GRACE/GRACE-FO data and metadata grace_L2_input = {} - #-- extract GRACE/GRACE-FO date information from input file name + # extract GRACE/GRACE-FO date information from input file name start_yr = np.float64(SY) end_yr = np.float64(EY) start_day = np.float64(SD) end_day = np.float64(ED) - #-- calculate mid-month date taking into account if measurements are - #-- on different years + # calculate mid-month date taking into account if measurements are + # on different years dpy = gravity_toolkit.time.calendar_days(start_yr).sum() - #-- For data that crosses years (end_yr - start_yr should be at most 1) + # For data that crosses years (end_yr - start_yr should be at most 1) end_cyclic = ((end_yr - start_yr)*dpy+end_day) - #-- Calculate mid-month value + # Calculate mid-month value mid_day = np.mean([start_day, end_cyclic]) - #-- Calculating the mid-month date in decimal form + # Calculating the mid-month date in decimal form grace_L2_input['time'] = start_yr + mid_day/dpy - #-- Calculating the Julian dates of the start and end date + # Calculating the Julian dates of the start and end date grace_L2_input['start'] = 2400000.5 + \ gravity_toolkit.time.convert_calendar_dates(start_yr,1.0,start_day, epoch=(1858,11,17,0,0,0)) @@ -177,117 +177,117 @@ def read_GRACE_harmonics(input_file, LMAX, **kwargs): gravity_toolkit.time.convert_calendar_dates(end_yr,1.0,end_day, epoch=(1858,11,17,0,0,0)) - #-- set maximum spherical harmonic order + # set maximum spherical harmonic order MMAX = np.copy(LMAX) if (kwargs['MMAX'] is None) else np.copy(kwargs['MMAX']) - #-- output dimensions + # output dimensions grace_L2_input['l'] = np.arange(LMAX+1) grace_L2_input['m'] = np.arange(MMAX+1) - #-- Spherical harmonic coefficient matrices to be filled from data file + # Spherical harmonic coefficient matrices to be filled from data file grace_L2_input['clm'] = np.zeros((LMAX+1,MMAX+1)) grace_L2_input['slm'] = np.zeros((LMAX+1,MMAX+1)) - #-- spherical harmonic uncalibrated standard deviations + # spherical harmonic uncalibrated standard deviations grace_L2_input['eclm'] = np.zeros((LMAX+1,MMAX+1)) grace_L2_input['eslm'] = np.zeros((LMAX+1,MMAX+1)) if ((DREL == 4) and (DSET == 'GSM')): - #-- clm and slm drift rates for RL04 + # clm and slm drift rates for RL04 drift_c = np.zeros((LMAX+1,MMAX+1)) drift_s = np.zeros((LMAX+1,MMAX+1)) - #-- extract GRACE and GRACE-FO file headers - #-- replace colons in header if within quotations + # extract GRACE and GRACE-FO file headers + # replace colons in header if within quotations head = [re.sub(r'\"(.*?)\:\s(.*?)\"',r'"\1, \2"',l) for l in file_contents if not re.match(rf'{FLAG}|GRDOTA',l)] if SFX in ('.gfc',): - #-- extract parameters from header + # extract parameters from header header_parameters = ['modelname','earth_gravity_constant','radius', 'max_degree','errors','norm','tide_system'] header_regex = re.compile(r'(' + r'|'.join(header_parameters) + r')') grace_L2_input['header'] = [l for l in head if header_regex.match(l)] elif ((N == 'GRAC') and (DREL >= 6)) or (N == 'GRFO'): - #-- parse the YAML header for RL06 or GRACE-FO (specifying yaml loader) + # parse the YAML header for RL06 or GRACE-FO (specifying yaml loader) grace_L2_input.update(yaml.load('\n'.join(head),Loader=yaml.BaseLoader)) else: - #-- save lines of the GRACE file header removing empty lines + # save lines of the GRACE file header removing empty lines grace_L2_input['header'] = [l.rstrip() for l in head if l] - #-- for each line in the GRACE/GRACE-FO file + # for each line in the GRACE/GRACE-FO file for line in file_contents: - #-- find if line starts with data marker flag (e.g. GRCOF2) + # find if line starts with data marker flag (e.g. GRCOF2) if bool(re.match(FLAG,line)): - #-- split the line into individual components + # split the line into individual components line_contents = line.split() - #-- degree and order for the line + # degree and order for the line l1 = np.int64(line_contents[1]) m1 = np.int64(line_contents[2]) - #-- if degree and order are below the truncation limits + # if degree and order are below the truncation limits if ((l1 <= LMAX) and (m1 <= MMAX)): grace_L2_input['clm'][l1,m1] = np.float64(line_contents[3]) grace_L2_input['slm'][l1,m1] = np.float64(line_contents[4]) grace_L2_input['eclm'][l1,m1] = np.float64(line_contents[5]) grace_L2_input['eslm'][l1,m1] = np.float64(line_contents[6]) - #-- find if line starts with drift rate flag + # find if line starts with drift rate flag elif bool(re.match(r'GRDOTA',line)): - #-- split the line into individual components + # split the line into individual components line_contents = line.split() l1 = np.int64(line_contents[1]) m1 = np.int64(line_contents[2]) - #-- Reading Drift rates for low degree harmonics + # Reading Drift rates for low degree harmonics drift_c[l1,m1] = np.float64(line_contents[3]) drift_s[l1,m1] = np.float64(line_contents[4]) - #-- Adding drift rates to clm and slm for RL04 - #-- if drift rates exist at any time, will add to harmonics - #-- Will convert the secular rates into a stokes contribution - #-- Currently removes 2003.3 to get the temporal average close to 0. + # Adding drift rates to clm and slm for RL04 + # if drift rates exist at any time, will add to harmonics + # Will convert the secular rates into a stokes contribution + # Currently removes 2003.3 to get the temporal average close to 0. if ((DREL == 4) and (DSET == 'GSM')): - #-- time since 2003.3 + # time since 2003.3 dt = (grace_L2_input['time'] - 2003.3) grace_L2_input['clm'][:,:] += dt*drift_c[:,:] grace_L2_input['slm'][:,:] += dt*drift_s[:,:] - #-- Correct Pole Tide following Wahr et al. (2015) 10.1002/2015JB011986 + # Correct Pole Tide following Wahr et al. (2015) 10.1002/2015JB011986 if kwargs['POLE_TIDE'] and (DSET == 'GSM'): - #-- time since 2000.0 + # time since 2000.0 dt = (grace_L2_input['time']-2000.0) - #-- CSR and JPL Pole Tide Correction + # CSR and JPL Pole Tide Correction if PRC in ('UTCSR','JPLEM','JPLMSC'): - #-- values for IERS mean pole [2010] + # values for IERS mean pole [2010] if (grace_L2_input['time'] < 2010.0): a = np.array([0.055974,1.8243e-3,1.8413e-4,7.024e-6]) b = np.array([-0.346346,-1.7896e-3,1.0729e-4,0.908e-6]) elif (grace_L2_input['time'] >= 2010.0): a = np.array([0.023513,7.6141e-3,0.0,0.0]) b = np.array([-0.358891,0.6287e-3,0.0,0.0]) - #-- calculate m1 and m2 values + # calculate m1 and m2 values m1 = np.copy(a[0]) m2 = np.copy(b[0]) for x in range(1,4): m1 += a[x]*dt**x m2 += b[x]*dt**x - #-- pole tide values for CSR and JPL - #-- CSR and JPL both remove the IERS mean pole from m1 and m2 - #-- before computing their harmonic solutions + # pole tide values for CSR and JPL + # CSR and JPL both remove the IERS mean pole from m1 and m2 + # before computing their harmonic solutions C21_PT = -1.551e-9*(m1 - 0.62e-3*dt) - 0.012e-9*(m2 + 3.48e-3*dt) S21_PT = 0.021e-9*(m1 - 0.62e-3*dt) - 1.505e-9*(m2 + 3.48e-3*dt) - #-- correct GRACE/GRACE-FO spherical harmonics for pole tide + # correct GRACE/GRACE-FO spherical harmonics for pole tide grace_L2_input['clm'][2,1] -= C21_PT grace_L2_input['slm'][2,1] -= S21_PT - #-- GFZ Pole Tide Correction + # GFZ Pole Tide Correction elif PRC in ('EIGEN','GFZOP'): - #-- pole tide values for GFZ - #-- GFZ removes only a constant pole position + # pole tide values for GFZ + # GFZ removes only a constant pole position C21_PT = -1.551e-9*(-0.62e-3*dt) - 0.012e-9*(3.48e-3*dt) S21_PT = 0.021e-9*(-0.62e-3*dt) - 1.505e-9*(3.48e-3*dt) - #-- correct GRACE/GRACE-FO spherical harmonics for pole tide + # correct GRACE/GRACE-FO spherical harmonics for pole tide grace_L2_input['clm'][2,1] -= C21_PT grace_L2_input['slm'][2,1] -= S21_PT - #-- return the header data, GRACE/GRACE-FO data - #-- GRACE/GRACE-FO date (mid-month in decimal) - #-- and the start and end days as Julian dates + # return the header data, GRACE/GRACE-FO data + # GRACE/GRACE-FO date (mid-month in decimal) + # and the start and end days as Julian dates return grace_L2_input -#-- PURPOSE: extract parameters from filename +# PURPOSE: extract parameters from filename def parse_file(input_file): """ Extract parameters from filename @@ -297,25 +297,25 @@ def parse_file(input_file): input_file: str GRACE/GRACE-FO Level-2 spherical harmonic data file """ - #-- compile numerical expression operator for parameters from files - #-- UTCSR: The University of Texas at Austin Center for Space Research - #-- EIGEN: GFZ German Research Center for Geosciences (RL01-RL05) - #-- GFZOP: GFZ German Research Center for Geosciences (RL06+GRACE-FO) - #-- JPLEM: NASA Jet Propulsion Laboratory (harmonic solutions) - #-- JPLMSC: NASA Jet Propulsion Laboratory (mascon solutions) - #-- GRGS: French Centre National D'Etudes Spatiales (CNES) - #-- COSTG: International Combined Time-variable Gravity Fields + # compile numerical expression operator for parameters from files + # UTCSR: The University of Texas at Austin Center for Space Research + # EIGEN: GFZ German Research Center for Geosciences (RL01-RL05) + # GFZOP: GFZ German Research Center for Geosciences (RL06+GRACE-FO) + # JPLEM: NASA Jet Propulsion Laboratory (harmonic solutions) + # JPLMSC: NASA Jet Propulsion Laboratory (mascon solutions) + # GRGS: French Centre National D'Etudes Spatiales (CNES) + # COSTG: International Combined Time-variable Gravity Fields args = r'UTCSR|EIGEN|GFZOP|JPLEM|JPLMSC|GRGS|COSTG' regex_pattern = (r'(.*?)-2_(\d{{4}})(\d{{3}})-(\d{{4}})(\d{{3}})_' r'(.*?)_({0})_(.*?)_(\d+)(.*?)(\.gz|\.gfc)?$').format(args) rx = re.compile(regex_pattern, re.VERBOSE) - #-- extract parameters from input filename + # extract parameters from input filename if isinstance(input_file, io.IOBase): return rx.findall(input_file.filename).pop() else: return rx.findall(os.path.basename(input_file)).pop() -#-- PURPOSE: read input file and extract contents +# PURPOSE: read input file and extract contents def extract_file(input_file, compressed): """ Read input file and extract contents @@ -327,19 +327,19 @@ def extract_file(input_file, compressed): compressed: bool denotes if the file is compressed """ - #-- tilde expansion of input file if not byteIO object + # tilde expansion of input file if not byteIO object if not isinstance(input_file, io.IOBase): input_file = os.path.expanduser(input_file) - #-- check that data file is present in file system + # check that data file is present in file system if not os.access(input_file, os.F_OK): raise FileNotFoundError(f'{input_file} not found') - #-- check if file is uncompressed byteIO object + # check if file is uncompressed byteIO object if isinstance(input_file, io.IOBase) and not compressed: - #-- extract spherical harmonic coefficients + # extract spherical harmonic coefficients return input_file.read().decode('ISO-8859-1').splitlines() else: - #-- check if file is compressed (read with gzip if gz) + # check if file is compressed (read with gzip if gz) file_opener = gzip.open if compressed else open - #-- opening data file to extract spherical harmonic coefficients + # opening data file to extract spherical harmonic coefficients with file_opener(input_file, 'rb') as f: return f.read().decode('ISO-8859-1').splitlines() diff --git a/gravity_toolkit/read_ICGEM_harmonics.py b/gravity_toolkit/read_ICGEM_harmonics.py index b8f98d27..137be275 100644 --- a/gravity_toolkit/read_ICGEM_harmonics.py +++ b/gravity_toolkit/read_ICGEM_harmonics.py @@ -43,7 +43,7 @@ import warnings import geoid_toolkit.read_ICGEM_harmonics -#-- PURPOSE: read spherical harmonic coefficients of a gravity model +# PURPOSE: read spherical harmonic coefficients of a gravity model def read_ICGEM_harmonics(*args,**kwargs): warnings.filterwarnings("always") warnings.warn("Deprecated. Please use geoid toolkit instead", diff --git a/gravity_toolkit/read_SLR_C20.py b/gravity_toolkit/read_SLR_C20.py index 6abfe3af..1cdfcfd7 100644 --- a/gravity_toolkit/read_SLR_C20.py +++ b/gravity_toolkit/read_SLR_C20.py @@ -111,7 +111,7 @@ import numpy as np import gravity_toolkit.time -#-- PURPOSE: read oblateness data from Satellite Laser Ranging (SLR) +# PURPOSE: read oblateness data from Satellite Laser Ranging (SLR) def read_SLR_C20(SLR_file, AOD=True, HEADER=True): """ Reads C20 spherical harmonic coefficients from SLR measurements @@ -138,312 +138,312 @@ def read_SLR_C20(SLR_file, AOD=True, HEADER=True): date of SLR measurement """ - #-- check that SLR file exists + # check that SLR file exists if not os.access(os.path.expanduser(SLR_file), os.F_OK): raise FileNotFoundError('SLR file not found in file system') - #-- output dictionary with data variables + # output dictionary with data variables dinput = {} - #-- determine if imported file is from PO.DAAC or CSR + # determine if imported file is from PO.DAAC or CSR if bool(re.search(r'C20_RL\d+',SLR_file,re.I)): - #-- SLR C20 file from CSR - #-- Just for checking new months when TN series isn't up to date as the - #-- SLR estimates always use the full set of days in each calendar month. - #-- format of the input file (note 64 bit floating point for C20) - #-- Column 1: Approximate mid-point of monthly solution (years) - #-- Column 2: C20 from SLR (normalized) - #-- Column 3: Delta C20 relative to a mean value (1E-10) - #-- Column 4: Solution sigma (1E-10) - #-- Column 5: Mean value of Atmosphere-Ocean De-aliasing model (1E-10) - #-- Columns 6-7: Start and end dates of data used in solution + # SLR C20 file from CSR + # Just for checking new months when TN series isn't up to date as the + # SLR estimates always use the full set of days in each calendar month. + # format of the input file (note 64 bit floating point for C20) + # Column 1: Approximate mid-point of monthly solution (years) + # Column 2: C20 from SLR (normalized) + # Column 3: Delta C20 relative to a mean value (1E-10) + # Column 4: Solution sigma (1E-10) + # Column 5: Mean value of Atmosphere-Ocean De-aliasing model (1E-10) + # Columns 6-7: Start and end dates of data used in solution dtype = {} dtype['names'] = ('time','C20','delta','sigma','AOD','start','end') dtype['formats'] = ('f','f8','f','f','f','f','f') - #-- header text is commented and won't be read + # header text is commented and won't be read file_input = np.loadtxt(os.path.expanduser(SLR_file),dtype=dtype) - #-- date and GRACE/GRACE-FO month + # date and GRACE/GRACE-FO month dinput['time'] = file_input['time'] dinput['month'] = gravity_toolkit.time.calendar_to_grace(dinput['time']) - #-- monthly spherical harmonic replacement solutions + # monthly spherical harmonic replacement solutions dinput['data'] = file_input['C20'].copy() - #-- monthly spherical harmonic formal standard deviations + # monthly spherical harmonic formal standard deviations dinput['error'] = file_input['sigma']*1e-10 - #-- Background gravity model includes solid earth and ocean tides, solid - #-- earth and ocean pole tides, and the Atmosphere-Ocean De-aliasing - #-- product. The monthly mean of the AOD model has been restored. + # Background gravity model includes solid earth and ocean tides, solid + # earth and ocean pole tides, and the Atmosphere-Ocean De-aliasing + # product. The monthly mean of the AOD model has been restored. if AOD: - #-- Removing AOD product that was restored in the solution + # Removing AOD product that was restored in the solution dinput['data'] -= file_input['AOD']*1e-10 elif bool(re.search(r'GFZ_(RL\d+)_C20_SLR',SLR_file,re.I)): - #-- SLR C20 file from GFZ - #-- Column 1: MJD of BEGINNING of solution span - #-- Column 2: Year and fraction of year of BEGINNING of solution span - #-- Column 3: Replacement C(2,0) - #-- Column 4: Replacement C(2,0) - mean C(2,0) (1.0E-10) - #-- Column 5: C(2,0) formal error (1.0E-10) + # SLR C20 file from GFZ + # Column 1: MJD of BEGINNING of solution span + # Column 2: Year and fraction of year of BEGINNING of solution span + # Column 3: Replacement C(2,0) + # Column 4: Replacement C(2,0) - mean C(2,0) (1.0E-10) + # Column 5: C(2,0) formal error (1.0E-10) with open(os.path.expanduser(SLR_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text while HEADER: - #-- file line at count + # file line at count line = file_contents[count] - #-- find PRODUCT: within line to set HEADER flag to False when found + # find PRODUCT: within line to set HEADER flag to False when found HEADER = not bool(re.match(r'PRODUCT:+',line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- number of months within the file + # number of months within the file n_mon = file_lines - count - #-- date and GRACE/GRACE-FO month + # date and GRACE/GRACE-FO month dinput['time'] = np.zeros((n_mon)) dinput['month'] = np.zeros((n_mon),dtype=np.int64) - #-- monthly spherical harmonic replacement solutions + # monthly spherical harmonic replacement solutions dinput['data'] = np.zeros((n_mon)) - #-- monthly spherical harmonic formal standard deviations + # monthly spherical harmonic formal standard deviations dinput['error'] = np.zeros((n_mon)) - #-- time count + # time count t = 0 - #-- for every other line: + # for every other line: for line in file_contents[count:]: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives + # find numerical instances in line including exponents, + # decimal points and negatives line_contents = re.findall(r'[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) - #-- check if line has G* or Gm flags + # check if line has G* or Gm flags if bool(re.search(r'(G\*|Gm)',line)): - #-- reading decimal year for start of span + # reading decimal year for start of span dinput['time'][t] = np.float64(line_contents[1]) - #-- Spherical Harmonic data for line + # Spherical Harmonic data for line dinput['data'][t] = np.float64(line_contents[2]) dinput['error'][t] = np.float64(line_contents[4])*1e-10 - #-- GRACE/GRACE-FO month of SLR solutions + # GRACE/GRACE-FO month of SLR solutions dinput['month'][t] = gravity_toolkit.time.calendar_to_grace( dinput['time'][t], around=np.round) - #-- add to t count + # add to t count t += 1 - #-- truncate variables if necessary + # truncate variables if necessary for key,val in dinput.items(): dinput[key] = val[:t] elif bool(re.search(r'GRAVIS-2B_GFZOP',SLR_file,re.I)): - #-- Combined GRACE/SLR solution file produced by GFZ - #-- Column 1: MJD of BEGINNING of solution data span - #-- Column 2: Year and fraction of year of BEGINNING of solution span - #-- Column 3: Replacement C(2,0) - #-- Column 4: Replacement C(2,0) - mean C(2,0) (1.0E-10) - #-- Column 5: C(2,0) formal standard deviation (1.0E-12) + # Combined GRACE/SLR solution file produced by GFZ + # Column 1: MJD of BEGINNING of solution data span + # Column 2: Year and fraction of year of BEGINNING of solution span + # Column 3: Replacement C(2,0) + # Column 4: Replacement C(2,0) - mean C(2,0) (1.0E-10) + # Column 5: C(2,0) formal standard deviation (1.0E-12) with open(os.path.expanduser(SLR_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text while HEADER: - #-- file line at count + # file line at count line = file_contents[count] - #-- find PRODUCT: within line to set HEADER flag to False when found + # find PRODUCT: within line to set HEADER flag to False when found HEADER = not bool(re.match(r'PRODUCT:+',line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- number of months within the file + # number of months within the file n_mon = file_lines - count - #-- date and GRACE/GRACE-FO month + # date and GRACE/GRACE-FO month dinput['time'] = np.zeros((n_mon)) dinput['month'] = np.zeros((n_mon),dtype=int) - #-- monthly spherical harmonic replacement solutions + # monthly spherical harmonic replacement solutions dinput['data'] = np.zeros((n_mon)) - #-- monthly spherical harmonic formal standard deviations + # monthly spherical harmonic formal standard deviations dinput['error'] = np.zeros((n_mon)) - #-- time count + # time count t = 0 - #-- for every other line: + # for every other line: for line in file_contents[count:]: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives + # find numerical instances in line including exponents, + # decimal points and negatives line_contents = re.findall(r'[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) count = len(line_contents) - #-- check for empty lines + # check for empty lines if (count > 0): - #-- reading decimal year for start of span + # reading decimal year for start of span dinput['time'][t] = np.float64(line_contents[1]) - #-- Spherical Harmonic data for line + # Spherical Harmonic data for line dinput['data'][t] = np.float64(line_contents[2]) dinput['error'][t] = np.float64(line_contents[4])*1e-10 - #-- GRACE/GRACE-FO month of SLR solutions + # GRACE/GRACE-FO month of SLR solutions dinput['month'][t] = gravity_toolkit.time.calendar_to_grace( dinput['time'][t], around=np.round) - #-- add to t count + # add to t count t += 1 - #-- truncate variables if necessary + # truncate variables if necessary for key,val in dinput.items(): dinput[key] = val[:t] elif bool(re.search(r'TN-(11|14)',SLR_file,re.I)): - #-- SLR C20 RL06 file from PO.DAAC + # SLR C20 RL06 file from PO.DAAC with open(os.path.expanduser(SLR_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text while HEADER: - #-- file line at count + # file line at count line = file_contents[count] - #-- find PRODUCT: within line to set HEADER flag to False when found + # find PRODUCT: within line to set HEADER flag to False when found HEADER = not bool(re.match(r'PRODUCT:+',line,re.IGNORECASE)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- number of months within the file + # number of months within the file n_mon = file_lines - count - #-- date and GRACE/GRACE-FO month + # date and GRACE/GRACE-FO month dinput['time'] = np.zeros((n_mon)) dinput['month'] = np.zeros((n_mon),dtype=np.int64) - #-- monthly spherical harmonic replacement solutions + # monthly spherical harmonic replacement solutions dinput['data'] = np.zeros((n_mon)) - #-- monthly spherical harmonic formal standard deviations + # monthly spherical harmonic formal standard deviations dinput['error'] = np.zeros((n_mon)) - #-- time count + # time count t = 0 - #-- for every other line: + # for every other line: for line in file_contents[count:]: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives + # find numerical instances in line including exponents, + # decimal points and negatives line_contents = re.findall(r'[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) - #-- check for empty lines as there are - #-- slight differences in RL04 TN-05_C20_SLR.txt - #-- with blanks between the PRODUCT: line and the data + # check for empty lines as there are + # slight differences in RL04 TN-05_C20_SLR.txt + # with blanks between the PRODUCT: line and the data count = len(line_contents) - #-- if count is greater than 0 + # if count is greater than 0 if (count > 0): - #-- modified julian date for line + # modified julian date for line MJD = np.float64(line_contents[0]) - #-- converting from MJD into month, day and year + # converting from MJD into month, day and year YY,MM,DD,hh,mm,ss = gravity_toolkit.time.convert_julian( MJD+2400000.5, format='tuple') - #-- converting from month, day, year into decimal year + # converting from month, day, year into decimal year dinput['time'][t] = gravity_toolkit.time.convert_calendar_decimal( YY, MM, day=DD, hour=hh) - #-- Spherical Harmonic data for line + # Spherical Harmonic data for line dinput['data'][t] = np.float64(line_contents[2]) dinput['error'][t] = np.float64(line_contents[4])*1e-10 - #-- GRACE/GRACE-FO month of SLR solutions + # GRACE/GRACE-FO month of SLR solutions dinput['month'][t] = gravity_toolkit.time.calendar_to_grace( dinput['time'][t], around=np.round) - #-- add to t count + # add to t count t += 1 - #-- truncate variables if necessary + # truncate variables if necessary for key,val in dinput.items(): dinput[key] = val[:t] else: - #-- SLR C20 file from PO.DAAC + # SLR C20 file from PO.DAAC with open(os.path.expanduser(SLR_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text while HEADER: - #-- file line at count + # file line at count line = file_contents[count] - #-- find PRODUCT: within line to set HEADER flag to False when found + # find PRODUCT: within line to set HEADER flag to False when found HEADER = not bool(re.match(r'PRODUCT:+',line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- number of months within the file + # number of months within the file n_mon = file_lines - count - #-- GRACE/GRACE-FO dates + # GRACE/GRACE-FO dates date_conv = np.zeros((n_mon)) - #-- monthly spherical harmonic replacement solutions + # monthly spherical harmonic replacement solutions C20_input = np.zeros((n_mon)) - #-- monthly spherical harmonic formal standard deviations + # monthly spherical harmonic formal standard deviations eC20_input = np.zeros((n_mon)) - #-- flag denoting if replacement solution + # flag denoting if replacement solution slr_flag = np.zeros((n_mon),dtype=bool) - #-- time count + # time count t = 0 - #-- for every other line: + # for every other line: for line in file_contents[count:]: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives + # find numerical instances in line including exponents, + # decimal points and negatives line_contents = re.findall(r'[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) - #-- check for empty lines as there are - #-- slight differences in RL04 TN-05_C20_SLR.txt - #-- with blanks between the PRODUCT: line and the data + # check for empty lines as there are + # slight differences in RL04 TN-05_C20_SLR.txt + # with blanks between the PRODUCT: line and the data count = len(line_contents) - #-- if count is greater than 0 + # if count is greater than 0 if (count > 0): - #-- modified julian date for line + # modified julian date for line MJD = np.float64(line_contents[0]) - #-- converting from MJD into month, day and year + # converting from MJD into month, day and year YY,MM,DD,hh,mm,ss = gravity_toolkit.time.convert_julian( MJD+2400000.5, format='tuple') - #-- converting from month, day, year into decimal year + # converting from month, day, year into decimal year date_conv[t] = gravity_toolkit.time.convert_calendar_decimal( YY, MM, day=DD, hour=hh) - #-- Spherical Harmonic data for line + # Spherical Harmonic data for line C20_input[t] = np.float64(line_contents[2]) eC20_input[t] = np.float64(line_contents[4])*1e-10 - #-- line has * flag + # line has * flag if bool(re.search(r'\*',line)): slr_flag[t] = True - #-- add to t count + # add to t count t += 1 - #-- truncate for RL04 if necessary + # truncate for RL04 if necessary date_conv = date_conv[:t] C20_input = C20_input[:t] eC20_input = eC20_input[:t] slr_flag = slr_flag[:t] - #-- GRACE/GRACE-FO month of SLR solutions + # GRACE/GRACE-FO month of SLR solutions mon = gravity_toolkit.time.calendar_to_grace(date_conv,around=np.round) - #-- number of unique months + # number of unique months dinput['month'] = np.unique(mon) n_uniq = len(dinput['month']) - #-- Removing overlapping months to use the data for - #-- months with limited GRACE accelerometer use + # Removing overlapping months to use the data for + # months with limited GRACE accelerometer use dinput['time'] = np.zeros((n_uniq)) dinput['data'] = np.zeros((n_uniq)) dinput['error'] = np.zeros((n_uniq)) - #-- New SLR datasets have * flags for the modified GRACE periods - #-- these GRACE months use half of a prior month in their solution - #-- this will find these months (marked above with slr_flag) + # New SLR datasets have * flags for the modified GRACE periods + # these GRACE months use half of a prior month in their solution + # this will find these months (marked above with slr_flag) for t in range(n_uniq): count = np.count_nonzero(mon == dinput['month'][t]) - #-- there is only one solution for the month + # there is only one solution for the month if (count == 1): i = np.nonzero(mon == dinput['month'][t]) dinput['time'][t] = date_conv[i] dinput['data'][t] = C20_input[i] dinput['error'][t] = eC20_input[i] - #-- there is a special solution for the month - #-- will the solution flagged with slr_flag + # there is a special solution for the month + # will the solution flagged with slr_flag elif (count == 2): i = np.nonzero((mon == dinput['month'][t]) & slr_flag) dinput['time'][t] = date_conv[i] dinput['data'][t] = C20_input[i] dinput['error'][t] = eC20_input[i] - #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with - #-- Accelerometer shutoffs make the relation between month number - #-- and date more complicated as days from other months are used - #-- For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) - #-- For JPL: Dec 2011 (120) is centered in Jan 2012 (121) - #-- For all: May 2015 (161) is centered in Apr 2015 (160) - #-- For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) + # The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + # Accelerometer shutoffs make the relation between month number + # and date more complicated as days from other months are used + # For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) + # For JPL: Dec 2011 (120) is centered in Jan 2012 (121) + # For all: May 2015 (161) is centered in Apr 2015 (160) + # For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) dinput['month'] = gravity_toolkit.time.adjust_months(dinput['month']) - #-- return the SLR-derived oblateness solutions + # return the SLR-derived oblateness solutions return dinput \ No newline at end of file diff --git a/gravity_toolkit/read_SLR_C30.py b/gravity_toolkit/read_SLR_C30.py index 831ded2f..ef40c84e 100644 --- a/gravity_toolkit/read_SLR_C30.py +++ b/gravity_toolkit/read_SLR_C30.py @@ -84,7 +84,7 @@ import gravity_toolkit.time import gravity_toolkit.read_SLR_harmonics -#-- PURPOSE: read Degree 3 zonal data from Satellite Laser Ranging (SLR) +# PURPOSE: read Degree 3 zonal data from Satellite Laser Ranging (SLR) def read_SLR_C30(SLR_file, C30_MEAN=9.5717395773300e-07, HEADER=True): """ Reads C30 spherical harmonic coefficients from SLR measurements @@ -110,159 +110,159 @@ def read_SLR_C30(SLR_file, C30_MEAN=9.5717395773300e-07, HEADER=True): date of SLR measurement """ - #-- check that SLR file exists + # check that SLR file exists if not os.access(os.path.expanduser(SLR_file), os.F_OK): raise FileNotFoundError('SLR file not found in file system') - #-- output dictionary with input data + # output dictionary with input data dinput = {} if bool(re.search(r'TN-(14)',SLR_file,re.I)): - #-- SLR C30 RL06 file from PO.DAAC produced by GSFC + # SLR C30 RL06 file from PO.DAAC produced by GSFC with open(os.path.expanduser(SLR_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text while HEADER: - #-- file line at count + # file line at count line = file_contents[count] - #-- find PRODUCT: within line to set HEADER flag to False when found + # find PRODUCT: within line to set HEADER flag to False when found HEADER = not bool(re.match(r'Product:+',line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- number of months within the file + # number of months within the file n_mon = file_lines - count - #-- date and GRACE/GRACE-FO month + # date and GRACE/GRACE-FO month dinput['time'] = np.zeros((n_mon)) dinput['month'] = np.zeros((n_mon),dtype=int) - #-- monthly spherical harmonic replacement solutions + # monthly spherical harmonic replacement solutions dinput['data'] = np.zeros((n_mon)) - #-- monthly spherical harmonic formal standard deviations + # monthly spherical harmonic formal standard deviations dinput['error'] = np.zeros((n_mon)) - #-- time count + # time count t = 0 - #-- for every other line: + # for every other line: for line in file_contents[count:]: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives + # find numerical instances in line including exponents, + # decimal points and negatives line_contents = re.findall(r'[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) count = len(line_contents) - #-- only read lines where C30 data exists (don't read NaN lines) + # only read lines where C30 data exists (don't read NaN lines) if (count > 7): - #-- modified julian date for line + # modified julian date for line MJD = np.float64(line_contents[0]) - #-- converting from MJD into month, day and year + # converting from MJD into month, day and year YY,MM,DD,hh,mm,ss = gravity_toolkit.time.convert_julian( MJD+2400000.5, format='tuple') - #-- converting from month, day, year into decimal year + # converting from month, day, year into decimal year dinput['time'][t] = gravity_toolkit.time.convert_calendar_decimal( YY, MM, day=DD, hour=hh) - #-- Spherical Harmonic data for line + # Spherical Harmonic data for line dinput['data'][t] = np.float64(line_contents[5]) dinput['error'][t] = np.float64(line_contents[7])*1e-10 - #-- GRACE/GRACE-FO month of SLR solutions + # GRACE/GRACE-FO month of SLR solutions dinput['month'][t] = gravity_toolkit.time.calendar_to_grace( dinput['time'][t], around=np.round) - #-- add to t count + # add to t count t += 1 - #-- verify that there imported C30 solutions - #-- (TN-14 data format has changed in the past) + # verify that there imported C30 solutions + # (TN-14 data format has changed in the past) if (t == 0): raise Exception('No GSFC C30 data imported') - #-- truncate variables if necessary + # truncate variables if necessary for key,val in dinput.items(): dinput[key] = val[:t] elif bool(re.search(r'C30_LARES',SLR_file,re.I)): - #-- read LARES filtered values + # read LARES filtered values LARES_input = np.loadtxt(SLR_file,skiprows=1) dinput['time'] = LARES_input[:,0].copy() - #-- convert C30 from anomalies to absolute + # convert C30 from anomalies to absolute dinput['data'] = 1e-10*LARES_input[:,1] + C30_MEAN - #-- filtered data does not have errors + # filtered data does not have errors dinput['error'] = np.zeros_like(LARES_input[:,1]) - #-- calculate GRACE/GRACE-FO month + # calculate GRACE/GRACE-FO month dinput['month'] = gravity_toolkit.time.calendar_to_grace(dinput['time']) elif bool(re.search(r'GRAVIS-2B_GFZOP',SLR_file,re.I)): - #-- Combined GRACE/SLR solution file produced by GFZ - #-- Column 1: MJD of BEGINNING of solution data span - #-- Column 2: Year and fraction of year of BEGINNING of solution span - #-- Column 6: Replacement C(3,0) - #-- Column 7: Replacement C(3,0) - mean C(3,0) (1.0E-10) - #-- Column 8: C(3,0) formal standard deviation (1.0E-12) + # Combined GRACE/SLR solution file produced by GFZ + # Column 1: MJD of BEGINNING of solution data span + # Column 2: Year and fraction of year of BEGINNING of solution span + # Column 6: Replacement C(3,0) + # Column 7: Replacement C(3,0) - mean C(3,0) (1.0E-10) + # Column 8: C(3,0) formal standard deviation (1.0E-12) with open(os.path.expanduser(SLR_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text while HEADER: - #-- file line at count + # file line at count line = file_contents[count] - #-- find PRODUCT: within line to set HEADER flag to False when found + # find PRODUCT: within line to set HEADER flag to False when found HEADER = not bool(re.match(r'PRODUCT:+',line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- number of months within the file + # number of months within the file n_mon = file_lines - count - #-- date and GRACE/GRACE-FO month + # date and GRACE/GRACE-FO month dinput['time'] = np.zeros((n_mon)) dinput['month'] = np.zeros((n_mon),dtype=int) - #-- monthly spherical harmonic replacement solutions + # monthly spherical harmonic replacement solutions dinput['data'] = np.zeros((n_mon)) - #-- monthly spherical harmonic formal standard deviations + # monthly spherical harmonic formal standard deviations dinput['error'] = np.zeros((n_mon)) - #-- time count + # time count t = 0 - #-- for every other line: + # for every other line: for line in file_contents[count:]: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives + # find numerical instances in line including exponents, + # decimal points and negatives line_contents = re.findall(r'[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) count = len(line_contents) - #-- check for empty lines + # check for empty lines if (count > 0): - #-- reading decimal year for start of span + # reading decimal year for start of span dinput['time'][t] = np.float64(line_contents[1]) - #-- Spherical Harmonic data for line + # Spherical Harmonic data for line dinput['data'][t] = np.float64(line_contents[5]) dinput['error'][t] = np.float64(line_contents[7])*1e-10 - #-- GRACE/GRACE-FO month of SLR solutions + # GRACE/GRACE-FO month of SLR solutions dinput['month'][t] = gravity_toolkit.time.calendar_to_grace( dinput['time'][t], around=np.round) - #-- add to t count + # add to t count t += 1 - #-- truncate variables if necessary + # truncate variables if necessary for key,val in dinput.items(): dinput[key] = val[:t] else: - #-- CSR 5x5 + 6,1 file from CSR and extract C3,0 coefficients + # CSR 5x5 + 6,1 file from CSR and extract C3,0 coefficients Ylms = gravity_toolkit.read_SLR_harmonics(SLR_file, HEADER=True) - #-- extract dates, C30 harmonics and errors + # extract dates, C30 harmonics and errors dinput['time'] = Ylms['time'].copy() dinput['data'] = Ylms['clm'][3,0,:].copy() dinput['error'] = Ylms['error']['clm'][3,0,:].copy() - #-- converting from MJD into month, day and year + # converting from MJD into month, day and year YY,MM,DD,hh,mm,ss = gravity_toolkit.time.convert_julian( Ylms['MJD']+2400000.5, format='tuple') - #-- calculate GRACE/GRACE-FO month + # calculate GRACE/GRACE-FO month dinput['month'] = gravity_toolkit.time.calendar_to_grace(YY,MM) - #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with - #-- Accelerometer shutoffs make the relation between month number - #-- and date more complicated as days from other months are used - #-- For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) - #-- For JPL: Dec 2011 (120) is centered in Jan 2012 (121) - #-- For all: May 2015 (161) is centered in Apr 2015 (160) - #-- For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) + # The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + # Accelerometer shutoffs make the relation between month number + # and date more complicated as days from other months are used + # For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) + # For JPL: Dec 2011 (120) is centered in Jan 2012 (121) + # For all: May 2015 (161) is centered in Apr 2015 (160) + # For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) dinput['month'] = gravity_toolkit.time.adjust_months(dinput['month']) - #-- return the SLR-derived degree 3 zonal solutions + # return the SLR-derived degree 3 zonal solutions return dinput diff --git a/gravity_toolkit/read_SLR_C40.py b/gravity_toolkit/read_SLR_C40.py index 7946edbb..80671b5e 100644 --- a/gravity_toolkit/read_SLR_C40.py +++ b/gravity_toolkit/read_SLR_C40.py @@ -51,7 +51,7 @@ import gravity_toolkit.time import gravity_toolkit.read_SLR_harmonics -#-- PURPOSE: read Degree 4 zonal data from Satellite Laser Ranging (SLR) +# PURPOSE: read Degree 4 zonal data from Satellite Laser Ranging (SLR) def read_SLR_C40(SLR_file, C40_MEAN=0.0, DATE=None, **kwargs): """ Reads C40 spherical harmonic coefficients from SLR measurements @@ -77,51 +77,51 @@ def read_SLR_C40(SLR_file, C40_MEAN=0.0, DATE=None, **kwargs): date of SLR measurement """ - #-- check that SLR file exists + # check that SLR file exists if not os.access(os.path.expanduser(SLR_file), os.F_OK): raise FileNotFoundError('SLR file not found in file system') - #-- output dictionary with input data + # output dictionary with input data dinput = {} if bool(re.search(r'gsfc_slr_5x5c61s61',SLR_file,re.I)): - #-- read 5x5 + 6,1 file from GSFC and extract coefficients + # read 5x5 + 6,1 file from GSFC and extract coefficients Ylms = gravity_toolkit.read_SLR_harmonics(SLR_file, HEADER=True) - #-- calculate 28-day moving-average solution from 7-day arcs + # calculate 28-day moving-average solution from 7-day arcs dinput.update(gravity_toolkit.convert_weekly(Ylms['time'], Ylms['clm'][4,0,:], DATE=DATE, NEIGHBORS=28)) - #-- no estimated spherical harmonic errors + # no estimated spherical harmonic errors dinput['error'] = np.zeros_like(DATE,dtype='f8') elif bool(re.search(r'C40_LARES',SLR_file,re.I)): - #-- read LARES filtered values + # read LARES filtered values LARES_input = np.loadtxt(SLR_file,skiprows=1) dinput['time'] = LARES_input[:,0].copy() - #-- convert C40 from anomalies to absolute + # convert C40 from anomalies to absolute dinput['data'] = 1e-10*LARES_input[:,1] + C40_MEAN - #-- filtered data does not have errors + # filtered data does not have errors dinput['error'] = np.zeros_like(LARES_input[:,1]) - #-- calculate GRACE/GRACE-FO month + # calculate GRACE/GRACE-FO month dinput['month'] = gravity_toolkit.time.calendar_to_grace(dinput['time']) else: - #-- read 5x5 + 6,1 file from CSR and extract C4,0 coefficients + # read 5x5 + 6,1 file from CSR and extract C4,0 coefficients Ylms = gravity_toolkit.read_SLR_harmonics(SLR_file, HEADER=True) - #-- extract dates, C40 harmonics and errors + # extract dates, C40 harmonics and errors dinput['time'] = Ylms['time'].copy() dinput['data'] = Ylms['clm'][4,0,:].copy() dinput['error'] = Ylms['error']['clm'][4,0,:].copy() - #-- converting from MJD into month, day and year + # converting from MJD into month, day and year YY,MM,DD,hh,mm,ss = gravity_toolkit.time.convert_julian( Ylms['MJD']+2400000.5, format='tuple') - #-- calculate GRACE/GRACE-FO month + # calculate GRACE/GRACE-FO month dinput['month'] = gravity_toolkit.time.calendar_to_grace(YY,MM) - #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with - #-- Accelerometer shutoffs make the relation between month number - #-- and date more complicated as days from other months are used - #-- For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) - #-- For JPL: Dec 2011 (120) is centered in Jan 2012 (121) - #-- For all: May 2015 (161) is centered in Apr 2015 (160) - #-- For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) + # The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + # Accelerometer shutoffs make the relation between month number + # and date more complicated as days from other months are used + # For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) + # For JPL: Dec 2011 (120) is centered in Jan 2012 (121) + # For all: May 2015 (161) is centered in Apr 2015 (160) + # For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) dinput['month'] = gravity_toolkit.time.adjust_months(dinput['month']) - #-- return the SLR-derived degree 4 zonal solutions + # return the SLR-derived degree 4 zonal solutions return dinput diff --git a/gravity_toolkit/read_SLR_C50.py b/gravity_toolkit/read_SLR_C50.py index 657d20b0..7c7fcd3c 100644 --- a/gravity_toolkit/read_SLR_C50.py +++ b/gravity_toolkit/read_SLR_C50.py @@ -61,7 +61,7 @@ import gravity_toolkit.time import gravity_toolkit.read_SLR_harmonics -#-- PURPOSE: read Degree 5 zonal data from Satellite Laser Ranging (SLR) +# PURPOSE: read Degree 5 zonal data from Satellite Laser Ranging (SLR) def read_SLR_C50(SLR_file, C50_MEAN=0.0, DATE=None, HEADER=True): """ Reads C50 spherical harmonic coefficients from SLR measurements @@ -89,111 +89,111 @@ def read_SLR_C50(SLR_file, C50_MEAN=0.0, DATE=None, HEADER=True): date of SLR measurement """ - #-- check that SLR file exists + # check that SLR file exists if not os.access(os.path.expanduser(SLR_file), os.F_OK): raise FileNotFoundError('SLR file not found in file system') - #-- output dictionary with input data + # output dictionary with input data dinput = {} if bool(re.search(r'GSFC_SLR_C(20)_C(30)_C(50)',SLR_file,re.I)): - #-- SLR C50 RL06 file from GSFC + # SLR C50 RL06 file from GSFC with open(os.path.expanduser(SLR_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text while HEADER: - #-- file line at count + # file line at count line = file_contents[count] - #-- find PRODUCT: within line to set HEADER flag to False when found + # find PRODUCT: within line to set HEADER flag to False when found HEADER = not bool(re.match(r'Product:+',line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- number of months within the file + # number of months within the file n_mon = file_lines - count - #-- date and GRACE/GRACE-FO month + # date and GRACE/GRACE-FO month dinput['time'] = np.zeros((n_mon)) dinput['month'] = np.zeros((n_mon),dtype=int) - #-- monthly spherical harmonic replacement solutions + # monthly spherical harmonic replacement solutions dinput['data'] = np.zeros((n_mon)) - #-- monthly spherical harmonic formal standard deviations + # monthly spherical harmonic formal standard deviations dinput['error'] = np.zeros((n_mon)) - #-- time count + # time count t = 0 - #-- for every other line: + # for every other line: for line in file_contents[count:]: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives + # find numerical instances in line including exponents, + # decimal points and negatives line_contents = re.findall(r'[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) count = len(line_contents) - #-- only read lines where C50 data exists (don't read NaN lines) + # only read lines where C50 data exists (don't read NaN lines) if (count > 7): - #-- modified julian date for line + # modified julian date for line MJD = np.float64(line_contents[0]) - #-- converting from MJD into month, day and year + # converting from MJD into month, day and year YY,MM,DD,hh,mm,ss = gravity_toolkit.time.convert_julian( MJD+2400000.5, format='tuple') - #-- converting from month, day, year into decimal year + # converting from month, day, year into decimal year dinput['time'][t] = gravity_toolkit.time.convert_calendar_decimal( YY, MM, day=DD, hour=hh) - #-- Spherical Harmonic data for line + # Spherical Harmonic data for line dinput['data'][t] = np.float64(line_contents[10]) dinput['error'][t] = np.float64(line_contents[12])*1e-10 - #-- GRACE/GRACE-FO month of SLR solutions + # GRACE/GRACE-FO month of SLR solutions dinput['month'][t] = gravity_toolkit.time.calendar_to_grace( dinput['time'][t], around=np.round) - #-- add to t count + # add to t count t += 1 - #-- verify that there imported C50 solutions + # verify that there imported C50 solutions if (t == 0): raise Exception('No GSFC C50 data imported') - #-- truncate variables if necessary + # truncate variables if necessary for key,val in dinput.items(): dinput[key] = val[:t] elif bool(re.search(r'gsfc_slr_5x5c61s61',SLR_file,re.I)): - #-- read 5x5 + 6,1 file from GSFC and extract coefficients + # read 5x5 + 6,1 file from GSFC and extract coefficients Ylms = gravity_toolkit.read_SLR_harmonics(SLR_file, HEADER=True) - #-- calculate 28-day moving-average solution from 7-day arcs + # calculate 28-day moving-average solution from 7-day arcs dinput.update(gravity_toolkit.convert_weekly(Ylms['time'], Ylms['clm'][5,0,:], DATE=DATE, NEIGHBORS=28)) - #-- no estimated spherical harmonic errors + # no estimated spherical harmonic errors dinput['error'] = np.zeros_like(DATE,dtype='f8') elif bool(re.search(r'C50_LARES',SLR_file,re.I)): - #-- read LARES filtered values + # read LARES filtered values LARES_input = np.loadtxt(SLR_file,skiprows=1) dinput['time'] = LARES_input[:,0].copy() - #-- convert C50 from anomalies to absolute + # convert C50 from anomalies to absolute dinput['data'] = 1e-10*LARES_input[:,1] + C50_MEAN - #-- filtered data does not have errors + # filtered data does not have errors dinput['error'] = np.zeros_like(LARES_input[:,1]) - #-- calculate GRACE/GRACE-FO month + # calculate GRACE/GRACE-FO month dinput['month'] = gravity_toolkit.time.calendar_to_grace(dinput['time']) else: - #-- read 5x5 + 6,1 file from CSR and extract C5,0 coefficients + # read 5x5 + 6,1 file from CSR and extract C5,0 coefficients Ylms = gravity_toolkit.read_SLR_harmonics(SLR_file, HEADER=True) - #-- extract dates, C50 harmonics and errors + # extract dates, C50 harmonics and errors dinput['time'] = Ylms['time'].copy() dinput['data'] = Ylms['clm'][5,0,:].copy() dinput['error'] = Ylms['error']['clm'][5,0,:].copy() - #-- converting from MJD into month, day and year + # converting from MJD into month, day and year YY,MM,DD,hh,mm,ss = gravity_toolkit.time.convert_julian( Ylms['MJD']+2400000.5, format='tuple') - #-- calculate GRACE/GRACE-FO month + # calculate GRACE/GRACE-FO month dinput['month'] = gravity_toolkit.time.calendar_to_grace(YY,MM) - #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with - #-- Accelerometer shutoffs make the relation between month number - #-- and date more complicated as days from other months are used - #-- For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) - #-- For JPL: Dec 2011 (120) is centered in Jan 2012 (121) - #-- For all: May 2015 (161) is centered in Apr 2015 (160) - #-- For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) + # The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + # Accelerometer shutoffs make the relation between month number + # and date more complicated as days from other months are used + # For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) + # For JPL: Dec 2011 (120) is centered in Jan 2012 (121) + # For all: May 2015 (161) is centered in Apr 2015 (160) + # For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) dinput['month'] = gravity_toolkit.time.adjust_months(dinput['month']) - #-- return the SLR-derived degree 5 zonal solutions + # return the SLR-derived degree 5 zonal solutions return dinput diff --git a/gravity_toolkit/read_SLR_CS2.py b/gravity_toolkit/read_SLR_CS2.py index 43135183..fbf631cf 100644 --- a/gravity_toolkit/read_SLR_CS2.py +++ b/gravity_toolkit/read_SLR_CS2.py @@ -81,7 +81,7 @@ import gravity_toolkit.time import gravity_toolkit.read_SLR_harmonics -#-- PURPOSE: read Degree 2,m data from Satellite Laser Ranging (SLR) +# PURPOSE: read Degree 2,m data from Satellite Laser Ranging (SLR) def read_SLR_CS2(SLR_file, ORDER=1, DATE=None, HEADER=True): """ Reads CS2,m spherical harmonic coefficients from SLR measurements @@ -113,36 +113,36 @@ def read_SLR_CS2(SLR_file, ORDER=1, DATE=None, HEADER=True): date of SLR measurement """ - #-- check that SLR file exists + # check that SLR file exists if not os.access(os.path.expanduser(SLR_file), os.F_OK): raise FileNotFoundError('SLR file not found in file system') - #-- output dictionary with input data + # output dictionary with input data dinput = {} if bool(re.search(r'GSFC_C2(\d)_S2(\d)',SLR_file,re.I)): - #-- 7-day arc SLR file produced by GSFC - #-- input variable names and types + # 7-day arc SLR file produced by GSFC + # input variable names and types dtype = {} dtype['names'] = ('time','C2','S2') dtype['formats'] = ('f','f8','f8') - #-- read SLR 2,1 file from GSFC - #-- Column 1: Approximate mid-point of 7-day solution (years) - #-- Column 2: Solution from SLR (normalized) - #-- Column 3: Solution from SLR (normalized) + # read SLR 2,1 file from GSFC + # Column 1: Approximate mid-point of 7-day solution (years) + # Column 2: Solution from SLR (normalized) + # Column 3: Solution from SLR (normalized) content = np.loadtxt(os.path.expanduser(SLR_file),dtype=dtype) - #-- duplicate time and harmonics + # duplicate time and harmonics tdec = np.repeat(content['time'],7) c2m = np.repeat(content['C2'],7) s2m = np.repeat(content['S2'],7) - #-- calculate daily dates to use in centered moving average + # calculate daily dates to use in centered moving average tdec += (np.mod(np.arange(len(tdec)),7) - 3.5)/365.25 - #-- number of dates to use in average + # number of dates to use in average n_neighbors = 28 - #-- calculate 28-day moving-average solution from 7-day arcs + # calculate 28-day moving-average solution from 7-day arcs dinput['time'] = np.zeros_like(DATE) dinput['C2m'] = np.zeros_like(DATE,dtype='f8') dinput['S2m'] = np.zeros_like(DATE,dtype='f8') - #-- no estimated spherical harmonic errors + # no estimated spherical harmonic errors dinput['eC2m'] = np.zeros_like(DATE,dtype='f8') dinput['eS2m'] = np.zeros_like(DATE,dtype='f8') for i,D in enumerate(DATE): @@ -150,24 +150,24 @@ def read_SLR_CS2(SLR_file, ORDER=1, DATE=None, HEADER=True): dinput['time'][i] = np.mean(tdec[isort]) dinput['C2m'][i] = np.mean(c2m[isort]) dinput['S2m'][i] = np.mean(s2m[isort]) - #-- GRACE/GRACE-FO month + # GRACE/GRACE-FO month dinput['month'] = gravity_toolkit.time.calendar_to_grace(dinput['time']) elif bool(re.search(r'gsfc_slr_5x5c61s61',SLR_file,re.I)): - #-- read 5x5 + 6,1 file from GSFC and extract coefficients + # read 5x5 + 6,1 file from GSFC and extract coefficients Ylms = gravity_toolkit.read_SLR_harmonics(SLR_file, HEADER=True) - #-- duplicate time and harmonics + # duplicate time and harmonics tdec = np.repeat(Ylms['time'],7) c2m = np.repeat(Ylms['clm'][2,ORDER],7) s2m = np.repeat(Ylms['slm'][2,ORDER],7) - #-- calculate daily dates to use in centered moving average + # calculate daily dates to use in centered moving average tdec += (np.mod(np.arange(len(tdec)),7) - 3.5)/365.25 - #-- number of dates to use in average + # number of dates to use in average n_neighbors = 28 - #-- calculate 28-day moving-average solution from 7-day arcs + # calculate 28-day moving-average solution from 7-day arcs dinput['time'] = np.zeros_like(DATE) dinput['C2m'] = np.zeros_like(DATE,dtype='f8') dinput['S2m'] = np.zeros_like(DATE,dtype='f8') - #-- no estimated spherical harmonic errors + # no estimated spherical harmonic errors dinput['eC2m'] = np.zeros_like(DATE,dtype='f8') dinput['eS2m'] = np.zeros_like(DATE,dtype='f8') for i,D in enumerate(DATE): @@ -175,106 +175,106 @@ def read_SLR_CS2(SLR_file, ORDER=1, DATE=None, HEADER=True): dinput['time'][i] = np.mean(tdec[isort]) dinput['C2m'][i] = np.mean(c2m[isort]) dinput['S2m'][i] = np.mean(s2m[isort]) - #-- GRACE/GRACE-FO month + # GRACE/GRACE-FO month dinput['month'] = gravity_toolkit.time.calendar_to_grace(dinput['time']) elif bool(re.search(r'C2(\d)_S2(\d)_(RL\d{2})',SLR_file,re.I)): - #-- SLR RL06 file produced by CSR - #-- input variable names and types + # SLR RL06 file produced by CSR + # input variable names and types dtype = {} dtype['names'] = ('time','C2','S2','eC2','eS2', 'C2aod','S2aod','start','end') dtype['formats'] = ('f','f8','f8','f','f','f','f','f','f') - #-- read SLR 2,1 or 2,2 RL06 file from CSR - #-- header text is commented and won't be read - #-- Column 1: Approximate mid-point of monthly solution (years) - #-- Column 2: Solution from SLR (normalized) - #-- Column 3: Solution from SLR (normalized) - #-- Column 4: Solution sigma (1E-10) - #-- Column 5: Solution sigma (1E-10) - #-- Column 6: Mean value of Atmosphere-Ocean De-aliasing model (1E-10) - #-- Column 7: Mean value of Atmosphere-Ocean De-aliasing model (1E-10) - #-- Columns 8-9: Start and end dates of data used in solution + # read SLR 2,1 or 2,2 RL06 file from CSR + # header text is commented and won't be read + # Column 1: Approximate mid-point of monthly solution (years) + # Column 2: Solution from SLR (normalized) + # Column 3: Solution from SLR (normalized) + # Column 4: Solution sigma (1E-10) + # Column 5: Solution sigma (1E-10) + # Column 6: Mean value of Atmosphere-Ocean De-aliasing model (1E-10) + # Column 7: Mean value of Atmosphere-Ocean De-aliasing model (1E-10) + # Columns 8-9: Start and end dates of data used in solution content = np.loadtxt(os.path.expanduser(SLR_file),dtype=dtype) - #-- date and GRACE/GRACE-FO month + # date and GRACE/GRACE-FO month dinput['time'] = content['time'].copy() dinput['month'] = gravity_toolkit.time.calendar_to_grace(dinput['time']) - #-- remove the monthly mean of the AOD model + # remove the monthly mean of the AOD model dinput['C2m'] = content['C2'] - content['C2aod']*10**-10 dinput['S2m'] = content['S2'] - content['S2aod']*10**-10 - #-- scale SLR solution sigmas + # scale SLR solution sigmas dinput['eC2m'] = content['eC2']*10**-10 dinput['eS2m'] = content['eS2']*10**-10 elif bool(re.search(r'GRAVIS-2B_GFZOP',SLR_file,re.I)): - #-- Combined GRACE/SLR solution file produced by GFZ - #-- Column 1: MJD of BEGINNING of solution data span - #-- Column 2: Year and fraction of year of BEGINNING of solution span - #-- Column 9: Replacement C(2,1) - #-- Column 10: Replacement C(2,1) - mean C(2,1) (1.0E-10) - #-- Column 11: C(2,1) formal standard deviation (1.0E-12) - #-- Column 12: Replacement S(2,1) - #-- Column 13: Replacement S(2,1) - mean S(2,1) (1.0E-10) - #-- Column 14: S(2,1) formal standard deviation (1.0E-12) + # Combined GRACE/SLR solution file produced by GFZ + # Column 1: MJD of BEGINNING of solution data span + # Column 2: Year and fraction of year of BEGINNING of solution span + # Column 9: Replacement C(2,1) + # Column 10: Replacement C(2,1) - mean C(2,1) (1.0E-10) + # Column 11: C(2,1) formal standard deviation (1.0E-12) + # Column 12: Replacement S(2,1) + # Column 13: Replacement S(2,1) - mean S(2,1) (1.0E-10) + # Column 14: S(2,1) formal standard deviation (1.0E-12) with open(os.path.expanduser(SLR_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- number of lines contained in the file + # number of lines contained in the file file_lines = len(file_contents) - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text while HEADER: - #-- file line at count + # file line at count line = file_contents[count] - #-- find PRODUCT: within line to set HEADER flag to False when found + # find PRODUCT: within line to set HEADER flag to False when found HEADER = not bool(re.match(r'PRODUCT:+',line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- number of months within the file + # number of months within the file n_mon = file_lines - count - #-- date and GRACE/GRACE-FO month + # date and GRACE/GRACE-FO month dinput['time'] = np.zeros((n_mon)) dinput['month'] = np.zeros((n_mon),dtype=int) - #-- monthly spherical harmonic replacement solutions + # monthly spherical harmonic replacement solutions dinput['C2m'] = np.zeros((n_mon)) dinput['S2m'] = np.zeros((n_mon)) - #-- monthly spherical harmonic formal standard deviations + # monthly spherical harmonic formal standard deviations dinput['eC2m'] = np.zeros((n_mon)) dinput['eS2m'] = np.zeros((n_mon)) - #-- time count + # time count t = 0 - #-- for every other line: + # for every other line: for line in file_contents[count:]: - #-- find numerical instances in line including exponents, - #-- decimal points and negatives + # find numerical instances in line including exponents, + # decimal points and negatives line_contents = re.findall(r'[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) count = len(line_contents) - #-- check for empty lines + # check for empty lines if (count > 0): - #-- reading decimal year for start of span + # reading decimal year for start of span dinput['time'][t] = np.float64(line_contents[1]) - #-- Spherical Harmonic data for line + # Spherical Harmonic data for line dinput['C2m'][t] = np.float64(line_contents[8]) dinput['eC2m'][t] = np.float64(line_contents[10])*1e-10 dinput['S2m'][t] = np.float64(line_contents[11]) dinput['eS2m'][t] = np.float64(line_contents[13])*1e-10 - #-- GRACE/GRACE-FO month of SLR solutions + # GRACE/GRACE-FO month of SLR solutions dinput['month'][t] = gravity_toolkit.time.calendar_to_grace( dinput['time'][t], around=np.round) - #-- add to t count + # add to t count t += 1 - #-- truncate variables if necessary + # truncate variables if necessary for key,val in dinput.items(): dinput[key] = val[:t] - #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with - #-- Accelerometer shutoffs make the relation between month number - #-- and date more complicated as days from other months are used - #-- For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) - #-- For JPL: Dec 2011 (120) is centered in Jan 2012 (121) - #-- For all: May 2015 (161) is centered in Apr 2015 (160) - #-- For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) + # The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + # Accelerometer shutoffs make the relation between month number + # and date more complicated as days from other months are used + # For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) + # For JPL: Dec 2011 (120) is centered in Jan 2012 (121) + # For all: May 2015 (161) is centered in Apr 2015 (160) + # For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) dinput['month'] = gravity_toolkit.time.adjust_months(dinput['month']) - #-- return the SLR-derived degree 2 solutions + # return the SLR-derived degree 2 solutions return dinput diff --git a/gravity_toolkit/read_SLR_geocenter.py b/gravity_toolkit/read_SLR_geocenter.py index 6ef25b20..4f8d8e28 100644 --- a/gravity_toolkit/read_SLR_geocenter.py +++ b/gravity_toolkit/read_SLR_geocenter.py @@ -81,7 +81,7 @@ import warnings import gravity_toolkit.geocenter -#-- PURPOSE: read geocenter data from Satellite Laser Ranging (SLR) +# PURPOSE: read geocenter data from Satellite Laser Ranging (SLR) def read_SLR_geocenter(geocenter_file, RADIUS=None, HEADER=0, COLUMNS=['time', 'X', 'Y', 'Z', 'X_sigma', 'Y_sigma', 'Z_sigma']): """ @@ -131,12 +131,12 @@ def read_SLR_geocenter(geocenter_file, RADIUS=None, HEADER=0, # call renamed version to not break workflows DEG1 = gravity_toolkit.geocenter(radius=RADIUS).from_SLR(geocenter_file, AOD=False, header=HEADER, columns=COLUMNS) - #-- return the SLR-derived geocenter solutions + # return the SLR-derived geocenter solutions return DEG1.to_dict() -#-- special function for outputting AOD corrected SLR geocenter values -#-- need to run aod1b_geocenter.py to calculate the monthly geocenter dealiasing +# special function for outputting AOD corrected SLR geocenter values +# need to run aod1b_geocenter.py to calculate the monthly geocenter dealiasing def aod_corrected_SLR_geocenter(geocenter_file, DREL, RADIUS=None, HEADER=0, COLUMNS=[]): """ @@ -176,5 +176,5 @@ def aod_corrected_SLR_geocenter(geocenter_file, DREL, RADIUS=None, HEADER=0, # call renamed version to not break workflows DEG1 = gravity_toolkit.geocenter(radius=RADIUS).from_SLR(geocenter_file, AOD=True, release=DREL, header=HEADER, columns=COLUMNS) - #-- return the SLR-derived geocenter solutions + # return the SLR-derived geocenter solutions return DEG1.to_dict() diff --git a/gravity_toolkit/read_SLR_harmonics.py b/gravity_toolkit/read_SLR_harmonics.py index 9fcdcf05..e2883cea 100644 --- a/gravity_toolkit/read_SLR_harmonics.py +++ b/gravity_toolkit/read_SLR_harmonics.py @@ -73,7 +73,7 @@ import numpy as np import gravity_toolkit.time -#-- PURPOSE: wrapper function for calling individual readers +# PURPOSE: wrapper function for calling individual readers def read_SLR_harmonics(SLR_file, **kwargs): """ Wrapper function for reading spherical harmonic coefficients @@ -93,7 +93,7 @@ def read_SLR_harmonics(SLR_file, **kwargs): else: raise Exception(f'Unknown SLR file format {SLR_file}') -#-- PURPOSE: read monthly degree harmonic data from Satellite Laser Ranging (SLR) +# PURPOSE: read monthly degree harmonic data from Satellite Laser Ranging (SLR) def read_CSR_monthly_6x1(SLR_file, SCALE=1e-10, HEADER=True): """ Reads in monthly low degree and order spherical harmonic coefficients @@ -130,43 +130,43 @@ def read_CSR_monthly_6x1(SLR_file, SCALE=1e-10, HEADER=True): and GRACE", *Journal of Geophysical Research*, 116(B01409), (2010). `doi: 10.1029/2010JB000850 `_ """ - #-- check that SLR file exists + # check that SLR file exists if not os.access(os.path.expanduser(SLR_file), os.F_OK): raise FileNotFoundError('SLR file not found in file system') - #-- read the file and get contents + # read the file and get contents with open(os.path.expanduser(SLR_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() file_lines = len(file_contents) - #-- spherical harmonic degree range (5x5 with 6,1) - #-- new 5x5 fields no longer include geocenter components + # spherical harmonic degree range (5x5 with 6,1) + # new 5x5 fields no longer include geocenter components LMIN = 2 LMAX = 6 n_harm = (LMAX**2 + 3*LMAX - LMIN**2 - LMIN)//2 - 5 - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 indice = None - #-- Reading over header text + # Reading over header text while HEADER: - #-- file line at count + # file line at count line = file_contents[count] - #-- find end within line to set HEADER flag to False when found + # find end within line to set HEADER flag to False when found HEADER = not bool(re.match(r'end\sof\sheader',line)) if bool(re.match(80*r'=',line)): indice = count + 1 - #-- add 1 to counter + # add 1 to counter count += 1 - #-- verify that mean field header indice was found + # verify that mean field header indice was found if not indice: raise Exception('Mean field header not found') - #-- number of dates within the file + # number of dates within the file n_dates = (file_lines - count)//(n_harm + 1) - #-- read mean fields from the header + # read mean fields from the header mean_Ylms = {} mean_Ylm_error = {} mean_Ylms['clm'] = np.zeros((LMAX+1,LMAX+1)) @@ -174,18 +174,18 @@ def read_CSR_monthly_6x1(SLR_file, SCALE=1e-10, HEADER=True): mean_Ylm_error['clm'] = np.zeros((LMAX+1,LMAX+1)) mean_Ylm_error['slm'] = np.zeros((LMAX+1,LMAX+1)) for i in range(n_harm): - #-- split the line into individual components + # split the line into individual components line = file_contents[indice+i].split() - #-- degree and order for the line + # degree and order for the line l1 = np.int64(line[0]) m1 = np.int64(line[1]) - #-- fill mean field Ylms + # fill mean field Ylms mean_Ylms['clm'][l1,m1] = np.float64(line[2].replace('D','E')) mean_Ylms['slm'][l1,m1] = np.float64(line[3].replace('D','E')) mean_Ylm_error['clm'][l1,m1] = np.float64(line[4].replace('D','E')) mean_Ylm_error['slm'][l1,m1] = np.float64(line[5].replace('D','E')) - #-- output spherical harmonic fields + # output spherical harmonic fields Ylms = {} Ylms['error'] = {} Ylms['MJD'] = np.zeros((n_dates)) @@ -194,44 +194,44 @@ def read_CSR_monthly_6x1(SLR_file, SCALE=1e-10, HEADER=True): Ylms['slm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) Ylms['error']['clm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) Ylms['error']['slm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) - #-- input spherical harmonic anomalies and errors + # input spherical harmonic anomalies and errors Ylm_anomalies = {} Ylm_anomaly_error = {} Ylm_anomalies['clm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) Ylm_anomalies['slm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) Ylm_anomaly_error['clm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) Ylm_anomaly_error['slm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) - #-- for each date + # for each date for d in range(n_dates): - #-- split the date line into individual components + # split the date line into individual components line_contents = file_contents[count].split() - #-- verify arc number from iteration and file + # verify arc number from iteration and file IARC = int(line_contents[0]) assert (IARC == (d+1)) - #-- modified Julian date of the middle of the month + # modified Julian date of the middle of the month Ylms['MJD'][d] = np.mean(np.array(line_contents[5:7],dtype=np.float64)) - #-- date of the mid-point of the arc given in years + # date of the mid-point of the arc given in years YY,MM = np.array(line_contents[3:5]) Ylms['time'][d] = gravity_toolkit.time.convert_calendar_decimal(YY,MM) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- read the anomaly field + # read the anomaly field for i in range(n_harm): - #-- split the line into individual components + # split the line into individual components line = file_contents[count].split() - #-- degree and order for the line + # degree and order for the line l1 = np.int64(line[0]) m1 = np.int64(line[1]) - #-- fill anomaly field Ylms and rescale to output + # fill anomaly field Ylms and rescale to output Ylm_anomalies['clm'][l1,m1,d] = np.float64(line[2])*SCALE Ylm_anomalies['slm'][l1,m1,d] = np.float64(line[3])*SCALE Ylm_anomaly_error['clm'][l1,m1,d] = np.float64(line[6])*SCALE Ylm_anomaly_error['slm'][l1,m1,d] = np.float64(line[7])*SCALE - #-- add 1 to counter + # add 1 to counter count += 1 - #-- calculate full coefficients and full errors + # calculate full coefficients and full errors Ylms['clm'][:,:,d] = Ylm_anomalies['clm'][:,:,d] + mean_Ylms['clm'][:,:] Ylms['slm'][:,:,d] = Ylm_anomalies['slm'][:,:,d] + mean_Ylms['slm'][:,:] Ylms['error']['clm'][:,:,d]=np.sqrt(Ylm_anomaly_error['clm'][:,:,d]**2 + @@ -239,10 +239,10 @@ def read_CSR_monthly_6x1(SLR_file, SCALE=1e-10, HEADER=True): Ylms['error']['slm'][:,:,d]=np.sqrt(Ylm_anomaly_error['slm'][:,:,d]**2 + mean_Ylm_error['slm'][:,:]**2) - #-- return spherical harmonic fields and date information + # return spherical harmonic fields and date information return Ylms -#-- PURPOSE: read weekly degree harmonic data from Satellite Laser Ranging (SLR) +# PURPOSE: read weekly degree harmonic data from Satellite Laser Ranging (SLR) def read_GSFC_weekly_6x1(SLR_file, SCALE=1.0, HEADER=True): """ Reads weekly 5x5 spherical harmonic coefficients with 1 coefficient from @@ -276,68 +276,68 @@ def read_GSFC_weekly_6x1(SLR_file, SCALE=1.0, HEADER=True): *Geophysical Research Letters*, 47, (2020). `doi: 10.1029/2019GL085488 `_ """ - #-- check that SLR file exists + # check that SLR file exists if not os.access(os.path.expanduser(SLR_file), os.F_OK): raise FileNotFoundError('SLR file not found in file system') - #-- read the file and get contents + # read the file and get contents with open(os.path.expanduser(SLR_file), mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() file_lines = len(file_contents) - #-- spherical harmonic degree range (5x5 with 6,1) + # spherical harmonic degree range (5x5 with 6,1) LMIN = 2 LMAX = 6 n_harm = (LMAX**2 + 3*LMAX - LMIN**2 - LMIN)//2 - 5 - #-- counts the number of lines in the header + # counts the number of lines in the header count = 0 - #-- Reading over header text + # Reading over header text while HEADER: - #-- file line at count + # file line at count line = file_contents[count] - #-- find the final line within the header text - #-- to set HEADER flag to False when found + # find the final line within the header text + # to set HEADER flag to False when found HEADER = not bool(re.search(r'Product:',line)) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- number of dates within the file + # number of dates within the file n_dates = (file_lines - count)//(n_harm + 1) - #-- output spherical harmonic fields + # output spherical harmonic fields Ylms = {} Ylms['MJD'] = np.zeros((n_dates)) Ylms['time'] = np.zeros((n_dates)) Ylms['clm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) Ylms['slm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) - #-- for each date + # for each date for d in range(n_dates): - #-- split the date line into individual components + # split the date line into individual components line_contents = file_contents[count].split() - #-- modified Julian date of the beginning of the week + # modified Julian date of the beginning of the week Ylms['MJD'][d] = np.float64(line_contents[0]) - #-- date of the mid-point of the arc given in years + # date of the mid-point of the arc given in years Ylms['time'][d] = np.float64(line_contents[1]) - #-- add 1 to counter + # add 1 to counter count += 1 - #-- read the spherical harmonic field + # read the spherical harmonic field for i in range(n_harm): - #-- split the line into individual components + # split the line into individual components line_contents = file_contents[count].split() - #-- degree and order for the line + # degree and order for the line l1 = np.int64(line_contents[0]) m1 = np.int64(line_contents[1]) - #-- Spherical Harmonic data rescaled to output + # Spherical Harmonic data rescaled to output Ylms['clm'][l1,m1,d] = np.float64(line_contents[2])*SCALE Ylms['slm'][l1,m1,d] = np.float64(line_contents[3])*SCALE - #-- add 1 to counter + # add 1 to counter count += 1 - #-- return spherical harmonic fields and date information + # return spherical harmonic fields and date information return Ylms -#-- PURPOSE: interpolate harmonics from 7-day to monthly +# PURPOSE: interpolate harmonics from 7-day to monthly def convert_weekly(t_in, d_in, DATE=[], NEIGHBORS=28): """ Interpolate harmonics from 7-day to 28-day @@ -362,24 +362,24 @@ def convert_weekly(t_in, d_in, DATE=[], NEIGHBORS=28): data: float monthly spherical harmonic coefficients """ - #-- duplicate time and harmonics + # duplicate time and harmonics tdec = np.repeat(t_in, 7) data = np.repeat(d_in, 7) - #-- calculate daily dates to use in centered moving average + # calculate daily dates to use in centered moving average tdec += (np.mod(np.arange(len(tdec)),7) - 3.5)/365.25 - #-- calculate moving-average solution from 7-day arcs + # calculate moving-average solution from 7-day arcs dinput = {} dinput['time'] = np.zeros_like(DATE) dinput['data'] = np.zeros_like(DATE,dtype='f8') - #-- for each output monthly date + # for each output monthly date for i,D in enumerate(DATE): - #-- find all dates within NEIGHBORS days of mid-point + # find all dates within NEIGHBORS days of mid-point isort = np.argsort((tdec - D)**2)[:NEIGHBORS] - #-- calculate monthly mean of date and data + # calculate monthly mean of date and data dinput['time'][i] = np.mean(tdec[isort]) dinput['data'][i] = np.mean(data[isort]) - #-- GRACE/GRACE-FO month + # GRACE/GRACE-FO month dinput['month'] = gravity_toolkit.time.calendar_to_grace(dinput['time']) dinput['month'] = gravity_toolkit.time.adjust_months(dinput['month']) - #-- return the moving averages + # return the moving averages return dinput diff --git a/gravity_toolkit/read_gfc_harmonics.py b/gravity_toolkit/read_gfc_harmonics.py index 2890a8ed..8b6ce889 100644 --- a/gravity_toolkit/read_gfc_harmonics.py +++ b/gravity_toolkit/read_gfc_harmonics.py @@ -71,7 +71,7 @@ import gravity_toolkit.time from geoid_toolkit.read_ICGEM_harmonics import read_ICGEM_harmonics -#-- PURPOSE: read spherical harmonic coefficients of a gravity model +# PURPOSE: read spherical harmonic coefficients of a gravity model def read_gfc_harmonics(input_file, TIDE=None, FLAG='gfc'): """ Extract gravity model spherical harmonics from Gravity @@ -133,7 +133,7 @@ def read_gfc_harmonics(input_file, TIDE=None, FLAG='gfc'): Coefficients for Satellite Altimetry Applications", (2003). `eprint ID: 11802 `_ """ - #-- regular expression operators for ITSG data and models + # regular expression operators for ITSG data and models itsg_products = [] itsg_products.append(r'atmosphere') itsg_products.append(r'dealiasing') @@ -145,60 +145,60 @@ def read_gfc_harmonics(input_file, TIDE=None, FLAG='gfc'): itsg_products.append(r'Grace_operational') itsg_pattern = (r'(AOD1B_RL\d+|model|ITSG)[-_]({0})(_n\d+)?_' r'(\d+)-(\d+)(\.gfc)').format(r'|'.join(itsg_products)) - #-- regular expression operators for Swarm data and models + # regular expression operators for Swarm data and models swarm_data = r'(SW)_(.*?)_(EGF_SHA_2)__(.*?)_(.*?)_(.*?)(\.gfc|\.ZIP)' swarm_model = r'(GAA|GAB|GAC|GAD)_Swarm_(\d+)_(\d{2})_(\d{4})(\.gfc|\.ZIP)' - #-- extract parameters for each data center and product + # extract parameters for each data center and product if re.match(itsg_pattern, os.path.basename(input_file)): - #-- compile numerical expression operator for parameters from files - #-- GRAZ: Institute of Geodesy from GRAZ University of Technology + # compile numerical expression operator for parameters from files + # GRAZ: Institute of Geodesy from GRAZ University of Technology rx = re.compile(itsg_pattern, re.VERBOSE | re.IGNORECASE) - #-- extract parameters from input filename + # extract parameters from input filename PFX,PRD,trunc,year,month,SFX = rx.findall(input_file).pop() - #-- number of days in each month for the calendar year + # number of days in each month for the calendar year dpm = gravity_toolkit.time.calendar_days(int(year)) - #-- create start and end date lists + # create start and end date lists start_date = [int(year),int(month),1,0,0,0] end_date = [int(year),int(month),dpm[int(month)-1],23,59,59] elif re.match(swarm_data, os.path.basename(input_file)): - #-- compile numerical expression operator for parameters from files - #-- Swarm: data from Swarm satellite + # compile numerical expression operator for parameters from files + # Swarm: data from Swarm satellite rx = re.compile(swarm_data, re.VERBOSE | re.IGNORECASE) - #-- extract parameters from input filename + # extract parameters from input filename SAT,tmp,PROD,starttime,endtime,RL,SFX = rx.findall(input_file).pop() start_date,_ = gravity_toolkit.time.parse_date_string(starttime) end_date,_ = gravity_toolkit.time.parse_date_string(endtime) - #-- number of days in each month for the calendar year + # number of days in each month for the calendar year dpm = gravity_toolkit.time.calendar_days(start_date[0]) elif re.match(swarm_model, os.path.basename(input_file)): - #-- compile numerical expression operator for parameters from files - #-- Swarm: dealiasing products for Swarm data + # compile numerical expression operator for parameters from files + # Swarm: dealiasing products for Swarm data rx = re.compile(swarm_data, re.VERBOSE | re.IGNORECASE) - #-- extract parameters from input filename + # extract parameters from input filename PROD,trunc,month,year,SFX = rx.findall(input_file).pop() - #-- number of days in each month for the calendar year + # number of days in each month for the calendar year dpm = gravity_toolkit.time.calendar_days(int(year)) - #-- create start and end date lists + # create start and end date lists start_date = [int(year),int(month),1,0,0,0] end_date = [int(year),int(month),dpm[int(month)-1],23,59,59] - #-- python dictionary with model input and headers + # python dictionary with model input and headers ZIP = bool(re.search('ZIP',SFX,re.IGNORECASE)) model_input = read_ICGEM_harmonics(input_file, TIDE=TIDE, FLAG=FLAG, ZIP=ZIP) - #-- start and end day of the year + # start and end day of the year start_day = np.sum(dpm[:start_date[1]-1]) + start_date[2] + \ start_date[3]/24.0 + start_date[4]/1440.0 + start_date[5]/86400.0 end_day = np.sum(dpm[:end_date[1]-1]) + end_date[2] + \ end_date[3]/24.0 + end_date[4]/1440.0 + end_date[5]/86400.0 - #-- end date taking into account measurements taken on different years + # end date taking into account measurements taken on different years end_cyclic = (end_date[0]-start_date[0])*np.sum(dpm) + end_day - #-- calculate mid-month value + # calculate mid-month value mid_day = np.mean([start_day, end_cyclic]) - #-- Calculating the mid-month date in decimal form + # Calculating the mid-month date in decimal form model_input['time'] = start_date[0] + mid_day/np.sum(dpm) - #-- Calculating the Julian dates of the start and end date + # Calculating the Julian dates of the start and end date model_input['start'] = 2400000.5 + \ gravity_toolkit.time.convert_calendar_dates(start_date[0], start_date[1],start_date[2],hour=start_date[3],minute=start_date[4], @@ -208,5 +208,5 @@ def read_gfc_harmonics(input_file, TIDE=None, FLAG='gfc'): end_date[1],end_date[2],hour=end_date[3],minute=end_date[4], second=end_date[5],epoch=(1858,11,17,0,0,0)) - #-- return the spherical harmonics and parameters + # return the spherical harmonics and parameters return model_input diff --git a/gravity_toolkit/read_gravis_geocenter.py b/gravity_toolkit/read_gravis_geocenter.py index 55bf1b7d..a936122a 100644 --- a/gravity_toolkit/read_gravis_geocenter.py +++ b/gravity_toolkit/read_gravis_geocenter.py @@ -53,7 +53,7 @@ import warnings import gravity_toolkit.geocenter -#-- PURPOSE: read geocenter data from GFZ GravIS SLR/GRACE solutions +# PURPOSE: read geocenter data from GFZ GravIS SLR/GRACE solutions def read_gravis_geocenter(geocenter_file, HEADER=True): """ Reads monthly geocenter spherical harmonic data files from @@ -85,5 +85,5 @@ def read_gravis_geocenter(geocenter_file, HEADER=True): DeprecationWarning) # call renamed version to not break workflows DEG1 = gravity_toolkit.geocenter().from_gravis(geocenter_file,header=HEADER) - #-- return the GFZ GravIS geocenter solutions + # return the GFZ GravIS geocenter solutions return DEG1.to_dict() diff --git a/gravity_toolkit/read_love_numbers.py b/gravity_toolkit/read_love_numbers.py index 7371e96d..f6597a11 100755 --- a/gravity_toolkit/read_love_numbers.py +++ b/gravity_toolkit/read_love_numbers.py @@ -87,7 +87,7 @@ import numpy as np from gravity_toolkit.utilities import get_data_path -#-- PURPOSE: read load love numbers from PREM +# PURPOSE: read load love numbers from PREM def read_love_numbers(love_numbers_file, LMAX=None, HEADER=2, COLUMNS=['l','hl','kl','ll'], REFERENCE='CE', FORMAT='tuple'): """ @@ -167,85 +167,85 @@ def read_love_numbers(love_numbers_file, LMAX=None, HEADER=2, *Computers & Geosciences*, 49, 190--199, (2012). `doi: 10.1016/j.cageo.2012.06.022 `_ """ - #-- Input load love number data file and read contents + # Input load love number data file and read contents file_contents = extract_love_numbers(love_numbers_file) - #-- compile regular expression operator to find numerical instances + # compile regular expression operator to find numerical instances regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' rx = re.compile(regex_pattern, re.VERBOSE) - #-- extract maximum spherical harmonic degree from final line in file + # extract maximum spherical harmonic degree from final line in file if LMAX is None: LMAX = np.int64(rx.findall(file_contents[-1])[COLUMNS.index('l')]) - #-- dictionary of output love numbers + # dictionary of output love numbers love = {} - #-- spherical harmonic degree + # spherical harmonic degree love['l'] = np.arange(LMAX+1) - #-- vertical displacement hl - #-- gravitational potential kl - #-- horizontal displacement ll + # vertical displacement hl + # gravitational potential kl + # horizontal displacement ll for n in ('hl','kl','ll'): love[n] = np.zeros((LMAX+1)) - #-- check if needing to interpolate between degrees + # check if needing to interpolate between degrees flag = np.ones((LMAX+1),dtype=bool) - #-- for each line in the file (skipping header lines) + # for each line in the file (skipping header lines) for file_line in file_contents[HEADER:]: - #-- find numerical instances in line - #-- replacing fortran double precision exponential + # find numerical instances in line + # replacing fortran double precision exponential love_numbers = rx.findall(file_line.replace('D','E')) - #-- spherical harmonic degree + # spherical harmonic degree l = np.int64(love_numbers[COLUMNS.index('l')]) - #-- truncate to spherical harmonic degree LMAX + # truncate to spherical harmonic degree LMAX if (l <= LMAX): - #-- convert love numbers to float - #-- vertical displacement hl - #-- gravitational potential kl - #-- horizontal displacement ll + # convert love numbers to float + # vertical displacement hl + # gravitational potential kl + # horizontal displacement ll for n in ('hl','kl','ll'): love[n][l] = np.float64(love_numbers[COLUMNS.index(n)]) - #-- set interpolation flag for degree + # set interpolation flag for degree flag[l] = False - #-- if needing to linearly interpolate love numbers + # if needing to linearly interpolate love numbers if np.any(flag): - #-- linearly interpolate each load love number following Wahr (1998) + # linearly interpolate each load love number following Wahr (1998) for n in ('hl','kl','ll'): love[n][flag] = np.interp(love['l'][flag], love['l'][~flag], love[n][~flag]) - #-- if needing to linearly extrapolate love numbers - #-- NOTE: use caution if extrapolating far beyond the - #-- maximum degree of the love numbers dataset + # if needing to linearly extrapolate love numbers + # NOTE: use caution if extrapolating far beyond the + # maximum degree of the love numbers dataset for lint in range(l,LMAX+1): - #-- linearly extrapolate each load love number + # linearly extrapolate each load love number for n in ('hl','kl','ll'): love[n][lint] = 2.0*love[n][lint-1] - love[n][lint-2] - #-- calculate isomorphic parameters for different reference frames - #-- From Blewitt (2003), Wahr (1998), Trupin (1992) and Farrell (1972) + # calculate isomorphic parameters for different reference frames + # From Blewitt (2003), Wahr (1998), Trupin (1992) and Farrell (1972) if (REFERENCE.upper() == 'CF'): - #-- Center of Surface Figure + # Center of Surface Figure alpha = (love['hl'][1] + 2.0*love['ll'][1])/3.0 elif (REFERENCE.upper() == 'CL'): - #-- Center of Surface Lateral Figure + # Center of Surface Lateral Figure alpha = love['ll'][1].copy() elif (REFERENCE.upper() == 'CH'): - #-- Center of Surface Height Figure + # Center of Surface Height Figure alpha = love['hl'][1].copy() elif (REFERENCE.upper() == 'CM'): - #-- Center of Mass of Earth System + # Center of Mass of Earth System alpha = 1.0 elif (REFERENCE.upper() == 'CE'): - #-- Center of Mass of Solid Earth + # Center of Mass of Solid Earth alpha = 0.0 else: raise Exception(f'Invalid Reference Frame {REFERENCE}') - #-- apply isomorphic parameters + # apply isomorphic parameters for n in ('hl','kl','ll'): love[n][1] -= alpha - #-- return love numbers in output format + # return love numbers in output format if (FORMAT == 'dict'): return love elif (FORMAT == 'tuple'): @@ -253,7 +253,7 @@ def read_love_numbers(love_numbers_file, LMAX=None, HEADER=2, elif (FORMAT == 'zip'): return zip(love['hl'], love['kl'], love['ll']) -#-- PURPOSE: read input file and extract contents +# PURPOSE: read input file and extract contents def extract_love_numbers(love_numbers_file): """ Read load love number file and extract contents @@ -263,21 +263,21 @@ def extract_love_numbers(love_numbers_file): love_numbers_file: str Elastic load Love numbers file """ - #-- check if input love numbers are a string or bytesIO object + # check if input love numbers are a string or bytesIO object if isinstance(love_numbers_file, str): - #-- tilde expansion of load love number data file + # tilde expansion of load love number data file love_numbers_file = os.path.expanduser(love_numbers_file) - #-- check that load love number data file is present in file system + # check that load love number data file is present in file system if not os.access(love_numbers_file, os.F_OK): raise FileNotFoundError(f'{love_numbers_file} not found') - #-- Input load love number data file and read contents + # Input load love number data file and read contents with open(love_numbers_file, mode='r', encoding='utf8') as f: return f.read().splitlines() elif isinstance(love_numbers_file, io.IOBase): - #-- read contents from load love number data + # read contents from load love number data return love_numbers_file.read().decode('utf8').splitlines() -#-- PURPOSE: read load love numbers for a range of spherical harmonic degrees +# PURPOSE: read load love numbers for a range of spherical harmonic degrees def load_love_numbers(LMAX, LOVE_NUMBERS=0, REFERENCE='CF', FORMAT='tuple'): """ Wrapper function for reading PREM load Love numbers for a @@ -339,31 +339,31 @@ def load_love_numbers(LMAX, LOVE_NUMBERS=0, REFERENCE='CF', FORMAT='tuple'): *Computers & Geosciences*, 49, 190--199, (2012). `doi: 10.1016/j.cageo.2012.06.022 `_ """ - #-- load love numbers file + # load love numbers file if (LOVE_NUMBERS == 0): - #-- PREM outputs from Han and Wahr (1995) - #-- https://doi.org/10.1111/j.1365-246X.1995.tb01819.x + # PREM outputs from Han and Wahr (1995) + # https://doi.org/10.1111/j.1365-246X.1995.tb01819.x love_numbers_file = get_data_path(['data','love_numbers']) header = 2 columns = ['l','hl','kl','ll'] elif (LOVE_NUMBERS == 1): - #-- PREM outputs from Gegout (2005) - #-- http://gemini.gsfc.nasa.gov/aplo/ + # PREM outputs from Gegout (2005) + # http://gemini.gsfc.nasa.gov/aplo/ love_numbers_file = get_data_path(['data','Load_Love2_CE.dat']) header = 3 columns = ['l','hl','ll','kl'] elif (LOVE_NUMBERS == 2): - #-- PREM outputs from Wang et al. (2012) - #-- https://doi.org/10.1016/j.cageo.2012.06.022 + # PREM outputs from Wang et al. (2012) + # https://doi.org/10.1016/j.cageo.2012.06.022 love_numbers_file = get_data_path(['data','PREM-LLNs-truncated.dat']) header = 1 columns = ['l','hl','ll','kl','nl','nk'] - #-- log load love numbers file if debugging + # log load love numbers file if debugging logging.debug(f'Reading Love numbers file: {love_numbers_file}') - #-- LMAX of load love numbers from Han and Wahr (1995) is 696. - #-- from Wahr (2007) linearly interpolating kl works - #-- however, as we are linearly extrapolating out, do not make - #-- LMAX too much larger than 696 - #-- read arrays of kl, hl, and ll Love Numbers + # LMAX of load love numbers from Han and Wahr (1995) is 696. + # from Wahr (2007) linearly interpolating kl works + # however, as we are linearly extrapolating out, do not make + # LMAX too much larger than 696 + # read arrays of kl, hl, and ll Love Numbers return read_love_numbers(love_numbers_file, LMAX=LMAX, HEADER=header, COLUMNS=columns, REFERENCE=REFERENCE, FORMAT=FORMAT) diff --git a/gravity_toolkit/read_swenson_geocenter.py b/gravity_toolkit/read_swenson_geocenter.py index ca39f62c..b05ae4f5 100755 --- a/gravity_toolkit/read_swenson_geocenter.py +++ b/gravity_toolkit/read_swenson_geocenter.py @@ -61,7 +61,7 @@ import warnings import gravity_toolkit.geocenter -#-- PURPOSE: read geocenter data from Sean Swenson +# PURPOSE: read geocenter data from Sean Swenson def read_swenson_geocenter(geocenter_file, HEADER=True): """ Reads monthly geocenter files computed by Sean Swenson using @@ -93,5 +93,5 @@ def read_swenson_geocenter(geocenter_file, HEADER=True): # call renamed version to not break workflows DEG1 = gravity_toolkit.geocenter().from_swenson(geocenter_file, header=HEADER) - #-- return the geocenter solutions from Sean Swenson + # return the geocenter solutions from Sean Swenson return DEG1.to_dict() diff --git a/gravity_toolkit/read_tellus_geocenter.py b/gravity_toolkit/read_tellus_geocenter.py index 7ffc5cf6..986911a1 100644 --- a/gravity_toolkit/read_tellus_geocenter.py +++ b/gravity_toolkit/read_tellus_geocenter.py @@ -80,7 +80,7 @@ import warnings import gravity_toolkit.geocenter -#-- PURPOSE: read geocenter data from PO.DAAC +# PURPOSE: read geocenter data from PO.DAAC def read_tellus_geocenter(geocenter_file, HEADER=True, JPL=False): """ Reads monthly geocenter files computed by JPL Tellus using @@ -120,5 +120,5 @@ def read_tellus_geocenter(geocenter_file, HEADER=True, JPL=False): # call renamed version to not break workflows DEG1 = gravity_toolkit.geocenter().from_tellus(geocenter_file, header=HEADER, JPL=JPL) - #-- return the JPL GRACE Tellus geocenter solutions + # return the JPL GRACE Tellus geocenter solutions return DEG1.to_dict() diff --git a/gravity_toolkit/savitzky_golay.py b/gravity_toolkit/savitzky_golay.py index 87424956..7a0eebfe 100644 --- a/gravity_toolkit/savitzky_golay.py +++ b/gravity_toolkit/savitzky_golay.py @@ -105,44 +105,44 @@ def savitzky_golay(t_in, y_in, WINDOW=None, ORDER=2, DERIV=0, B.P. Flannery. Cambridge University Press, (2007). """ - #-- verify that WINDOW is positive, odd and greater than ORDER+1 + # verify that WINDOW is positive, odd and greater than ORDER+1 if WINDOW is None: WINDOW = ORDER + -1*(ORDER % 2) + 3 if WINDOW % 2 != 1 or WINDOW < 1: raise ValueError("WINDOW size must be a positive odd number") if WINDOW < ORDER + 2: raise ValueError("WINDOW is too small for the polynomials order") - #-- remove any singleton dimensions + # remove any singleton dimensions t_in = np.squeeze(t_in) y_in = np.squeeze(y_in) nmax = len(t_in) - #-- order range + # order range order_range = np.arange(ORDER+1) - #-- filter half-window + # filter half-window half_window = (WINDOW - 1) // 2 - #-- output time-series (removing half-windows on ends) + # output time-series (removing half-windows on ends) t_out = t_in[half_window:nmax-half_window] - #-- output smoothed timeseries (or derivative) + # output smoothed timeseries (or derivative) y_out = np.zeros((nmax-2*half_window)) y_err = np.zeros((nmax-2*half_window)) for n in range(0, (nmax-(2*half_window))): yran = y_in[n + np.arange(0, 2*half_window+1)] - #-- Vandermonde matrix for the time-series + # Vandermonde matrix for the time-series b = np.mat([[(t_in[k]-t_in[n+half_window])**i for i in order_range] for k in range(n, n+2*half_window+1)]) - #-- compute the pseudoinverse of the design matrix + # compute the pseudoinverse of the design matrix m=np.linalg.pinv(b).A[DERIV]*RATE**DERIV*scipy.special.factorial(DERIV) - #-- pad the signal at the extremes with values taken from the signal + # pad the signal at the extremes with values taken from the signal firstvals = yran[0] - np.abs(yran[1:half_window+1][::-1] - yran[0]) lastvals = yran[-1] + np.abs(yran[-half_window-1:-1][::-1] - yran[-1]) yn = np.concatenate((firstvals, yran, lastvals)) - #-- compute the convolution and use middle value + # compute the convolution and use middle value y_out[n] = np.convolve(m[::-1], yn, mode='valid')[half_window] if (DATA_ERR != 0): - #-- if data error is known and of equal value + # if data error is known and of equal value P_err = DATA_ERR*np.ones((4*half_window+1)) - #-- compute the convolution and use middle value + # compute the convolution and use middle value y_err[n] = np.sqrt(np.convolve(m[::-1]**2, P_err**2, mode='valid')[half_window]) diff --git a/gravity_toolkit/sea_level_equation.py b/gravity_toolkit/sea_level_equation.py index 44c13006..00fd2f38 100644 --- a/gravity_toolkit/sea_level_equation.py +++ b/gravity_toolkit/sea_level_equation.py @@ -123,7 +123,7 @@ from gravity_toolkit.plm_holmes import plm_holmes from gravity_toolkit.units import units -#-- PURPOSE: Computes Sea Level Fingerprints including polar motion feedback +# PURPOSE: Computes Sea Level Fingerprints including polar motion feedback def sea_level_equation(loadClm, loadSlm, glon, glat, land_function, LMAX=0, LOVE=None, BODY_TIDE_LOVE=0, FLUID_LOVE=0, POLAR=True, ITERATIONS=6, PLM=None, FILL_VALUE=0, ASTYPE=np.longdouble, SCALE=1e-280, **kwargs): @@ -223,166 +223,166 @@ def sea_level_equation(loadClm, loadSlm, glon, glat, land_function, LMAX=0, (1985). `doi: 10.1029/JB090iB11p09363 `_ """ - #-- dimensions of land function + # dimensions of land function nphi,nth = np.shape(land_function) - #-- calculate colatitude and longitude in radians + # calculate colatitude and longitude in radians th = (90.0 - glat)*np.pi/180.0 phi = np.squeeze(glon*np.pi/180.0) - #-- calculate ocean function from land function + # calculate ocean function from land function ocean_function = 1.0 - land_function - #-- indices of the ocean function + # indices of the ocean function ii,jj = np.nonzero(ocean_function) - #-- extract arrays of kl, hl, and ll Love Numbers + # extract arrays of kl, hl, and ll Love Numbers hl,kl,ll = LOVE - #-- density of water [g/cm^3] + # density of water [g/cm^3] rho_water = 1.0 - #-- Earth Parameters + # Earth Parameters factors = units(lmax=LMAX) - #-- Average Density of the Earth [g/cm^3] + # Average Density of the Earth [g/cm^3] rho_e = factors.rho_e - #-- Average Radius of the Earth [cm] + # Average Radius of the Earth [cm] rad_e = factors.rad_e - #-- different treatments of the body tide Love numbers of degree 2 + # different treatments of the body tide Love numbers of degree 2 if isinstance(BODY_TIDE_LOVE,(list,tuple)): - #-- use custom defined values + # use custom defined values k2b,h2b = BODY_TIDE_LOVE elif (BODY_TIDE_LOVE == 0): - #-- Wahr (1981) and Wahr (1985) values from PREM + # Wahr (1981) and Wahr (1985) values from PREM k2b = 0.298 h2b = 0.604 elif (BODY_TIDE_LOVE == 1): - #-- Farrell (1972) values from Gutenberg-Bullen oceanic mantle model + # Farrell (1972) values from Gutenberg-Bullen oceanic mantle model k2b = 0.3055 h2b = 0.6149 - #-- different treatments of the fluid Love number of gravitational potential + # different treatments of the fluid Love number of gravitational potential if isinstance(FLUID_LOVE,(list,tuple)): - #-- use custom defined value + # use custom defined value klf, = FLUID_LOVE elif (FLUID_LOVE == 0): - #-- Han and Wahr (1989) fluid love number - #-- klf = 3.0*G*(C-A)/(rad_e**5*omega**2) - G = 6.6740e-11#-- gravitational constant [m^3/(kg*s^2)] - Re = 6.371e6#-- mean radius of the Earth [m] - A_moi = 8.0077e+37#-- mean equatorial moment of inertia [kg m^2] - omega = 7.292115e-5#-- mean rotation rate of the Earth [radians/s] - ef = 0.00328475#-- dynamical ellipticity (C_moi-A_moi)/A_moi - C_moi = A_moi*(1.0 + ef)#-- mean polar moment of inertia [kg m^2] + # Han and Wahr (1989) fluid love number + # klf = 3.0*G*(C-A)/(rad_e**5*omega**2) + G = 6.6740e-11# gravitational constant [m^3/(kg*s^2)] + Re = 6.371e6# mean radius of the Earth [m] + A_moi = 8.0077e+37# mean equatorial moment of inertia [kg m^2] + omega = 7.292115e-5# mean rotation rate of the Earth [radians/s] + ef = 0.00328475# dynamical ellipticity (C_moi-A_moi)/A_moi + C_moi = A_moi*(1.0 + ef)# mean polar moment of inertia [kg m^2] klf = 3.0*G*(C_moi-A_moi)*(Re**-5)*(omega**-2) klf = 0.00328475/0.00348118 if (FLUID_LOVE == 1): - #-- Munk and MacDonald (1960) secular love number with IERS and PREM values - GM = 3.98004418e14#-- geocentric gravitational constant [m^3/s^2] - Re = 6.371e6#-- mean radius of the Earth [m] - omega = 7.292115e-5#-- mean rotation rate of the Earth [radians/s] - C_moi = 0.33068#-- reduced polar moment of inertia (C/Ma^2) - H = 1.0/305.51#-- precessional constant (C_moi-A_moi)/C_moi + # Munk and MacDonald (1960) secular love number with IERS and PREM values + GM = 3.98004418e14# geocentric gravitational constant [m^3/s^2] + Re = 6.371e6# mean radius of the Earth [m] + omega = 7.292115e-5# mean rotation rate of the Earth [radians/s] + C_moi = 0.33068# reduced polar moment of inertia (C/Ma^2) + H = 1.0/305.51# precessional constant (C_moi-A_moi)/C_moi klf = 3.0*GM*H*C_moi/(Re**3*omega**2) elif (FLUID_LOVE == 2): - #-- Munk and MacDonald (1960) fluid love number with IERS and WGS84 values - flat = 1.0/298.257223563#-- flattening of the WGS84 ellipsoid - Re = 6.371e6#-- mean radius of the Earth [m] - omega = 7.292115e-5#-- mean rotation rate of the Earth [radians/s] - ge = 9.80665#-- standard gravity (mean gravitational acceleration) [m/s^2] + # Munk and MacDonald (1960) fluid love number with IERS and WGS84 values + flat = 1.0/298.257223563# flattening of the WGS84 ellipsoid + Re = 6.371e6# mean radius of the Earth [m] + omega = 7.292115e-5# mean rotation rate of the Earth [radians/s] + ge = 9.80665# standard gravity (mean gravitational acceleration) [m/s^2] klf = 2.0*flat*ge/(omega**2*Re) - 1.0 elif (FLUID_LOVE == 3): - #-- Fluid love number from Lambeck (1980) - #-- klf = 3.0*(C-A)*G/(omega**2*rad_e**5) = 3.0*GM*C20/(omega**2*rad_e**3) - G = 6.672e-11#-- gravitational constant [m^3/(kg*s^2)] - M = 5.974e+24#-- mass of the Earth [kg] - R = 6.378140e6#-- equatorial radius of the Earth [m] - Re = 6.3710121e6#-- mean radius of the Earth [m] - omega = 7.292115e-5#-- mean rotation rate of the Earth [radians/s] - A_moi = 0.3295*M*R**2#-- mean equatorial moment of inertia [kg m^2] - H = 0.003275#-- precessional constant (C_moi-A_moi)/C_moi - C_moi = -A_moi/(H-1.0)#-- mean polar moment of inertia [kg m^2] + # Fluid love number from Lambeck (1980) + # klf = 3.0*(C-A)*G/(omega**2*rad_e**5) = 3.0*GM*C20/(omega**2*rad_e**3) + G = 6.672e-11# gravitational constant [m^3/(kg*s^2)] + M = 5.974e+24# mass of the Earth [kg] + R = 6.378140e6# equatorial radius of the Earth [m] + Re = 6.3710121e6# mean radius of the Earth [m] + omega = 7.292115e-5# mean rotation rate of the Earth [radians/s] + A_moi = 0.3295*M*R**2# mean equatorial moment of inertia [kg m^2] + H = 0.003275# precessional constant (C_moi-A_moi)/C_moi + C_moi = -A_moi/(H-1.0)# mean polar moment of inertia [kg m^2] klf = 3.0*(C_moi-A_moi)*G*(omega**-2)*(Re**-5) klf = 0.942 - #-- calculate coefh and coefp for each degree and order - #-- see equation 11 from Tamisiea et al (2010) + # calculate coefh and coefp for each degree and order + # see equation 11 from Tamisiea et al (2010) coefh = np.zeros((LMAX+1,LMAX+1)) coefp = np.zeros((LMAX+1,LMAX+1)) for l in range(LMAX+1): - #-- coefh and coefp will be the same for all orders except for degree 2 - #-- and order 1 (if POLAR motion feedback is included) + # coefh and coefp will be the same for all orders except for degree 2 + # and order 1 (if POLAR motion feedback is included) m = np.arange(0,l+1) coefh[l,m] = 3.0*rho_water*(1.0 + kl[l] - hl[l])/rho_e/np.float64(2*l+1) coefp[l,m] = (1.0 + kl[l] - hl[l])/(kl[l] + 1.0) - #-- if degree 2 and POLAR parameter is set + # if degree 2 and POLAR parameter is set if (l == 2) and POLAR: - #-- calculate coefficient for polar motion feedback and add to coefs - #-- For small perturbations in rotation vector: driving potential - #-- will be dominated by degree two and order one polar wander - #-- effects (quadrantal geometry effects) (Kendall et al., 2005) + # calculate coefficient for polar motion feedback and add to coefs + # For small perturbations in rotation vector: driving potential + # will be dominated by degree two and order one polar wander + # effects (quadrantal geometry effects) (Kendall et al., 2005) coefpmf = (1.0 + k2b - h2b)*(1.0 + kl[l])/(klf - k2b) - #-- add effects of polar motion feedback to order 1 coefficients + # add effects of polar motion feedback to order 1 coefficients coefh[l,1] += 3.0*rho_water*coefpmf/rho_e/np.float64(2*l+1) coefp[l,1] += coefpmf/(kl[l] + 1.0) - #-- added option to precompute plms to improve computational speed + # added option to precompute plms to improve computational speed if PLM is None: - #-- calculate Legendre polynomials using Holmes and Featherstone relation + # calculate Legendre polynomials using Holmes and Featherstone relation PLM, dPLM = plm_holmes(LMAX, np.cos(th)) - #-- calculate sin of colatitudes + # calculate sin of colatitudes gth,gphi = np.meshgrid(th, phi) u = np.sin(gth[ii,jj]) - #-- indices of spherical harmonics for calculating eps + # indices of spherical harmonics for calculating eps l1,m1 = np.tril_indices(LMAX+1) - #-- total mass of the surface mass load [g] from harmonics + # total mass of the surface mass load [g] from harmonics tmass = 4.0*np.pi*(rad_e**3.0)*rho_e*loadClm[0,0]/3.0 - #-- convert ocean function into a series of spherical harmonics + # convert ocean function into a series of spherical harmonics ocean_Ylms = gen_harmonics(ocean_function,glon,glat,LMAX=LMAX,PLM=PLM) - #-- total area of ocean calculated by integrating the ocean function + # total area of ocean calculated by integrating the ocean function ocean_area = 4.0*np.pi*ocean_Ylms.clm[0,0] - #-- uniform distribution as initial guess of the ocean change following - #-- Mitrovica and Peltier (1991) doi:10.1029/91JB01284 - #-- sea level height change + # uniform distribution as initial guess of the ocean change following + # Mitrovica and Peltier (1991) doi:10.1029/91JB01284 + # sea level height change sea_height = -tmass/rho_water/rad_e**2/ocean_area - #-- if verbose output: print ocean area and uniform sea level height + # if verbose output: print ocean area and uniform sea level height logging.info(f'Total Ocean Area: {ocean_area:0.10g}') logging.info(f'Uniform Ocean Height: {sea_height:0.10g}') - #-- distribute sea height over ocean harmonics + # distribute sea height over ocean harmonics height_Ylms = ocean_Ylms.scale(sea_height) - #-- iterate solutions until convergence or reaching total iterations + # iterate solutions until convergence or reaching total iterations n_iter = 1 - #-- use maximum eps values from Mitrovica and Peltier (1991) - #-- Milne and Mitrovica (1998) doi:10.1046/j.1365-246X.1998.1331455.x + # use maximum eps values from Mitrovica and Peltier (1991) + # Milne and Mitrovica (1998) doi:10.1046/j.1365-246X.1998.1331455.x eps = np.inf eps_max = 1e-4 while (eps > eps_max) and (n_iter <= ITERATIONS): - #-- allocate for sea level field of iteration + # allocate for sea level field of iteration sea_level = np.zeros((nphi,nth)) - #-- calculate combined spherical harmonics for Clenshaw summation + # calculate combined spherical harmonics for Clenshaw summation clm1 = coefh*height_Ylms.clm + rad_e*coefp*loadClm slm1 = coefh*height_Ylms.slm + rad_e*coefp*loadSlm - #-- calculate clenshaw summations over colatitudes + # calculate clenshaw summations over colatitudes s_m_c = np.zeros((nth,LMAX*2+2)) for m in range(LMAX, -1, -1): s_m_c[:,2*m:2*m+2] = clenshaw_s_m(np.cos(th), m, clm1, slm1, LMAX, ASTYPE=ASTYPE, SCALE=SCALE) - #-- calculate cos(phi) + # calculate cos(phi) cos_phi_2 = 2.0*np.cos(phi) - #-- matrix of cos/sin m*phi summation + # matrix of cos/sin m*phi summation cos_m_phi = np.zeros((nphi,LMAX+2),dtype=ASTYPE) sin_m_phi = np.zeros((nphi,LMAX+2),dtype=ASTYPE) - #-- initialize matrix with values at lmax+1 and lmax + # initialize matrix with values at lmax+1 and lmax cos_m_phi[:,LMAX+1] = np.cos(ASTYPE(LMAX + 1)*phi) sin_m_phi[:,LMAX+1] = np.sin(ASTYPE(LMAX + 1)*phi) cos_m_phi[:,LMAX] = np.cos(ASTYPE(LMAX)*phi) sin_m_phi[:,LMAX] = np.sin(ASTYPE(LMAX)*phi) - #-- calculate summation + # calculate summation gc=np.multiply(s_m_c[np.newaxis,:,2*LMAX],cos_m_phi[:,np.newaxis,LMAX]) gs=np.multiply(s_m_c[np.newaxis,:,2*LMAX+1],sin_m_phi[:,np.newaxis,LMAX]) s_m = gc[ii,jj] + gs[ii,jj] - #-- iterate to calculate complete summation + # iterate to calculate complete summation for m in range(LMAX-1, 0, -1): cos_m_phi[:,m] = cos_phi_2*cos_m_phi[:,m+1] - cos_m_phi[:,m+2] sin_m_phi[:,m] = cos_phi_2*sin_m_phi[:,m+1] - sin_m_phi[:,m+2] @@ -390,66 +390,66 @@ def sea_level_equation(loadClm, loadSlm, glon, glat, land_function, LMAX=0, gc=np.multiply(s_m_c[np.newaxis,:,2*m],cos_m_phi[:,np.newaxis,m]) gs=np.multiply(s_m_c[np.newaxis,:,2*m+1],sin_m_phi[:,np.newaxis,m]) s_m = a_m*u*s_m + gc[ii,jj] + gs[ii,jj] - #-- calculate new sea level for iteration + # calculate new sea level for iteration gsmc,gcmp = np.meshgrid(s_m_c[:,0],cos_m_phi[:,0]) sea_level[ii,jj] = np.sqrt(3.0)*u*s_m + gsmc[ii,jj] - #-- calculate spherical harmonic field for iteration + # calculate spherical harmonic field for iteration Ylms = gen_harmonics(sea_level, glon, glat, LMAX=LMAX, PLM=PLM) - #-- total sea level height for iteration - #-- integrated total rmass will differ as sea_level is only over ocean - #-- whereas the crustal and gravitational effects are global + # total sea level height for iteration + # integrated total rmass will differ as sea_level is only over ocean + # whereas the crustal and gravitational effects are global rmass = 4.0*np.pi*Ylms.clm[0,0] - #-- mass anomaly converted to ocean height to ensure mass conservation - #-- (this is the gravitational perturbation (Delta Phi)/g) + # mass anomaly converted to ocean height to ensure mass conservation + # (this is the gravitational perturbation (Delta Phi)/g) sea_height = (-tmass/rho_water/rad_e**2 - rmass)/ocean_area - #-- if verbose output: print iteration, mass and anomaly for convergence + # if verbose output: print iteration, mass and anomaly for convergence logging.info(f'Iteration: {n_iter:d}') logging.info(f'Integrated Ocean Height: {rmass:0.10g}') logging.info(f'Difference from Initial Height: {sea_height:0.10g}') - #-- geoid component is split into two parts (Kendall 2005) - #-- this part is the spatially uniform shift in the geoid that is - #-- constrained by invoking conservation of mass of the surface load - #-- Equation 48 of Mitrovica and Peltier (1991) - #-- add difference to total sea level field to force mass conservation + # geoid component is split into two parts (Kendall 2005) + # this part is the spatially uniform shift in the geoid that is + # constrained by invoking conservation of mass of the surface load + # Equation 48 of Mitrovica and Peltier (1991) + # add difference to total sea level field to force mass conservation sea_level += sea_height*ocean_function[:,:] uniform_Ylms = ocean_Ylms.scale(sea_height) Ylms.add(uniform_Ylms) - #-- calculate eps to determine if solution is appropriately converged + # calculate eps to determine if solution is appropriately converged mod1 = np.sqrt(height_Ylms.clm**2 + height_Ylms.slm**2) mod2 = np.sqrt(Ylms.clm**2 + Ylms.slm**2) eps = np.abs(np.sum(mod2[l1,m1] - mod1[l1,m1])/np.sum(mod1[l1,m1])) - #-- save height harmonics for use in the next iteration + # save height harmonics for use in the next iteration height_Ylms = Ylms.copy() - #-- add 1 to n_iter + # add 1 to n_iter n_iter += 1 - #-- calculate final total mass for sanity check + # calculate final total mass for sanity check omass = 4.0*np.pi*(rad_e**2.0)*rho_water*height_Ylms.clm[0,0] - #-- if verbose output: sanity check of masses + # if verbose output: sanity check of masses logging.info('Original Total Ocean Mass: {0:0.10g}'.format(-tmass/1e15)) logging.info('Final Iterated Ocean Mass: {0:0.10g}'.format(omass/1e15)) - #-- set final invalid points to fill value if applicable + # set final invalid points to fill value if applicable if (FILL_VALUE != 0): ii,jj = np.nonzero(land_function) sea_level[ii,jj] = FILL_VALUE - #-- return the sea level spatial field + # return the sea level spatial field return sea_level -#-- PURPOSE: compute Clenshaw summation of the fully normalized associated -#-- Legendre's function for constant order m +# PURPOSE: compute Clenshaw summation of the fully normalized associated +# Legendre's function for constant order m def clenshaw_s_m(t, m, clm1, slm1, lmax, ASTYPE=np.longdouble, SCALE=1e-280): - #-- allocate for output matrix + # allocate for output matrix N = len(t) s_m = np.zeros((N,2),dtype=ASTYPE) - #-- scaling to prevent overflow + # scaling to prevent overflow clm = SCALE*clm1.astype(ASTYPE) slm = SCALE*slm1.astype(ASTYPE) - #-- convert lmax and m to float + # convert lmax and m to float lm = ASTYPE(lmax) mm = ASTYPE(m) if (m == lmax): @@ -489,5 +489,5 @@ def clenshaw_s_m(t, m, clm1, slm1, lmax, ASTYPE=np.longdouble, SCALE=1e-280): s_mm_c_pre_2 = np.copy(s_mm_c_pre_1) s_mm_c_pre_1 = np.copy(s_mm_c) s_m[:,0] = np.copy(s_mm_c) - #-- return rescaled s_m + # return rescaled s_m return s_m/SCALE diff --git a/gravity_toolkit/spatial.py b/gravity_toolkit/spatial.py index 544fc7b7..d4fdddd3 100644 --- a/gravity_toolkit/spatial.py +++ b/gravity_toolkit/spatial.py @@ -103,13 +103,13 @@ class spatial(object): """ np.seterr(invalid='ignore') def __init__(self, **kwargs): - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('spacing',[None,None]) kwargs.setdefault('nlat',None) kwargs.setdefault('nlon',None) kwargs.setdefault('extent',[None]*4) kwargs.setdefault('fill_value',None) - #-- set default class attributes + # set default class attributes self.data=None self.mask=None self.lon=None @@ -133,15 +133,15 @@ def case_insensitive_filename(self,filename): filename: str input filename """ - #-- check if filename is open file object + # check if filename is open file object if isinstance(filename, io.IOBase): self.filename = copy.copy(filename) else: - #-- tilde-expand input filename + # tilde-expand input filename self.filename = os.path.expanduser(filename) - #-- check if file presently exists with input case + # check if file presently exists with input case if not os.access(self.filename,os.F_OK): - #-- search for filename without case dependence + # search for filename without case dependence basename = os.path.basename(filename) directory = os.path.dirname(os.path.expanduser(filename)) f = [f for f in os.listdir(directory) if re.match(basename,f,re.I)] @@ -149,7 +149,7 @@ def case_insensitive_filename(self,filename): errmsg = f'{filename} not found in file system' raise FileNotFoundError(errmsg) self.filename = os.path.join(directory,f.pop()) - #-- print filename + # print filename logging.debug(self.filename) return self @@ -176,70 +176,70 @@ def from_ascii(self, filename, date=True, **kwargs): verbose: bool, default False print file and variable information """ - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- set default parameters + # set default parameters kwargs.setdefault('verbose',False) kwargs.setdefault('compression',None) kwargs.setdefault('columns',['lon','lat','data','time']) kwargs.setdefault('header',0) - #-- open the ascii file and extract contents + # open the ascii file and extract contents logging.info(self.filename) if (kwargs['compression'] == 'gzip'): - #-- read input ascii data from gzip compressed file and split lines + # read input ascii data from gzip compressed file and split lines with gzip.open(self.filename,'r') as f: file_contents = f.read().decode('ISO-8859-1').splitlines() elif (kwargs['compression'] == 'zip'): - #-- read input ascii data from zipped file and split lines + # read input ascii data from zipped file and split lines base,_ = os.path.splitext(self.filename) with zipfile.ZipFile(self.filename) as z: file_contents = z.read(base).decode('ISO-8859-1').splitlines() elif (kwargs['compression'] == 'bytes'): - #-- read input file object and split lines + # read input file object and split lines file_contents = self.filename.read().splitlines() else: - #-- read input ascii file (.txt, .asc) and split lines + # read input ascii file (.txt, .asc) and split lines with open(self.filename, mode='r', encoding='utf8') as f: file_contents = f.read().splitlines() - #-- compile regular expression operator for extracting numerical values - #-- from input ascii files of spatial data + # compile regular expression operator for extracting numerical values + # from input ascii files of spatial data regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[EeD][+-]?\d+)?' rx = re.compile(regex_pattern, re.VERBOSE) - #-- output spatial dimensions + # output spatial dimensions if (None not in self.extent): self.lat = np.linspace(self.extent[3],self.extent[2],self.shape[0]) self.lon = np.linspace(self.extent[0],self.extent[1],self.shape[1]) else: self.lat = np.zeros((self.shape[0])) self.lon = np.zeros((self.shape[1])) - #-- output spatial data + # output spatial data self.data = np.zeros((self.shape[0],self.shape[1])) self.mask = np.zeros((self.shape[0],self.shape[1]),dtype=bool) - #-- remove time from list of column names if not date + # remove time from list of column names if not date columns = [c for c in kwargs['columns'] if (c != 'time')] - #-- extract spatial data array and convert to matrix - #-- for each line in the file + # extract spatial data array and convert to matrix + # for each line in the file header = kwargs['header'] for line in file_contents[header:]: - #-- extract columns of interest and assign to dict - #-- convert fortran exponentials if applicable + # extract columns of interest and assign to dict + # convert fortran exponentials if applicable d = {c:r.replace('D','E') for c,r in zip(columns,rx.findall(line))} - #-- convert line coordinates to integers + # convert line coordinates to integers ilon = np.int64(np.float64(d['lon'])/self.spacing[0]) ilat = np.int64((90.0-np.float64(d['lat']))//self.spacing[1]) self.data[ilat,ilon] = np.float64(d['data']) self.mask[ilat,ilon] = False self.lon[ilon] = np.float64(d['lon']) self.lat[ilat] = np.float64(d['lat']) - #-- if the ascii file contains date variables + # if the ascii file contains date variables if date: self.time = np.array(d['time'],dtype='f') self.month = calendar_to_grace(self.time) - #-- if the ascii file contains date variables + # if the ascii file contains date variables if date: - #-- adjust months to fix special cases if necessary + # adjust months to fix special cases if necessary self.month = adjust_months(self.month) - #-- get spacing and dimensions + # get spacing and dimensions self.update_spacing() self.update_extents() self.update_dimensions() @@ -275,9 +275,9 @@ def from_netCDF4(self, filename, **kwargs): verbose: bool, default False print file and variable information """ - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- set default parameters + # set default parameters kwargs.setdefault('date',True) kwargs.setdefault('compression',None) kwargs.setdefault('varname','z') @@ -286,59 +286,59 @@ def from_netCDF4(self, filename, **kwargs): kwargs.setdefault('timename','time') kwargs.setdefault('field_mapping',{}) kwargs.setdefault('verbose',False) - #-- Open the NetCDF4 file for reading + # Open the NetCDF4 file for reading if (kwargs['compression'] == 'gzip'): - #-- read as in-memory (diskless) netCDF4 dataset + # read as in-memory (diskless) netCDF4 dataset with gzip.open(self.filename, mode='r') as f: fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=f.read()) elif (kwargs['compression'] == 'zip'): - #-- read zipped file and extract file into in-memory file object + # read zipped file and extract file into in-memory file object fileBasename,_ = os.path.splitext(os.path.basename(filename)) with zipfile.ZipFile(self.filename) as z: - #-- first try finding a netCDF4 file with same base filename - #-- if none found simply try searching for a netCDF4 file + # first try finding a netCDF4 file with same base filename + # if none found simply try searching for a netCDF4 file try: f,=[f for f in z.namelist() if re.match(fileBasename,f,re.I)] except: f,=[f for f in z.namelist() if re.search(r'\.nc(4)?$',f)] - #-- read bytes from zipfile as in-memory (diskless) netCDF4 dataset + # read bytes from zipfile as in-memory (diskless) netCDF4 dataset fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=z.read(f)) elif (kwargs['compression'] == 'bytes'): - #-- read as in-memory (diskless) netCDF4 dataset + # read as in-memory (diskless) netCDF4 dataset fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=filename.read()) else: - #-- read netCDF4 dataset + # read netCDF4 dataset fileID = netCDF4.Dataset(self.filename, 'r') - #-- Output NetCDF file information + # Output NetCDF file information logging.info(fileID.filepath()) logging.info(list(fileID.variables.keys())) # set automasking fileID.set_auto_mask(False) - #-- list of variable attributes + # list of variable attributes attributes_list = ['description','units','long_name','calendar', 'standard_name','_FillValue','missing_value'] - #-- mapping between output keys and netCDF4 variable names + # mapping between output keys and netCDF4 variable names if not kwargs['field_mapping']: kwargs['field_mapping']['lon'] = kwargs['lonname'] kwargs['field_mapping']['lat'] = kwargs['latname'] kwargs['field_mapping']['data'] = kwargs['varname'] if kwargs['date']: kwargs['field_mapping']['time'] = kwargs['timename'] - #-- for each variable + # for each variable for field,key in kwargs['field_mapping'].items(): - #-- Getting the data from each NetCDF variable - #-- remove singleton dimensions + # Getting the data from each NetCDF variable + # remove singleton dimensions setattr(self, field, np.squeeze(fileID.variables[key][:])) - #-- Getting attributes of included variables + # Getting attributes of included variables self.attributes[field] = {} for attr in attributes_list: - #-- try getting the attribute + # try getting the attribute try: self.attributes[field][attr] = \ fileID.variables[key].getncattr(attr) except (KeyError,ValueError,AttributeError): pass - #-- Global attributes + # Global attributes for att_name in ['title','description','reference']: try: ncattr, = [s for s in fileID.ncattrs() @@ -346,24 +346,24 @@ def from_netCDF4(self, filename, **kwargs): self.attributes[att_name] = fileID.getncattr(ncattr) except (ValueError, KeyError, AttributeError): pass - #-- Closing the NetCDF file + # Closing the NetCDF file fileID.close() - #-- switching data array to lat/lon if lon/lat + # switching data array to lat/lon if lon/lat sz = self.data.shape if (self.data.ndim == 2) and (len(self.lon) == sz[0]): self.data = self.data.T - #-- set fill value and mask + # set fill value and mask if '_FillValue' in self.attributes['data'].keys(): self.fill_value = self.attributes['data']['_FillValue'] self.mask = (self.data == self.fill_value) else: self.mask = np.zeros(self.data.shape, dtype=bool) - #-- set GRACE/GRACE-FO month if file has date variables + # set GRACE/GRACE-FO month if file has date variables if kwargs['date']: self.month = calendar_to_grace(self.time) - #-- adjust months to fix special cases if necessary + # adjust months to fix special cases if necessary self.month = adjust_months(self.month) - #-- get spacing and dimensions + # get spacing and dimensions self.update_spacing() self.update_extents() self.update_dimensions() @@ -399,9 +399,9 @@ def from_HDF5(self, filename, **kwargs): verbose: bool, default False print file and variable information """ - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- set default parameters + # set default parameters kwargs.setdefault('date',True) kwargs.setdefault('compression',None) kwargs.setdefault('varname','z') @@ -410,90 +410,90 @@ def from_HDF5(self, filename, **kwargs): kwargs.setdefault('timename','time') kwargs.setdefault('field_mapping',{}) kwargs.setdefault('verbose',False) - #-- Open the HDF5 file for reading + # Open the HDF5 file for reading if (kwargs['compression'] == 'gzip'): - #-- read gzip compressed file and extract into in-memory file object + # read gzip compressed file and extract into in-memory file object with gzip.open(self.filename, mode='r') as f: fid = io.BytesIO(f.read()) - #-- set filename of BytesIO object + # set filename of BytesIO object fid.filename = os.path.basename(filename) - #-- rewind to start of file + # rewind to start of file fid.seek(0) - #-- read as in-memory (diskless) HDF5 dataset from BytesIO object + # read as in-memory (diskless) HDF5 dataset from BytesIO object fileID = h5py.File(fid, 'r') elif (kwargs['compression'] == 'zip'): - #-- read zipped file and extract file into in-memory file object + # read zipped file and extract file into in-memory file object fileBasename,_ = os.path.splitext(os.path.basename(filename)) with zipfile.ZipFile(self.filename) as z: - #-- first try finding a HDF5 file with same base filename - #-- if none found simply try searching for a HDF5 file + # first try finding a HDF5 file with same base filename + # if none found simply try searching for a HDF5 file try: f,=[f for f in z.namelist() if re.match(fileBasename,f,re.I)] except: f,=[f for f in z.namelist() if re.search(r'\.H(DF)?5$',f,re.I)] - #-- read bytes from zipfile into in-memory BytesIO object + # read bytes from zipfile into in-memory BytesIO object fid = io.BytesIO(z.read(f)) - #-- set filename of BytesIO object + # set filename of BytesIO object fid.filename = os.path.basename(filename) - #-- rewind to start of file + # rewind to start of file fid.seek(0) - #-- read as in-memory (diskless) HDF5 dataset from BytesIO object + # read as in-memory (diskless) HDF5 dataset from BytesIO object fileID = h5py.File(fid, mode='r') elif (kwargs['compression'] == 'bytes'): - #-- read as in-memory (diskless) HDF5 dataset + # read as in-memory (diskless) HDF5 dataset fileID = h5py.File(filename, mode='r') else: - #-- read HDF5 dataset + # read HDF5 dataset fileID = h5py.File(self.filename, 'r') - #-- Output HDF5 file information + # Output HDF5 file information logging.info(fileID.filename) logging.info(list(fileID.keys())) - #-- list of variable attributes + # list of variable attributes attributes_list = ['description','units','long_name','calendar', 'standard_name','_FillValue','missing_value'] - #-- mapping between output keys and HDF5 variable names + # mapping between output keys and HDF5 variable names if not kwargs['field_mapping']: kwargs['field_mapping']['lon'] = kwargs['lonname'] kwargs['field_mapping']['lat'] = kwargs['latname'] kwargs['field_mapping']['data'] = kwargs['varname'] if kwargs['date']: kwargs['field_mapping']['time'] = kwargs['timename'] - #-- for each variable + # for each variable for field,key in kwargs['field_mapping'].items(): - #-- Getting the data from each HDF5 variable - #-- remove singleton dimensions + # Getting the data from each HDF5 variable + # remove singleton dimensions setattr(self, field, np.squeeze(fileID[key][:])) - #-- Getting attributes of included variables + # Getting attributes of included variables self.attributes[field] = {} for attr in attributes_list: try: self.attributes[field][attr] = fileID[key].attrs[attr] except (KeyError, AttributeError): pass - #-- Global attributes + # Global attributes for att_name in ['title','description','reference']: try: self.attributes[att_name] = fileID.attrs[att_name] except (ValueError, KeyError, AttributeError): pass - #-- Closing the HDF5 file + # Closing the HDF5 file fileID.close() - #-- switching data array to lat/lon if lon/lat + # switching data array to lat/lon if lon/lat sz = self.data.shape if (self.data.ndim == 2) and (len(self.lon) == sz[0]): self.data = self.data.T - #-- set fill value and mask + # set fill value and mask if '_FillValue' in self.attributes['data'].keys(): self.fill_value = self.attributes['data']['_FillValue'] self.mask = (self.data == self.fill_value) else: self.mask = np.zeros(self.data.shape, dtype=bool) - #-- set GRACE/GRACE-FO month if file has date variables + # set GRACE/GRACE-FO month if file has date variables if kwargs['date']: self.month = calendar_to_grace(self.time) - #-- adjust months to fix special cases if necessary + # adjust months to fix special cases if necessary self.month = adjust_months(self.month) - #-- get spacing and dimensions + # get spacing and dimensions self.update_spacing() self.update_extents() self.update_dimensions() @@ -521,33 +521,33 @@ def from_index(self, filename, **kwargs): **kwargs: dict keyword arguments for input readers """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('format',None) kwargs.setdefault('date',True) kwargs.setdefault('sort',True) - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- file parser for reading index files - #-- removes commented lines (can comment out files in the index) - #-- removes empty lines (if there are extra empty lines) + # file parser for reading index files + # removes commented lines (can comment out files in the index) + # removes empty lines (if there are extra empty lines) parser = re.compile(r'^(?!\#|\%|$)', re.VERBOSE) - #-- Read index file of input spatial data + # Read index file of input spatial data with open(self.filename, mode='r', encoding='utf8') as f: file_list = [l for l in f.read().splitlines() if parser.match(l)] - #-- create a list of spatial objects + # create a list of spatial objects s = [] - #-- for each file in the index + # for each file in the index for i,f in enumerate(file_list): if (kwargs['format'] == 'ascii'): - #-- netcdf (.nc) + # netcdf (.nc) s.append(spatial().from_ascii(f, **kwargs)) elif (kwargs['format'] == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) s.append(spatial().from_netCDF4(f, **kwargs)) elif (kwargs['format'] == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) s.append(spatial().from_HDF5(f, **kwargs)) - #-- create a single spatial object from the list + # create a single spatial object from the list return self.from_list(s,date=kwargs['date'],sort=kwargs['sort']) def from_list(self, object_list, **kwargs): @@ -565,57 +565,57 @@ def from_list(self, object_list, **kwargs): clear: bool, default True clear the spatial list from memory """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('date',True) kwargs.setdefault('sort',True) kwargs.setdefault('clear',False) - #-- number of spatial objects in list + # number of spatial objects in list n = len(object_list) - #-- indices to sort data objects if spatial list contain dates + # indices to sort data objects if spatial list contain dates if kwargs['date'] and kwargs['sort']: list_sort = np.argsort([d.time for d in object_list],axis=None) else: list_sort = np.arange(n) - #-- extract dimensions and grid spacing + # extract dimensions and grid spacing self.spacing = object_list[0].spacing self.extent = object_list[0].extent self.shape = object_list[0].shape - #-- create output spatial grid and mask + # create output spatial grid and mask self.data = np.zeros((self.shape[0],self.shape[1],n)) self.mask = np.zeros((self.shape[0],self.shape[1],n),dtype=bool) self.fill_value = object_list[0].fill_value self.lon = object_list[0].lon.copy() self.lat = object_list[0].lat.copy() - #-- create list of files and attributes + # create list of files and attributes self.filename = [] self.attributes = [] - #-- output dates + # output dates if kwargs['date']: self.time = np.zeros((n)) self.month = np.zeros((n),dtype=np.int64) - #-- for each indice + # for each indice for t,i in enumerate(list_sort): self.data[:,:,t] = object_list[i].data[:,:].copy() self.mask[:,:,t] |= object_list[i].mask[:,:] if kwargs['date']: self.time[t] = np.atleast_1d(object_list[i].time) self.month[t] = np.atleast_1d(object_list[i].month) - #-- append filename to list + # append filename to list if getattr(object_list[i], 'filename'): self.filename.append(object_list[i].filename) - #-- append attributes to list + # append attributes to list if getattr(object_list[i], 'attributes'): self.attributes.append(object_list[i].attributes) - #-- adjust months to fix special cases if necessary + # adjust months to fix special cases if necessary if kwargs['date']: self.month = adjust_months(self.month) - #-- update the dimensions + # update the dimensions self.update_dimensions() self.update_mask() - #-- clear the input list to free memory + # clear the input list to free memory if kwargs['clear']: object_list = None - #-- return the single spatial object + # return the single spatial object return self def from_file(self, filename, format=None, date=True, **kwargs): @@ -639,19 +639,19 @@ def from_file(self, filename, format=None, date=True, **kwargs): **kwargs: dict keyword arguments for input readers """ - #-- set filename + # set filename self.case_insensitive_filename(filename) - #-- set default verbosity + # set default verbosity kwargs.setdefault('verbose',False) - #-- read from file + # read from file if (format == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) return spatial().from_ascii(filename, date=date, **kwargs) elif (format == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) return spatial().from_netCDF4(filename, date=date, **kwargs) elif (format == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) return spatial().from_HDF5(filename, date=date, **kwargs) def from_dict(self, d, **kwargs): @@ -663,15 +663,15 @@ def from_dict(self, d, **kwargs): d: dict dictionary object to be converted """ - #-- assign variables to self + # assign variables to self for key in ['lon','lat','data','error','time','month']: try: setattr(self, key, d[key].copy()) except (AttributeError, KeyError): pass - #-- create output mask for data + # create output mask for data self.mask = np.zeros_like(self.data,dtype=bool) - #-- get spacing and dimensions + # get spacing and dimensions self.update_spacing() self.update_extents() self.update_dimensions() @@ -692,21 +692,21 @@ def to_ascii(self, filename, **kwargs): Output file and variable information """ self.filename = os.path.expanduser(filename) - #-- set default verbosity and parameters + # set default verbosity and parameters kwargs.setdefault('date',True) kwargs.setdefault('verbose',False) logging.info(self.filename) - #-- open the output file + # open the output file fid = open(self.filename, 'w') if kwargs['date']: file_format = '{0:10.4f} {1:10.4f} {2:12.4f} {3:10.4f}' else: file_format = '{0:10.4f} {1:10.4f} {2:12.4f}' - #-- write to file for each valid latitude and longitude + # write to file for each valid latitude and longitude ii,jj = np.nonzero((self.data != self.fill_value) & (~self.mask)) for ln,lt,dt in zip(self.lon[jj],self.lat[ii],self.data[ii,jj]): print(file_format.format(ln,lt,dt,self.time), file=fid) - #-- close the output file + # close the output file fid.close() def to_netCDF4(self, filename, **kwargs): @@ -746,7 +746,7 @@ def to_netCDF4(self, filename, **kwargs): verbose: bool, default False Output file and variable information """ - #-- set default verbosity and parameters + # set default verbosity and parameters kwargs.setdefault('verbose',False) kwargs.setdefault('varname','z') kwargs.setdefault('lonname','lon') @@ -763,78 +763,78 @@ def to_netCDF4(self, filename, **kwargs): kwargs.setdefault('date',True) kwargs.setdefault('clobber',True) kwargs.setdefault('verbose',False) - #-- setting NetCDF clobber attribute + # setting NetCDF clobber attribute clobber = 'w' if kwargs['clobber'] else 'a' - #-- opening NetCDF file for writing + # opening NetCDF file for writing self.filename = os.path.expanduser(filename) fileID = netCDF4.Dataset(self.filename, clobber, format="NETCDF4") - #-- mapping between output keys and netCDF4 variable names + # mapping between output keys and netCDF4 variable names if not kwargs['field_mapping']: kwargs['field_mapping']['lon'] = kwargs['lonname'] kwargs['field_mapping']['lat'] = kwargs['latname'] kwargs['field_mapping']['data'] = kwargs['varname'] if kwargs['date']: kwargs['field_mapping']['time'] = kwargs['timename'] - #-- create attributes dictionary for output variables + # create attributes dictionary for output variables if not kwargs['attributes']: - #-- Defining attributes for longitude and latitude + # Defining attributes for longitude and latitude kwargs['attributes'][kwargs['field_mapping']['lon']] = {} kwargs['attributes'][kwargs['field_mapping']['lon']]['long_name'] = 'longitude' kwargs['attributes'][kwargs['field_mapping']['lon']]['units'] = 'degrees_east' kwargs['attributes'][kwargs['field_mapping']['lat']] = {} kwargs['attributes'][kwargs['field_mapping']['lat']]['long_name'] = 'latitude' kwargs['attributes'][kwargs['field_mapping']['lat']]['units'] = 'degrees_north' - #-- Defining attributes for dataset + # Defining attributes for dataset kwargs['attributes'][kwargs['field_mapping']['data']] = {} kwargs['attributes'][kwargs['field_mapping']['data']]['long_name'] = kwargs['longname'] kwargs['attributes'][kwargs['field_mapping']['data']]['units'] = kwargs['units'] - #-- Defining attributes for date if applicable + # Defining attributes for date if applicable if kwargs['date']: kwargs['attributes'][kwargs['field_mapping']['time']] = {} kwargs['attributes'][kwargs['field_mapping']['time']]['long_name'] = kwargs['time_longname'] kwargs['attributes'][kwargs['field_mapping']['time']]['units'] = kwargs['time_units'] - #-- netCDF4 dimension variables + # netCDF4 dimension variables dimensions = [] dimensions.append('lat') dimensions.append('lon') - #-- expand dimensions if containing date variables + # expand dimensions if containing date variables if kwargs['date']: self.expand_dims() dimensions.append('time') dims = tuple(kwargs['field_mapping'][key] for key in dimensions) - #-- defining the NetCDF dimensions and variables + # defining the NetCDF dimensions and variables nc = {} - #-- NetCDF dimensions + # NetCDF dimensions for i,field in enumerate(dimensions): temp = getattr(self,field) key = kwargs['field_mapping'][field] fileID.createDimension(key, len(temp)) nc[key] = fileID.createVariable(key, temp.dtype, (key,)) - #-- NetCDF spatial data + # NetCDF spatial data variables = set(kwargs['field_mapping'].keys()) - set(dimensions) for field in sorted(variables): temp = getattr(self,field) key = kwargs['field_mapping'][field] nc[key] = fileID.createVariable(key, temp.dtype, dims, fill_value=self.fill_value, zlib=True) - #-- filling NetCDF variables + # filling NetCDF variables for field,key in kwargs['field_mapping'].items(): nc[key][:] = getattr(self,field) - #-- filling netCDF dataset attributes + # filling netCDF dataset attributes for att_name,att_val in kwargs['attributes'][key].items(): if att_name not in ('DIMENSION_LIST','CLASS','NAME','_FillValue'): nc[key].setncattr(att_name, att_val) - #-- filling global netCDF attributes + # filling global netCDF attributes if kwargs['title']: fileID.title = kwargs['title'] if kwargs['reference']: fileID.reference = kwargs['reference'] - #-- date created + # date created fileID.date_created = time.strftime('%Y-%m-%d',time.localtime()) - #-- Output NetCDF structure information + # Output NetCDF structure information logging.info(self.filename) logging.info(list(fileID.variables.keys())) - #-- Closing the NetCDF file + # Closing the NetCDF file fileID.close() def to_HDF5(self, filename, **kwargs): @@ -874,7 +874,7 @@ def to_HDF5(self, filename, **kwargs): verbose: bool, default False Output file and variable information """ - #-- set default verbosity and parameters + # set default verbosity and parameters kwargs.setdefault('verbose',False) kwargs.setdefault('varname','z') kwargs.setdefault('lonname','lon') @@ -891,79 +891,79 @@ def to_HDF5(self, filename, **kwargs): kwargs.setdefault('date',True) kwargs.setdefault('clobber',True) kwargs.setdefault('verbose',False) - #-- setting NetCDF clobber attribute + # setting NetCDF clobber attribute clobber = 'w' if kwargs['clobber'] else 'w-' - #-- opening NetCDF file for writing + # opening NetCDF file for writing self.filename = os.path.expanduser(filename) fileID = h5py.File(self.filename, clobber) - #-- mapping between output keys and netCDF4 variable names + # mapping between output keys and netCDF4 variable names if not kwargs['field_mapping']: kwargs['field_mapping']['lon'] = kwargs['lonname'] kwargs['field_mapping']['lat'] = kwargs['latname'] kwargs['field_mapping']['data'] = kwargs['varname'] if kwargs['date']: kwargs['field_mapping']['time'] = kwargs['timename'] - #-- create attributes dictionary for output variables + # create attributes dictionary for output variables if not kwargs['attributes']: - #-- Defining attributes for longitude and latitude + # Defining attributes for longitude and latitude kwargs['attributes'][kwargs['field_mapping']['lon']] = {} kwargs['attributes'][kwargs['field_mapping']['lon']]['long_name'] = 'longitude' kwargs['attributes'][kwargs['field_mapping']['lon']]['units'] = 'degrees_east' kwargs['attributes'][kwargs['field_mapping']['lat']] = {} kwargs['attributes'][kwargs['field_mapping']['lat']]['long_name'] = 'latitude' kwargs['attributes'][kwargs['field_mapping']['lat']]['units'] = 'degrees_north' - #-- Defining attributes for dataset + # Defining attributes for dataset kwargs['attributes'][kwargs['field_mapping']['data']] = {} kwargs['attributes'][kwargs['field_mapping']['data']]['long_name'] = kwargs['longname'] kwargs['attributes'][kwargs['field_mapping']['data']]['units'] = kwargs['units'] - #-- Defining attributes for date if applicable + # Defining attributes for date if applicable if kwargs['date']: kwargs['attributes'][kwargs['field_mapping']['time']] = {} kwargs['attributes'][kwargs['field_mapping']['time']]['long_name'] = kwargs['time_longname'] kwargs['attributes'][kwargs['field_mapping']['time']]['units'] = kwargs['time_units'] - #-- HDF5 dimension variables + # HDF5 dimension variables dimensions = [] dimensions.append('lat') dimensions.append('lon') - #-- expand dimensions if containing date variables + # expand dimensions if containing date variables if kwargs['date']: self.expand_dims() dimensions.append('time') dims = tuple(kwargs['field_mapping'][key] for key in dimensions) - #-- Defining the HDF5 dataset variables + # Defining the HDF5 dataset variables h5 = {} for field,key in kwargs['field_mapping'].items(): temp = getattr(self,field) key = kwargs['field_mapping'][field] h5[key] = fileID.create_dataset(key, temp.shape, data=temp, dtype=temp.dtype, compression='gzip') - #-- filling HDF5 dataset attributes + # filling HDF5 dataset attributes for att_name,att_val in kwargs['attributes'][key].items(): if att_name not in ('DIMENSION_LIST','CLASS','NAME'): h5[key].attrs[att_name] = att_val - #-- add dimensions + # add dimensions variables = set(kwargs['field_mapping'].keys()) - set(dimensions) for field in sorted(variables): key = kwargs['field_mapping'][field] for i,dim in enumerate(dims): h5[key].dims[i].label = dim h5[key].dims[i].attach_scale(h5[dim]) - #-- Dataset contains missing values + # Dataset contains missing values if (self.fill_value is not None): h5[key].attrs['_FillValue'] = self.fill_value - #-- filling global HDF5 attributes - #-- description of file + # filling global HDF5 attributes + # description of file if kwargs['title']: fileID.attrs['description'] = kwargs['title'] - #-- reference of file + # reference of file if kwargs['reference']: fileID.attrs['reference'] = kwargs['reference'] - #-- date created + # date created fileID.attrs['date_created'] = time.strftime('%Y-%m-%d',time.localtime()) - #-- Output HDF5 structure information + # Output HDF5 structure information logging.info(self.filename) logging.info(list(fileID.keys())) - #-- Closing the NetCDF file + # Closing the NetCDF file fileID.close() def to_index(self, filename, file_list, format=None, date=True, **kwargs): @@ -989,28 +989,28 @@ def to_index(self, filename, file_list, format=None, date=True, **kwargs): kwargs: dict keyword arguments for output writers """ - #-- Write index file of output spatial files + # Write index file of output spatial files self.filename = os.path.expanduser(filename) fid = open(self.filename,'w') - #-- set default verbosity + # set default verbosity kwargs.setdefault('verbose',False) - #-- for each file to be in the index + # for each file to be in the index for i,f in enumerate(file_list): - #-- print filename to index + # print filename to index print(f.replace(os.path.expanduser('~'),'~'), file=fid) - #-- index spatial object at i + # index spatial object at i s = self.index(i, date=date) - #-- write to file + # write to file if (format == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) s.to_ascii(f, date=date, **kwargs) elif (format == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) s.to_netCDF4(f, date=date, **kwargs) elif (format == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) s.to_HDF5(f, date=date, **kwargs) - #-- close the index file + # close the index file fid.close() def to_file(self, filename, format=None, date=True, **kwargs): @@ -1034,17 +1034,17 @@ def to_file(self, filename, format=None, date=True, **kwargs): kwargs: dict keyword arguments for output writers """ - #-- set default verbosity + # set default verbosity kwargs.setdefault('verbose',False) - #-- write to file + # write to file if (format == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) self.to_ascii(filename, date=date, **kwargs) elif (format == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) self.to_netCDF4(filename, date=date, **kwargs) elif (format == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) self.to_HDF5(filename, date=date, **kwargs) def to_masked_array(self): @@ -1058,7 +1058,7 @@ def update_spacing(self): """ Calculate the step size of spatial object """ - #-- calculate degree spacing + # calculate degree spacing dlat = np.abs(self.lat[1] - self.lat[0]) dlon = np.abs(self.lon[1] - self.lon[0]) self.spacing = (dlon,dlat) @@ -1096,12 +1096,12 @@ def copy(self): Copy a spatial object to a new spatial object """ temp = spatial(fill_value=self.fill_value) - #-- copy attributes or update attributes dictionary + # copy attributes or update attributes dictionary if isinstance(self.attributes,list): setattr(temp,'attributes',self.attributes) elif isinstance(self.attributes,dict): temp.attributes.update(self.attributes) - #-- assign variables to self + # assign variables to self var = ['lon','lat','data','mask','error','time','month','filename'] for key in var: try: @@ -1109,7 +1109,7 @@ def copy(self): setattr(temp, key, np.copy(val)) except AttributeError: pass - #-- get spacing and dimensions + # get spacing and dimensions temp.update_spacing() temp.update_extents() temp.update_dimensions() @@ -1121,7 +1121,7 @@ def zeros_like(self): Create a spatial object using the dimensions of another """ temp = spatial(fill_value=self.fill_value) - #-- assign variables to self + # assign variables to self temp.lon = self.lon.copy() temp.lat = self.lat.copy() var = ['data','mask','error','time','month'] @@ -1131,7 +1131,7 @@ def zeros_like(self): setattr(temp, key, np.zeros_like(val)) except AttributeError: pass - #-- get spacing and dimensions + # get spacing and dimensions temp.update_spacing() temp.update_extents() temp.update_dimensions() @@ -1142,18 +1142,18 @@ def expand_dims(self): """ Add a singleton dimension to a spatial object if non-existent """ - #-- change time dimensions to be iterable + # change time dimensions to be iterable self.time = np.atleast_1d(self.time) self.month = np.atleast_1d(self.month) - #-- output spatial with a third dimension + # output spatial with a third dimension if (np.ndim(self.data) == 2): self.data = self.data[:,:,None] - #-- try expanding mask variable + # try expanding mask variable try: self.mask = self.mask[:,:,None] except Exception as e: pass - #-- get spacing and dimensions + # get spacing and dimensions self.update_spacing() self.update_extents() self.update_dimensions() @@ -1164,16 +1164,16 @@ def squeeze(self): """ Remove singleton dimensions from a spatial object """ - #-- squeeze singleton dimensions + # squeeze singleton dimensions self.time = np.squeeze(self.time) self.month = np.squeeze(self.month) self.data = np.squeeze(self.data) - #-- try squeezing mask variable + # try squeezing mask variable try: self.mask = np.squeeze(self.mask) except Exception as e: pass - #-- get spacing and dimensions + # get spacing and dimensions self.update_spacing() self.update_extents() self.update_dimensions() @@ -1191,29 +1191,29 @@ def index(self, indice, date=True): date: bool, default True spatial objects contain date information """ - #-- output spatial object + # output spatial object temp = spatial(fill_value=self.fill_value) - #-- subset output spatial field + # subset output spatial field temp.data = self.data[:,:,indice].copy() temp.mask = self.mask[:,:,indice].copy() - #-- subset output spatial error + # subset output spatial error try: temp.error = self.error[:,:,indice].copy() except AttributeError: pass - #-- copy dimensions + # copy dimensions temp.lon = self.lon.copy() temp.lat = self.lat.copy() - #-- subset output dates + # subset output dates if date: temp.time = self.time[indice].copy() temp.month = self.month[indice].copy() - #-- subset filenames + # subset filenames try: temp.filename = self.filename[indice] if getattr(self, 'filename') else None except IndexError: pass - #-- get spacing and dimensions + # get spacing and dimensions temp.update_spacing() temp.update_extents() temp.update_dimensions() @@ -1228,36 +1228,36 @@ def subset(self, months): months: int GRACE/GRACE-FO to subset """ - #-- check if months is an array or a single value + # check if months is an array or a single value months = np.atleast_1d(months) - #-- number of months + # number of months n = len(months) - #-- check that all months are available + # check that all months are available months_check = list(set(months) - set(self.month)) if months_check: m = ','.join([f'{m:03d}' for m in months_check]) raise IOError(f'GRACE/GRACE-FO months {m} not Found') - #-- indices to sort data objects + # indices to sort data objects months_list = [i for i,m in enumerate(self.month) if m in months] - #-- output spatial object + # output spatial object temp = spatial(nlon=self.shape[0],nlat=self.shape[1], fill_value=self.fill_value) - #-- create output spatial object + # create output spatial object temp.data = np.zeros((temp.shape[0],temp.shape[1],n)) temp.mask = np.zeros((temp.shape[0],temp.shape[1],n)) - #-- create output spatial error + # create output spatial error try: getattr(self, 'error') temp.error = np.zeros((temp.shape[0],temp.shape[1],n)) except AttributeError: pass - #-- copy dimensions + # copy dimensions temp.lon = self.lon.copy() temp.lat = self.lat.copy() temp.time = np.zeros((n)) temp.month = np.zeros((n),dtype=np.int64) temp.filename = [] - #-- for each indice + # for each indice for t,i in enumerate(months_list): temp.data[:,:,t] = self.data[:,:,i].copy() temp.mask[:,:,t] = self.mask[:,:,i].copy() @@ -1265,13 +1265,13 @@ def subset(self, months): temp.error[:,:,t] = self.error[:,:,i].copy() except AttributeError: pass - #-- copy time dimensions + # copy time dimensions temp.time[t] = self.time[i].copy() temp.month[t] = self.month[i].copy() - #-- subset filenmaes + # subset filenmaes if getattr(self, 'filename'): temp.filename.append(self.filename[i]) - #-- remove singleton dimensions if importing a single value + # remove singleton dimensions if importing a single value return temp.squeeze() def offset(self, var): @@ -1284,7 +1284,7 @@ def offset(self, var): scalar value to which the spatial object will be offset """ temp = self.copy() - #-- offset by a single constant or a time-variable scalar + # offset by a single constant or a time-variable scalar if (np.ndim(var) == 0): temp.data = self.data + var elif (np.ndim(var) == 1) and (self.ndim == 2): @@ -1305,11 +1305,11 @@ def offset(self, var): elif (np.ndim(var) == 3) and (self.ndim == 3): for i,t in enumerate(self.time): temp.data[:,:,i] = self.data[:,:,i] + var[:,:,i] - #-- get spacing and dimensions + # get spacing and dimensions temp.update_spacing() temp.update_extents() temp.update_dimensions() - #-- update mask + # update mask temp.update_mask() return temp @@ -1323,7 +1323,7 @@ def scale(self, var): scalar value to which the spatial object will be multiplied """ temp = self.copy() - #-- multiply by a single constant or a time-variable scalar + # multiply by a single constant or a time-variable scalar if (np.ndim(var) == 0): temp.data = var*self.data elif (np.ndim(var) == 1) and (self.ndim == 2): @@ -1344,11 +1344,11 @@ def scale(self, var): elif (np.ndim(var) == 3) and (self.ndim == 3): for i,t in enumerate(self.time): temp.data[:,:,i] = var[:,:,i]*self.data[:,:,i] - #-- get spacing and dimensions + # get spacing and dimensions temp.update_spacing() temp.update_extents() temp.update_dimensions() - #-- update mask + # update mask temp.update_mask() return temp @@ -1374,39 +1374,39 @@ def kfactor(self, var): *Water Resources Research*, 48(W04531), (2012). `doi: 10.1029/2011WR011453 `_ """ - #-- copy to not modify original inputs + # copy to not modify original inputs temp1 = self.copy() temp2 = var.copy() - #-- expand dimensions and replace invalid values with 0 + # expand dimensions and replace invalid values with 0 temp1.expand_dims().replace_invalid(0.0) temp2.expand_dims().replace_invalid(0.0) - #-- dimensions of input spatial object + # dimensions of input spatial object nlat,nlon,nt = temp1.shape - #-- allocate for scaling factor and scaling factor error + # allocate for scaling factor and scaling factor error temp = spatial(nlat=nlat, nlon=nlon, fill_value=0.0) temp.data = np.zeros((nlat, nlon)) temp.error = np.zeros((nlat, nlon)) - #-- copy latitude and longitude variables + # copy latitude and longitude variables temp.lon = np.copy(temp1.lon) temp.lat = np.copy(temp1.lat) - #-- find valid data points and set mask + # find valid data points and set mask temp.mask = np.any(temp1.mask | temp2.mask, axis=2) indy,indx = np.nonzero(np.logical_not(temp.mask)) - #-- calculate point-based scaling factors as centroids + # calculate point-based scaling factors as centroids val1 = np.sum(temp1.data[indy,indx,:]*temp2.data[indy,indx,:],axis=1) val2 = np.sum(temp1.data[indy,indx,:]**2,axis=1) temp.data[indy,indx] = val1/val2 - #-- calculate difference between scaled and original + # calculate difference between scaled and original variance = temp1.scale(temp.data).offset(-temp2.data) - #-- calculate scaling factor errors as RMS of variance + # calculate scaling factor errors as RMS of variance temp.error = np.sqrt((variance.sum(power=2).data)/nt) - #-- get spacing and dimensions + # get spacing and dimensions temp.update_spacing() temp.update_extents() temp.update_dimensions() - #-- update mask + # update mask temp.update_mask() - #-- return the scaling factors and scaling factor errors + # return the scaling factors and scaling factor errors return temp def mean(self, apply=False, indices=Ellipsis): @@ -1420,30 +1420,30 @@ def mean(self, apply=False, indices=Ellipsis): indices: int, default Ellipsis indices of input spatial object to compute mean """ - #-- output spatial object + # output spatial object temp = spatial(nlon=self.shape[0],nlat=self.shape[1], fill_value=self.fill_value) - #-- copy dimensions + # copy dimensions temp.lon = self.lon.copy() temp.lat = self.lat.copy() - #-- create output mean spatial object + # create output mean spatial object temp.data = np.mean(self.data[:,:,indices],axis=2) temp.mask = np.any(self.mask[:,:,indices],axis=2) - #-- calculate the mean time + # calculate the mean time try: val = getattr(self, 'time') temp.time = np.mean(val[indices]) except (AttributeError,TypeError): pass - #-- calculate the spatial anomalies by removing the mean field + # calculate the spatial anomalies by removing the mean field if apply: for i,t in enumerate(self.time): self.data[:,:,i] -= temp.data[:,:] - #-- get spacing and dimensions + # get spacing and dimensions temp.update_spacing() temp.update_extents() temp.update_dimensions() - #-- update mask + # update mask temp.update_mask() return temp @@ -1456,10 +1456,10 @@ def reverse(self, axis=0): axis: int, default 0 axis to reorder """ - #-- output spatial object + # output spatial object temp = self.copy() temp.expand_dims() - #-- copy dimensions and reverse order + # copy dimensions and reverse order if (axis == 0): temp.lat = temp.lat[::-1].copy() temp.data = temp.data[::-1,:,:].copy() @@ -1468,9 +1468,9 @@ def reverse(self, axis=0): temp.lon = temp.lon[::-1].copy() temp.data = temp.data[:,::-1,:].copy() temp.mask = temp.mask[:,::-1,:].copy() - #-- squeeze output spatial object - #-- get spacing and dimensions - #-- update mask + # squeeze output spatial object + # get spacing and dimensions + # update mask temp.squeeze() return temp @@ -1483,16 +1483,16 @@ def transpose(self, axes=None): axis: int or NoneType, default None order of the output axes """ - #-- output spatial object + # output spatial object temp = self.copy() - #-- copy dimensions and reverse order + # copy dimensions and reverse order temp.data = np.transpose(temp.data, axes=axes) temp.mask = np.transpose(temp.mask, axes=axes) - #-- get spacing and dimensions + # get spacing and dimensions temp.update_spacing() temp.update_extents() temp.update_dimensions() - #-- update mask + # update mask temp.update_mask() return temp @@ -1505,20 +1505,20 @@ def sum(self, power=1): power: int, default 1 apply a power before calculating summation """ - #-- output spatial object + # output spatial object temp = spatial(nlon=self.shape[0],nlat=self.shape[1], fill_value=self.fill_value) - #-- copy dimensions + # copy dimensions temp.lon = self.lon.copy() temp.lat = self.lat.copy() - #-- create output summation spatial object + # create output summation spatial object temp.data = np.sum(np.power(self.data,power),axis=2) temp.mask = np.any(self.mask,axis=2) - #-- get spacing and dimensions + # get spacing and dimensions temp.update_spacing() temp.update_extents() temp.update_dimensions() - #-- update mask + # update mask temp.update_mask() return temp @@ -1533,7 +1533,7 @@ def power(self, power): """ temp = self.copy() temp.data = np.power(self.data,power) - #-- assign ndim and shape attributes + # assign ndim and shape attributes temp.update_dimensions() return temp @@ -1541,20 +1541,20 @@ def max(self): """ Compute maximum value of spatial field """ - #-- output spatial object + # output spatial object temp = spatial(nlon=self.shape[0],nlat=self.shape[1], fill_value=self.fill_value) - #-- copy dimensions + # copy dimensions temp.lon = self.lon.copy() temp.lat = self.lat.copy() - #-- create output maximum spatial object + # create output maximum spatial object temp.data = np.max(self.data,axis=2) temp.mask = np.any(self.mask,axis=2) - #-- get spacing and dimensions + # get spacing and dimensions temp.update_spacing() temp.update_extents() temp.update_dimensions() - #-- update mask + # update mask temp.update_mask() return temp @@ -1562,20 +1562,20 @@ def min(self): """ Compute minimum value of spatial field """ - #-- output spatial object + # output spatial object temp = spatial(nlon=self.shape[0],nlat=self.shape[1], fill_value=self.fill_value) - #-- copy dimensions + # copy dimensions temp.lon = self.lon.copy() temp.lat = self.lat.copy() - #-- create output minimum spatial object + # create output minimum spatial object temp.data = np.min(self.data,axis=2) temp.mask = np.any(self.mask,axis=2) - #-- get spacing and dimensions + # get spacing and dimensions temp.update_spacing() temp.update_extents() temp.update_dimensions() - #-- update mask + # update mask temp.update_mask() return temp @@ -1590,19 +1590,19 @@ def replace_invalid(self, fill_value, mask=None): mask: bool or NoneType, default None Update the current mask """ - #-- validate current mask + # validate current mask self.update_mask() - #-- update the mask if specified + # update the mask if specified if mask is not None: if (np.shape(mask) == self.shape): self.mask |= mask elif (np.ndim(mask) == 2) & (self.ndim == 3): - #-- broadcast mask over third dimension + # broadcast mask over third dimension temp = np.repeat(mask[:,:,np.newaxis],self.shape[2],axis=2) self.mask |= temp - #-- update the fill value + # update the fill value self.fill_value = fill_value - #-- replace invalid values with new fill value + # replace invalid values with new fill value self.data[self.mask] = self.fill_value return self diff --git a/gravity_toolkit/time.py b/gravity_toolkit/time.py index 50967424..ae9f734b 100644 --- a/gravity_toolkit/time.py +++ b/gravity_toolkit/time.py @@ -37,7 +37,7 @@ import numpy as np import dateutil.parser -#-- conversion factors between time units and seconds +# conversion factors between time units and seconds _to_sec = {'microseconds': 1e-6, 'microsecond': 1e-6, 'microsec': 1e-6, 'microsecs': 1e-6, 'milliseconds': 1e-3, 'millisecond': 1e-3, @@ -50,7 +50,7 @@ 'hours': 3600.0, 'hour': 3600.0, 'hr': 3600.0, 'hrs': 3600.0, 'h': 3600.0, 'day': 86400.0, 'days': 86400.0, 'd': 86400.0} -#-- approximate conversions for longer periods +# approximate conversions for longer periods _to_sec['mon'] = 30.0 * 86400.0 _to_sec['month'] = 30.0 * 86400.0 _to_sec['months'] = 30.0 * 86400.0 @@ -59,7 +59,7 @@ _to_sec['year'] = 365.25 * 86400.0 _to_sec['years'] = 365.25 * 86400.0 -#-- PURPOSE: parse a date string into epoch and units scale +# PURPOSE: parse a date string into epoch and units scale def parse_date_string(date_string): """ parse a date string of the form @@ -79,22 +79,22 @@ def parse_date_string(date_string): conversion_factor: float multiplication factor to convert to seconds """ - #-- try parsing the original date string as a date + # try parsing the original date string as a date try: epoch = dateutil.parser.parse(date_string) except ValueError: pass else: - #-- return the epoch (as list) + # return the epoch (as list) return (datetime_to_list(epoch),0.0) - #-- split the date string into units and epoch + # split the date string into units and epoch units, epoch = split_date_string(date_string) if units not in _to_sec.keys(): raise ValueError(f'Invalid units: {units}') - #-- return the epoch (as list) and the time unit conversion factors + # return the epoch (as list) and the time unit conversion factors return (datetime_to_list(epoch), _to_sec[units]) -#-- PURPOSE: split a date string into units and epoch +# PURPOSE: split a date string into units and epoch def split_date_string(date_string): """ split a date string into units and epoch @@ -111,7 +111,7 @@ def split_date_string(date_string): else: return (units.lower(),dateutil.parser.parse(epoch)) -#-- PURPOSE: convert a datetime object into a list +# PURPOSE: convert a datetime object into a list def datetime_to_list(date): """ convert a datetime object into a list @@ -127,7 +127,7 @@ def datetime_to_list(date): """ return [date.year,date.month,date.day,date.hour,date.minute,date.second] -#-- PURPOSE: extract parameters from filename +# PURPOSE: extract parameters from filename def parse_grace_file(granule): """ Extract dates from GRACE/GRACE-FO files @@ -137,26 +137,26 @@ def parse_grace_file(granule): granule: str GRACE/GRACE-FO Level-2 spherical harmonic data file """ - #-- verify that filename is reduced to basename + # verify that filename is reduced to basename file_basename = os.path.basename(granule) - #-- compile numerical expression operator for parameters from files - #-- UTCSR: The University of Texas at Austin Center for Space Research - #-- EIGEN: GFZ German Research Center for Geosciences (RL01-RL05) - #-- GFZOP: GFZ German Research Center for Geosciences (RL06+GRACE-FO) - #-- JPLEM: NASA Jet Propulsion Laboratory (harmonic solutions) - #-- JPLMSC: NASA Jet Propulsion Laboratory (mascon solutions) - #-- GRGS: French Centre National D'Etudes Spatiales (CNES) - #-- COSTG: International Combined Time-variable Gravity Fields + # compile numerical expression operator for parameters from files + # UTCSR: The University of Texas at Austin Center for Space Research + # EIGEN: GFZ German Research Center for Geosciences (RL01-RL05) + # GFZOP: GFZ German Research Center for Geosciences (RL06+GRACE-FO) + # JPLEM: NASA Jet Propulsion Laboratory (harmonic solutions) + # JPLMSC: NASA Jet Propulsion Laboratory (mascon solutions) + # GRGS: French Centre National D'Etudes Spatiales (CNES) + # COSTG: International Combined Time-variable Gravity Fields args = r'UTCSR|EIGEN|GFZOP|JPLEM|JPLMSC|GRGS|COSTG' regex_pattern = (r'(.*?)-2_(\d{{4}})(\d{{3}})-(\d{{4}})(\d{{3}})_' r'(.*?)_({0})_(.*?)_(\d+)(.*?)(\.gz|\.gfc)?$').format(args) rx = re.compile(regex_pattern, re.VERBOSE) - #-- extract parameters from input filename + # extract parameters from input filename PFX,SY,SD,EY,ED,AUX,PRC,F1,DRL,F2,SFX = rx.findall(file_basename).pop() - #-- return the start and end date lists + # return the start and end date lists return ((SY,SD),(EY,ED)) -#-- PURPOSE: extract dates from GRAZ or Swarm files with regular expressions +# PURPOSE: extract dates from GRAZ or Swarm files with regular expressions def parse_gfc_file(granule, PROC, DSET): """ Extract dates from Gravity Field Coefficient (gfc) files @@ -179,11 +179,11 @@ def parse_gfc_file(granule, PROC, DSET): - ``'GAD'``: ocean bottom pressure product - ``'GSM'``: corrected monthly static gravity field product """ - #-- verify that filename is reduced to basename + # verify that filename is reduced to basename file_basename = os.path.basename(granule) - #-- extract parameters from input filename + # extract parameters from input filename if (PROC == 'GRAZ'): - #-- regular expression operators for ITSG data and models + # regular expression operators for ITSG data and models itsg_products = [] itsg_products.append(r'atmosphere') itsg_products.append(r'dealiasing') @@ -195,38 +195,38 @@ def parse_gfc_file(granule, PROC, DSET): itsg_products.append(r'Grace_operational') regex_pattern=(r'(AOD1B_RL\d+|model|ITSG)[-_]({0})(_n\d+)?_' r'(\d+)-(\d+)(\.gfc)').format(r'|'.join(itsg_products)) - #-- compile regular expression operator for parameters from files + # compile regular expression operator for parameters from files rx = re.compile(regex_pattern, re.VERBOSE | re.IGNORECASE) - #-- extract parameters from input filename + # extract parameters from input filename PFX,PRD,trunc,year,month,SFX = rx.findall(file_basename).pop() - #-- number of days in each month for the calendar year + # number of days in each month for the calendar year dpm = calendar_days(int(year)) - #-- create start and end date lists + # create start and end date lists start_date = [int(year),int(month),1,0,0,0] end_date = [int(year),int(month),dpm[int(month)-1],23,59,59] elif (PROC == 'Swarm') and (DSET == 'GSM'): - #-- regular expression operators for Swarm data + # regular expression operators for Swarm data regex_pattern=r'(SW)_(.*?)_(EGF_SHA_2)__(.*?)_(.*?)_(.*?)(\.gfc|\.ZIP)' - #-- compile regular expression operator for parameters from files + # compile regular expression operator for parameters from files rx = re.compile(regex_pattern, re.VERBOSE | re.IGNORECASE) - #-- extract parameters from input filename + # extract parameters from input filename SAT,tmp,PROD,starttime,endtime,RL,SFX = rx.findall(file_basename).pop() start_date,_ = parse_date_string(starttime) end_date,_ = parse_date_string(endtime) elif (PROC == 'Swarm') and (DSET != 'GSM'): - #-- regular expression operators for Swarm models + # regular expression operators for Swarm models regex_pattern=(r'(GAA|GAB|GAC|GAD)_Swarm_(\d+)_(\d{2})_(\d{4})' r'(\.gfc|\.ZIP)') - #-- compile regular expression operator for parameters from files + # compile regular expression operator for parameters from files rx = re.compile(regex_pattern, re.VERBOSE | re.IGNORECASE) - #-- extract parameters from input filename + # extract parameters from input filename PROD,trunc,month,year,SFX = rx.findall(file_basename).pop() - #-- number of days in each month for the calendar year + # number of days in each month for the calendar year dpm = calendar_days(int(year)) - #-- create start and end date lists + # create start and end date lists start_date = [int(year),int(month),1,0,0,0] end_date = [int(year),int(month),dpm[int(month)-1],23,59,59] - #-- return the start and end date lists + # return the start and end date lists return (start_date, end_date) def reduce_by_date(granules): @@ -238,46 +238,46 @@ def reduce_by_date(granules): granules: list GRACE/GRACE-FO Level-2 spherical harmonic data files """ - #-- list of dates for all input files + # list of dates for all input files date_list = [parse_grace_file(f) for f in granules] unique_list = [] - #-- compile numerical expression operator for parameters from files - #-- UTCSR: The University of Texas at Austin Center for Space Research - #-- EIGEN: GFZ German Research Center for Geosciences (RL01-RL05) - #-- GFZOP: GFZ German Research Center for Geosciences (RL06+GRACE-FO) - #-- JPLEM: NASA Jet Propulsion Laboratory (harmonic solutions) - #-- JPLMSC: NASA Jet Propulsion Laboratory (mascon solutions) - #-- GRGS: French Centre National D'Etudes Spatiales (CNES) - #-- COSTG: International Combined Time-variable Gravity Fields + # compile numerical expression operator for parameters from files + # UTCSR: The University of Texas at Austin Center for Space Research + # EIGEN: GFZ German Research Center for Geosciences (RL01-RL05) + # GFZOP: GFZ German Research Center for Geosciences (RL06+GRACE-FO) + # JPLEM: NASA Jet Propulsion Laboratory (harmonic solutions) + # JPLMSC: NASA Jet Propulsion Laboratory (mascon solutions) + # GRGS: French Centre National D'Etudes Spatiales (CNES) + # COSTG: International Combined Time-variable Gravity Fields args = r'UTCSR|EIGEN|GFZOP|JPLEM|JPLMSC|GRGS|COSTG' regex_pattern = (r'(.*?)-2_(\d{{4}})(\d{{3}})-(\d{{4}})(\d{{3}})_(.*?)_' r'({0})_(.*?)_(\d{{2}})(\d{{2}})(.*?)(\.gz|\.gfc)?$').format(args) rx = re.compile(regex_pattern, re.VERBOSE) - #-- for each unique date + # for each unique date for d in sorted(set(date_list)): if (date_list.count(d) == 1): i = date_list.index(d) unique_list.append(granules[i]) elif (date_list.count(d) >= 2): - #-- if more than 1 file with date use newest version + # if more than 1 file with date use newest version indices = [i for i, dt in enumerate(date_list) if (dt == d)] - #-- find each version within the file + # find each version within the file versions = [] for i in indices: - #-- verify that filename is reduced to basename + # verify that filename is reduced to basename file_basename = os.path.basename(granules[i]) - #-- parse filename to get file version + # parse filename to get file version PFX,SY,SD,EY,ED,AUX,PRC,F1,DRL,VER,F2,SFX = \ rx.findall(file_basename).pop() - #-- append to list of file versions + # append to list of file versions versions.append(int(VER)) - #-- find file with newest version + # find file with newest version i = versions.index(max(versions)) unique_list.append(granules[indices[i]]) - #-- return the sorted list of files with unique dates + # return the sorted list of files with unique dates return unique_list -#-- PURPOSE: Adjust GRACE/GRACE-FO months to fix "Special Cases" +# PURPOSE: Adjust GRACE/GRACE-FO months to fix "Special Cases" def adjust_months(grace_month): """ Adjust estimated GRACE/GRACE-FO months to fix "Special Cases" @@ -301,45 +301,45 @@ def adjust_months(grace_month): For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) """ - #-- verify dimensions + # verify dimensions grace_month = np.atleast_1d(grace_month) - #-- number of months + # number of months nmon = len(grace_month) - #-- create temporary months object + # create temporary months object m = np.zeros_like(grace_month) - #-- find unique months + # find unique months _,i,c = np.unique(grace_month,return_inverse=True,return_counts=True) - #-- simple unique months case + # simple unique months case case1, = np.nonzero(c[i] == 1) m[case1] = grace_month[case1] - #-- Special Months cases + # Special Months cases case2, = np.nonzero(c[i] == 2) - #-- for each special case month + # for each special case month for j in case2: - #-- prior month, current month, subsequent 2 months + # prior month, current month, subsequent 2 months mm1 = grace_month[j-1] mon = grace_month[j] mp1 = grace_month[j+1] if (j < (nmon-1)) else (mon + 1) mp2 = grace_month[j+2] if (j < (nmon-2)) else (mp1 + 1) - #-- determine the months which meet the criteria need to be adjusted + # determine the months which meet the criteria need to be adjusted if (mon == (mm1 + 1)): - #-- case where month is correct - #-- but subsequent month needs to be +1 + # case where month is correct + # but subsequent month needs to be +1 m[j] = np.copy(grace_month[j]) elif (mon == mm1) and (mon != m[j-1]): - #-- case where prior month needed to be -1 - #-- but current month is correct + # case where prior month needed to be -1 + # but current month is correct m[j] = np.copy(grace_month[j]) elif (mon == mm1): - #-- case where month should be +1 + # case where month should be +1 m[j] = grace_month[j] + 1 elif (mon == mp1) and ((mon == (mm1 + 2)) or (mp2 == (mp1 + 1))): - #-- case where month should be -1 + # case where month should be -1 m[j] = grace_month[j] - 1 - #-- update months and remove singleton dimensions if necessary + # update months and remove singleton dimensions if necessary return np.squeeze(m) -#-- PURPOSE: convert calendar dates to GRACE/GRACE-FO months +# PURPOSE: convert calendar dates to GRACE/GRACE-FO months def calendar_to_grace(year,month=1,around=np.floor): """ Converts calendar dates to GRACE/GRACE-FO months @@ -361,7 +361,7 @@ def calendar_to_grace(year,month=1,around=np.floor): grace_month = around(12.0*(year - 2002.0)) + month return np.array(grace_month, dtype=int) -#-- PURPOSE: convert GRACE/GRACE-FO months to calendar dates +# PURPOSE: convert GRACE/GRACE-FO months to calendar dates def grace_to_calendar(grace_month): """ Converts GRACE/GRACE-FO months to calendar dates @@ -382,7 +382,7 @@ def grace_to_calendar(grace_month): month = np.mod(grace_month-1,12) + 1 return (year, month) -#-- PURPOSE: convert calendar dates to Julian days +# PURPOSE: convert calendar dates to Julian days def calendar_to_julian(year_decimal): """ Converts calendar dates to Julian days @@ -397,23 +397,23 @@ def calendar_to_julian(year_decimal): JD: float Julian Day (days since 01-01-4713 BCE at 12:00:00) """ - #-- calculate year + # calculate year year = np.floor(year_decimal) - #-- calculation of day of the year + # calculation of day of the year dpy = calendar_days(year).sum() DofY = dpy*(year_decimal % 1) - #-- Calculation of the Julian date from year and DofY + # Calculation of the Julian date from year and DofY JD = np.array(367.0*year - np.floor(7.0*year/4.0) - np.floor(3.0*(np.floor((7.0*year - 1.0)/700.0) + 1.0)/4.0) + DofY + 1721058.5, dtype=np.float64) return JD -#-- days per month in a leap and a standard year -#-- only difference is February (29 vs. 28) +# days per month in a leap and a standard year +# only difference is February (29 vs. 28) _dpm_leap = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] _dpm_stnd = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] -#-- PURPOSE: gets the number of days per month for a given year +# PURPOSE: gets the number of days per month for a given year def calendar_days(year): """ Calculates the number of days per month for a given year @@ -428,24 +428,24 @@ def calendar_days(year): dpm: float number of days for each month """ - #-- Rules in the Gregorian calendar for a year to be a leap year: - #-- divisible by 4, but not by 100 unless divisible by 400 - #-- True length of the year is about 365.2422 days - #-- Adding a leap day every four years ==> average 365.25 - #-- Subtracting a leap year every 100 years ==> average 365.24 - #-- Adding a leap year back every 400 years ==> average 365.2425 - #-- Subtracting a leap year every 4000 years ==> average 365.24225 + # Rules in the Gregorian calendar for a year to be a leap year: + # divisible by 4, but not by 100 unless divisible by 400 + # True length of the year is about 365.2422 days + # Adding a leap day every four years ==> average 365.25 + # Subtracting a leap year every 100 years ==> average 365.24 + # Adding a leap year back every 400 years ==> average 365.2425 + # Subtracting a leap year every 4000 years ==> average 365.24225 m4 = (year % 4) m100 = (year % 100) m400 = (year % 400) m4000 = (year % 4000) - #-- find indices for standard years and leap years using criteria + # find indices for standard years and leap years using criteria if ((m4 == 0) & (m100 != 0) | (m400 == 0) & (m4000 != 0)): return np.array(_dpm_leap, dtype=np.float64) elif ((m4 != 0) | (m100 == 0) & (m400 != 0) | (m4000 == 0)): return np.array(_dpm_stnd, dtype=np.float64) -#-- PURPOSE: convert times from seconds since epoch1 to time since epoch2 +# PURPOSE: convert times from seconds since epoch1 to time since epoch2 def convert_delta_time(delta_time, epoch1=None, epoch2=None, scale=1.0): """ Convert delta time from seconds since epoch1 to time since epoch2 @@ -464,11 +464,11 @@ def convert_delta_time(delta_time, epoch1=None, epoch2=None, scale=1.0): epoch1 = datetime.datetime(*epoch1) epoch2 = datetime.datetime(*epoch2) delta_time_epochs = (epoch2 - epoch1).total_seconds() - #-- subtract difference in time and rescale to output units + # subtract difference in time and rescale to output units return scale*(delta_time - delta_time_epochs) -#-- PURPOSE: calculate the delta time from calendar date -#-- http://scienceworld.wolfram.com/astronomy/JulianDate.html +# PURPOSE: calculate the delta time from calendar date +# http://scienceworld.wolfram.com/astronomy/JulianDate.html def convert_calendar_dates(year, month, day, hour=0.0, minute=0.0, second=0.0, epoch=(1992,1,1,0,0,0), scale=1.0): """ @@ -498,8 +498,8 @@ def convert_calendar_dates(year, month, day, hour=0.0, minute=0.0, second=0.0, delta_time: float days since epoch """ - #-- calculate date in Modified Julian Days (MJD) from calendar date - #-- MJD: days since November 17, 1858 (1858-11-17T00:00:00) + # calculate date in Modified Julian Days (MJD) from calendar date + # MJD: days since November 17, 1858 (1858-11-17T00:00:00) MJD = 367.0*year - np.floor(7.0*(year + np.floor((month+9.0)/12.0))/4.0) - \ np.floor(3.0*(np.floor((year + (month - 9.0)/7.0)/100.0) + 1.0)/4.0) + \ np.floor(275.0*month/9.0) + day + hour/24.0 + minute/1440.0 + \ @@ -507,10 +507,10 @@ def convert_calendar_dates(year, month, day, hour=0.0, minute=0.0, second=0.0, epoch1 = datetime.datetime(1858,11,17,0,0,0) epoch2 = datetime.datetime(*epoch) delta_time_epochs = (epoch2 - epoch1).total_seconds() - #-- return the date in days since epoch (or scaled to units) + # return the date in days since epoch (or scaled to units) return scale*np.array(MJD - delta_time_epochs/86400.0,dtype=np.float64) -#-- PURPOSE: Converts from calendar dates into decimal years +# PURPOSE: Converts from calendar dates into decimal years def convert_calendar_decimal(year, month, day=None, hour=None, minute=None, second=None, DofY=None): """ @@ -546,10 +546,10 @@ def convert_calendar_decimal(year, month, day=None, hour=None, minute=None, Cambridge: Cambridge University Press. """ - #-- number of dates + # number of dates n_dates = len(np.atleast_1d(year)) - #-- create arrays for calendar date variables + # create arrays for calendar date variables cal_date = {} cal_date['year'] = np.zeros((n_dates)) cal_date['month'] = np.zeros((n_dates)) @@ -557,101 +557,101 @@ def convert_calendar_decimal(year, month, day=None, hour=None, minute=None, cal_date['hour'] = np.zeros((n_dates)) cal_date['minute'] = np.zeros((n_dates)) cal_date['second'] = np.zeros((n_dates)) - #-- day of the year + # day of the year cal_date['DofY'] = np.zeros((n_dates)) - #-- remove singleton dimensions and use year and month + # remove singleton dimensions and use year and month cal_date['year'][:] = np.squeeze(year) cal_date['month'][:] = np.squeeze(month) - #-- create output date variable + # create output date variable t_date = np.zeros((n_dates)) - #-- days per month in a leap and a standard year - #-- only difference is February (29 vs. 28) + # days per month in a leap and a standard year + # only difference is February (29 vs. 28) dpm_leap = np.array(_dpm_leap, dtype=np.float64) dpm_stnd = np.array(_dpm_stnd, dtype=np.float64) - #-- Rules in the Gregorian calendar for a year to be a leap year: - #-- divisible by 4, but not by 100 unless divisible by 400 - #-- True length of the year is about 365.2422 days - #-- Adding a leap day every four years ==> average 365.25 - #-- Subtracting a leap year every 100 years ==> average 365.24 - #-- Adding a leap year back every 400 years ==> average 365.2425 - #-- Subtracting a leap year every 4000 years ==> average 365.24225 + # Rules in the Gregorian calendar for a year to be a leap year: + # divisible by 4, but not by 100 unless divisible by 400 + # True length of the year is about 365.2422 days + # Adding a leap day every four years ==> average 365.25 + # Subtracting a leap year every 100 years ==> average 365.24 + # Adding a leap year back every 400 years ==> average 365.2425 + # Subtracting a leap year every 4000 years ==> average 365.24225 m4 = (cal_date['year'] % 4) m100 = (cal_date['year'] % 100) m400 = (cal_date['year'] % 400) m4000 = (cal_date['year'] % 4000) - #-- find indices for standard years and leap years using criteria + # find indices for standard years and leap years using criteria leap, = np.nonzero((m4 == 0) & (m100 != 0) | (m400 == 0) & (m4000 != 0)) stnd, = np.nonzero((m4 != 0) | (m100 == 0) & (m400 != 0) | (m4000 == 0)) - #-- calculate the day of the year + # calculate the day of the year if DofY is not None: - #-- if entered directly as an input - #-- remove 1 so day 1 (Jan 1st) = 0.0 in decimal format + # if entered directly as an input + # remove 1 so day 1 (Jan 1st) = 0.0 in decimal format cal_date['DofY'][:] = np.squeeze(DofY)-1 else: - #-- use calendar month and day of the month to calculate day of the year - #-- month minus 1: January = 0, February = 1, etc (indice of month) - #-- in decimal form: January = 0.0 + # use calendar month and day of the month to calculate day of the year + # month minus 1: January = 0, February = 1, etc (indice of month) + # in decimal form: January = 0.0 month_m1 = np.array(cal_date['month'],dtype=np.int64) - 1 - #-- day of month + # day of month if day is not None: - #-- remove 1 so 1st day of month = 0.0 in decimal format + # remove 1 so 1st day of month = 0.0 in decimal format cal_date['day'][:] = np.squeeze(day)-1.0 else: - #-- if not entering days as an input - #-- will use the mid-month value + # if not entering days as an input + # will use the mid-month value cal_date['day'][leap] = dpm_leap[month_m1[leap]]/2.0 cal_date['day'][stnd] = dpm_stnd[month_m1[stnd]]/2.0 - #-- create matrix with the lower half = 1 - #-- this matrix will be used in a matrix multiplication - #-- to calculate the total number of days for prior months - #-- the -1 will make the diagonal == 0 - #-- i.e. first row == all zeros and the - #-- last row == ones for all but the last element + # create matrix with the lower half = 1 + # this matrix will be used in a matrix multiplication + # to calculate the total number of days for prior months + # the -1 will make the diagonal == 0 + # i.e. first row == all zeros and the + # last row == ones for all but the last element mon_mat=np.tri(12,12,-1) - #-- using a dot product to calculate total number of days - #-- for the months before the input date - #-- basically is sum(i*dpm) - #-- where i is 1 for all months < the month of interest - #-- and i is 0 for all months >= the month of interest - #-- month of interest is zero as the exact days will be - #-- used to calculate the date - - #-- calculate the day of the year for leap and standard - #-- use total days of all months before date - #-- and add number of days before date in month + # using a dot product to calculate total number of days + # for the months before the input date + # basically is sum(i*dpm) + # where i is 1 for all months < the month of interest + # and i is 0 for all months >= the month of interest + # month of interest is zero as the exact days will be + # used to calculate the date + + # calculate the day of the year for leap and standard + # use total days of all months before date + # and add number of days before date in month cal_date['DofY'][stnd] = cal_date['day'][stnd] + \ np.dot(mon_mat[month_m1[stnd],:],dpm_stnd) cal_date['DofY'][leap] = cal_date['day'][leap] + \ np.dot(mon_mat[month_m1[leap],:],dpm_leap) - #-- hour of day (else is zero) + # hour of day (else is zero) if hour is not None: cal_date['hour'][:] = np.squeeze(hour) - #-- minute of hour (else is zero) + # minute of hour (else is zero) if minute is not None: cal_date['minute'][:] = np.squeeze(minute) - #-- second in minute (else is zero) + # second in minute (else is zero) if second is not None: cal_date['second'][:] = np.squeeze(second) - #-- calculate decimal date - #-- convert hours, minutes and seconds into days - #-- convert calculated fractional days into decimal fractions of the year - #-- Leap years + # calculate decimal date + # convert hours, minutes and seconds into days + # convert calculated fractional days into decimal fractions of the year + # Leap years t_date[leap] = cal_date['year'][leap] + \ (cal_date['DofY'][leap] + cal_date['hour'][leap]/24. + \ cal_date['minute'][leap]/1440. + \ cal_date['second'][leap]/86400.)/np.sum(dpm_leap) - #-- Standard years + # Standard years t_date[stnd] = cal_date['year'][stnd] + \ (cal_date['DofY'][stnd] + cal_date['hour'][stnd]/24. + \ cal_date['minute'][stnd]/1440. + \ @@ -659,7 +659,7 @@ def convert_calendar_decimal(year, month, day=None, hour=None, minute=None, return t_date -#-- PURPOSE: Converts from Julian day to calendar date and time +# PURPOSE: Converts from Julian day to calendar date and time def convert_julian(JD, **kwargs): """ Converts from Julian day to calendar date and time @@ -701,50 +701,50 @@ def convert_julian(JD, **kwargs): Calendar Dates", *Quarterly Journal of the Royal Astronomical Society*, 25(1), (1984). """ - #-- set default keyword arguments + # set default keyword arguments kwargs.setdefault('astype', None) kwargs.setdefault('format', 'dict') - #-- raise warnings for deprecated keyword arguments + # raise warnings for deprecated keyword arguments deprecated_keywords = dict(ASTYPE='astype', FORMAT='format') for old,new in deprecated_keywords.items(): if old in kwargs.keys(): warnings.warn(f"""Deprecated keyword argument {old}. Changed to '{new}'""", DeprecationWarning) - #-- set renamed argument to not break workflows + # set renamed argument to not break workflows kwargs[new] = copy.copy(kwargs[old]) - #-- convert to array if only a single value was imported + # convert to array if only a single value was imported if (np.ndim(JD) == 0): JD = np.atleast_1d(JD) single_value = True else: single_value = False - #-- verify julian day + # verify julian day JDO = np.floor(JD + 0.5) C = np.zeros_like(JD) - #-- calculate C for dates before and after the switch to Gregorian + # calculate C for dates before and after the switch to Gregorian IGREG = 2299161.0 ind1, = np.nonzero(JDO < IGREG) C[ind1] = JDO[ind1] + 1524.0 ind2, = np.nonzero(JDO >= IGREG) B = np.floor((JDO[ind2] - 1867216.25)/36524.25) C[ind2] = JDO[ind2] + B - np.floor(B/4.0) + 1525.0 - #-- calculate coefficients for date conversion + # calculate coefficients for date conversion D = np.floor((C - 122.1)/365.25) E = np.floor((365.0 * D) + np.floor(D/4.0)) F = np.floor((C - E)/30.6001) - #-- calculate day, month, year and hour + # calculate day, month, year and hour day = np.floor(C - E + 0.5) - np.floor(30.6001*F) month = F - 1.0 - 12.0*np.floor(F/14.0) year = D - 4715.0 - np.floor((7.0 + month)/10.0) hour = np.floor(24.0*(JD + 0.5 - JDO)) - #-- calculate minute and second + # calculate minute and second G = (JD + 0.5 - JDO) - hour/24.0 minute = np.floor(G*1440.0) second = (G - minute/1440.0) * 86400.0 - #-- convert all variables to output type (from float) + # convert all variables to output type (from float) if kwargs['astype'] is not None: year = year.astype(kwargs['astype']) month = month.astype(kwargs['astype']) @@ -753,7 +753,7 @@ def convert_julian(JD, **kwargs): minute = minute.astype(kwargs['astype']) second = second.astype(kwargs['astype']) - #-- if only a single value was imported initially: remove singleton dims + # if only a single value was imported initially: remove singleton dims if single_value: year = year.item(0) month = month.item(0) @@ -762,7 +762,7 @@ def convert_julian(JD, **kwargs): minute = minute.item(0) second = second.item(0) - #-- return date variables in output format + # return date variables in output format if (kwargs['format'] == 'dict'): return dict(year=year, month=month, day=day, hour=hour, minute=minute, second=second) diff --git a/gravity_toolkit/tools.py b/gravity_toolkit/tools.py index 84d74b00..71153944 100644 --- a/gravity_toolkit/tools.py +++ b/gravity_toolkit/tools.py @@ -1226,28 +1226,28 @@ def mask_oceans(xin, yin, data=None, order=0, lakes=False, """ # read in land/sea mask lsmask = get_data_path(['data',f'landsea_{resolution}.nc']) - #-- Land-Sea Mask with Antarctica from Rignot (2017) and Greenland from GEUS - #-- 0=Ocean, 1=Land, 2=Lake, 3=Small Island, 4=Ice Shelf - #-- Open the land-sea NetCDF file for reading + # Land-Sea Mask with Antarctica from Rignot (2017) and Greenland from GEUS + # 0=Ocean, 1=Land, 2=Lake, 3=Small Island, 4=Ice Shelf + # Open the land-sea NetCDF file for reading landsea = spatial().from_netCDF4(lsmask, date=False, varname='LSMASK') - #-- create land function + # create land function nth,nphi = landsea.shape land_function = np.zeros((nth,nphi),dtype=bool) - #-- extract land function from file - #-- find land values (1) + # extract land function from file + # find land values (1) land_function |= (landsea.data == 1) - #-- find lake values (2) + # find lake values (2) if lakes: land_function |= (landsea.data == 2) - #-- find small island values (3) + # find small island values (3) land_function |= (landsea.data == 3) - #-- find Greenland and Antarctic ice shelf values (4) + # find Greenland and Antarctic ice shelf values (4) if iceshelves: land_function |= (landsea.data == 4) - #-- interpolate to output grid + # interpolate to output grid mask = interp_grid(land_function.astype(np.int32), landsea.lon, landsea.lat, xin, yin, order) - #-- mask input data or return the interpolated mask + # mask input data or return the interpolated mask if data is not None: # update data mask with interpolated mask data.mask |= mask.astype(bool) diff --git a/gravity_toolkit/tsregress.py b/gravity_toolkit/tsregress.py index d40808e3..91c43b84 100755 --- a/gravity_toolkit/tsregress.py +++ b/gravity_toolkit/tsregress.py @@ -186,11 +186,11 @@ def tsregress(t_in, d_in, ORDER=1, CYCLES=[0.5,1.0], DATA_ERR=0, `doi: 10.1007/b97636 `_ """ - #-- remove singleton dimensions + # remove singleton dimensions t_in = np.squeeze(t_in) d_in = np.squeeze(d_in) nmax = len(t_in) - #-- calculate epoch for calculating relative times + # calculate epoch for calculating relative times if isinstance(RELATIVE, (list, np.ndarray)): t_rel = t_in[RELATIVE].mean() elif isinstance(RELATIVE, (float, int, np.float_, np.int_)): @@ -198,100 +198,100 @@ def tsregress(t_in, d_in, ORDER=1, CYCLES=[0.5,1.0], DATA_ERR=0, elif (RELATIVE == Ellipsis): t_rel = t_in[RELATIVE].mean() - #-- create design matrix based on polynomial order and harmonics + # create design matrix based on polynomial order and harmonics DMAT = [] - #-- add polynomial orders (0=constant, 1=linear, 2=quadratic) + # add polynomial orders (0=constant, 1=linear, 2=quadratic) for o in range(ORDER+1): DMAT.append((t_in-t_rel)**o) - #-- add cyclical terms (0.5=semi-annual, 1=annual) + # add cyclical terms (0.5=semi-annual, 1=annual) for c in CYCLES: DMAT.append(np.sin(2.0*np.pi*t_in/np.float64(c))) DMAT.append(np.cos(2.0*np.pi*t_in/np.float64(c))) - #-- take the transpose of the design matrix + # take the transpose of the design matrix DMAT = np.transpose(DMAT) - #-- Calculating Least-Squares Coefficients + # Calculating Least-Squares Coefficients if WEIGHT: - #-- Weighted Least-Squares fitting + # Weighted Least-Squares fitting if (np.ndim(DATA_ERR) == 0): raise ValueError('Input DATA_ERR for Weighted Least-Squares') - #-- check if any error values are 0 (prevent infinite weights) + # check if any error values are 0 (prevent infinite weights) if np.count_nonzero(DATA_ERR == 0.0): - #-- change to minimum floating point value + # change to minimum floating point value DATA_ERR[DATA_ERR == 0.0] = np.finfo(np.float64).eps - #--- Weight Precision + # Weight Precision wi = np.squeeze(DATA_ERR**(-2)) - #-- If uncorrelated weights are the diagonal + # If uncorrelated weights are the diagonal W = np.diag(wi) - #-- Least-Squares fitting - #-- Temporary Matrix: Inv(X'.W.X) + # Least-Squares fitting + # Temporary Matrix: Inv(X'.W.X) TM1 = np.linalg.inv(np.dot(np.transpose(DMAT),np.dot(W,DMAT))) - #-- Temporary Matrix: (X'.W.Y) + # Temporary Matrix: (X'.W.Y) TM2 = np.dot(np.transpose(DMAT),np.dot(W,d_in)) - #-- Least Squares Solutions: Inv(X'.W.X).(X'.W.Y) + # Least Squares Solutions: Inv(X'.W.X).(X'.W.Y) beta_mat = np.dot(TM1,TM2) - else:#-- Standard Least-Squares fitting (the [0] denotes coefficients output) + else:# Standard Least-Squares fitting (the [0] denotes coefficients output) beta_mat = np.linalg.lstsq(DMAT,d_in,rcond=-1)[0] - #-- Weights are equal + # Weights are equal wi = 1.0 - #-- number of terms in least-squares solution + # number of terms in least-squares solution n_terms = len(beta_mat) - #-- modelled time-series + # modelled time-series mod = np.dot(DMAT,beta_mat) - #-- residual + # residual res = d_in[0:nmax] - np.dot(DMAT,beta_mat) - #-- Fitted Values without (and with) climate oscillations + # Fitted Values without (and with) climate oscillations simple = np.dot(DMAT[:,0:(ORDER+1)],beta_mat[0:(ORDER+1)]) season = mod - simple - #-- nu = Degrees of Freedom + # nu = Degrees of Freedom nu = nmax - n_terms - #-- calculating R^2 values - #-- SStotal = sum((Y-mean(Y))**2) + # calculating R^2 values + # SStotal = sum((Y-mean(Y))**2) SStotal = np.dot(np.transpose(d_in[0:nmax] - np.mean(d_in[0:nmax])), (d_in[0:nmax] - np.mean(d_in[0:nmax]))) - #-- SSerror = sum((Y-X*B)**2) + # SSerror = sum((Y-X*B)**2) SSerror = np.dot(np.transpose(d_in[0:nmax] - np.dot(DMAT,beta_mat)), (d_in[0:nmax] - np.dot(DMAT,beta_mat))) - #-- R**2 term = 1- SSerror/SStotal + # R**2 term = 1- SSerror/SStotal rsquare = 1.0 - (SSerror/SStotal) - #-- Adjusted R**2 term: weighted by degrees of freedom + # Adjusted R**2 term: weighted by degrees of freedom rsq_adj = 1.0 - (SSerror/SStotal)*np.float64((nmax-1.0)/nu) - #-- Fit Criterion - #-- number of parameters including the intercept and the variance + # Fit Criterion + # number of parameters including the intercept and the variance K = np.float64(n_terms + 1) - #-- Log-Likelihood with weights (if unweighted, weight portions == 0) - #-- log(L) = -0.5*n*log(sigma^2) - 0.5*n*log(2*pi) - 0.5*n + # Log-Likelihood with weights (if unweighted, weight portions == 0) + # log(L) = -0.5*n*log(sigma^2) - 0.5*n*log(2*pi) - 0.5*n #log_lik = -0.5*nmax*(np.log(2.0 * np.pi) + 1.0 + np.log(np.sum((res**2)/nmax))) log_lik = 0.5*(np.sum(np.log(wi)) - nmax*(np.log(2.0 * np.pi) + 1.0 - np.log(nmax) + np.log(np.sum(wi * (res**2))))) - #-- Aikaike's Information Criterion + # Aikaike's Information Criterion AIC = -2.0*log_lik + 2.0*K if AICc: - #-- Second-Order AIC correcting for small sample sizes (restricted) - #-- Burnham and Anderson (2002) advocate use of AICc where - #-- ratio num/K is small - #-- A small ratio is defined in the definition at approximately < 40 + # Second-Order AIC correcting for small sample sizes (restricted) + # Burnham and Anderson (2002) advocate use of AICc where + # ratio num/K is small + # A small ratio is defined in the definition at approximately < 40 AIC += (2.0*K*(K+1.0))/(nmax - K - 1.0) - #-- Bayesian Information Criterion (Schwarz Criterion) + # Bayesian Information Criterion (Schwarz Criterion) BIC = -2.0*log_lik + np.log(nmax)*K - #--- Error Analysis + # Error Analysis if WEIGHT: - #-- WEIGHTED LEAST-SQUARES CASE (unequal error) - #-- Covariance Matrix + # WEIGHTED LEAST-SQUARES CASE (unequal error) + # Covariance Matrix Hinv = np.linalg.inv(np.dot(np.transpose(DMAT),np.dot(W,DMAT))) - #-- Normal Equations + # Normal Equations NORMEQ = np.dot(Hinv,np.transpose(np.dot(W,DMAT))) beta_err = np.zeros((n_terms)) - #-- Propagating RMS errors + # Propagating RMS errors for i in range(0,n_terms): beta_err[i] = np.sqrt(np.sum((NORMEQ[i,:]*DATA_ERR)**2)) - #-- Weighted sum of squares Error + # Weighted sum of squares Error WSSE = np.dot(np.transpose(wi*(d_in[0:nmax] - np.dot(DMAT,beta_mat))), wi*(d_in[0:nmax] - np.dot(DMAT,beta_mat)))/np.float64(nu) @@ -301,15 +301,15 @@ def tsregress(t_in, d_in, ORDER=1, CYCLES=[0.5,1.0], DATA_ERR=0, 'season':season, 'N':n_terms, 'DOF':nu, 'cov_mat':Hinv} elif ((not WEIGHT) and (DATA_ERR != 0)): - #-- LEAST-SQUARES CASE WITH KNOWN AND EQUAL ERROR + # LEAST-SQUARES CASE WITH KNOWN AND EQUAL ERROR P_err = DATA_ERR*np.ones((nmax)) Hinv = np.linalg.inv(np.dot(np.transpose(DMAT),DMAT)) - #-- Normal Equations + # Normal Equations NORMEQ = np.dot(Hinv,np.transpose(DMAT)) beta_err = np.zeros((n_terms)) for i in range(0,n_terms): beta_err[i] = np.sqrt(np.sum((NORMEQ[i,:]*P_err)**2)) - #-- Mean square error + # Mean square error MSE = np.dot(np.transpose(d_in[0:nmax] - np.dot(DMAT,beta_mat)), (d_in[0:nmax] - np.dot(DMAT,beta_mat)))/np.float64(nu) @@ -318,36 +318,36 @@ def tsregress(t_in, d_in, ORDER=1, CYCLES=[0.5,1.0], DATA_ERR=0, 'LOGLIK':log_lik, 'model':mod, 'residual':res, 'simple':simple, 'season':season,'N':n_terms, 'DOF':nu, 'cov_mat':Hinv} else: - #-- STANDARD LEAST-SQUARES CASE - #-- Regression with Errors with Unknown Standard Deviations - #-- MSE = (1/nu)*sum((Y-X*B)**2) - #-- Mean square error + # STANDARD LEAST-SQUARES CASE + # Regression with Errors with Unknown Standard Deviations + # MSE = (1/nu)*sum((Y-X*B)**2) + # Mean square error MSE = np.dot(np.transpose(d_in[0:nmax] - np.dot(DMAT,beta_mat)), (d_in[0:nmax] - np.dot(DMAT,beta_mat)))/np.float64(nu) - #-- Root mean square error + # Root mean square error RMSE = np.sqrt(MSE) - #-- Normalized root mean square error + # Normalized root mean square error NRMSE = RMSE/(np.max(d_in[0:nmax])-np.min(d_in[0:nmax])) - #-- Covariance Matrix - #-- Multiplying the design matrix by itself + # Covariance Matrix + # Multiplying the design matrix by itself Hinv = np.linalg.inv(np.dot(np.transpose(DMAT),DMAT)) - #-- Taking the diagonal components of the cov matrix + # Taking the diagonal components of the cov matrix hdiag = np.diag(Hinv) - #-- set either the standard deviation or the confidence interval + # set either the standard deviation or the confidence interval if (STDEV != 0): - #-- Setting the standard deviation of the output error + # Setting the standard deviation of the output error alpha = 1.0 - scipy.special.erf(STDEV/np.sqrt(2.0)) elif (CONF != 0): - #-- Setting the confidence interval of the output error + # Setting the confidence interval of the output error alpha = 1.0 - CONF else: - #-- Default is 95% confidence interval + # Default is 95% confidence interval alpha = 1.0 - (0.95) - #-- Student T-Distribution with D.O.F. nu - #-- t.ppf parallels tinv in matlab + # Student T-Distribution with D.O.F. nu + # t.ppf parallels tinv in matlab tstar = scipy.stats.t.ppf(1.0-(alpha/2.0),nu) - #-- beta_err is the error for each coefficient - #-- beta_err = t(nu,1-alpha/2)*standard error + # beta_err is the error for each coefficient + # beta_err = t(nu,1-alpha/2)*standard error st_err = np.sqrt(MSE*hdiag) beta_err = tstar*st_err diff --git a/gravity_toolkit/tssmooth.py b/gravity_toolkit/tssmooth.py index c2d07b5d..4d30a84e 100755 --- a/gravity_toolkit/tssmooth.py +++ b/gravity_toolkit/tssmooth.py @@ -141,59 +141,59 @@ def tssmooth(t_in, d_in, HFWTH=6, MOVING=False, DATA_ERR=0, WEIGHT=0, `doi: 10.1029/2009GL040222 `_ """ - #-- remove singleton dimensions + # remove singleton dimensions t_in = np.squeeze(t_in) d_in = np.squeeze(d_in) nmax = len(t_in) - #-- Indice with start of seasonal terms: + # Indice with start of seasonal terms: SEAS = 2 - #-- set either the standard deviation or the confidence interval + # set either the standard deviation or the confidence interval if (STDEV != 0): - #-- Setting the standard deviation of the output error + # Setting the standard deviation of the output error alpha = 1.0 - scipy.special.erf(STDEV/np.sqrt(2.0)) elif (CONF != 0): - #-- Setting the confidence interval of the output error + # Setting the confidence interval of the output error alpha = 1.0 - CONF else: - #-- Default is 95% confidence interval + # Default is 95% confidence interval alpha = 1.0 - (0.95) - #-- moving average algorithm + # moving average algorithm if MOVING: - #-- Centered moving average using the mean of each window - #-- equal to mean of Jan:Dec and Feb:Jan+1 for HFWTH 6 - #-- problematic with GRACE due to missing months within time-series - #-- output time + # Centered moving average using the mean of each window + # equal to mean of Jan:Dec and Feb:Jan+1 for HFWTH 6 + # problematic with GRACE due to missing months within time-series + # output time tout = t_in[HFWTH:nmax-HFWTH] smth = np.zeros((nmax-2*HFWTH)) for k in range(0, (nmax-(2*HFWTH))): - #-- centered moving average sum[2:i-1] + 0.5[1] + 0.5[i] + # centered moving average sum[2:i-1] + 0.5[1] + 0.5[i] smth[k] = np.sum(d_in[k+1:k+2*HFWTH]) + 0.5*(d_in[k]+d_in[k+2*HFWTH]) dsmth = smth/(2*HFWTH) return {'data':dsmth, 'time':tout} elif WEIGHT in (1,2): - #-- weighted moving average calculated from the least-squares of window - #-- and removing An/SAn signal. models entire range of dates - #-- for a HFWTH of 6 (remove annual) - #-- will fit linear model to data for 13 months - #-- creates a weight array ranging from 1:HFWTH+1:-1 for linear - #-- or a gaussian function centered on HFWTH - #-- which favors the regression with the date centered - #-- smoothed time-series = sum(smth*weights)/sum(weights)the weight array - #-- output time = input time + # weighted moving average calculated from the least-squares of window + # and removing An/SAn signal. models entire range of dates + # for a HFWTH of 6 (remove annual) + # will fit linear model to data for 13 months + # creates a weight array ranging from 1:HFWTH+1:-1 for linear + # or a gaussian function centered on HFWTH + # which favors the regression with the date centered + # smoothed time-series = sum(smth*weights)/sum(weights)the weight array + # output time = input time tout = np.copy(t_in) if (WEIGHT == 1): - #-- linear weights (range from 1:HFWTH+1:-1) + # linear weights (range from 1:HFWTH+1:-1) wi = np.concatenate((np.arange(1,HFWTH+2,dtype=np.float64), np.arange(HFWTH,0,-1,dtype=np.float64)),axis=0) elif (WEIGHT == 2): - #-- gaussian weights - #-- default standard deviation of 2 + # gaussian weights + # default standard deviation of 2 stdev = 2.0 - #-- gaussian function over range 2*HFWTH - #-- centered on HFWTH + # gaussian function over range 2*HFWTH + # centered on HFWTH xi=np.arange(0, 2*HFWTH+1) wi=np.exp(-(xi-HFWTH)**2/(2.0*stdev**2))/(stdev*np.sqrt(2.0*np.pi)) @@ -208,39 +208,39 @@ def tssmooth(t_in, d_in, HFWTH=6, MOVING=False, DATA_ERR=0, WEIGHT=0, weight = np.zeros((nmax)) for i in range(0, (nmax-(2*HFWTH))): ran = i + np.arange(0, 2*HFWTH+1) - P_x0 = np.ones((2*HFWTH+1))#-- Constant Term - P_x1 = t_in[ran]#-- Linear Term - #-- Annual term = 2*pi*t*harmonic + P_x0 = np.ones((2*HFWTH+1))# Constant Term + P_x1 = t_in[ran]# Linear Term + # Annual term = 2*pi*t*harmonic P_asin = np.sin(2*np.pi*t_in[ran]) P_acos = np.cos(2*np.pi*t_in[ran]) - #--Semi-Annual = 4*pi*t*harmonic + #Semi-Annual = 4*pi*t*harmonic P_ssin = np.sin(4*np.pi*t_in[ran]) P_scos = np.cos(4*np.pi*t_in[ran]) - #-- x0,x1,AS,AC,SS,SC + # x0,x1,AS,AC,SS,SC TMAT = np.array([P_x0, P_x1, P_asin, P_acos, P_ssin, P_scos]) TMAT = np.transpose(TMAT) - #--- Least-Squares fitting - #--- (the [0] denotes coefficients output)standard + # Least-Squares fitting + # (the [0] denotes coefficients output)standard beta_mat = np.linalg.lstsq(TMAT,d_in[ran],rcond=-1)[0] - #-- Calculating the output components - #-- add weighted smoothed time series + # Calculating the output components + # add weighted smoothed time series dsmth[ran] += wi*np.dot(TMAT[:,0:SEAS],beta_mat[0:SEAS]) - #-- seasonal component + # seasonal component dseason[ran] += wi*np.dot(TMAT[:,SEAS:],beta_mat[SEAS:]) - #-- annual component + # annual component AS,AC = beta_mat[SEAS:SEAS+2] dannual[ran] += wi*np.dot(TMAT[:,SEAS:SEAS+2],[AS,AC]) annamp[ran] += wi*np.sqrt(AS**2 + AC**2) annphase[ran] += wi*np.arctan2(AC,AS)*180.0/np.pi - #-- semi-annual component + # semi-annual component SS,SC = beta_mat[SEAS+2:SEAS+4] dsemian[ran] += wi*np.dot(TMAT[:,SEAS+2:SEAS+4],[SS,SC]) semiamp[ran] += wi*np.sqrt(SS**2 + SC**2) semiphase[ran] += wi*np.arctan2(SC,SS)*180.0/np.pi - #-- add weights + # add weights weight[ran] += wi - #-- divide weighted smoothed time-series by weights - #-- to get output smoothed time-series + # divide weighted smoothed time-series by weights + # to get output smoothed time-series dsmth /= weight dseason /= weight dannual /= weight @@ -249,16 +249,16 @@ def tssmooth(t_in, d_in, HFWTH=6, MOVING=False, DATA_ERR=0, WEIGHT=0, dsemian /= weight semiamp /= weight semiphase /= weight - #-- noise = data - smoothed - seasonal + # noise = data - smoothed - seasonal dnoise = d_in - dsmth - dseason return {'data':dsmth, 'seasonal':dseason, 'annual':dannual, 'annamp':annamp, 'annphase':annphase, 'semiann':dsemian, 'semiamp':semiamp, 'semiphase':semiphase, 'noise':dnoise, 'time':tout, 'weight':weight} else: - #-- Moving average calculated from least-squares of window - #-- and removing An/SAn signal - #-- output time + # Moving average calculated from least-squares of window + # and removing An/SAn signal + # output time tout = t_in[HFWTH:nmax-HFWTH] dsmth = np.zeros((nmax-2*HFWTH)) dtrend = np.zeros((nmax-2*HFWTH)) @@ -274,74 +274,74 @@ def tssmooth(t_in, d_in, HFWTH=6, MOVING=False, DATA_ERR=0, WEIGHT=0, dreduce = np.zeros((nmax-2*HFWTH)) for i in range(0, (nmax-(2*HFWTH))): ran = i + np.arange(0, 2*HFWTH+1) - P_x0 = np.ones((2*HFWTH+1))#-- Constant Term - P_x1 = t_in[ran]#-- Linear Term - #-- Annual term = 2*pi*t*harmonic + P_x0 = np.ones((2*HFWTH+1))# Constant Term + P_x1 = t_in[ran]# Linear Term + # Annual term = 2*pi*t*harmonic P_asin = np.sin(2*np.pi*t_in[ran]) P_acos = np.cos(2*np.pi*t_in[ran]) - #--Semi-Annual = 4*pi*t*harmonic + #Semi-Annual = 4*pi*t*harmonic P_ssin = np.sin(4*np.pi*t_in[ran]) P_scos = np.cos(4*np.pi*t_in[ran]) - #-- x0,x1,AS,AC,SS,SC + # x0,x1,AS,AC,SS,SC TMAT = np.array([P_x0, P_x1, P_asin, P_acos, P_ssin, P_scos]) TMAT = np.transpose(TMAT) - #--- Least-Squares fitting - #--- (the [0] denotes coefficients output) + # Least-Squares fitting + # (the [0] denotes coefficients output) beta_mat = np.linalg.lstsq(TMAT,d_in[ran],rcond=-1)[0] n_terms = len(beta_mat) if (DATA_ERR != 0): - #-- LEAST-SQUARES CASE WITH KNOWN AND EQUAL ERROR + # LEAST-SQUARES CASE WITH KNOWN AND EQUAL ERROR P_err = DATA_ERR*np.ones((2*HFWTH+1)) Hinv = np.linalg.inv(np.dot(np.transpose(TMAT),TMAT)) - #-- Normal Equations + # Normal Equations NORMEQ = np.dot(Hinv,np.transpose(TMAT)) beta_err = np.zeros((n_terms)) for n in range(0,n_terms): beta_err[n] = np.sqrt(np.sum((NORMEQ[n,:]*P_err)**2)) else: - #-- Error Analysis - #-- Degrees of Freedom + # Error Analysis + # Degrees of Freedom nu = (2*HFWTH+1) - n_terms - #-- Mean square error + # Mean square error MSE = np.dot(np.transpose(d_in[ran] - np.dot(TMAT,beta_mat)), (d_in[ran] - np.dot(TMAT,beta_mat)))/nu - #-- Covariance Matrix - #-- Multiplying the design matrix by itself + # Covariance Matrix + # Multiplying the design matrix by itself Hinv = np.linalg.inv(np.dot(np.transpose(TMAT),TMAT)) - #-- Taking the diagonal components of the cov matrix + # Taking the diagonal components of the cov matrix hdiag = np.diag(Hinv) - #-- STANDARD LEAST-SQUARES CASE - #-- Regression with Errors with Unknown Standard Deviations - #-- Student T-Distribution with D.O.F. nu - #-- t.ppf parallels tinv in matlab + # STANDARD LEAST-SQUARES CASE + # Regression with Errors with Unknown Standard Deviations + # Student T-Distribution with D.O.F. nu + # t.ppf parallels tinv in matlab tstar = scipy.stats.t.ppf(1.0-(alpha/2.0),nu) - #-- beta_err is the error for each coefficient - #-- beta_err = t(nu,1-alpha/2)*standard error + # beta_err is the error for each coefficient + # beta_err = t(nu,1-alpha/2)*standard error st_err = np.sqrt(MSE*hdiag) beta_err = tstar*st_err - #-- Calculating the output components - #-- smoothed time series + # Calculating the output components + # smoothed time series dsmth[i] = np.dot(TMAT[HFWTH,0:SEAS],beta_mat[0:SEAS]) - dtrend[i] = np.copy(beta_mat[1])#-- Instantaneous data trend - derror[i] = np.copy(beta_err[1])#-- Error in trend - #-- seasonal component + dtrend[i] = np.copy(beta_mat[1])# Instantaneous data trend + derror[i] = np.copy(beta_err[1])# Error in trend + # seasonal component dseason[i] = np.dot(TMAT[HFWTH,SEAS:],beta_mat[SEAS:]) - #-- annual component + # annual component AS,AC = beta_mat[SEAS:SEAS+2] dannual[i] = np.dot(TMAT[HFWTH,SEAS:SEAS+2],[AS,AC]) annphase[i] = np.arctan2(AC,AS)*180.0/np.pi annamp[i] = np.sqrt(AS**2 + AC**2) - #-- semi-annual component + # semi-annual component SS,SC = beta_mat[SEAS+2:SEAS+4] dsemian[i] = np.dot(TMAT[HFWTH,SEAS+2:SEAS+4],[SS,SC]) semiamp[i] = np.sqrt(SS**2 + SC**2) semiphase[i] = np.arctan2(SC,SS)*180.0/np.pi - #-- noise component + # noise component dnoise[i] = d_in[i+HFWTH] - dsmth[i] - dseason[i] - #-- reduced time-series + # reduced time-series dreduce[i] = d_in[i+HFWTH] return {'data':dsmth, 'trend':dtrend, 'error':derror, diff --git a/gravity_toolkit/utilities.py b/gravity_toolkit/utilities.py index 2ea7bfcf..262ba31c 100644 --- a/gravity_toolkit/utilities.py +++ b/gravity_toolkit/utilities.py @@ -71,7 +71,7 @@ from urllib.parse import urlencode import urllib.request as urllib2 -#-- PURPOSE: get absolute path within a package from a relative path +# PURPOSE: get absolute path within a package from a relative path def get_data_path(relpath): """ Get the absolute path within a package from a relative path @@ -81,16 +81,16 @@ def get_data_path(relpath): relpath: str, relative path """ - #-- current file path + # current file path filename = inspect.getframeinfo(inspect.currentframe()).filename filepath = os.path.dirname(os.path.abspath(filename)) if isinstance(relpath,list): - #-- use *splat operator to extract from list + # use *splat operator to extract from list return os.path.join(filepath,*relpath) elif isinstance(relpath,str): return os.path.join(filepath,relpath) -#-- PURPOSE: get the hash value of a file +# PURPOSE: get the hash value of a file def get_hash(local, algorithm='MD5'): """ Get the hash value from a local file or BytesIO object @@ -105,17 +105,17 @@ def get_hash(local, algorithm='MD5'): - ``'MD5'``: Message Digest - ``'sha1'``: Secure Hash Algorithm """ - #-- check if open file object or if local file exists + # check if open file object or if local file exists if isinstance(local, io.IOBase): if (algorithm == 'MD5'): return hashlib.md5(local.getvalue()).hexdigest() elif (algorithm == 'sha1'): return hashlib.sha1(local.getvalue()).hexdigest() elif os.access(os.path.expanduser(local),os.F_OK): - #-- generate checksum hash for local file - #-- open the local_file in binary read mode + # generate checksum hash for local file + # open the local_file in binary read mode with open(os.path.expanduser(local), 'rb') as local_buffer: - #-- generate checksum hash for a given type + # generate checksum hash for a given type if (algorithm == 'MD5'): return hashlib.md5(local_buffer.read()).hexdigest() elif (algorithm == 'sha1'): @@ -123,7 +123,7 @@ def get_hash(local, algorithm='MD5'): else: return '' -#-- PURPOSE: recursively split a url path +# PURPOSE: recursively split a url path def url_split(s): """ Recursively split a url path into a list @@ -140,7 +140,7 @@ def url_split(s): return tail, return url_split(head) + (tail,) -#-- PURPOSE: convert file lines to arguments +# PURPOSE: convert file lines to arguments def convert_arg_line_to_args(arg_line): """ Convert file lines to arguments @@ -150,13 +150,13 @@ def convert_arg_line_to_args(arg_line): arg_line: str line string containing a single argument and/or comments """ - #-- remove commented lines and after argument comments + # remove commented lines and after argument comments for arg in re.sub(r'\#(.*?)$',r'',arg_line).split(): if not arg.strip(): continue yield arg -#-- PURPOSE: returns the Unix timestamp value for a formatted date string +# PURPOSE: returns the Unix timestamp value for a formatted date string def get_unix_time(time_string, format='%Y-%m-%d %H:%M:%S'): """ Get the Unix timestamp value for a formatted date string @@ -174,7 +174,7 @@ def get_unix_time(time_string, format='%Y-%m-%d %H:%M:%S'): pass else: return calendar.timegm(parsed_time) - #-- try parsing with dateutil + # try parsing with dateutil try: parsed_time = dateutil.parser.parse(time_string.rstrip()) except (TypeError, ValueError): @@ -182,7 +182,7 @@ def get_unix_time(time_string, format='%Y-%m-%d %H:%M:%S'): else: return parsed_time.timestamp() -#-- PURPOSE: output a time string in isoformat +# PURPOSE: output a time string in isoformat def isoformat(time_string): """ Reformat a date string to ISO formatting @@ -192,7 +192,7 @@ def isoformat(time_string): time_string: str formatted time string to parse """ - #-- try parsing with dateutil + # try parsing with dateutil try: parsed_time = dateutil.parser.parse(time_string.rstrip()) except (TypeError, ValueError): @@ -200,7 +200,7 @@ def isoformat(time_string): else: return parsed_time.isoformat() -#-- PURPOSE: rounds a number to an even number less than or equal to original +# PURPOSE: rounds a number to an even number less than or equal to original def even(value): """ Rounds a number to an even number less than or equal to original @@ -212,7 +212,7 @@ def even(value): """ return 2*int(value//2) -#-- PURPOSE: rounds a number upward to its nearest integer +# PURPOSE: rounds a number upward to its nearest integer def ceil(value): """ Rounds a number upward to its nearest integer @@ -224,7 +224,7 @@ def ceil(value): """ return -int(-value//1) -#-- PURPOSE: make a copy of a file with all system information +# PURPOSE: make a copy of a file with all system information def copy(source, destination, move=False, **kwargs): """ Copy or move a file with all system information @@ -240,14 +240,14 @@ def copy(source, destination, move=False, **kwargs): """ source = os.path.abspath(os.path.expanduser(source)) destination = os.path.abspath(os.path.expanduser(destination)) - #-- log source and destination + # log source and destination logging.info(f'{source} -->\n\t{destination}') shutil.copyfile(source, destination) shutil.copystat(source, destination) if move: os.remove(source) -#-- PURPOSE: open a unique file adding a numerical instance if existing +# PURPOSE: open a unique file adding a numerical instance if existing def create_unique_file(filename): """ Open a unique file adding a numerical instance if existing @@ -257,23 +257,23 @@ def create_unique_file(filename): filename: str full path to output file """ - #-- split filename into fileBasename and fileExtension + # split filename into fileBasename and fileExtension fileBasename, fileExtension = os.path.splitext(filename) - #-- create counter to add to the end of the filename if existing + # create counter to add to the end of the filename if existing counter = 1 while counter: try: - #-- open file descriptor only if the file doesn't exist + # open file descriptor only if the file doesn't exist fd = os.open(filename, os.O_CREAT | os.O_EXCL | os.O_RDWR) except OSError: pass else: return os.fdopen(fd, 'w+') - #-- new filename adds counter the between fileBasename and fileExtension + # new filename adds counter the between fileBasename and fileExtension filename = f'{fileBasename}_{counter:d}{fileExtension}' counter += 1 -#-- PURPOSE: check ftp connection +# PURPOSE: check ftp connection def check_ftp_connection(HOST, username=None, password=None): """ Check internet connection with ftp host @@ -287,7 +287,7 @@ def check_ftp_connection(HOST, username=None, password=None): password: str or NoneType ftp password """ - #-- attempt to connect to ftp host + # attempt to connect to ftp host try: f = ftplib.FTP(HOST) f.login(username, password) @@ -299,7 +299,7 @@ def check_ftp_connection(HOST, username=None, password=None): else: return True -#-- PURPOSE: list a directory on a ftp host +# PURPOSE: list a directory on a ftp host def ftp_list(HOST, username=None, password=None, timeout=None, basename=False, pattern=None, sort=False): """ @@ -329,52 +329,52 @@ def ftp_list(HOST, username=None, password=None, timeout=None, mtimes: list last modification times for items in the directory """ - #-- verify inputs for remote ftp host + # verify inputs for remote ftp host if isinstance(HOST, str): HOST = url_split(HOST) - #-- try to connect to ftp host + # try to connect to ftp host try: ftp = ftplib.FTP(HOST[0],timeout=timeout) except (socket.gaierror,IOError) as e: raise RuntimeError(f'Unable to connect to {HOST[0]}') else: ftp.login(username,password) - #-- list remote path + # list remote path output = ftp.nlst(posixpath.join(*HOST[1:])) - #-- get last modified date of ftp files and convert into unix time + # get last modified date of ftp files and convert into unix time mtimes = [None]*len(output) - #-- iterate over each file in the list and get the modification time + # iterate over each file in the list and get the modification time for i,f in enumerate(output): try: - #-- try sending modification time command + # try sending modification time command mdtm = ftp.sendcmd(f'MDTM {f}') except ftplib.error_perm: - #-- directories will return with an error + # directories will return with an error pass else: - #-- convert the modification time into unix time + # convert the modification time into unix time mtimes[i] = get_unix_time(mdtm[4:], format="%Y%m%d%H%M%S") - #-- reduce to basenames + # reduce to basenames if basename: output = [posixpath.basename(i) for i in output] - #-- reduce using regular expression pattern + # reduce using regular expression pattern if pattern: i = [i for i,f in enumerate(output) if re.search(pattern,f)] - #-- reduce list of listed items and last modified times + # reduce list of listed items and last modified times output = [output[indice] for indice in i] mtimes = [mtimes[indice] for indice in i] - #-- sort the list + # sort the list if sort: i = [i for i,j in sorted(enumerate(output), key=lambda i: i[1])] - #-- sort list of listed items and last modified times + # sort list of listed items and last modified times output = [output[indice] for indice in i] mtimes = [mtimes[indice] for indice in i] - #-- close the ftp connection + # close the ftp connection ftp.close() - #-- return the list of items and last modified times + # return the list of items and last modified times return (output,mtimes) -#-- PURPOSE: download a file from a ftp host +# PURPOSE: download a file from a ftp host def from_ftp(HOST, username=None, password=None, timeout=None, local=None, hash='', chunk=8192, verbose=False, fid=sys.stdout, mode=0o775): @@ -409,59 +409,59 @@ def from_ftp(HOST, username=None, password=None, timeout=None, remote_buffer: obj BytesIO representation of file """ - #-- create logger + # create logger loglevel = logging.INFO if verbose else logging.CRITICAL logging.basicConfig(stream=fid, level=loglevel) - #-- verify inputs for remote ftp host + # verify inputs for remote ftp host if isinstance(HOST, str): HOST = url_split(HOST) - #-- try downloading from ftp + # try downloading from ftp try: - #-- try to connect to ftp host + # try to connect to ftp host ftp = ftplib.FTP(HOST[0],timeout=timeout) except (socket.gaierror,IOError) as e: raise RuntimeError(f'Unable to connect to {HOST[0]}') else: ftp.login(username,password) - #-- remote path + # remote path ftp_remote_path = posixpath.join(*HOST[1:]) - #-- copy remote file contents to bytesIO object + # copy remote file contents to bytesIO object remote_buffer = io.BytesIO() ftp.retrbinary(f'RETR {ftp_remote_path}', remote_buffer.write, blocksize=chunk) remote_buffer.seek(0) - #-- save file basename with bytesIO object + # save file basename with bytesIO object remote_buffer.filename = HOST[-1] - #-- generate checksum hash for remote file + # generate checksum hash for remote file remote_hash = hashlib.md5(remote_buffer.getvalue()).hexdigest() - #-- get last modified date of remote file and convert into unix time + # get last modified date of remote file and convert into unix time mdtm = ftp.sendcmd(f'MDTM {ftp_remote_path}') remote_mtime = get_unix_time(mdtm[4:], format="%Y%m%d%H%M%S") - #-- compare checksums + # compare checksums if local and (hash != remote_hash): - #-- convert to absolute path + # convert to absolute path local = os.path.abspath(local) - #-- create directory if non-existent + # create directory if non-existent if not os.access(os.path.dirname(local), os.F_OK): os.makedirs(os.path.dirname(local), mode) - #-- print file information + # print file information args = (posixpath.join(*HOST),local) logging.info('{0} -->\n\t{1}'.format(*args)) - #-- store bytes to file using chunked transfer encoding + # store bytes to file using chunked transfer encoding remote_buffer.seek(0) with open(os.path.expanduser(local), 'wb') as f: shutil.copyfileobj(remote_buffer, f, chunk) - #-- change the permissions mode + # change the permissions mode os.chmod(local,mode) - #-- keep remote modification time of file and local access time + # keep remote modification time of file and local access time os.utime(local, (os.stat(local).st_atime, remote_mtime)) - #-- close the ftp connection + # close the ftp connection ftp.close() - #-- return the bytesIO object + # return the bytesIO object remote_buffer.seek(0) return remote_buffer -#-- PURPOSE: check internet connection +# PURPOSE: check internet connection def check_connection(HOST): """ Check internet connection with http host @@ -471,7 +471,7 @@ def check_connection(HOST): HOST: str remote http host """ - #-- attempt to connect to http host + # attempt to connect to http host try: urllib2.urlopen(HOST,timeout=20,context=ssl.SSLContext()) except urllib2.URLError: @@ -479,7 +479,7 @@ def check_connection(HOST): else: return True -#-- PURPOSE: list a directory on an Apache http Server +# PURPOSE: list a directory on an Apache http Server def http_list(HOST, timeout=None, context=ssl.SSLContext(), parser=lxml.etree.HTMLParser(), format='%Y-%m-%d %H:%M', pattern='', sort=False): @@ -510,39 +510,39 @@ def http_list(HOST, timeout=None, context=ssl.SSLContext(), collastmod: list last modification times for items in the directory """ - #-- verify inputs for remote http host + # verify inputs for remote http host if isinstance(HOST, str): HOST = url_split(HOST) - #-- try listing from http + # try listing from http try: - #-- Create and submit request. + # Create and submit request. request=urllib2.Request(posixpath.join(*HOST)) response=urllib2.urlopen(request,timeout=timeout,context=context) except (urllib2.HTTPError, urllib2.URLError) as e: raise Exception('List error from {0}'.format(posixpath.join(*HOST))) else: - #-- read and parse request for files (column names and modified times) + # read and parse request for files (column names and modified times) tree = lxml.etree.parse(response,parser) colnames = tree.xpath('//tr/td[not(@*)]//a/@href') - #-- get the Unix timestamp value for a modification time + # get the Unix timestamp value for a modification time collastmod = [get_unix_time(i,format=format) for i in tree.xpath('//tr/td[@align="right"][1]/text()')] - #-- reduce using regular expression pattern + # reduce using regular expression pattern if pattern: i = [i for i,f in enumerate(colnames) if re.search(pattern,f)] - #-- reduce list of column names and last modified times + # reduce list of column names and last modified times colnames = [colnames[indice] for indice in i] collastmod = [collastmod[indice] for indice in i] - #-- sort the list + # sort the list if sort: i = [i for i,j in sorted(enumerate(colnames), key=lambda i: i[1])] - #-- sort list of column names and last modified times + # sort list of column names and last modified times colnames = [colnames[indice] for indice in i] collastmod = [collastmod[indice] for indice in i] - #-- return the list of column names and last modified times + # return the list of column names and last modified times return (colnames,collastmod) -#-- PURPOSE: download a file from a http host +# PURPOSE: download a file from a http host def from_http(HOST, timeout=None, context=ssl.SSLContext(), local=None, hash='', chunk=16384, verbose=False, fid=sys.stdout, mode=0o775): @@ -577,45 +577,45 @@ def from_http(HOST, timeout=None, context=ssl.SSLContext(), remote_buffer: obj BytesIO representation of file """ - #-- create logger + # create logger loglevel = logging.INFO if verbose else logging.CRITICAL logging.basicConfig(stream=fid, level=loglevel) - #-- verify inputs for remote http host + # verify inputs for remote http host if isinstance(HOST, str): HOST = url_split(HOST) - #-- try downloading from http + # try downloading from http try: - #-- Create and submit request. + # Create and submit request. request = urllib2.Request(posixpath.join(*HOST)) response = urllib2.urlopen(request,timeout=timeout,context=context) except (urllib2.HTTPError, urllib2.URLError) as e: raise Exception('Download error from {0}'.format(posixpath.join(*HOST))) else: - #-- copy remote file contents to bytesIO object + # copy remote file contents to bytesIO object remote_buffer = io.BytesIO() shutil.copyfileobj(response, remote_buffer, chunk) remote_buffer.seek(0) - #-- save file basename with bytesIO object + # save file basename with bytesIO object remote_buffer.filename = HOST[-1] - #-- generate checksum hash for remote file + # generate checksum hash for remote file remote_hash = hashlib.md5(remote_buffer.getvalue()).hexdigest() - #-- compare checksums + # compare checksums if local and (hash != remote_hash): - #-- convert to absolute path + # convert to absolute path local = os.path.abspath(local) - #-- create directory if non-existent + # create directory if non-existent if not os.access(os.path.dirname(local), os.F_OK): os.makedirs(os.path.dirname(local), mode) - #-- print file information + # print file information args = (posixpath.join(*HOST),local) logging.info('{0} -->\n\t{1}'.format(*args)) - #-- store bytes to file using chunked transfer encoding + # store bytes to file using chunked transfer encoding remote_buffer.seek(0) with open(os.path.expanduser(local), 'wb') as f: shutil.copyfileobj(remote_buffer, f, chunk) - #-- change the permissions mode + # change the permissions mode os.chmod(local,mode) - #-- return the bytesIO object + # return the bytesIO object remote_buffer.seek(0) return remote_buffer @@ -698,7 +698,7 @@ def attempt_login(urs, context=ssl.SSLContext(), # reached end of available retries raise RuntimeError('End of Retries: Check NASA Earthdata credentials') -#-- PURPOSE: "login" to NASA Earthdata with supplied credentials +# PURPOSE: "login" to NASA Earthdata with supplied credentials def build_opener(username, password, context=ssl.SSLContext(), password_manager=False, get_ca_certs=False, redirect=False, authorization_header=True, urs='https://urs.earthdata.nasa.gov'): @@ -729,38 +729,38 @@ def build_opener(username, password, context=ssl.SSLContext(), opener: obj OpenerDirector instance """ - #-- https://docs.python.org/3/howto/urllib2.html#id5 + # https://docs.python.org/3/howto/urllib2.html#id5 handler = [] - #-- create a password manager + # create a password manager if password_manager: password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() - #-- Add the username and password for NASA Earthdata Login system + # Add the username and password for NASA Earthdata Login system password_mgr.add_password(None,urs,username,password) handler.append(urllib2.HTTPBasicAuthHandler(password_mgr)) - #-- Create cookie jar for storing cookies. This is used to store and return - #-- the session cookie given to use by the data server (otherwise will just - #-- keep sending us back to Earthdata Login to authenticate). + # Create cookie jar for storing cookies. This is used to store and return + # the session cookie given to use by the data server (otherwise will just + # keep sending us back to Earthdata Login to authenticate). cookie_jar = CookieJar() handler.append(urllib2.HTTPCookieProcessor(cookie_jar)) - #-- SSL context handler + # SSL context handler if get_ca_certs: context.get_ca_certs() handler.append(urllib2.HTTPSHandler(context=context)) - #-- redirect handler + # redirect handler if redirect: handler.append(urllib2.HTTPRedirectHandler()) - #-- create "opener" (OpenerDirector instance) + # create "opener" (OpenerDirector instance) opener = urllib2.build_opener(*handler) - #-- Encode username/password for request authorization headers - #-- add Authorization header to opener + # Encode username/password for request authorization headers + # add Authorization header to opener if authorization_header: b64 = base64.b64encode(f'{username}:{password}'.encode()) opener.addheaders = [("Authorization","Basic {0}".format(b64.decode()))] - #-- Now all calls to urllib2.urlopen use our opener. + # Now all calls to urllib2.urlopen use our opener. urllib2.install_opener(opener) - #-- All calls to urllib2.urlopen will now use handler - #-- Make sure not to include the protocol in with the URL, or - #-- HTTPPasswordMgrWithDefaultRealm will be confused. + # All calls to urllib2.urlopen will now use handler + # Make sure not to include the protocol in with the URL, or + # HTTPPasswordMgrWithDefaultRealm will be confused. return opener # NASA Cumulus AWS S3 credential endpoints @@ -784,7 +784,7 @@ def build_opener(username, password, context=ssl.SSLContext(), 'podaac-doc': 'podaac-ops-cumulus-docs' } -#-- PURPOSE: get AWS s3 client for PO.DAAC Cumulus +# PURPOSE: get AWS s3 client for PO.DAAC Cumulus def s3_client(HOST=_s3_endpoints['podaac'], timeout=None, region_name='us-west-2'): """ @@ -807,16 +807,16 @@ def s3_client(HOST=_s3_endpoints['podaac'], timeout=None, request = urllib2.Request(HOST) response = urllib2.urlopen(request, timeout=timeout) cumulus = json.loads(response.read()) - #-- get AWS client object + # get AWS client object client = boto3.client('s3', aws_access_key_id=cumulus['accessKeyId'], aws_secret_access_key=cumulus['secretAccessKey'], aws_session_token=cumulus['sessionToken'], region_name=region_name) - #-- return the AWS client for region + # return the AWS client for region return client -#-- PURPOSE: get a s3 bucket name from a presigned url +# PURPOSE: get a s3 bucket name from a presigned url def s3_bucket(presigned_url): """ Get a s3 bucket name from a presigned url @@ -835,7 +835,7 @@ def s3_bucket(presigned_url): bucket = re.sub(r's3:\/\/', r'', host[0], re.IGNORECASE) return bucket -#-- PURPOSE: get a s3 bucket key from a presigned url +# PURPOSE: get a s3 bucket key from a presigned url def s3_key(presigned_url): """ Get a s3 bucket key from a presigned url @@ -854,7 +854,7 @@ def s3_key(presigned_url): key = posixpath.join(*host[1:]) return key -#-- PURPOSE: check that entered NASA Earthdata credentials are valid +# PURPOSE: check that entered NASA Earthdata credentials are valid def check_credentials(HOST='https://podaac-tools.jpl.nasa.gov/drive/files'): """ Check that entered NASA Earthdata credentials are valid @@ -872,7 +872,7 @@ def check_credentials(HOST='https://podaac-tools.jpl.nasa.gov/drive/files'): else: return True -#-- PURPOSE: list a directory on JPL PO.DAAC/ECCO Drive https server +# PURPOSE: list a directory on JPL PO.DAAC/ECCO Drive https server def drive_list(HOST, username=None, password=None, build=True, timeout=None, urs='podaac-tools.jpl.nasa.gov', parser=lxml.etree.HTMLParser(), pattern='', sort=False): @@ -909,46 +909,46 @@ def drive_list(HOST, username=None, password=None, build=True, collastmod: list last modification times for items in the directory """ - #-- use netrc credentials + # use netrc credentials if build and not (username or password): username,_,password = netrc.netrc().authenticators(urs) - #-- build urllib2 opener and check credentials + # build urllib2 opener and check credentials if build: - #-- build urllib2 opener with credentials + # build urllib2 opener with credentials build_opener(username, password) - #-- check credentials + # check credentials check_credentials() - #-- verify inputs for remote https host + # verify inputs for remote https host if isinstance(HOST, str): HOST = url_split(HOST) - #-- try listing from https + # try listing from https try: - #-- Create and submit request. + # Create and submit request. request = urllib2.Request(posixpath.join(*HOST)) tree = lxml.etree.parse(urllib2.urlopen(request,timeout=timeout),parser) except (urllib2.HTTPError, urllib2.URLError) as e: raise Exception('List error from {0}'.format(posixpath.join(*HOST))) else: - #-- read and parse request for files (column names and modified times) + # read and parse request for files (column names and modified times) colnames = tree.xpath('//tr/td//a[@class="text-left"]/text()') - #-- get the Unix timestamp value for a modification time + # get the Unix timestamp value for a modification time collastmod = [get_unix_time(i) for i in tree.xpath('//tr/td[3]/text()')] - #-- reduce using regular expression pattern + # reduce using regular expression pattern if pattern: i = [i for i,f in enumerate(colnames) if re.search(pattern,f)] - #-- reduce list of column names and last modified times + # reduce list of column names and last modified times colnames = [colnames[indice] for indice in i] collastmod = [collastmod[indice] for indice in i] - #-- sort the list + # sort the list if sort: i = [i for i,j in sorted(enumerate(colnames), key=lambda i: i[1])] - #-- sort list of column names and last modified times + # sort list of column names and last modified times colnames = [colnames[indice] for indice in i] collastmod = [collastmod[indice] for indice in i] - #-- return the list of column names and last modified times + # return the list of column names and last modified times return (colnames,collastmod) -#-- PURPOSE: download a file from a PO.DAAC/ECCO Drive https server +# PURPOSE: download a file from a PO.DAAC/ECCO Drive https server def from_drive(HOST, username=None, password=None, build=True, timeout=None, urs='podaac-tools.jpl.nasa.gov', local=None, hash='', chunk=16384, verbose=False, fid=sys.stdout, mode=0o775): @@ -989,58 +989,58 @@ def from_drive(HOST, username=None, password=None, build=True, remote_buffer: obj BytesIO representation of file """ - #-- create logger + # create logger loglevel = logging.INFO if verbose else logging.CRITICAL logging.basicConfig(stream=fid, level=loglevel) - #-- use netrc credentials + # use netrc credentials if build and not (username or password): username,_,password = netrc.netrc().authenticators(urs) - #-- build urllib2 opener and check credentials + # build urllib2 opener and check credentials if build: - #-- build urllib2 opener with credentials + # build urllib2 opener with credentials build_opener(username, password) - #-- check credentials + # check credentials check_credentials() - #-- verify inputs for remote https host + # verify inputs for remote https host if isinstance(HOST, str): HOST = url_split(HOST) - #-- try downloading from https + # try downloading from https try: - #-- Create and submit request. + # Create and submit request. request = urllib2.Request(posixpath.join(*HOST)) response = urllib2.urlopen(request,timeout=timeout) except (urllib2.HTTPError, urllib2.URLError) as e: raise Exception('Download error from {0}'.format(posixpath.join(*HOST))) else: - #-- copy remote file contents to bytesIO object + # copy remote file contents to bytesIO object remote_buffer = io.BytesIO() shutil.copyfileobj(response, remote_buffer, chunk) remote_buffer.seek(0) - #-- save file basename with bytesIO object + # save file basename with bytesIO object remote_buffer.filename = HOST[-1] - #-- generate checksum hash for remote file + # generate checksum hash for remote file remote_hash = hashlib.md5(remote_buffer.getvalue()).hexdigest() - #-- compare checksums + # compare checksums if local and (hash != remote_hash): - #-- convert to absolute path + # convert to absolute path local = os.path.abspath(local) - #-- create directory if non-existent + # create directory if non-existent if not os.access(os.path.dirname(local), os.F_OK): os.makedirs(os.path.dirname(local), mode) - #-- print file information + # print file information args = (posixpath.join(*HOST),local) logging.info('{0} -->\n\t{1}'.format(*args)) - #-- store bytes to file using chunked transfer encoding + # store bytes to file using chunked transfer encoding remote_buffer.seek(0) with open(os.path.expanduser(local), 'wb') as f: shutil.copyfileobj(remote_buffer, f, chunk) - #-- change the permissions mode + # change the permissions mode os.chmod(local,mode) - #-- return the bytesIO object + # return the bytesIO object remote_buffer.seek(0) return remote_buffer -#-- PURPOSE: retrieve shortnames for GRACE/GRACE-FO products +# PURPOSE: retrieve shortnames for GRACE/GRACE-FO products def cmr_product_shortname(mission, center, release, level='L2', version='0'): """ Create a list of product shortnames for CMR queries @@ -1067,71 +1067,71 @@ def cmr_product_shortname(mission, center, release, level='L2', version='0'): cmr_shortnames: list shortnames for CMR queries """ - #-- build dictionary for GRACE/GRACE-FO shortnames + # build dictionary for GRACE/GRACE-FO shortnames cmr_shortname = {} cmr_shortname['grace'] = {} cmr_shortname['grace-fo'] = {} - #-- format of GRACE/GRACE-FO shortnames + # format of GRACE/GRACE-FO shortnames grace_l1_format = 'GRACE_{0}_GRAV_{1}_{2}' grace_l2_format = 'GRACE_{0}_{1}_GRAV_{2}_{3}' gracefo_l1_format = 'GRACEFO_{0}_{1}_GRAV_{2}_{3}' gracefo_l2_format = 'GRACEFO_{0}_{1}_MONTHLY_{2}{3}' - #-- dictionary entries for each product level + # dictionary entries for each product level cmr_shortname['grace']['L1B'] = dict(GFZ={},JPL={}) cmr_shortname['grace']['L2'] = dict(CSR={},GFZ={},JPL={}) cmr_shortname['grace-fo']['L1A'] = dict(JPL={}) cmr_shortname['grace-fo']['L1B'] = dict(JPL={}) cmr_shortname['grace-fo']['L2'] = dict(CSR={},GFZ={},JPL={}) - #-- dictionary entry for GRACE Level-1B deliasing products - #-- for each data release + # dictionary entry for GRACE Level-1B deliasing products + # for each data release for rl in ['RL06']: shortname = grace_l1_format.format('AOD1B','GFZ',rl) cmr_shortname['grace']['L1B']['GFZ'][rl] = [shortname] - #-- dictionary entries for GRACE Level-1B ranging data products - #-- for each data release + # dictionary entries for GRACE Level-1B ranging data products + # for each data release for rl in ['RL02','RL03']: shortname = grace_l1_format.format('L1B','JPL',rl) cmr_shortname['grace']['L1B']['JPL'][rl] = [shortname] - #-- dictionary entries for GRACE Level-2 products - #-- for each data release + # dictionary entries for GRACE Level-2 products + # for each data release for rl in ['RL06']: - #-- Center for Space Research (CSR) + # Center for Space Research (CSR) cmr_shortname['grace']['L2']['CSR'][rl] = [] - #-- German Research Centre for Geosciences (GFZ) + # German Research Centre for Geosciences (GFZ) cmr_shortname['grace']['L2']['GFZ'][rl] = [] - #-- NASA Jet Propulsion Laboratory (JPL) + # NASA Jet Propulsion Laboratory (JPL) cmr_shortname['grace']['L2']['JPL'][rl] = [] - #-- create list of product shortnames for GRACE level-2 products - #-- for each L2 data processing center + # create list of product shortnames for GRACE level-2 products + # for each L2 data processing center for c in ['CSR','GFZ','JPL']: - #-- for each level-2 product + # for each level-2 product for p in ['GAA', 'GAB', 'GAC', 'GAD', 'GSM']: - #-- skip atmospheric and oceanic dealiasing products for CSR + # skip atmospheric and oceanic dealiasing products for CSR if (c == 'CSR') and p in ('GAA', 'GAB'): continue - #-- shortname for center and product + # shortname for center and product shortname = grace_l2_format.format(p,'L2',c,rl) cmr_shortname['grace']['L2'][c][rl].append(shortname) - #-- dictionary entries for GRACE-FO Level-1 ranging data products - #-- for each data release + # dictionary entries for GRACE-FO Level-1 ranging data products + # for each data release for rl in ['RL04']: for l in ['L1A','L1B']: shortname = gracefo_l1_format.format(l,'ASCII','JPL',rl) cmr_shortname['grace-fo'][l]['JPL'][rl] = [shortname] - #-- dictionary entries for GRACE-FO Level-2 products - #-- for each data release + # dictionary entries for GRACE-FO Level-2 products + # for each data release for rl in ['RL06']: rs = re.findall(r'\d+',rl).pop().zfill(3) for c in ['CSR','GFZ','JPL']: shortname = gracefo_l2_format.format('L2',c,rs,version) cmr_shortname['grace-fo']['L2'][c][rl] = [shortname] - #-- try to retrieve the shortname for a given mission + # try to retrieve the shortname for a given mission try: cmr_shortnames = cmr_shortname[mission][level][center][release] except Exception as e: @@ -1179,10 +1179,10 @@ def cmr_readable_granules(product, level='L2', solution='BA01', version='0'): pattern = '{0}-2_???????-???????_????_?????_{1}_???{2}*'.format(*args) else: pattern = '*' - #-- return readable granules pattern + # return readable granules pattern return pattern -#-- PURPOSE: filter the CMR json response for desired data files +# PURPOSE: filter the CMR json response for desired data files def cmr_filter_json(search_results, endpoint="data"): """ Filter the CMR json response for desired data files @@ -1206,18 +1206,18 @@ def cmr_filter_json(search_results, endpoint="data"): granule_mtimes: list GRACE/GRACE-FO granule modification times """ - #-- output list of granule ids, urls and modified times + # output list of granule ids, urls and modified times granule_names = [] granule_urls = [] granule_mtimes = [] - #-- check that there are urls for request + # check that there are urls for request if ('feed' not in search_results) or ('entry' not in search_results['feed']): return (granule_names,granule_urls) - #-- descriptor links for each endpoint + # descriptor links for each endpoint rel = {} rel['data'] = "http://esipfed.org/ns/fedsearch/1.1/data#" rel['s3'] = "http://esipfed.org/ns/fedsearch/1.1/s3#" - #-- iterate over references and get cmr location + # iterate over references and get cmr location for entry in search_results['feed']['entry']: granule_names.append(entry['title']) granule_mtimes.append(get_unix_time(entry['updated'], @@ -1226,10 +1226,10 @@ def cmr_filter_json(search_results, endpoint="data"): if (link['rel'] == rel[endpoint]): granule_urls.append(link['href']) break - #-- return the list of urls, granule ids and modified times + # return the list of urls, granule ids and modified times return (granule_names,granule_urls,granule_mtimes) -#-- PURPOSE: filter the CMR json response for desired metadata files +# PURPOSE: filter the CMR json response for desired metadata files def cmr_metadata_json(search_results, endpoint="data"): """ Filter the CMR json response for desired metadata files @@ -1250,25 +1250,25 @@ def cmr_metadata_json(search_results, endpoint="data"): collection_urls: list urls from collection of endpoint type """ - #-- output list of collection urls + # output list of collection urls collection_urls = [] - #-- check that there are urls for request + # check that there are urls for request if ('feed' not in search_results) or ('entry' not in search_results['feed']): return collection_urls - #-- descriptor links for each endpoint + # descriptor links for each endpoint rel = {} rel['documentation'] = "http://esipfed.org/ns/fedsearch/1.1/documentation#" rel['data'] = "http://esipfed.org/ns/fedsearch/1.1/data#" rel['s3'] = "http://esipfed.org/ns/fedsearch/1.1/s3#" - #-- iterate over references and get cmr location + # iterate over references and get cmr location for entry in search_results['feed']['entry']: for link in entry['links']: if (link['rel'] == rel[endpoint]): collection_urls.append(link['href']) - #-- return the list of urls + # return the list of urls return collection_urls -#-- PURPOSE: cmr queries for GRACE/GRACE-FO products +# PURPOSE: cmr queries for GRACE/GRACE-FO products def cmr(mission=None, center=None, release=None, level='L2', product=None, solution='BA01', version='0', start_date=None, end_date=None, provider='POCLOUD', endpoint='data', verbose=False, fid=sys.stdout): @@ -1319,51 +1319,51 @@ def cmr(mission=None, center=None, release=None, level='L2', product=None, granule_mtimes: list GRACE/GRACE-FO granule modification times """ - #-- create logger + # create logger loglevel = logging.INFO if verbose else logging.CRITICAL logging.basicConfig(stream=fid, level=loglevel) - #-- build urllib2 opener with SSL context - #-- https://docs.python.org/3/howto/urllib2.html#id5 + # build urllib2 opener with SSL context + # https://docs.python.org/3/howto/urllib2.html#id5 handler = [] - #-- Create cookie jar for storing cookies + # Create cookie jar for storing cookies cookie_jar = CookieJar() handler.append(urllib2.HTTPCookieProcessor(cookie_jar)) handler.append(urllib2.HTTPSHandler(context=ssl.SSLContext())) - #-- create "opener" (OpenerDirector instance) + # create "opener" (OpenerDirector instance) opener = urllib2.build_opener(*handler) - #-- build CMR query + # build CMR query cmr_query_type = 'granules' cmr_format = 'json' cmr_page_size = 2000 CMR_HOST = ['https://cmr.earthdata.nasa.gov','search', f'{cmr_query_type}.{cmr_format}'] - #-- build list of CMR query parameters + # build list of CMR query parameters CMR_KEYS = [] CMR_KEYS.append(f'?provider={provider}') CMR_KEYS.append('&sort_key[]=start_date') CMR_KEYS.append('&sort_key[]=producer_granule_id') CMR_KEYS.append('&scroll=true') CMR_KEYS.append(f'&page_size={cmr_page_size}') - #-- dictionary of product shortnames + # dictionary of product shortnames short_names = cmr_product_shortname(mission, center, release, level=level, version=version) for short_name in short_names: CMR_KEYS.append(f'&short_name={short_name}') - #-- append keys for start and end time - #-- verify that start and end times are in ISO format + # append keys for start and end time + # verify that start and end times are in ISO format start_date = isoformat(start_date) if start_date else '' end_date = isoformat(end_date) if end_date else '' CMR_KEYS.append(f'&temporal={start_date},{end_date}') - #-- append keys for querying specific products + # append keys for querying specific products CMR_KEYS.append("&options[readable_granule_name][pattern]=true") CMR_KEYS.append("&options[spatial][or]=true") readable_granule = cmr_readable_granules(product, level=level, solution=solution, version=version) CMR_KEYS.append(f"&readable_granule_name[]={readable_granule}") - #-- full CMR query url + # full CMR query url cmr_query_url = "".join([posixpath.join(*CMR_HOST),*CMR_KEYS]) logging.info(f'CMR request={cmr_query_url}') - #-- output list of granule names and urls + # output list of granule names and urls granule_names = [] granule_urls = [] granule_mtimes = [] @@ -1373,23 +1373,23 @@ def cmr(mission=None, center=None, release=None, level='L2', product=None, if cmr_scroll_id: req.add_header('cmr-scroll-id', cmr_scroll_id) response = opener.open(req) - #-- get scroll id for next iteration + # get scroll id for next iteration if not cmr_scroll_id: headers = {k.lower():v for k,v in dict(response.info()).items()} cmr_scroll_id = headers['cmr-scroll-id'] - #-- read the CMR search as JSON + # read the CMR search as JSON search_page = json.loads(response.read().decode('utf8')) ids,urls,mtimes = cmr_filter_json(search_page, endpoint=endpoint) if not urls: break - #-- extend lists + # extend lists granule_names.extend(ids) granule_urls.extend(urls) granule_mtimes.extend(mtimes) - #-- return the list of granule ids, urls and modification times + # return the list of granule ids, urls and modification times return (granule_names, granule_urls, granule_mtimes) -#-- PURPOSE: cmr queries for GRACE/GRACE-FO auxiliary data and documentation +# PURPOSE: cmr queries for GRACE/GRACE-FO auxiliary data and documentation def cmr_metadata(mission=None, center=None, release=None, level='L2', version='0', provider='POCLOUD', endpoint='data', pattern='', verbose=False, fid=sys.stdout): @@ -1432,50 +1432,50 @@ def cmr_metadata(mission=None, center=None, release=None, level='L2', collection_urls: list urls from collection of endpoint type """ - #-- create logger + # create logger loglevel = logging.INFO if verbose else logging.CRITICAL logging.basicConfig(stream=fid, level=loglevel) - #-- build urllib2 opener with SSL context - #-- https://docs.python.org/3/howto/urllib2.html#id5 + # build urllib2 opener with SSL context + # https://docs.python.org/3/howto/urllib2.html#id5 handler = [] - #-- Create cookie jar for storing cookies + # Create cookie jar for storing cookies cookie_jar = CookieJar() handler.append(urllib2.HTTPCookieProcessor(cookie_jar)) handler.append(urllib2.HTTPSHandler(context=ssl.SSLContext())) - #-- create "opener" (OpenerDirector instance) + # create "opener" (OpenerDirector instance) opener = urllib2.build_opener(*handler) - #-- build CMR query + # build CMR query cmr_query_type = 'collections' cmr_format = 'json' CMR_HOST = ['https://cmr.earthdata.nasa.gov','search', f'{cmr_query_type}.{cmr_format}'] - #-- build list of CMR query parameters + # build list of CMR query parameters CMR_KEYS = [] CMR_KEYS.append(f'?provider={provider}') - #-- dictionary of product shortnames + # dictionary of product shortnames short_names = cmr_product_shortname(mission, center, release, level=level, version=version) for short_name in short_names: CMR_KEYS.append(f'&short_name={short_name}') - #-- full CMR query url + # full CMR query url cmr_query_url = "".join([posixpath.join(*CMR_HOST),*CMR_KEYS]) logging.info(f'CMR request={cmr_query_url}') - #-- query CMR for collection metadata + # query CMR for collection metadata req = urllib2.Request(cmr_query_url) response = opener.open(req) - #-- read the CMR search as JSON + # read the CMR search as JSON search_page = json.loads(response.read().decode('utf8')) - #-- filter the JSON response for desired endpoint links + # filter the JSON response for desired endpoint links collection_urls = cmr_metadata_json(search_page, endpoint=endpoint) - #-- reduce using regular expression pattern + # reduce using regular expression pattern if pattern: i = [i for i,f in enumerate(collection_urls) if re.search(pattern,f)] - #-- reduce list of collection_urls + # reduce list of collection_urls collection_urls = [collection_urls[indice] for indice in i] - #-- return the list of collection urls + # return the list of collection urls return collection_urls -#-- PURPOSE: create and compile regular expression operator to find GRACE files +# PURPOSE: create and compile regular expression operator to find GRACE files def compile_regex_pattern(PROC, DREL, DSET, mission=None, solution=r'BA01', version=r'\d+'): """ @@ -1515,7 +1515,7 @@ def compile_regex_pattern(PROC, DREL, DSET, mission=None, version: str, default '0' GRACE/GRACE-FO Level-2 data version """ - #-- verify inputs + # verify inputs if mission and mission not in ('GRAC','GRFO'): raise ValueError(f'Unknown mission {mission}') if PROC not in ('CNES','CSR','GFZ','JPL'): @@ -1524,61 +1524,61 @@ def compile_regex_pattern(PROC, DREL, DSET, mission=None, raise ValueError(f'Unknown Level-2 product {DSET}') if isinstance(version, int): version = str(version).zfill(2) - #-- compile regular expression operator for inputs + # compile regular expression operator for inputs if ((DSET == 'GSM') and (PROC == 'CSR') and (DREL in ('RL04','RL05'))): - #-- CSR GSM: only monthly degree 60 products - #-- not the longterm degree 180, degree 96 dataset or the - #-- special order 30 datasets for the high-resonance months + # CSR GSM: only monthly degree 60 products + # not the longterm degree 180, degree 96 dataset or the + # special order 30 datasets for the high-resonance months release, = re.findall(r'\d+', DREL) args = (DSET, int(release)) pattern = r'{0}-2_\d+-\d+_\d+_UTCSR_0060_000{1:d}(\.gz)?$' elif ((DSET == 'GSM') and (PROC == 'CSR') and (DREL == 'RL06')): - #-- CSR GSM RL06: monthly products for mission and solution + # CSR GSM RL06: monthly products for mission and solution release, = re.findall(r'\d+', DREL) args = (DSET, mission, solution, release.zfill(2), version.zfill(2)) pattern = r'{0}-2_\d+-\d+_{1}_UTCSR_{2}_{3}{4}(\.gz)?$' elif ((DSET == 'GSM') and (PROC == 'GFZ') and (DREL == 'RL04')): - #-- GFZ RL04: only unconstrained solutions (not GK2 products) + # GFZ RL04: only unconstrained solutions (not GK2 products) args = (DSET,) pattern = r'{0}-2_\d+-\d+_\d+_EIGEN_G---_0004(\.gz)?$' elif ((DSET == 'GSM') and (PROC == 'GFZ') and (DREL == 'RL05')): - #-- GFZ RL05: updated RL05a products which are less constrained to - #-- the background model. Allow regularized fields + # GFZ RL05: updated RL05a products which are less constrained to + # the background model. Allow regularized fields args = (DSET, r'(G---|GK2-)') pattern = r'{0}-2_\d+-\d+_\d+_EIGEN_{1}_005a(\.gz)?$' elif ((DSET == 'GSM') and (PROC == 'GFZ') and (DREL == 'RL06')): - #-- GFZ GSM RL06: monthly products for mission and solution + # GFZ GSM RL06: monthly products for mission and solution release, = re.findall(r'\d+', DREL) args = (DSET, mission, solution, release.zfill(2), version.zfill(2)) pattern = r'{0}-2_\d+-\d+_{1}_GFZOP_{2}_{3}{4}(\.gz)?$' elif (PROC == 'JPL') and DREL in ('RL04','RL05'): - #-- JPL: RL04a and RL05a products (denoted by 0001) + # JPL: RL04a and RL05a products (denoted by 0001) release, = re.findall(r'\d+', DREL) args = (DSET, int(release)) pattern = r'{0}-2_\d+-\d+_\d+_JPLEM_0001_000{1:d}(\.gz)?$' elif ((DSET == 'GSM') and (PROC == 'JPL') and (DREL == 'RL06')): - #-- JPL GSM RL06: monthly products for mission and solution + # JPL GSM RL06: monthly products for mission and solution release, = re.findall(r'\d+', DREL) args = (DSET, mission, solution, release.zfill(2), version.zfill(2)) pattern = r'{0}-2_\d+-\d+_{1}_JPLEM_{2}_{3}{4}(\.gz)?$' elif (PROC == 'CNES'): - #-- CNES: use products in standard format + # CNES: use products in standard format args = (DSET,) pattern = r'{0}-2_\d+-\d+_\d+_GRGS_([a-zA-Z0-9_\-]+)(\.txt)?(\.gz)?$' elif mission is not None: - #-- deliasing products with mission listed + # deliasing products with mission listed args = (DSET, mission) pattern = r'{0}-2_([a-zA-Z0-9_\-]+)_{1}_([a-zA-Z0-9_\-]+)(\.gz)?$' else: - #-- deliasing products: use products in standard format + # deliasing products: use products in standard format args = (DSET,) pattern = r'{0}-2_([a-zA-Z0-9_\-]+)(\.gz)?$' - #-- return the compiled regular expression operator + # return the compiled regular expression operator return re.compile(pattern.format(*args), re.VERBOSE) -#-- PURPOSE: download geocenter files from Sutterley and Velicogna (2019) -#-- https://doi.org/10.3390/rs11182108 -#-- https://doi.org/10.6084/m9.figshare.7388540 +# PURPOSE: download geocenter files from Sutterley and Velicogna (2019) +# https://doi.org/10.3390/rs11182108 +# https://doi.org/10.6084/m9.figshare.7388540 def from_figshare(directory, article='7388540', timeout=None, context=ssl.SSLContext(), chunk=16384, verbose=False, fid=sys.stdout, pattern=r'(CSR|GFZ|JPL)_(RL\d+)_(.*?)_SLF_iter.txt$', mode=0o775): @@ -1614,30 +1614,30 @@ def from_figshare(directory, article='7388540', timeout=None, and Ocean Model Outputs", *Remote Sensing*, 11(18), 2108, (2019). `doi: 10.3390/rs11182108 `_ """ - #-- figshare host + # figshare host HOST=['https://api.figshare.com','v2','articles',article] - #-- recursively create directory if non-existent + # recursively create directory if non-existent directory = os.path.abspath(os.path.expanduser(directory)) if not os.access(os.path.join(directory,'geocenter'), os.F_OK): os.makedirs(os.path.join(directory,'geocenter'), mode) - #-- Create and submit request. + # Create and submit request. request = urllib2.Request(posixpath.join(*HOST)) response = urllib2.urlopen(request,timeout=timeout,context=context) resp = json.loads(response.read()) - #-- reduce list of geocenter files + # reduce list of geocenter files geocenter_files = [f for f in resp['files'] if re.match(pattern,f['name'])] for f in geocenter_files: - #-- download geocenter file + # download geocenter file original_md5 = get_hash(os.path.join(directory,'geocenter',f['name'])) from_http(url_split(f['download_url']),timeout=timeout,context=context, local=os.path.join(directory,'geocenter',f['name']), hash=original_md5,chunk=chunk,verbose=verbose,fid=fid,mode=mode) - #-- verify MD5 checksums + # verify MD5 checksums computed_md5 = get_hash(os.path.join(directory,'geocenter',f['name'])) if (computed_md5 != f['supplied_md5']): raise Exception('Checksum mismatch: {0}'.format(f['download_url'])) -#-- PURPOSE: send files to figshare using secure FTP uploader +# PURPOSE: send files to figshare using secure FTP uploader def to_figshare(files, username=None, password=None, directory=None, timeout=None, context=ssl.SSLContext(ssl.PROTOCOL_TLS), get_ca_certs=False, verbose=False, chunk=8192): @@ -1666,38 +1666,38 @@ def to_figshare(files, username=None, password=None, directory=None, chunk: int, default 8192 chunk size for transfer encoding """ - #-- SSL context handler + # SSL context handler if get_ca_certs: context.get_ca_certs() - #-- connect to figshare secure ftp host + # connect to figshare secure ftp host ftps = ftplib.FTP_TLS(host='ftps.figshare.com', user=username, passwd=password, context=context, timeout=timeout) - #-- set the verbosity level + # set the verbosity level ftps.set_debuglevel(1) if verbose else None - #-- encrypt data connections + # encrypt data connections ftps.prot_p() - #-- try to create project directory + # try to create project directory try: - #-- will only create the directory if non-existent + # will only create the directory if non-existent ftps.mkd(posixpath.join('data',directory)) except: pass - #-- upload each file + # upload each file for local_file in files: - #-- remote ftp file + # remote ftp file ftp_remote_path = posixpath.join('data',directory, os.path.basename(local_file)) - #-- open local file and send bytes + # open local file and send bytes with open(os.path.expanduser(local_file),'rb') as fp: ftps.storbinary(f'STOR {ftp_remote_path}', fp, blocksize=chunk, callback=None, rest=None) -#-- PURPOSE: download satellite laser ranging files from CSR -#-- http://download.csr.utexas.edu/pub/slr/geocenter/GCN_L1_L2_30d_CF-CM.txt -#-- http://download.csr.utexas.edu/outgoing/cheng/gct2est.220_5s +# PURPOSE: download satellite laser ranging files from CSR +# http://download.csr.utexas.edu/pub/slr/geocenter/GCN_L1_L2_30d_CF-CM.txt +# http://download.csr.utexas.edu/outgoing/cheng/gct2est.220_5s def from_csr(directory, timeout=None, context=ssl.SSLContext(), chunk=16384, verbose=False, fid=sys.stdout, mode=0o775): """ @@ -1721,31 +1721,31 @@ def from_csr(directory, timeout=None, context=ssl.SSLContext(), mode: oct, default 0o775 permissions mode of output local file """ - #-- CSR download http server + # CSR download http server HOST = 'http://download.csr.utexas.edu' - #-- recursively create directory if non-existent + # recursively create directory if non-existent directory = os.path.abspath(os.path.expanduser(directory)) if not os.access(os.path.join(directory,'geocenter'), os.F_OK): os.makedirs(os.path.join(directory,'geocenter'), mode) - #-- download SLR 5x5, figure axis and azimuthal dependence files + # download SLR 5x5, figure axis and azimuthal dependence files FILES = [] FILES.append([HOST,'pub','slr','degree_5', 'CSR_Monthly_5x5_Gravity_Harmonics.txt']) FILES.append([HOST,'pub','slr','degree_2','C20_RL06.txt']) FILES.append([HOST,'pub','slr','degree_2','C21_S21_RL06.txt']) FILES.append([HOST,'pub','slr','degree_2','C22_S22_RL06.txt']) - #-- for each SLR file + # for each SLR file for FILE in FILES: original_md5 = get_hash(os.path.join(directory,FILE[-1])) from_http(FILE,timeout=timeout,context=context, local=os.path.join(directory,FILE[-1]), hash=original_md5,chunk=chunk,verbose=verbose, fid=fid,mode=mode) - #-- download CF-CM SLR and updated SLR geocenter files from Minkang Cheng + # download CF-CM SLR and updated SLR geocenter files from Minkang Cheng FILES = [] FILES.append([HOST,'pub','slr','geocenter','GCN_L1_L2_30d_CF-CM.txt']) FILES.append([HOST,'outgoing','cheng','gct2est.220_5s']) - #-- for each SLR geocenter file + # for each SLR geocenter file for FILE in FILES: original_md5 = get_hash(os.path.join(directory,'geocenter',FILE[-1])) from_http(FILE,timeout=timeout,context=context, @@ -1753,9 +1753,9 @@ def from_csr(directory, timeout=None, context=ssl.SSLContext(), hash=original_md5,chunk=chunk,verbose=verbose, fid=fid,mode=mode) -#-- PURPOSE: download GravIS and satellite laser ranging files from GFZ -#-- ftp://isdcftp.gfz-potsdam.de/grace/Level-2/GFZ/RL06_SLR_C20/ -#-- ftp://isdcftp.gfz-potsdam.de/grace/GravIS/GFZ/Level-2B/aux_data/ +# PURPOSE: download GravIS and satellite laser ranging files from GFZ +# ftp://isdcftp.gfz-potsdam.de/grace/Level-2/GFZ/RL06_SLR_C20/ +# ftp://isdcftp.gfz-potsdam.de/grace/GravIS/GFZ/Level-2B/aux_data/ def from_gfz(directory, timeout=None, chunk=8192, verbose=False, fid=sys.stdout, mode=0o775): """ @@ -1777,30 +1777,30 @@ def from_gfz(directory, timeout=None, chunk=8192, verbose=False, mode: oct, default 0o775 permissions mode of output local file """ - #-- recursively create directories if non-existent + # recursively create directories if non-existent directory = os.path.abspath(os.path.expanduser(directory)) if not os.access(os.path.join(directory,'geocenter'), os.F_OK): os.makedirs(os.path.join(directory,'geocenter'), mode) - #-- SLR oblateness and combined low-degree harmonic files + # SLR oblateness and combined low-degree harmonic files FILES = [] FILES.append(['isdcftp.gfz-potsdam.de','grace','Level-2','GFZ', 'RL06_SLR_C20','GFZ_RL06_C20_SLR.dat']) FILES.append(['isdcftp.gfz-potsdam.de','grace','GravIS','GFZ', 'Level-2B','aux_data','GRAVIS-2B_GFZOP_GRACE+SLR_LOW_DEGREES_0002.dat']) - #-- get each file + # get each file for FILE in FILES: local = os.path.join(directory,FILE[-1]) from_ftp(FILE,timeout=timeout,local=local,hash=get_hash(local), chunk=chunk,verbose=verbose,fid=fid,mode=mode) - #-- GravIS geocenter file + # GravIS geocenter file FILE = ['isdcftp.gfz-potsdam.de','grace','GravIS','GFZ','Level-2B', 'aux_data','GRAVIS-2B_GFZOP_GEOCENTER_0002.dat'] local = os.path.join(directory,'geocenter',FILE[-1]) from_ftp(FILE,timeout=timeout,local=local,hash=get_hash(local), chunk=chunk,verbose=verbose,fid=fid,mode=mode) -#-- PURPOSE: download satellite laser ranging files from GSFC -#-- https://earth.gsfc.nasa.gov/geo/data/slr +# PURPOSE: download satellite laser ranging files from GSFC +# https://earth.gsfc.nasa.gov/geo/data/slr def from_gsfc(directory, host='https://earth.gsfc.nasa.gov/sites/default/files/geo/slr-weekly', timeout=None, context=ssl.SSLContext(), chunk=16384, verbose=False, @@ -1830,11 +1830,11 @@ def from_gsfc(directory, mode: oct, default 0o775 permissions mode of output local file """ - #-- recursively create directory if non-existent + # recursively create directory if non-existent directory = os.path.abspath(os.path.expanduser(directory)) if not os.access(directory, os.F_OK): os.makedirs(directory, mode) - #-- download GSFC SLR 5x5 file + # download GSFC SLR 5x5 file FILE = 'gsfc_slr_5x5c61s61.txt' original_md5 = get_hash(os.path.join(directory,FILE)) fileID = from_http(posixpath.join(host,FILE), @@ -1842,24 +1842,24 @@ def from_gsfc(directory, local=os.path.join(directory,FILE), hash=original_md5, chunk=chunk, verbose=verbose, fid=fid, mode=mode) - #-- create a dated copy for archival purposes + # create a dated copy for archival purposes if copy: - #-- create copy of file for archiving - #-- read file and extract data date span + # create copy of file for archiving + # read file and extract data date span file_contents = fileID.read().decode('utf-8').splitlines() data_span, = [l for l in file_contents if l.startswith('Data span:')] - #-- extract start and end of data date span + # extract start and end of data date span span_start,span_end = re.findall(r'\d+[\s+]\w{3}[\s+]\d{4}', data_span) - #-- create copy of file with date span in filename + # create copy of file with date span in filename COPY = 'GSFC_SLR_5x5c61s61_{0}_{1}.txt'.format( time.strftime('%Y%m', time.strptime(span_start, '%d %b %Y')), time.strftime('%Y%m', time.strptime(span_end, '%d %b %Y'))) shutil.copyfile(os.path.join(directory,FILE), os.path.join(directory,COPY)) - #-- copy modification times and permissions for archive file + # copy modification times and permissions for archive file shutil.copystat(os.path.join(directory,FILE), os.path.join(directory,COPY)) -#-- PURPOSE: list a directory on the GFZ ICGEM https server -#-- http://icgem.gfz-potsdam.de +# PURPOSE: list a directory on the GFZ ICGEM https server +# http://icgem.gfz-potsdam.de def icgem_list(host='http://icgem.gfz-potsdam.de/tom_longtime', timeout=None, parser=lxml.etree.HTMLParser()): """ @@ -1881,17 +1881,17 @@ def icgem_list(host='http://icgem.gfz-potsdam.de/tom_longtime', colfiles: dict Static gravity field file urls mapped by field name """ - #-- try listing from https + # try listing from https try: - #-- Create and submit request. + # Create and submit request. request = urllib2.Request(host) tree = lxml.etree.parse(urllib2.urlopen(request,timeout=timeout),parser) except: raise Exception(f'List error from {host}') else: - #-- read and parse request for files + # read and parse request for files colfiles = tree.xpath('//td[@class="tom-cell-modelfile"]//a/@href') - #-- reduce list of files to find gfc files - #-- return the dict of model files mapped by name + # reduce list of files to find gfc files + # return the dict of model files mapped by name return {re.findall(r'(.*?).gfc',posixpath.basename(f)).pop():url_split(f) for i,f in enumerate(colfiles) if re.search(r'gfc$',f)} diff --git a/notebooks/GRACE-Harmonic-Plots.ipynb b/notebooks/GRACE-Harmonic-Plots.ipynb index 1eec4fe7..6cb8751e 100644 --- a/notebooks/GRACE-Harmonic-Plots.ipynb +++ b/notebooks/GRACE-Harmonic-Plots.ipynb @@ -441,7 +441,7 @@ "# imshow = show image (interpolation nearest for blocks)\n", "im = ax1.imshow(np.ma.zeros((LMAX+1,LMAX+1)), interpolation='nearest',\n", " cmap=cmap.value, extent=(-LMAX,LMAX,LMAX,0), animated=True)\n", - "#-- Z color limit between -1 and 1\n", + "# Z color limit between -1 and 1\n", "im.set_clim(-1.0,1.0)\n", "\n", "# add date label (year-calendar month e.g. 2002-01)\n", diff --git a/scripts/aod1b_geocenter.py b/scripts/aod1b_geocenter.py index d91005bd..979b4cfb 100644 --- a/scripts/aod1b_geocenter.py +++ b/scripts/aod1b_geocenter.py @@ -70,7 +70,7 @@ from gravity_toolkit.geocenter import geocenter import gravity_toolkit.utilities as utilities -#-- program module to read the degree 1 coefficients of the AOD1b data +# program module to read the degree 1 coefficients of the AOD1b data def aod1b_geocenter(base_dir, DREL='', DSET='', @@ -96,84 +96,84 @@ def aod1b_geocenter(base_dir, MODE: Permission mode of directories and files """ - #-- compile regular expressions operators for file dates - #-- will extract the year and month from the tar file (.tar.gz) + # compile regular expressions operators for file dates + # will extract the year and month from the tar file (.tar.gz) tx = re.compile(r'AOD1B_(\d+)-(\d+)_\d+\.(tar\.gz|tgz)$', re.VERBOSE) - #-- and the calendar day from the ascii file (.asc or gzipped .asc.gz) + # and the calendar day from the ascii file (.asc or gzipped .asc.gz) fx = re.compile(r'AOD1B_\d+-\d+-(\d+)_X_\d+.asc(.gz)?$', re.VERBOSE) - #-- compile regular expressions operator for the clm/slm headers - #-- for the specific AOD1b product + # compile regular expressions operator for the clm/slm headers + # for the specific AOD1b product hx = re.compile(rf'^DATA.*SET.*{DSET}', re.VERBOSE) - #-- compile regular expression operator to find numerical instances - #-- will extract the data from the file + # compile regular expression operator to find numerical instances + # will extract the data from the file regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' rx = re.compile(regex_pattern, re.VERBOSE) - #-- output formatting string + # output formatting string fstr = '{0:4d}-{1:02d}-{2:02d}T{3:02d}:00:00 {4:12.8f} {5:12.8f} {6:12.8f}' - #-- set number of hours in a file - #-- set the ocean model for a given release + # set number of hours in a file + # set the ocean model for a given release if DREL in ('RL01','RL02','RL03','RL04','RL05'): - #-- for 00, 06, 12 and 18 + # for 00, 06, 12 and 18 n_time = 4 ATMOSPHERE = 'ECMWF' OCEAN_MODEL = 'OMCT' LMAX = 100 elif DREL in ('RL06',): - #-- for 00, 03, 06, 09, 12, 15, 18 and 21 + # for 00, 03, 06, 09, 12, 15, 18 and 21 n_time = 8 ATMOSPHERE = 'ECMWF' OCEAN_MODEL = 'MPIOM' LMAX = 180 else: raise ValueError('Invalid data release') - #-- Calculating the number of cos and sin harmonics up to LMAX + # Calculating the number of cos and sin harmonics up to LMAX n_harm = (LMAX**2 + 3*LMAX)//2 + 1 - #-- AOD1B data products + # AOD1B data products product = {} product['atm'] = f'Atmospheric loading from {ATMOSPHERE}' product['ocn'] = f'Oceanic loading from {OCEAN_MODEL}' product['glo'] = 'Global atmospheric and oceanic loading' product['oba'] = f'Ocean bottom pressure from {OCEAN_MODEL}' - #-- AOD1B directory and output geocenter directory + # AOD1B directory and output geocenter directory grace_dir = os.path.join(base_dir,'AOD1B',DREL) output_dir = os.path.join(grace_dir,'geocenter') if not os.access(output_dir, os.F_OK): os.mkdir(output_dir, MODE) - #-- finding all of the tar files in the AOD1b directory + # finding all of the tar files in the AOD1b directory input_tar_files = [tf for tf in os.listdir(grace_dir) if tx.match(tf)] - #-- for each tar file + # for each tar file for i in sorted(input_tar_files): - #-- extract the year and month from the file + # extract the year and month from the file YY,MM,SFX = tx.findall(i).pop() YY,MM = np.array([YY,MM], dtype=np.int64) - #-- output monthly geocenter file + # output monthly geocenter file FILE = f'AOD1B_{DREL}_{DSET}_{YY:4d}_{MM:02d}.txt' - #-- if output file exists: check if input tar file is newer + # if output file exists: check if input tar file is newer TEST = False OVERWRITE = ' (clobber)' - #-- check if output file exists + # check if output file exists if os.access(os.path.join(output_dir,FILE), os.F_OK): - #-- check last modification time of input and output files + # check last modification time of input and output files input_mtime = os.stat(os.path.join(grace_dir,i)).st_mtime output_mtime = os.stat(os.path.join(output_dir,FILE)).st_mtime - #-- if input tar file is newer: overwrite the output file + # if input tar file is newer: overwrite the output file if (input_mtime > output_mtime): TEST = True OVERWRITE = ' (overwrite)' else: TEST = True OVERWRITE = ' (new)' - #-- As there are so many files.. this will only read the new files - #-- or will rewrite if CLOBBER is set (if wanting something changed) + # As there are so many files.. this will only read the new files + # or will rewrite if CLOBBER is set (if wanting something changed) if TEST or CLOBBER: - #-- if verbose: output information about the geocenter file + # if verbose: output information about the geocenter file logging.info('{0}{1}'.format(os.path.join(output_dir,FILE),OVERWRITE)) - #-- open output monthly geocenter file + # open output monthly geocenter file f = open(os.path.join(output_dir,FILE), 'w') args = ('Geocenter time series',DREL,DSET) print('# {0} from {1} AOD1b {2} Product'.format(*args), file=f) @@ -181,43 +181,43 @@ def aod1b_geocenter(base_dir, args = ('ISO-Time','X','Y','Z') print('# {0:^15} {1:^12} {2:^12} {3:^12}'.format(*args), file=f) - #-- open the AOD1B monthly tar file + # open the AOD1B monthly tar file tar = tarfile.open(name=os.path.join(grace_dir,i), mode='r:gz') - #-- Iterate over every member within the tar file + # Iterate over every member within the tar file for member in tar.getmembers(): - #-- get calendar day from file + # get calendar day from file DD,SFX = fx.findall(member.name).pop() DD = np.int64(DD) - #-- open data file for day + # open data file for day if (SFX == '.gz'): fid = gzip.GzipFile(fileobj=tar.extractfile(member)) else: fid = tar.extractfile(member) - #-- degree 1 spherical harmonics for day and hours + # degree 1 spherical harmonics for day and hours DEG1 = geocenter() DEG1.C10 = np.zeros((n_time)) DEG1.C11 = np.zeros((n_time)) DEG1.S11 = np.zeros((n_time)) hours = np.zeros((n_time),dtype=np.int64) - #-- create counter for hour in dataset + # create counter for hour in dataset c = 0 - #-- while loop ends when dataset is read + # while loop ends when dataset is read while (c < n_time): - #-- read line + # read line file_contents = fid.readline().decode('ISO-8859-1') - #-- find file header for data product + # find file header for data product if bool(hx.search(file_contents)): - #-- extract hour from header and convert to float + # extract hour from header and convert to float HH, = re.findall(r'(\d+):\d+:\d+',file_contents) hours[c] = np.int64(HH) - #-- read each line of spherical harmonics + # read each line of spherical harmonics for k in range(0,n_harm): file_contents = fid.readline().decode('ISO-8859-1') - #-- find numerical instances in the data line + # find numerical instances in the data line line_contents = rx.findall(file_contents) - #-- spherical harmonic degree and order + # spherical harmonic degree and order l1 = np.int64(line_contents[0]) m1 = np.int64(line_contents[1]) if (l1 == 1) and (m1 == 0): @@ -225,24 +225,24 @@ def aod1b_geocenter(base_dir, elif (l1 == 1) and (m1 == 1): DEG1.C11[c] = np.float64(line_contents[2]) DEG1.S11[c] = np.float64(line_contents[3]) - #-- add 1 to hour counter + # add 1 to hour counter c += 1 - #-- close the input file for day + # close the input file for day fid.close() - #-- convert from spherical harmonics into geocenter + # convert from spherical harmonics into geocenter DEG1.to_cartesian() - #-- write to file for each hour (iterates each 6-hour block) + # write to file for each hour (iterates each 6-hour block) for h,X,Y,Z in zip(hours,DEG1.X,DEG1.Y,DEG1.Z): print(fstr.format(YY,MM,DD,h,X,Y,Z), file=f) - #-- close the tar file + # close the tar file tar.close() - #-- close the output file + # close the output file f.close() - #-- set the permissions mode of the output file + # set the permissions mode of the output file os.chmod(os.path.join(output_dir,FILE), MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Creates monthly files of geocenter variations @@ -251,54 +251,54 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='', help='GRACE/GRACE-FO Data Release') - #-- GRACE/GRACE-FO level-1b dealiasing product + # GRACE/GRACE-FO level-1b dealiasing product parser.add_argument('--product','-p', metavar='DSET', type=str.lower, nargs='+', choices=['atm','ocn','glo','oba'], help='GRACE/GRACE-FO Level-1b data product') - #-- clobber will overwrite the existing data + # clobber will overwrite the existing data parser.add_argument('--clobber','-C', default=False, action='store_true', help='Overwrite existing data') - #-- verbose will output information about each output file + # verbose will output information about each output file parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of processing run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- for each entered AOD1B dataset + # for each entered AOD1B dataset for DSET in args.product: - #-- run AOD1b geocenter program with parameters + # run AOD1b geocenter program with parameters aod1b_geocenter(args.directory, DREL=args.release, DSET=DSET, CLOBBER=args.clobber, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/aod1b_oblateness.py b/scripts/aod1b_oblateness.py index 701d0918..bb6685d1 100644 --- a/scripts/aod1b_oblateness.py +++ b/scripts/aod1b_oblateness.py @@ -69,7 +69,7 @@ import numpy as np import gravity_toolkit.utilities as utilities -#-- program module to read the C20 coefficients of the AOD1b data +# program module to read the C20 coefficients of the AOD1b data def aod1b_oblateness(base_dir, DREL='', DSET='', @@ -96,144 +96,144 @@ def aod1b_oblateness(base_dir, VERBOSE: Output information for each output file """ - #-- compile regular expressions operators for file dates - #-- will extract the year and month from the tar file (.tar.gz) + # compile regular expressions operators for file dates + # will extract the year and month from the tar file (.tar.gz) tx = re.compile(r'AOD1B_(\d+)-(\d+)_\d+\.(tar\.gz|tgz)$', re.VERBOSE) - #-- and the calendar day from the ascii file (.asc or gzipped .asc.gz) + # and the calendar day from the ascii file (.asc or gzipped .asc.gz) fx = re.compile(r'AOD1B_\d+-\d+-(\d+)_X_\d+.asc(.gz)?$', re.VERBOSE) - #-- compile regular expressions operator for the clm/slm headers - #-- for the specific AOD1b product + # compile regular expressions operator for the clm/slm headers + # for the specific AOD1b product hx = re.compile(rf'^DATA.*SET.*{DSET}', re.VERBOSE) - #-- compile regular expression operator to find numerical instances - #-- will extract the data from the file + # compile regular expression operator to find numerical instances + # will extract the data from the file regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' rx = re.compile(regex_pattern, re.VERBOSE) - #-- output formatting string + # output formatting string fstr = '{0:4d}-{1:02d}-{2:02d}T{3:02d}:00:00 {4:+16.8E}' - #-- set number of hours in a file - #-- set the ocean model for a given release + # set number of hours in a file + # set the ocean model for a given release if DREL in ('RL01','RL02','RL03','RL04','RL05'): - #-- for 00, 06, 12 and 18 + # for 00, 06, 12 and 18 n_time = 4 ATMOSPHERE = 'ECMWF' OCEAN_MODEL = 'OMCT' LMAX = 100 elif DREL in ('RL06',): - #-- for 00, 03, 06, 09, 12, 15, 18 and 21 + # for 00, 03, 06, 09, 12, 15, 18 and 21 n_time = 8 ATMOSPHERE = 'ECMWF' OCEAN_MODEL = 'MPIOM' LMAX = 180 else: raise ValueError('Invalid data release') - #-- Calculating the number of cos and sin harmonics up to LMAX + # Calculating the number of cos and sin harmonics up to LMAX n_harm = (LMAX**2 + 3*LMAX)//2 + 1 - #-- AOD1B data products + # AOD1B data products product = {} product['atm'] = f'Atmospheric loading from {ATMOSPHERE}' product['ocn'] = f'Oceanic loading from {OCEAN_MODEL}' product['glo'] = 'Global atmospheric and oceanic loading' product['oba'] = f'Ocean bottom pressure from {OCEAN_MODEL}' - #-- AOD1B directory and output oblateness directory + # AOD1B directory and output oblateness directory grace_dir = os.path.join(base_dir,'AOD1B',DREL) output_dir = os.path.join(grace_dir,'oblateness') if not os.access(output_dir, os.F_OK): os.mkdir(output_dir, MODE) - #-- finding all of the tar files in the AOD1b directory + # finding all of the tar files in the AOD1b directory input_tar_files = [tf for tf in os.listdir(grace_dir) if tx.match(tf)] - #-- for each tar file + # for each tar file for i in sorted(input_tar_files): - #-- extract the year and month from the file + # extract the year and month from the file YY,MM,SFX = tx.findall(i).pop() YY,MM = np.array([YY,MM], dtype=np.int64) - #-- output monthly oblateness file + # output monthly oblateness file FILE = f'AOD1B_{DREL}_{DSET}_{YY:4d}_{MM:02d}.txt' - #-- if output file exists: check if input tar file is newer + # if output file exists: check if input tar file is newer TEST = False OVERWRITE = ' (clobber)' - #-- check if output file exists + # check if output file exists if os.access(os.path.join(output_dir,FILE), os.F_OK): - #-- check last modification time of input and output files + # check last modification time of input and output files input_mtime = os.stat(os.path.join(grace_dir,i)).st_mtime output_mtime = os.stat(os.path.join(output_dir,FILE)).st_mtime - #-- if input tar file is newer: overwrite the output file + # if input tar file is newer: overwrite the output file if (input_mtime > output_mtime): TEST = True OVERWRITE = ' (overwrite)' else: TEST = True OVERWRITE = ' (new)' - #-- As there are so many files.. this will only read the new files - #-- or will rewrite if CLOBBER is set (if wanting something changed) + # As there are so many files.. this will only read the new files + # or will rewrite if CLOBBER is set (if wanting something changed) if TEST or CLOBBER: - #-- if verbose: output information about the oblateness file + # if verbose: output information about the oblateness file logging.info('{0}{1}'.format(os.path.join(output_dir,FILE),OVERWRITE)) - #-- open output monthly oblateness file + # open output monthly oblateness file f = open(os.path.join(output_dir,FILE), 'w') args = ('Oblateness time series',DREL,DSET) print('# {0} from {1} AOD1b {2} Product'.format(*args), file=f) print('# {0}'.format(product[DSET]), file=f) print('# {0:^15} {1:^15}'.format('ISO-Time','C20'), file=f) - #-- open the AOD1B monthly tar file + # open the AOD1B monthly tar file tar = tarfile.open(name=os.path.join(grace_dir,i), mode='r:gz') - #-- Iterate over every member within the tar file + # Iterate over every member within the tar file for member in tar.getmembers(): - #-- get calendar day from file + # get calendar day from file DD,SFX = fx.findall(member.name).pop() DD = np.int64(DD) - #-- open datafile for day + # open datafile for day if (SFX == '.gz'): fid = gzip.GzipFile(fileobj=tar.extractfile(member)) else: fid = tar.extractfile(member) - #-- C20 spherical harmonics for day and hours + # C20 spherical harmonics for day and hours C20 = np.zeros((n_time)) hours = np.zeros((n_time),dtype=np.int64) - #-- create counter for hour in dataset + # create counter for hour in dataset c = 0 - #-- while loop ends when dataset is read + # while loop ends when dataset is read while (c < n_time): - #-- read line + # read line file_contents = fid.readline().decode('ISO-8859-1') - #-- find file header for data product + # find file header for data product if bool(hx.search(file_contents)): - #-- extract hour from header and convert to float + # extract hour from header and convert to float HH, = re.findall(r'(\d+):\d+:\d+',file_contents) hours[c] = np.int64(HH) - #-- read each line of spherical harmonics + # read each line of spherical harmonics for k in range(0,n_harm): file_contents = fid.readline().decode('ISO-8859-1') - #-- find numerical instances in the data line + # find numerical instances in the data line line_contents = rx.findall(file_contents) - #-- spherical harmonic degree and order + # spherical harmonic degree and order l1 = np.int64(line_contents[0]) m1 = np.int64(line_contents[1]) if (l1 == 2) and (m1 == 0): C20[c] = np.float64(line_contents[2]) - #-- add 1 to hour counter + # add 1 to hour counter c += 1 - #-- close the input file for day + # close the input file for day fid.close() - #-- write to file for each hour + # write to file for each hour for h in range(4): print(fstr.format(YY,MM,DD,hours[h],C20[h]),file=f) - #-- close the tar file + # close the tar file tar.close() - #-- close the output file + # close the output file f.close() - #-- set the permissions mode of the output file + # set the permissions mode of the output file os.chmod(os.path.join(output_dir,FILE), MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Creates monthly files of oblateness (C20) @@ -242,55 +242,55 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='', help='GRACE/GRACE-FO Data Release') - #-- GRACE/GRACE-FO level-1b dealiasing product + # GRACE/GRACE-FO level-1b dealiasing product parser.add_argument('--product','-p', metavar='DSET', type=str.lower, nargs='+', choices=['atm','ocn','glo','oba'], help='GRACE/GRACE-FO Level-1b data product') - #-- clobber will overwrite the existing data + # clobber will overwrite the existing data parser.add_argument('--clobber','-C', default=False, action='store_true', help='Overwrite existing data') - #-- verbose will output information about each output file + # verbose will output information about each output file parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of processing run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- for each entered AOD1B dataset + # for each entered AOD1B dataset for DSET in args.product: - #-- run AOD1b oblateness program with parameters + # run AOD1b oblateness program with parameters aod1b_oblateness(args.directory, DREL=args.release, DSET=DSET, CLOBBER=args.clobber, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/calc_degree_one.py b/scripts/calc_degree_one.py index 23c19cdf..626f1665 100755 --- a/scripts/calc_degree_one.py +++ b/scripts/calc_degree_one.py @@ -258,7 +258,7 @@ from gravity_toolkit.sea_level_equation import sea_level_equation from gravity_toolkit.time import grace_to_calendar -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -267,63 +267,63 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: import GRACE/GRACE-FO GSM files for a given months range +# PURPOSE: import GRACE/GRACE-FO GSM files for a given months range def load_grace_GSM(base_dir, PROC, DREL, START, END, MISSING, LMAX, MMAX=None, SLR_C20=None, SLR_21=None, SLR_22=None, SLR_C30=None, SLR_C40=None, SLR_C50=None, POLE_TIDE=False): - #-- GRACE/GRACE-FO dataset + # GRACE/GRACE-FO dataset DSET = 'GSM' - #-- do not import degree 1 coefficients for the GRACE GSM solution - #-- 0: No degree 1 + # do not import degree 1 coefficients for the GRACE GSM solution + # 0: No degree 1 DEG1 = 0 - #-- reading GRACE/GRACE-FO GSM solutions for input date range - #-- replacing low-degree harmonics with SLR values if specified - #-- correcting for Pole-Tide if specified - #-- atmospheric jumps will be corrected externally if specified + # reading GRACE/GRACE-FO GSM solutions for input date range + # replacing low-degree harmonics with SLR values if specified + # correcting for Pole-Tide if specified + # atmospheric jumps will be corrected externally if specified grace_Ylms = grace_input_months(base_dir, PROC, DREL, DSET, LMAX, START, END, MISSING, SLR_C20, DEG1, MMAX=MMAX, SLR_21=SLR_21, SLR_22=SLR_22, SLR_C30=SLR_C30, SLR_C40=SLR_C40, SLR_C50=SLR_C50, POLE_TIDE=POLE_TIDE, ATM=False, MODEL_DEG1=False) - #-- returning input variables as a harmonics object + # returning input variables as a harmonics object return harmonics().from_dict(grace_Ylms) -#-- PURPOSE: import GRACE/GRACE-FO dealiasing files for a given months range +# PURPOSE: import GRACE/GRACE-FO dealiasing files for a given months range def load_AOD(base_dir, PROC, DREL, DSET, START, END, MISSING, LMAX): - #-- do not replace low degree harmonics for AOD solutions + # do not replace low degree harmonics for AOD solutions SLR_C20 = 'N' - #-- do not replace degree 1 coefficients for the GRACE AOD solution - #-- 0: No degree 1 replacement + # do not replace degree 1 coefficients for the GRACE AOD solution + # 0: No degree 1 replacement DEG1 = 0 - #-- reading GRACE/GRACE-FO AOD solutions for input date range + # reading GRACE/GRACE-FO AOD solutions for input date range grace_Ylms = grace_input_months(base_dir, PROC, DREL, DSET, LMAX, START, END, MISSING, SLR_C20, DEG1, POLE_TIDE=False, ATM=False) - #-- returning input variables as a harmonics object + # returning input variables as a harmonics object return harmonics().from_dict(grace_Ylms) -#-- PURPOSE: model the seasonal component of an initial degree 1 model -#-- using preliminary estimates of annual and semi-annual variations from LWM -#-- as calculated in Chen et al. (1999), doi:10.1029/1998JB900019 -#-- NOTE: this is to get an accurate assessment of the land water mass for the -#-- eustatic component (not for the ocean component from GRACE) +# PURPOSE: model the seasonal component of an initial degree 1 model +# using preliminary estimates of annual and semi-annual variations from LWM +# as calculated in Chen et al. (1999), doi:10.1029/1998JB900019 +# NOTE: this is to get an accurate assessment of the land water mass for the +# eustatic component (not for the ocean component from GRACE) def model_seasonal_geocenter(grace_date): - #-- Annual amplitudes of (Soil Moisture + Snow) geocenter components (mm) + # Annual amplitudes of (Soil Moisture + Snow) geocenter components (mm) AAx = 1.28 AAy = 0.52 AAz = 3.30 - #-- Annual phase of (Soil Moisture + Snow) geocenter components (degrees) + # Annual phase of (Soil Moisture + Snow) geocenter components (degrees) APx = 44.0 APy = 182.0 APz = 43.0 - #-- Semi-Annual amplitudes of (Soil Moisture + Snow) geocenter components + # Semi-Annual amplitudes of (Soil Moisture + Snow) geocenter components SAAx = 0.15 SAAy = 0.56 SAAz = 0.50 - #-- Semi-Annual phase of (Soil Moisture + Snow) geocenter components + # Semi-Annual phase of (Soil Moisture + Snow) geocenter components SAPx = 331.0 SAPy = 312.0 SAPz = 75.0 - #-- calculate each geocenter component from the amplitude and phase - #-- converting the phase from degrees to radians + # calculate each geocenter component from the amplitude and phase + # converting the phase from degrees to radians X = AAx*np.sin(2.0*np.pi*grace_date + APx*np.pi/180.0) + \ SAAx*np.sin(4.0*np.pi*grace_date + SAPx*np.pi/180.0) Y = AAy*np.sin(2.0*np.pi*grace_date + APy*np.pi/180.0) + \ @@ -333,7 +333,7 @@ def model_seasonal_geocenter(grace_date): DEG1 = geocenter(X=X-X.mean(), Y=Y-Y.mean(), Z=Z-Z.mean()) return DEG1.from_cartesian() -#-- PURPOSE: calculate a geocenter time-series +# PURPOSE: calculate a geocenter time-series def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, START=None, END=None, @@ -364,17 +364,17 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, COPY=False, MODE=0o775): - #-- output directory + # output directory DIRECTORY = os.path.join(base_dir,'geocenter') - #-- create output directory if non-existent + # create output directory if non-existent if not os.access(DIRECTORY, os.F_OK): os.makedirs(DIRECTORY, mode=MODE) - #-- list object of output files for file logs (full path) + # list object of output files for file logs (full path) output_files = [] - #-- output flag for using sea level fingerprints + # output flag for using sea level fingerprints slf_str = '_SLF' if FINGERPRINT else '' - #-- output flag for low-degree harmonic replacements + # output flag for low-degree harmonic replacements if SLR_21 in ('CSR','GFZ','GSFC'): C21_str = f'_w{SLR_21}_21' else: @@ -384,7 +384,7 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, else: C22_str = '' if SLR_C30 in ('GSFC',): - #-- C30 replacement now default for all solutions + # C30 replacement now default for all solutions C30_str = '' elif SLR_C30 in ('CSR','GFZ','LARES'): C30_str = f'_w{SLR_C30}_C30' @@ -398,115 +398,115 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, C50_str = f'_w{SLR_C50}_C50' else: C50_str = '' - #-- combine satellite laser ranging flags + # combine satellite laser ranging flags slr_str = ''.join([C21_str,C22_str,C30_str,C40_str,C50_str]) - #-- read load love numbers + # read load love numbers hl,kl,ll = load_love_numbers(EXPANSION, LOVE_NUMBERS=LOVE_NUMBERS, REFERENCE='CF') - #-- set gravitational load love number to a specific value + # set gravitational load love number to a specific value if LOVE_K1: kl[1] = np.copy(LOVE_K1) - #-- maximum spherical harmonic order + # maximum spherical harmonic order if not MMAX: MMAX = np.copy(LMAX) - #-- Earth Parameters + # Earth Parameters factors = units(lmax=LMAX).harmonic(hl,kl,ll) - rho_e = factors.rho_e#-- Average Density of the Earth [g/cm^3] - rad_e = factors.rad_e#-- Average Radius of the Earth [cm] + rho_e = factors.rho_e# Average Density of the Earth [g/cm^3] + rad_e = factors.rad_e# Average Radius of the Earth [cm] l = factors.l - #-- Factor for converting to Mass SH + # Factor for converting to Mass SH dfactor = factors.cmwe - #-- Read Smoothed Ocean and Land Functions - #-- Open the land-sea NetCDF file for reading + # Read Smoothed Ocean and Land Functions + # Open the land-sea NetCDF file for reading landsea = spatial().from_netCDF4(LANDMASK, date=False, varname='LSMASK') - #-- degree spacing and grid dimensions - #-- will create GRACE spatial fields with same dimensions + # degree spacing and grid dimensions + # will create GRACE spatial fields with same dimensions dlon,dlat = landsea.spacing nlat,nlon = landsea.shape - #-- spatial parameters in radians + # spatial parameters in radians dphi = dlon*np.pi/180.0 dth = dlat*np.pi/180.0 - #-- longitude and colatitude in radians + # longitude and colatitude in radians phi = landsea.lon[np.newaxis,:]*np.pi/180.0 th = (90.0 - np.squeeze(landsea.lat))*np.pi/180.0 - #-- create land function + # create land function land_function = np.zeros((nlon,nlat),dtype=np.float64) - #-- extract land function from file - #-- combine land and island levels for land function + # extract land function from file + # combine land and island levels for land function indx,indy = np.nonzero((landsea.data.T >= 1) & (landsea.data.T <= 3)) land_function[indx,indy] = 1.0 - #-- calculate ocean function from land function + # calculate ocean function from land function ocean_function = 1.0 - land_function - #-- Calculating Legendre Polynomials using Holmes and Featherstone relation - #-- calculate up to degree and order of spherical harmonic expansion for SLF + # Calculating Legendre Polynomials using Holmes and Featherstone relation + # calculate up to degree and order of spherical harmonic expansion for SLF PLM, dPLM = plm_holmes(EXPANSION, np.cos(th)) - #-- calculate spherical harmonics of ocean function to degree 1 - #-- mass is equivalent to 1 cm ocean height change - #-- eustatic ratio = -land total/ocean total + # calculate spherical harmonics of ocean function to degree 1 + # mass is equivalent to 1 cm ocean height change + # eustatic ratio = -land total/ocean total ocean_Ylms = gen_stokes(ocean_function, landsea.lon, landsea.lat, UNITS=1, LMIN=0, LMAX=1, LOVE=(hl,kl,ll), PLM=PLM[:2,:2,:]) - #-- Gaussian Smoothing (Jekeli, 1981) + # Gaussian Smoothing (Jekeli, 1981) if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) else: - #-- else = 1 + # else = 1 wt = np.ones((LMAX+1)) - #-- load GRACE/GRACE-FO data + # load GRACE/GRACE-FO data GSM_Ylms = load_grace_GSM(base_dir, PROC, DREL, START, END, MISSING, LMAX, MMAX=MMAX, SLR_C20=SLR_C20, SLR_21=SLR_21, SLR_22=SLR_22, SLR_C30=SLR_C30, SLR_C40=SLR_C40, SLR_C50=SLR_C50, POLE_TIDE=POLE_TIDE) GAD_Ylms = load_AOD(base_dir, PROC, DREL, 'GAD', START, END, MISSING, LMAX) GAC_Ylms = load_AOD(base_dir, PROC, DREL, 'GAC', START, END, MISSING, LMAX) - #-- use a mean file for the static field to remove + # use a mean file for the static field to remove if MEAN_FILE: - #-- read data form for input mean file (ascii, netCDF4, HDF5, gfc) + # read data form for input mean file (ascii, netCDF4, HDF5, gfc) mean_Ylms = harmonics().from_file(MEAN_FILE,format=MEANFORM,date=False) - #-- remove the input mean + # remove the input mean GSM_Ylms.subtract(mean_Ylms) else: GSM_Ylms.mean(apply=True) - #-- remove the mean from the GRACE/GRACE-FO dealiasing data + # remove the mean from the GRACE/GRACE-FO dealiasing data GAD_Ylms.mean(apply=True) GAC_Ylms.mean(apply=True) - #-- convert GAC to geocenter object + # convert GAC to geocenter object GAC = geocenter().from_harmonics(GAC_Ylms) - #-- filter GRACE/GRACE-FO coefficients + # filter GRACE/GRACE-FO coefficients if DESTRIPE: - #-- destriping GRACE GSM and GAD coefficients + # destriping GRACE GSM and GAD coefficients ds_str = '_FL' GSM_Ylms = GSM_Ylms.destripe() GAD_Ylms = GAD_Ylms.destripe() else: - #-- using standard GRACE GSM harmonics + # using standard GRACE GSM harmonics ds_str = '' - #-- GRACE dates + # GRACE dates tdec = GSM_Ylms.time - #-- number of months considered + # number of months considered n_files = len(GSM_Ylms.month) - #-- input GIA spherical harmonic datafiles + # input GIA spherical harmonic datafiles GIA_Ylms_rate = read_GIA_model(GIA_FILE, GIA=GIA, LMAX=LMAX, MMAX=MMAX) gia_str = '_{0}'.format(GIA_Ylms_rate['title']) if GIA else '' - #-- GIA monthly coefficients + # GIA monthly coefficients GIA_Ylms = GSM_Ylms.zeros_like() GIA_Ylms.time[:] = np.copy(GSM_Ylms.time) GIA_Ylms.month[:] = np.copy(GSM_Ylms.month) - #-- monthly GIA calculated by gia_rate*time elapsed - #-- finding change in GIA each month + # monthly GIA calculated by gia_rate*time elapsed + # finding change in GIA each month for t in range(0,n_files): GIA_Ylms.clm[:,:,t] = GIA_Ylms_rate['clm']*(GIA_Ylms.time[t]-2003.3) GIA_Ylms.slm[:,:,t] = GIA_Ylms_rate['slm']*(GIA_Ylms.time[t]-2003.3) - #-- save geocenter coefficients of monthly GIA variability + # save geocenter coefficients of monthly GIA variability gia = geocenter().from_harmonics(GIA_Ylms) - #-- GRACE GAD degree 1 + # GRACE GAD degree 1 GAD = geocenter() GAD.time = np.copy(GAD_Ylms.time) GAD.month = np.copy(GAD_Ylms.month) @@ -514,17 +514,17 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, GAD.C11 = np.zeros((n_files)) GAD.S11 = np.zeros((n_files)) for t in range(0,n_files): - #-- converting GAD degree 1 harmonics to mass - #-- NOTE: following Swenson (2008): do not use the kl Load Love number - #-- to convert the GAD coefficients into coefficients of mass as - #-- the GAC and GAD products are computed with a Load Love number of 0 + # converting GAD degree 1 harmonics to mass + # NOTE: following Swenson (2008): do not use the kl Load Love number + # to convert the GAD coefficients into coefficients of mass as + # the GAC and GAD products are computed with a Load Love number of 0 GAD.C10[t] = rho_e*rad_e*np.squeeze(GAD_Ylms.clm[1,0,t])*(2.0 + 1.0)/3.0 GAD.C11[t] = rho_e*rad_e*np.squeeze(GAD_Ylms.clm[1,1,t])*(2.0 + 1.0)/3.0 GAD.S11[t] = rho_e*rad_e*np.squeeze(GAD_Ylms.slm[1,1,t])*(2.0 + 1.0)/3.0 - #-- removing the mean of the GAD OBP coefficients + # removing the mean of the GAD OBP coefficients GAD.mean(apply=True) - #-- read atmospheric jump corrections from Fagiolini et al. (2015) + # read atmospheric jump corrections from Fagiolini et al. (2015) ATM_Ylms = GSM_Ylms.zeros_like() ATM_Ylms.time[:] = np.copy(GSM_Ylms.time) ATM_Ylms.month[:] = np.copy(GSM_Ylms.month) @@ -532,77 +532,77 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, atm_corr = read_ecmwf_corrections(base_dir,LMAX,ATM_Ylms.month) ATM_Ylms.clm[:,:,:] = np.copy(atm_corr['clm']) ATM_Ylms.slm[:,:,:] = np.copy(atm_corr['slm']) - #-- removing the mean of the atmospheric jump correction coefficients + # removing the mean of the atmospheric jump correction coefficients ATM_Ylms.mean(apply=True) - #-- truncate to degree and order LMAX/MMAX + # truncate to degree and order LMAX/MMAX ATM_Ylms = ATM_Ylms.truncate(lmax=LMAX, mmax=MMAX) - #-- save geocenter coefficients of the atmospheric jump corrections + # save geocenter coefficients of the atmospheric jump corrections atm = geocenter().from_harmonics(ATM_Ylms) - #-- read bottom pressure model if applicable - #-- ECCO_kf080i: https://ecco.jpl.nasa.gov/drive/files/NearRealTime/KalmanFilter/ - #-- ECCO_dr080i: https://ecco.jpl.nasa.gov/drive/files/NearRealTime/Smoother/ - #-- ECCO_V4r3: https://ecco.jpl.nasa.gov/drive/files/Version4/Release3/interp_monthly/ - #-- ECCO_V4r4: https://ecco.jpl.nasa.gov/drive/files/Version4/Release4/interp_monthly/ + # read bottom pressure model if applicable + # ECCO_kf080i: https://ecco.jpl.nasa.gov/drive/files/NearRealTime/KalmanFilter/ + # ECCO_dr080i: https://ecco.jpl.nasa.gov/drive/files/NearRealTime/Smoother/ + # ECCO_V4r3: https://ecco.jpl.nasa.gov/drive/files/Version4/Release3/interp_monthly/ + # ECCO_V4r4: https://ecco.jpl.nasa.gov/drive/files/Version4/Release4/interp_monthly/ if MODEL not in ('OMCT','MPIOM'): - #-- read input data files for ascii (txt), netCDF4 (nc) or HDF5 (H5) + # read input data files for ascii (txt), netCDF4 (nc) or HDF5 (H5) MODEL_INDEX = os.path.expanduser(MODEL_INDEX) OBP_Ylms = harmonics().from_index(MODEL_INDEX, format=DATAFORM) - #-- reduce to GRACE/GRACE-FO months and truncate to degree and order + # reduce to GRACE/GRACE-FO months and truncate to degree and order OBP_Ylms = OBP_Ylms.subset(GSM_Ylms.month).truncate(lmax=LMAX,mmax=MMAX) - #-- filter ocean bottom pressure coefficients + # filter ocean bottom pressure coefficients if DESTRIPE: OBP_Ylms = OBP_Ylms.destripe() - #-- removing the mean of the ecco spherical harmonic coefficients + # removing the mean of the ecco spherical harmonic coefficients OBP_Ylms.mean(apply=True) - #-- converting ecco degree 1 harmonics to coefficients of mass + # converting ecco degree 1 harmonics to coefficients of mass OBP = geocenter.from_harmonics(OBP_Ylms).scale(dfactor[1]) - #-- Calculating cos/sin of phi arrays - #-- output [m,phi] + # Calculating cos/sin of phi arrays + # output [m,phi] m = GSM_Ylms.m[:, np.newaxis] - #-- Integration factors (solid angle) + # Integration factors (solid angle) int_fact = np.sin(th)*dphi*dth - #-- Calculating cos(m*phi) and sin(m*phi) + # Calculating cos(m*phi) and sin(m*phi) ccos = np.cos(np.dot(m,phi)) ssin = np.sin(np.dot(m,phi)) - #-- Legendre polynomials for degree 1 + # Legendre polynomials for degree 1 P10 = np.squeeze(PLM[1,0,:]) P11 = np.squeeze(PLM[1,1,:]) - #-- PLM for spherical harmonic degrees 2+ up to LMAX - #-- converted into mass and smoothed if specified + # PLM for spherical harmonic degrees 2+ up to LMAX + # converted into mass and smoothed if specified plmout = np.zeros((LMAX+1,MMAX+1,nlat)) for l in range(1,LMAX+1): m = np.arange(0,np.min([l,MMAX])+1) - #-- convert to smoothed coefficients of mass - #-- Convolving plms with degree dependent factor and smoothing + # convert to smoothed coefficients of mass + # Convolving plms with degree dependent factor and smoothing plmout[l,m,:] = PLM[l,m,:]*dfactor[l]*wt[l] - #-- Initializing 3x3 I-Parameter matrix + # Initializing 3x3 I-Parameter matrix IMAT = np.zeros((3,3)) - #-- Calculating I-Parameter matrix by integrating over latitudes - #-- I-Parameter matrix accounts for the fact that the GRACE data only - #-- includes spherical harmonic degrees greater than or equal to 2 + # Calculating I-Parameter matrix by integrating over latitudes + # I-Parameter matrix accounts for the fact that the GRACE data only + # includes spherical harmonic degrees greater than or equal to 2 for i in range(0,nlat): - #-- C10: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) + # C10: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) IMAT[0,0] += np.sum(int_fact[i]*P10[i]*ccos[0,:]*ocean_function[:,i]*P10[i]*ccos[0,:])/(4.0*np.pi) IMAT[1,0] += np.sum(int_fact[i]*P10[i]*ccos[0,:]*ocean_function[:,i]*P11[i]*ccos[1,:])/(4.0*np.pi) IMAT[2,0] += np.sum(int_fact[i]*P10[i]*ccos[0,:]*ocean_function[:,i]*P11[i]*ssin[1,:])/(4.0*np.pi) - #-- C11: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) + # C11: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) IMAT[0,1] += np.sum(int_fact[i]*P11[i]*ccos[1,:]*ocean_function[:,i]*P10[i]*ccos[0,:])/(4.0*np.pi) IMAT[1,1] += np.sum(int_fact[i]*P11[i]*ccos[1,:]*ocean_function[:,i]*P11[i]*ccos[1,:])/(4.0*np.pi) IMAT[2,1] += np.sum(int_fact[i]*P11[i]*ccos[1,:]*ocean_function[:,i]*P11[i]*ssin[1,:])/(4.0*np.pi) - #-- S11: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) + # S11: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) IMAT[0,2] += np.sum(int_fact[i]*P11[i]*ssin[1,:]*ocean_function[:,i]*P10[i]*ccos[0,:])/(4.0*np.pi) IMAT[1,2] += np.sum(int_fact[i]*P11[i]*ssin[1,:]*ocean_function[:,i]*P11[i]*ccos[1,:])/(4.0*np.pi) IMAT[2,2] += np.sum(int_fact[i]*P11[i]*ssin[1,:]*ocean_function[:,i]*P11[i]*ssin[1,:])/(4.0*np.pi) - #-- get seasonal variations of an initial geocenter correction - #-- for use in the land water mass calculation + # get seasonal variations of an initial geocenter correction + # for use in the land water mass calculation seasonal_geocenter = model_seasonal_geocenter(tdec) - #-- iterate solutions: if not single iteration + # iterate solutions: if not single iteration n_iter = 0 eps = np.inf eps_max = 1e-6 @@ -613,33 +613,33 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, iter_str = '' max_iter = 1 - #-- Calculating data matrices - #-- GRACE Eustatic degree 1 from land variations + # Calculating data matrices + # GRACE Eustatic degree 1 from land variations eustatic = geocenter() eustatic.C10 = np.zeros((n_files)) eustatic.C11 = np.zeros((n_files)) eustatic.S11 = np.zeros((n_files)) - #-- Allocate for G matrix parameters - #-- G matrix calculates the GRACE ocean mass variations + # Allocate for G matrix parameters + # G matrix calculates the GRACE ocean mass variations G = geocenter() G.C10 = np.zeros((n_files)) G.C11 = np.zeros((n_files)) G.S11 = np.zeros((n_files)) - #-- DMAT is the degree one matrix ((C10,C11,S11) x Time) in terms of mass + # DMAT is the degree one matrix ((C10,C11,S11) x Time) in terms of mass DMAT = np.zeros((3,n_files)) - #-- degree 1 iterations + # degree 1 iterations iteration = geocenter() iteration.C10 = np.zeros((n_files,max_iter)) iteration.C11 = np.zeros((n_files,max_iter)) iteration.S11 = np.zeros((n_files,max_iter)) - #-- calculate non-iterated terms for each file (G-matrix parameters) + # calculate non-iterated terms for each file (G-matrix parameters) for t in range(n_files): - #-- calculate geocenter component of ocean mass with GRACE - #-- allocate for product of grace and legendre polynomials + # calculate geocenter component of ocean mass with GRACE + # allocate for product of grace and legendre polynomials pcos = np.zeros((MMAX+1, nlat))#-[m,lat] psin = np.zeros((MMAX+1, nlat))#-[m,lat] - #-- Summing product of plms and c/slms over all SH degrees >= 2 - #-- Removing monthly GIA signal and atmospheric correction + # Summing product of plms and c/slms over all SH degrees >= 2 + # Removing monthly GIA signal and atmospheric correction Ylms = GSM_Ylms.index(t) Ylms.subtract(GIA_Ylms.index(t)) Ylms.subtract(ATM_Ylms.index(t)) @@ -647,214 +647,214 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, l = np.arange(2,LMAX+1) pcos[:,i] = np.sum(plmout[l,:,i]*Ylms.clm[l,:], axis=0) psin[:,i] = np.sum(plmout[l,:,i]*Ylms.slm[l,:], axis=0) - #-- Multiplying by c/s(phi#m) to get surface density in cmH2Oeq (lon,lat) - #-- ccos/ssin are mXphi, pcos/psin are mXtheta: resultant matrices are phiXtheta - #-- The summation over spherical harmonic order is in this multiplication + # Multiplying by c/s(phi#m) to get surface density in cmH2Oeq (lon,lat) + # ccos/ssin are mXphi, pcos/psin are mXtheta: resultant matrices are phiXtheta + # The summation over spherical harmonic order is in this multiplication rmass = np.dot(np.transpose(ccos),pcos) + np.dot(np.transpose(ssin),psin) - #-- calculate G matrix parameters through a summation of each latitude + # calculate G matrix parameters through a summation of each latitude for i in range(0,nlat): - #-- summation of integration factors, Legendre polynomials, - #-- (convolution of order and harmonics) and the ocean mass at t + # summation of integration factors, Legendre polynomials, + # (convolution of order and harmonics) and the ocean mass at t G.C10[t] += np.sum(int_fact[i]*P10[i]*ccos[0,:]*ocean_function[:,i]*rmass[:,i])/(4.0*np.pi) G.C11[t] += np.sum(int_fact[i]*P11[i]*ccos[1,:]*ocean_function[:,i]*rmass[:,i])/(4.0*np.pi) G.S11[t] += np.sum(int_fact[i]*P11[i]*ssin[1,:]*ocean_function[:,i]*rmass[:,i])/(4.0*np.pi) - #-- calculate degree one solution for each iteration (or single if not) + # calculate degree one solution for each iteration (or single if not) while (eps > eps_max) and (n_iter < max_iter): - #-- for each file + # for each file for t in range(n_files): - #-- calculate eustatic component from GRACE (can iterate) + # calculate eustatic component from GRACE (can iterate) if (n_iter == 0): - #-- for first iteration (will be only iteration if not ITERATIVE): - #-- seasonal component of geocenter variation for land water + # for first iteration (will be only iteration if not ITERATIVE): + # seasonal component of geocenter variation for land water GSM_Ylms.clm[1,0,t] = seasonal_geocenter.C10[t] GSM_Ylms.clm[1,1,t] = seasonal_geocenter.C11[t] GSM_Ylms.slm[1,1,t] = seasonal_geocenter.S11[t] else: - #-- for all others: use previous iteration of inversion - #-- for each of the geocenter solutions (C10, C11, S11) + # for all others: use previous iteration of inversion + # for each of the geocenter solutions (C10, C11, S11) GSM_Ylms.clm[1,0,t] = iteration.C10[t,n_iter-1] GSM_Ylms.clm[1,1,t] = iteration.C11[t,n_iter-1] GSM_Ylms.slm[1,1,t] = iteration.S11[t,n_iter-1] - #-- allocate for product of grace and legendre polynomials + # allocate for product of grace and legendre polynomials pcos = np.zeros((MMAX+1, nlat))#-[m,lat] psin = np.zeros((MMAX+1, nlat))#-[m,lat] - #-- Summing product of plms and c/slms over all SH degrees - #-- Removing monthly GIA signal and atmospheric correction + # Summing product of plms and c/slms over all SH degrees + # Removing monthly GIA signal and atmospheric correction Ylms = GSM_Ylms.index(t) Ylms.subtract(GIA_Ylms.index(t)) Ylms.subtract(ATM_Ylms.index(t)) for i in range(0, nlat): - #-- for land water: use an initial seasonal geocenter estimate - #-- from Chen et al. (1999) then the iterative if specified + # for land water: use an initial seasonal geocenter estimate + # from Chen et al. (1999) then the iterative if specified l = np.arange(1,LMAX+1) pcos[:,i] = np.sum(plmout[l,:,i]*Ylms.clm[l,:], axis=0) psin[:,i] = np.sum(plmout[l,:,i]*Ylms.slm[l,:], axis=0) - #-- Multiplying by c/s(phi#m) to get surface density in cm w.e. (lonxlat) - #-- this will be a spatial field similar to outputs from stokes_combine.py - #-- ccos/ssin are mXphi, pcos/psin are mXtheta: resultant matrices are phiXtheta - #-- The summation over spherical harmonic order is in this multiplication + # Multiplying by c/s(phi#m) to get surface density in cm w.e. (lonxlat) + # this will be a spatial field similar to outputs from stokes_combine.py + # ccos/ssin are mXphi, pcos/psin are mXtheta: resultant matrices are phiXtheta + # The summation over spherical harmonic order is in this multiplication lmass = np.dot(np.transpose(ccos),pcos) + np.dot(np.transpose(ssin),psin) - #-- use sea level fingerprints or eustatic from GRACE land components + # use sea level fingerprints or eustatic from GRACE land components if FINGERPRINT: - #-- calculate total sea level fingerprint for eustatic component - #-- steps to calculate sea level from GRACE land-water change: - #-- 1) calculate total land mass at time t (GRACE*land function) - #-- NOTE: this is an unscaled GRACE estimate that uses the - #-- buffered land function when solving the sea-level equation. - #-- possible improvement using scaled estimate with real coastlines + # calculate total sea level fingerprint for eustatic component + # steps to calculate sea level from GRACE land-water change: + # 1) calculate total land mass at time t (GRACE*land function) + # NOTE: this is an unscaled GRACE estimate that uses the + # buffered land function when solving the sea-level equation. + # possible improvement using scaled estimate with real coastlines land_Ylms = gen_stokes(lmass*land_function, landsea.lon, landsea.lat, UNITS=1, LMIN=0, LMAX=EXPANSION, PLM=PLM, LOVE=(hl,kl,ll)) - #-- 2) calculate sea level fingerprints of land mass at time t - #-- use maximum of 3 iterations for computational efficiency + # 2) calculate sea level fingerprints of land mass at time t + # use maximum of 3 iterations for computational efficiency sea_level = sea_level_equation(land_Ylms.clm, land_Ylms.slm, landsea.lon, landsea.lat, land_function, LMAX=EXPANSION, LOVE=(hl,kl,ll), BODY_TIDE_LOVE=0, FLUID_LOVE=0, ITERATIONS=3, POLAR=True, PLM=PLM, ASTYPE=np.float64, SCALE=1e-32, FILL_VALUE=0) - #-- 3) convert sea level fingerprints into spherical harmonics + # 3) convert sea level fingerprints into spherical harmonics slf_Ylms = gen_stokes(sea_level, landsea.lon, landsea.lat, UNITS=1, LMIN=0, LMAX=1, PLM=PLM[:2,:2,:], LOVE=(hl,kl,ll)) - #-- 4) convert the slf degree 1 harmonics to mass with dfactor + # 4) convert the slf degree 1 harmonics to mass with dfactor eustatic.C10[t] = slf_Ylms.clm[1,0]*dfactor[1] eustatic.C11[t] = slf_Ylms.clm[1,1]*dfactor[1] eustatic.S11[t] = slf_Ylms.slm[1,1]*dfactor[1] else: - #-- steps to calculate eustatic component from GRACE land-water change: - #-- 1) calculate total mass of 1 cm of ocean height (calculated above) - #-- 2) calculate total land mass at time t (GRACE*land function) - #-- NOTE: possible improvement using the sea-level equation to solve - #-- for the spatial pattern of sea level from the land water mass + # steps to calculate eustatic component from GRACE land-water change: + # 1) calculate total mass of 1 cm of ocean height (calculated above) + # 2) calculate total land mass at time t (GRACE*land function) + # NOTE: possible improvement using the sea-level equation to solve + # for the spatial pattern of sea level from the land water mass land_Ylms = gen_stokes(lmass*land_function, landsea.lon, landsea.lat, UNITS=1, LMIN=0, LMAX=1, PLM=PLM[:2,:2,:], LOVE=(hl,kl,ll)) - #-- 3) calculate ratio between the total land mass and the total mass - #-- of 1 cm of ocean height (negative as positive land = sea level drop) - #-- this converts the total land change to ocean height change + # 3) calculate ratio between the total land mass and the total mass + # of 1 cm of ocean height (negative as positive land = sea level drop) + # this converts the total land change to ocean height change eustatic_ratio = -land_Ylms.clm[0,0]/ocean_Ylms.clm[0,0] - #-- 4) scale degree one coefficients of ocean function with ratio - #-- and convert the eustatic degree 1 harmonics to mass with dfactor + # 4) scale degree one coefficients of ocean function with ratio + # and convert the eustatic degree 1 harmonics to mass with dfactor scale_factor = eustatic_ratio*dfactor[1] eustatic.C10[t] = ocean_Ylms.clm[1,0]*scale_factor eustatic.C11[t] = ocean_Ylms.clm[1,1]*scale_factor eustatic.S11[t] = ocean_Ylms.slm[1,1]*scale_factor - #-- eustatic coefficients of degree 1 - #-- for OMCT/MPIOM: - #-- equal to the eustatic component only as OMCT/MPIOM model is - #-- already removed from the GRACE/GRACE-FO GSM coefficients + # eustatic coefficients of degree 1 + # for OMCT/MPIOM: + # equal to the eustatic component only as OMCT/MPIOM model is + # already removed from the GRACE/GRACE-FO GSM coefficients CMAT = np.array([eustatic.C10[t],eustatic.C11[t],eustatic.S11[t]]) - #-- replacing the OBP harmonics of degree 1 + # replacing the OBP harmonics of degree 1 if MODEL not in ('OMCT','MPIOM'): - #-- calculate difference between ECCO and GAD as the OMCT/MPIOM - #-- model is already removed from the GRACE GSM coefficients + # calculate difference between ECCO and GAD as the OMCT/MPIOM + # model is already removed from the GRACE GSM coefficients GAD = np.array([GAD.C10[t], GAD.C11[t], GAD.S11[t]]) OBP = np.array([OBP.C10[t], OBP.C11[t], OBP.S11[t]]) - #-- effectively adding back OMCT/MPIOM and then removing ECCO + # effectively adding back OMCT/MPIOM and then removing ECCO CMAT += OBP - GAD - #-- G Matrix for time t + # G Matrix for time t GMAT = np.array([G.C10[t], G.C11[t], G.S11[t]]) - #-- calculate inversion for degree 1 solutions - #-- this is mathematically equivalent to an iterative procedure - #-- whereby the initial degree one coefficients are used to update - #-- the G Matrix until (C10, C11, S11) converge - #-- for OMCT/MPIOM: min(eustatic from land - measured ocean) - #-- for ECCO: min((OBP-GAD) + eustatic from land - measured ocean) + # calculate inversion for degree 1 solutions + # this is mathematically equivalent to an iterative procedure + # whereby the initial degree one coefficients are used to update + # the G Matrix until (C10, C11, S11) converge + # for OMCT/MPIOM: min(eustatic from land - measured ocean) + # for ECCO: min((OBP-GAD) + eustatic from land - measured ocean) DMAT[:,t] = np.dot(np.linalg.inv(IMAT), (CMAT-GMAT)) - #-- could also use pseudo-inverse in least-squares + # could also use pseudo-inverse in least-squares #DMAT[:,t] = np.linalg.lstsq(IMAT,(CMAT-GMAT),rcond=-1)[0] - #-- save geocenter for iteration and time t after restoring GIA+ATM + # save geocenter for iteration and time t after restoring GIA+ATM iteration.C10[t,n_iter] = DMAT[0,t]/dfactor[1]+gia.C10[t]+atm.C10[t] iteration.C11[t,n_iter] = DMAT[1,t]/dfactor[1]+gia.C11[t]+atm.C11[t] iteration.S11[t,n_iter] = DMAT[2,t]/dfactor[1]+gia.S11[t]+atm.S11[t] - #-- remove mean of each solution for iteration + # remove mean of each solution for iteration iteration.C10[:,n_iter] -= iteration.C10[:,n_iter].mean() iteration.C11[:,n_iter] -= iteration.C11[:,n_iter].mean() iteration.S11[:,n_iter] -= iteration.S11[:,n_iter].mean() - #-- calculate difference between original geocenter coefficients and the - #-- calculated coefficients for each of the geocenter solutions + # calculate difference between original geocenter coefficients and the + # calculated coefficients for each of the geocenter solutions sigma_C10 = np.sum((GSM_Ylms.clm[1,0,:] - iteration.C10[:,n_iter])**2) sigma_C11 = np.sum((GSM_Ylms.clm[1,1,:] - iteration.C11[:,n_iter])**2) sigma_S11 = np.sum((GSM_Ylms.slm[1,1,:] - iteration.S11[:,n_iter])**2) power = GSM_Ylms.clm[1,0,:]**2 + GSM_Ylms.clm[1,1,:]**2 + GSM_Ylms.slm[1,1,:]**2 eps = np.sqrt(sigma_C10 + sigma_C11 + sigma_S11)/np.sqrt(np.sum(power)) - #-- add 1 to n_iter counter + # add 1 to n_iter counter n_iter += 1 - #-- Convert inverted solutions into fully normalized spherical harmonics - #-- restore geocenter variation from glacial isostatic adjustment (GIA) - #-- restore atmospheric jump corrections from Fagiolini (2015) if applicable - #-- for each of the geocenter solutions (C10, C11, S11) - #-- for the iterative case this will be the final iteration + # Convert inverted solutions into fully normalized spherical harmonics + # restore geocenter variation from glacial isostatic adjustment (GIA) + # restore atmospheric jump corrections from Fagiolini (2015) if applicable + # for each of the geocenter solutions (C10, C11, S11) + # for the iterative case this will be the final iteration DEG1 = geocenter() DEG1.C10 = DMAT[0,:]/dfactor[1] + gia.C10[:] + atm.C10[:] DEG1.C11 = DMAT[1,:]/dfactor[1] + gia.C11[:] + atm.C11[:] DEG1.S11 = DMAT[2,:]/dfactor[1] + gia.S11[:] + atm.S11[:] - #-- remove mean of geocenter for each component + # remove mean of geocenter for each component DEG1.mean(apply=True) - #-- calculate geocenter variations with dealiasing restored + # calculate geocenter variations with dealiasing restored aod = DEG1.copy() aod.add(GAC) - #-- output degree 1 coefficients with and without dealiasing + # output degree 1 coefficients with and without dealiasing file_format = '{0}_{1}_{2}{3}{4}{5}{6}{7}{8}.{9}' output_format = '{0:11.4f}{1:14.6e}{2:14.6e}{3:14.6e} {4:03d}\n' - #-- public file format in fully normalized spherical harmonics - #-- before and after restoring the atmospheric and oceanic dealiasing + # public file format in fully normalized spherical harmonics + # before and after restoring the atmospheric and oceanic dealiasing for AOD in ['','_wAOD']: - #-- local version with all descriptor flags + # local version with all descriptor flags a1=(PROC,DREL,MODEL,slf_str,iter_str,slr_str,gia_str,AOD,ds_str,'txt') FILE1 = os.path.join(DIRECTORY,file_format.format(*a1)) fid1 = open(FILE1,'w') - #-- print headers for cases with and without dealiasing + # print headers for cases with and without dealiasing print_header(fid1) print_harmonic(fid1,kl[1]) print_global(fid1,PROC,DREL,MODEL.replace('_',' '),AOD,GIA_Ylms_rate, SLR_C20,SLR_21,GSM_Ylms.month) print_variables(fid1,'single precision','fully normalized') - #-- for each GRACE/GRACE-FO month + # for each GRACE/GRACE-FO month for t,mon in enumerate(GSM_Ylms.month): - #-- geocenter coefficients with and without AOD restored + # geocenter coefficients with and without AOD restored if AOD: args=(tdec[t],aod.C10[t],aod.C11[t],aod.S11[t],mon) else: args=(tdec[t],DEG1.C10[t],DEG1.C11[t],DEG1.S11[t],mon) - #-- output geocenter coefficients to file + # output geocenter coefficients to file fid1.write(output_format.format(*args)) - #-- close the output file + # close the output file fid1.close() output_files.append(FILE1) - #-- set the permissions mode of the output file + # set the permissions mode of the output file os.chmod(FILE1, MODE) - #-- create public and archival copies of data + # create public and archival copies of data if COPY: - #-- create symbolic link for public distribution without flags + # create symbolic link for public distribution without flags a2=(PROC,DREL,MODEL,slf_str,iter_str,'','',AOD,'','txt') FILE2 = os.path.join(DIRECTORY,file_format.format(*a2)) os.symlink(FILE1,FILE2) if not os.access(FILE2,os.F_OK) else None output_files.append(FILE2) - #-- create copy of file with date for archiving + # create copy of file with date for archiving today=time.strftime('_%Y-%m-%d',time.localtime()) a3=(PROC,DREL,MODEL,slf_str,iter_str,'','',AOD,today,'txt') FILE3 = os.path.join(DIRECTORY,file_format.format(*a3)) shutil.copyfile(FILE1,FILE3) - #-- copy modification times and permissions for archive file + # copy modification times and permissions for archive file shutil.copystat(FILE1,FILE3) output_files.append(FILE3) - #-- save iterations to netCDF4 file + # save iterations to netCDF4 file if ITERATIVE: - #-- output all degree 1 coefficients as a netCDF4 file + # output all degree 1 coefficients as a netCDF4 file a4=(PROC,DREL,MODEL,slf_str,iter_str,slr_str,gia_str,'',ds_str,'nc') FILE4=os.path.join(DIRECTORY,file_format.format(*a4)) fileID=netCDF4.Dataset(FILE4,'w') - #-- Defining the NetCDF4 dimensions + # Defining the NetCDF4 dimensions fileID.createDimension('iteration', n_iter) fileID.createDimension('time', n_files) - #-- defining the NetCDF4 variables + # defining the NetCDF4 variables nc = {} nc['time'] = fileID.createVariable('time',GSM_Ylms.time.dtype,('time',)) nc['month'] = fileID.createVariable('month',GSM_Ylms.month.dtype,('time',)) @@ -864,13 +864,13 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, ('time','iteration',), zlib=True) nc['S11'] = fileID.createVariable('S11',iteration.S11.dtype, ('time','iteration',), zlib=True) - #-- filling NetCDF4 variables + # filling NetCDF4 variables nc['time'][:] = np.copy(GSM_Ylms.time) nc['month'][:] = np.copy(GSM_Ylms.month) nc['C10'][:] = iteration.C10[:,:n_iter] nc['C11'][:] = iteration.C11[:,:n_iter] nc['S11'][:] = iteration.S11[:,:n_iter] - #-- defining the NetCDF4 attributes + # defining the NetCDF4 attributes nc['time'].units = 'years' nc['time'].long_name = 'Date_in_Decimal_Years' nc['month'].long_name = 'GRACE_month' @@ -882,45 +882,45 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, nc['C11'].long_name = 'cosine_spherical_harmonic_of_degree_1,_order_1' nc['S11'].units = 'fully_normalized' nc['S11'].long_name = 'sine_spherical_harmonic_of_degree_1,_order_1' - #-- define global attributes + # define global attributes fileID.date_created = time.strftime('%Y-%m-%d',time.localtime()) - #-- close the output file + # close the output file fileID.close() - #-- set the permissions mode of the output file + # set the permissions mode of the output file os.chmod(FILE4, MODE) output_files.append(FILE4) - #-- create plot similar to Figure 1 of Swenson et al (2008) + # create plot similar to Figure 1 of Swenson et al (2008) if PLOT: - #-- 3 row plot (C10, C11 and S11) - #-- with ECCO OBP geocenter, OMCT/MPIOM OBP geocenter, eustatic geocenter - #-- and the G matrix geocenter components + # 3 row plot (C10, C11 and S11) + # with ECCO OBP geocenter, OMCT/MPIOM OBP geocenter, eustatic geocenter + # and the G matrix geocenter components ax = {} fig, (ax[0], ax[1], ax[2]) = plt.subplots(num=1, nrows=3, sharex=True, sharey=True, figsize=(6,9)) ii = np.nonzero((tdec >= 2003.) & (tdec < 2008.)) - #-- plot ocean bottom pressure for alternative models + # plot ocean bottom pressure for alternative models if MODEL not in ('OMCT','MPIOM'): OBP.mean(apply=True,indices=ii) ax[0].plot(tdec,10.*OBP.C10,color='#1ed565',lw=2) ax[1].plot(tdec,10.*OBP.C11,color='#1ed565',lw=2) ax[2].plot(tdec,10.*OBP.S11,color='#1ed565',lw=2) - #-- plot GRACE components + # plot GRACE components G.mean(apply=True,indices=ii) ax[0].plot(tdec,10.*G.C10, color='orange', lw=2) ax[1].plot(tdec,10.*G.C11, color='orange', lw=2) ax[2].plot(tdec,10.*G.S11, color='orange', lw=2) - #-- plot OMCT/MPIOM ocean bottom pressure + # plot OMCT/MPIOM ocean bottom pressure GAD.mean(apply=True,indices=ii) ax[0].plot(tdec,10.*GAD.C10, 'b', lw=2) ax[1].plot(tdec,10.*GAD.C11, 'b', lw=2) ax[2].plot(tdec,10.*GAD.C11, 'b', lw=2) - #-- plot eustatic components + # plot eustatic components eustatic.mean(apply=True,indices=ii) ax[0].plot(tdec,10.*eustatic.C10, 'r', lw=2) ax[1].plot(tdec,10.*eustatic.C11, 'r', lw=2) ax[2].plot(tdec,10.*eustatic.S11, 'r', lw=2) - #-- labels and set limits to Swenson range + # labels and set limits to Swenson range ax[0].set_ylabel('[mm]', fontsize=14) ax[1].set_ylabel('[mm]', fontsize=14) ax[2].set_ylabel('[mm]', fontsize=14) @@ -931,48 +931,48 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, ax[2].xaxis.set_minor_locator(MultipleLocator(0.25)) ax[2].yaxis.set_ticks(np.arange(-6,8,2)) ax[2].xaxis.get_major_formatter().set_useOffset(False) - #-- add axis labels and adjust font sizes for axis ticks + # add axis labels and adjust font sizes for axis ticks fig_labels = ['C10','C11','S11'] for i in range(3): - #-- axis label + # axis label ax[i].add_artist(AnchoredText(fig_labels[i], pad=0., prop=dict(size=16,weight='bold'), frameon=False, loc=2)) - #-- axes tick adjustments + # axes tick adjustments for tick in ax[i].xaxis.get_major_ticks(): tick.label.set_fontsize(14) for tick in ax[i].yaxis.get_major_ticks(): tick.label.set_fontsize(14) - #-- adjust ticks + # adjust ticks ax[i].get_xaxis().set_tick_params(which='both', direction='in') ax[i].get_yaxis().set_tick_params(which='both', direction='in') - #-- adjust locations of subplots and save to file + # adjust locations of subplots and save to file fig.subplots_adjust(left=0.1,right=0.96,bottom=0.06,top=0.98,hspace=0.1) args = (PROC,DREL,MODEL,slf_str,iter_str,slr_str,gia_str,ds_str) FILE = 'Swenson_Figure_1_{0}_{1}_{2}{3}{4}{5}{6}{7}.pdf'.format(*args) plt.savefig(os.path.join(DIRECTORY,FILE), format='pdf', metadata={'Title':os.path.basename(sys.argv[0])}) plt.clf() - #-- set the permissions mode of the output files + # set the permissions mode of the output files os.chmod(os.path.join(DIRECTORY,FILE), MODE) output_files.append(os.path.join(DIRECTORY,FILE)) - #-- if ITERATIVE: create plot showing iteration solutions + # if ITERATIVE: create plot showing iteration solutions if PLOT and ITERATIVE: - #-- 3 row plot (C10, C11 and S11) + # 3 row plot (C10, C11 and S11) ax = {} fig, (ax[0], ax[1], ax[2]) = plt.subplots(num=1, nrows=3, sharex=True, figsize=(6,9)) - #-- show solutions for each iteration + # show solutions for each iteration cmap = copy.copy(cm.rainbow) plot_colors = iter(cmap(np.linspace(0,1,n_iter))) iteration_mmwe = iteration.scale(10.0*dfactor[1]) for j in range(n_iter): c = next(plot_colors) - #-- C10, C11 and S11 + # C10, C11 and S11 ax[0].plot(GSM_Ylms.month,iteration_mmwe.C10[:,j],c=c) ax[1].plot(GSM_Ylms.month,iteration_mmwe.C11[:,j],c=c) ax[2].plot(GSM_Ylms.month,iteration_mmwe.S11[:,j],c=c) - #-- labels and set limits + # labels and set limits ax[0].set_ylabel('mm', fontsize=14) ax[1].set_ylabel('mm', fontsize=14) ax[2].set_ylabel('mm', fontsize=14) @@ -982,59 +982,59 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, ax[2].set_xlim(xmin,xmax) ax[2].xaxis.set_minor_locator(MultipleLocator(5)) ax[2].xaxis.get_major_formatter().set_useOffset(False) - #-- add axis labels and adjust font sizes for axis ticks + # add axis labels and adjust font sizes for axis ticks fig_labels = ['C10','C11','S11'] for i in range(3): - #-- axis label + # axis label ax[i].add_artist(AnchoredText(fig_labels[i], pad=0., prop=dict(size=16,weight='bold'), frameon=False, loc=2)) - #-- axes tick adjustments + # axes tick adjustments for tick in ax[i].xaxis.get_major_ticks(): tick.label.set_fontsize(14) for tick in ax[i].yaxis.get_major_ticks(): tick.label.set_fontsize(14) - #-- adjust ticks + # adjust ticks ax[i].get_xaxis().set_tick_params(which='both', direction='in') ax[i].get_yaxis().set_tick_params(which='both', direction='in') - #-- adjust locations of subplots and save to file + # adjust locations of subplots and save to file fig.subplots_adjust(left=0.12,right=0.94,bottom=0.06,top=0.98,hspace=0.1) args = (PROC,DREL,MODEL,slf_str,slr_str,gia_str,ds_str) FILE = 'Geocenter_Iterative_{0}_{1}_{2}{3}{4}{5}{6}.pdf'.format(*args) plt.savefig(os.path.join(DIRECTORY,FILE), format='pdf', metadata={'Title':os.path.basename(sys.argv[0])}) plt.clf() - #-- set the permissions mode of the output files + # set the permissions mode of the output files os.chmod(os.path.join(DIRECTORY,FILE), MODE) output_files.append(os.path.join(DIRECTORY,FILE)) - #-- return the list of output files and the number of iterations + # return the list of output files and the number of iterations return (output_files, n_iter) -#-- PURPOSE: print YAML header to top of file +# PURPOSE: print YAML header to top of file def print_header(fid): - #-- print header + # print header fid.write('{0}:\n'.format('header')) - #-- data dimensions + # data dimensions fid.write(' {0}:\n'.format('dimensions')) fid.write(' {0:22}: {1:d}\n'.format('degree',1)) fid.write(' {0:22}: {1:d}\n'.format('order',1)) fid.write('\n') -#-- PURPOSE: print spherical harmonic attributes to YAML header +# PURPOSE: print spherical harmonic attributes to YAML header def print_harmonic(fid,kl): - #-- non-standard attributes + # non-standard attributes fid.write(' {0}:\n'.format('non-standard_attributes')) - #-- load love number + # load love number fid.write(' {0:22}:\n'.format('love_number')) long_name = 'Gravitational Load Love Number of Degree 1 (k1)' fid.write(' {0:20}: {1}\n'.format('long_name',long_name)) fid.write(' {0:20}: {1:0.3f}\n'.format('value',kl)) - #-- data format + # data format data_format = '(f11.4,3e14.6,i4)' fid.write(' {0:22}: {1}\n'.format('formatting_string',data_format)) fid.write('\n') -#-- PURPOSE: print global attributes to YAML header +# PURPOSE: print global attributes to YAML header def print_global(fid,PROC,DREL,MODEL,AOD,GIA,SLR,S21,month): fid.write(' {0}:\n'.format('global_attributes')) MISSION = dict(RL05='GRACE',RL06='GRACE/GRACE-FO') @@ -1053,7 +1053,7 @@ def print_global(fid,PROC,DREL,MODEL,AOD,GIA,SLR,S21,month): 'processes. In addition, the coefficients represent the ' 'atmospheric and oceanic processes not captured in the {0} {1} ' 'de-aliasing product.').format(MISSION[DREL],DREL)) - #-- get GIA parameters + # get GIA parameters summary.append((' Glacial Isostatic Adjustment (GIA) estimates from ' '{0} have been restored.').format(GIA['citation'])) if AOD: @@ -1107,7 +1107,7 @@ def print_global(fid,PROC,DREL,MODEL,AOD,GIA,SLR,S21,month): fid.write(' {0:22}: {1}\n'.format('product_version',PRODUCT_VERSION)) fid.write(' {0:22}:\n'.format('references')) reference = [] - #-- geocenter citations + # geocenter citations reference.append(('T. C. Sutterley, and I. Velicogna, "Improved estimates ' 'of geocenter variability from time-variable gravity and ocean model ' 'outputs", Remote Sensing, 11(18), 2108, (2019). ' @@ -1116,16 +1116,16 @@ def print_global(fid,PROC,DREL,MODEL,AOD,GIA,SLR,S21,month): 'geocenter variations from a combination of GRACE and ocean model ' 'output", Journal of Geophysical Research - Solid Earth, 113(B08410), ' '(2008). https://doi.org/10.1029/2007JB005338')) - #-- GIA citation + # GIA citation reference.append(GIA['reference']) - #-- ECMWF jump corrections citation + # ECMWF jump corrections citation if (DREL == 'RL05') and not AOD: reference.append(('E. Fagiolini, F. Flechtner, M. Horwath, H. Dobslaw, ' '''"Correction of inconsistencies in ECMWF's operational ''' '''analysis data during de-aliasing of GRACE gravity models", ''' 'Geophysical Journal International, 202(3), 2150, (2015). ' 'https://doi.org/10.1093/gji/ggv276')) - #-- SLR citation for a given solution + # SLR citation for a given solution if (SLR == 'CSR'): reference.append(('M. Cheng, B. D. Tapley, and J. C. Ries, ' '''"Deceleration in the Earth's oblateness", Journal of ''' @@ -1156,7 +1156,7 @@ def print_global(fid,PROC,DREL,MODEL,AOD,GIA,SLR,S21,month): 'GRACE/GRACE-FO Geopotential GSM Coefficients GFZ RL06 ' '(Level-2B Product)." V. 0002. GFZ Data Services, (2019). ' 'http://doi.org/10.5880/GFZ.GRAVIS_06_L2B')) - #-- print list of references + # print list of references for ref in reference: fid.write(' - {0}\n'.format(ref)) creators = 'Tyler C. Sutterley and Isabella Velicogna' @@ -1168,7 +1168,7 @@ def print_global(fid,PROC,DREL,MODEL,AOD,GIA,SLR,S21,month): fid.write(' {0:22}: {1}\n'.format('creator_type', 'group')) inst = 'University of Washington; University of California, Irvine' fid.write(' {0:22}: {1}\n'.format('creator_institution',inst)) - #-- date range and date created + # date range and date created calendar_year,calendar_month = grace_to_calendar(month) start_time = '{0:4.0f}-{1:02.0f}'.format(calendar_year[0],calendar_month[0]) fid.write(' {0:22}: {1}\n'.format('time_coverage_start', start_time)) @@ -1178,39 +1178,39 @@ def print_global(fid,PROC,DREL,MODEL,AOD,GIA,SLR,S21,month): fid.write(' {0:22}: {1}\n'.format('date_created', today)) fid.write('\n') -#-- PURPOSE: print variable descriptions to YAML header +# PURPOSE: print variable descriptions to YAML header def print_variables(fid,data_precision,data_units): - #-- variables + # variables fid.write(' {0}:\n'.format('variables')) - #-- time + # time fid.write(' {0:22}:\n'.format('mid-epoch_time')) long_name = 'mid-date of each measurement epoch' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', 'single precision')) fid.write(' {0:20}: {1}\n'.format('units', 'decimal-years')) fid.write(' {0:20}: {1}\n'.format('comment', '1st column')) - #-- C10 + # C10 fid.write(' {0:22}:\n'.format('C10')) long_name = 'C10 coefficient; cosine coefficient for degree 1 and order 0' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('units', data_units)) fid.write(' {0:20}: {1}\n'.format('comment', '2nd column')) - #-- C11 + # C11 fid.write(' {0:22}:\n'.format('C11')) long_name = 'C11 coefficient; cosine coefficient for degree 1 and order 1' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('units', data_units)) fid.write(' {0:20}: {1}\n'.format('comment', '3rd column')) - #-- S11 + # S11 fid.write(' {0:22}:\n'.format('S11')) long_name = 'S11 coefficient; sine coefficient for degree 1 and order 1' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('units', data_units)) fid.write(' {0:20}: {1}\n'.format('comment', '4th column')) - #-- GRACE month + # GRACE month fid.write(' {0:22}:\n'.format('month')) long_name = 'GRACE month of each measurement epoch' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) @@ -1219,52 +1219,52 @@ def print_variables(fid,data_precision,data_units): fid.write(' {0:20}: {1}\n'.format('data_type', 'integer')) fid.write(' {0:20}: {1}\n'.format('units', 'month')) fid.write(' {0:20}: {1}\n'.format('comment', '5th column')) - #-- end of header + # end of header fid.write('\n\n# End of YAML header\n') -#-- PURPOSE: print a file log for the GRACE degree one analysis +# PURPOSE: print a file log for the GRACE degree one analysis def output_log_file(arguments,output_files,n_iter): - #-- format: calc_degree_one_run_2002-04-01_PID-70335.log + # format: calc_degree_one_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'calc_degree_one_run_{0}_PID-{1:d}.log'.format(*args) DIRECTORY = os.path.join(arguments.directory,'geocenter') - #-- create a unique log and open the log file + # create a unique log and open the log file fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print number of iterations used in calculation + # print number of iterations used in calculation if arguments.iterative: logging.info('\n\nNUMBER OF ITERATIONS: {0:d}'.format(n_iter)) - #-- print output files + # print output files logging.info('\n\nOUTPUT FILES:') for f in output_files: logging.info('{0}'.format(f)) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: print a error file log for the GRACE degree one analysis +# PURPOSE: print a error file log for the GRACE degree one analysis def output_error_log_file(arguments): - #-- format: calc_degree_one_failed_run_2002-04-01_PID-70335.log + # format: calc_degree_one_failed_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'calc_degree_one_failed_run_{0}_PID-{1:d}.log'.format(*args) DIRECTORY = os.path.join(arguments.directory,'geocenter') - #-- create a unique log and open the log file + # create a unique log and open the log file fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print traceback error + # print traceback error logging.info('\n\nTRACEBACK ERROR:') traceback.print_exc(file=fid) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Calculates degree 1 variations using GRACE/GRACE-FO @@ -1274,28 +1274,28 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO data processing center + # GRACE/GRACE-FO data processing center parser.add_argument('--center','-c', metavar='PROC', type=str, required=True, help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', help='GRACE/GRACE-FO Data Release') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month') @@ -1307,25 +1307,25 @@ def arguments(): parser.add_argument('--missing','-N', metavar='MISSING', type=int, nargs='+', default=MISSING, help='Missing GRACE/GRACE-FO months') - #-- different treatments of the load Love numbers - #-- 0: Han and Wahr (1995) values from PREM - #-- 1: Gegout (2005) values from PREM - #-- 2: Wang et al. (2012) values from PREM + # different treatments of the load Love numbers + # 0: Han and Wahr (1995) values from PREM + # 1: Gegout (2005) values from PREM + # 2: Wang et al. (2012) values from PREM parser.add_argument('--love','-n', type=int, default=0, choices=[0,1,2], help='Treatment of the Load Love numbers') parser.add_argument('--kl','-k', type=float, default=0.021, nargs='?', help='Degree 1 gravitational Load Love number') - #-- Gaussian smoothing radius (km) + # Gaussian smoothing radius (km) parser.add_argument('--radius','-R', type=float, default=0, help='Gaussian smoothing radius (km)') - #-- Use a decorrelation (destriping) filter + # Use a decorrelation (destriping) filter parser.add_argument('--destripe','-d', default=False, action='store_true', help='Use decorrelation (destriping) filter') - #-- GIA model type list + # GIA model type list models = {} models['IJ05-R2'] = 'Ivins R2 GIA Models' models['W12a'] = 'Whitehouse GIA Models' @@ -1339,23 +1339,23 @@ def arguments(): models['ascii'] = 'reformatted GIA in ascii format' models['netCDF4'] = 'reformatted GIA in netCDF4 format' models['HDF5'] = 'reformatted GIA in HDF5 format' - #-- GIA model type + # GIA model type parser.add_argument('--gia','-G', type=str, metavar='GIA', default='AW13-ICE6G', choices=models.keys(), help='GIA model type to read') - #-- full path to GIA file + # full path to GIA file parser.add_argument('--gia-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GIA file to read') - #-- use atmospheric jump corrections from Fagiolini et al. (2015) + # use atmospheric jump corrections from Fagiolini et al. (2015) parser.add_argument('--atm-correction', default=False, action='store_true', help='Apply atmospheric jump correction coefficients') - #-- correct for pole tide drift follow Wahr et al. (2015) + # correct for pole tide drift follow Wahr et al. (2015) parser.add_argument('--pole-tide', default=False, action='store_true', help='Correct for pole tide drift') - #-- replace low degree harmonics with values from Satellite Laser Ranging + # replace low degree harmonics with values from Satellite Laser Ranging parser.add_argument('--slr-c20', type=str, default=None, choices=['CSR','GFZ','GSFC'], help='Replace C20 coefficients with SLR values') @@ -1374,7 +1374,7 @@ def arguments(): parser.add_argument('--slr-c50', type=str, default=None, choices=['CSR','GSFC','LARES'], help='Replace C50 coefficients with SLR values') - #-- ocean model list + # ocean model list choices = [] choices.append('OMCT') choices.append('MPIOM') @@ -1387,78 +1387,78 @@ def arguments(): metavar='MODEL', type=str, default='MPIOM', choices=choices, help='Ocean model to use') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input data format for ocean models') - #-- index file for ocean model harmonics + # index file for ocean model harmonics parser.add_argument('--ocean-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='Index file for ocean model harmonics') - #-- mean file to remove + # mean file to remove parser.add_argument('--mean-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GRACE/GRACE-FO mean file to remove from the harmonic data') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--mean-format', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5','gfc'], help='Input data format for GRACE/GRACE-FO mean file') - #-- run with iterative scheme + # run with iterative scheme parser.add_argument('--iterative', default=False, action='store_true', help='Iterate degree one solutions') - #-- run with sea level fingerprints + # run with sea level fingerprints parser.add_argument('--fingerprint', default=False, action='store_true', help='Redistribute land-water flux using sea level fingerprints') parser.add_argument('--expansion','-e', type=int, default=240, help='Spherical harmonic expansion for sea level fingerprints') - #-- land-sea mask for calculating ocean mass and land water flux + # land-sea mask for calculating ocean mass and land water flux land_mask_file = utilities.get_data_path(['data','land_fcn_300km.nc']) parser.add_argument('--mask', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=land_mask_file, help='Land-sea mask for calculating ocean mass and land water flux') - #-- create output plots + # create output plots parser.add_argument('--plot','-p', default=False, action='store_true', help='Create output plots for components and iterations') - #-- copy output files + # copy output files parser.add_argument('--copy','-C', default=False, action='store_true', help='Copy output files for distribution and archival') - #-- Output log file for each job in forms - #-- calc_degree_one_run_2002-04-01_PID-00000.log - #-- calc_degree_one_failed_run_2002-04-01_PID-00000.log + # Output log file for each job in forms + # calc_degree_one_run_2002-04-01_PID-00000.log + # calc_degree_one_failed_run_2002-04-01_PID-00000.log parser.add_argument('--log', default=False, action='store_true', help='Output log file for each job') - #-- print information about processing run + # print information about processing run parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of processing run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- try to run the analysis with listed parameters + # try to run the analysis with listed parameters try: info(args) - #-- run calc_degree_one algorithm with parameters + # run calc_degree_one algorithm with parameters output_files,n_iter = calc_degree_one( args.directory, args.center, @@ -1495,17 +1495,17 @@ def main(): COPY=args.copy, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) - if args.log:#-- write failed job completion log file + if args.log:# write failed job completion log file output_error_log_file(args) else: - if args.log:#-- write successful job completion log file + if args.log:# write successful job completion log file output_log_file(args,output_files,n_iter) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/calc_harmonic_resolution.py b/scripts/calc_harmonic_resolution.py index b227ff7f..045b209a 100755 --- a/scripts/calc_harmonic_resolution.py +++ b/scripts/calc_harmonic_resolution.py @@ -40,8 +40,8 @@ import argparse import numpy as np -#-- PURPOSE: Calculates minimum spatial resolution that can be resolved -#-- from spherical harmonics of a maximum degree +# PURPOSE: Calculates minimum spatial resolution that can be resolved +# from spherical harmonics of a maximum degree def calc_harmonic_resolution(LMAX, RADIUS=6371.0008, SPH_CAP=False): """ Calculates minimum spatial resolution that can be resolved from diff --git a/scripts/calc_mascon.py b/scripts/calc_mascon.py index a86cb880..36fc2d2f 100644 --- a/scripts/calc_mascon.py +++ b/scripts/calc_mascon.py @@ -253,7 +253,7 @@ from gravity_toolkit.ocean_stokes import ocean_stokes from gravity_toolkit.tssmooth import tssmooth -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -262,8 +262,8 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: calculate a regional time-series through a least -#-- squares mascon process +# PURPOSE: calculate a regional time-series through a least +# squares mascon process def calc_mascon(base_dir, PROC, DREL, DSET, LMAX, RAD, START=None, END=None, @@ -302,435 +302,435 @@ def calc_mascon(base_dir, PROC, DREL, DSET, LMAX, RAD, OUTPUT_DIRECTORY=None, MODE=0o775): - #-- recursively create output Directory if not currently existing + # recursively create output Directory if not currently existing if (not os.access(OUTPUT_DIRECTORY, os.F_OK)): os.makedirs(OUTPUT_DIRECTORY, mode=MODE, exist_ok=True) - #-- list object of output files for file logs (full path) + # list object of output files for file logs (full path) output_files = [] - #-- file information + # file information suffix = dict(ascii='txt', netCDF4='nc', HDF5='H5') - #-- file parser for reading index files - #-- removes commented lines (can comment out files in the index) - #-- removes empty lines (if there are extra empty lines) + # file parser for reading index files + # removes commented lines (can comment out files in the index) + # removes empty lines (if there are extra empty lines) parser = re.compile(r'^(?!\#|\%|$)', re.VERBOSE) - #-- read arrays of kl, hl, and ll Love Numbers + # read arrays of kl, hl, and ll Love Numbers hl,kl,ll = load_love_numbers(LMAX, LOVE_NUMBERS=LOVE_NUMBERS, REFERENCE=REFERENCE) - #-- Earth Parameters + # Earth Parameters factors = units(lmax=LMAX).harmonic(hl,kl,ll) - #-- Average Density of the Earth [g/cm^3] + # Average Density of the Earth [g/cm^3] rho_e = factors.rho_e - #-- Average Radius of the Earth [cm] + # Average Radius of the Earth [cm] rad_e = factors.rad_e - #-- for datasets not GSM: will add a label for the dataset + # for datasets not GSM: will add a label for the dataset dset_str = '' if (DSET == 'GSM') else f'_{DSET}' - #-- atmospheric ECMWF "jump" flag (if ATM) + # atmospheric ECMWF "jump" flag (if ATM) atm_str = '_wATM' if ATM else '' - #-- output string for both LMAX==MMAX and LMAX != MMAX cases + # output string for both LMAX==MMAX and LMAX != MMAX cases MMAX = np.copy(LMAX) if not MMAX else MMAX order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' - #-- Calculating the Gaussian smoothing for radius RAD + # Calculating the Gaussian smoothing for radius RAD if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) gw_str = f'_r{RAD:0.0f}km' else: - #-- else = 1 + # else = 1 wt = np.ones((LMAX+1)) gw_str = '' - #-- Read Ocean function and convert to Ylms for redistribution + # Read Ocean function and convert to Ylms for redistribution if (REDISTRIBUTE_MASCONS | REDISTRIBUTE_REMOVED): - #-- read Land-Sea Mask and convert to spherical harmonics + # read Land-Sea Mask and convert to spherical harmonics ocean_Ylms = ocean_stokes(LANDMASK, LMAX, MMAX=MMAX, LOVE=(hl,kl,ll)) ocean_str = '_OCN' else: - #-- not distributing uniformly over ocean + # not distributing uniformly over ocean ocean_str = '' - #-- input GRACE/GRACE-FO spherical harmonic datafiles for date range - #-- replacing low-degree harmonics with SLR values if specified - #-- include degree 1 (geocenter) harmonics if specified - #-- correcting for Pole-Tide and Atmospheric Jumps if specified + # input GRACE/GRACE-FO spherical harmonic datafiles for date range + # replacing low-degree harmonics with SLR values if specified + # include degree 1 (geocenter) harmonics if specified + # correcting for Pole-Tide and Atmospheric Jumps if specified Ylms = grace_input_months(base_dir, PROC, DREL, DSET, LMAX, START, END, MISSING, SLR_C20, DEG1, MMAX=MMAX, SLR_21=SLR_21, SLR_22=SLR_22, SLR_C30=SLR_C30, SLR_C40=SLR_C40, SLR_C50=SLR_C50, DEG1_FILE=DEG1_FILE, MODEL_DEG1=MODEL_DEG1, ATM=ATM, POLE_TIDE=POLE_TIDE) - #-- create harmonics object from GRACE/GRACE-FO data + # create harmonics object from GRACE/GRACE-FO data GRACE_Ylms = harmonics().from_dict(Ylms) - #-- use a mean file for the static field to remove + # use a mean file for the static field to remove if MEAN_FILE: - #-- read data form for input mean file (ascii, netCDF4, HDF5, gfc) + # read data form for input mean file (ascii, netCDF4, HDF5, gfc) mean_Ylms = harmonics().from_file(MEAN_FILE,format=MEANFORM,date=False) - #-- remove the input mean + # remove the input mean GRACE_Ylms.subtract(mean_Ylms) else: GRACE_Ylms.mean(apply=True) - #-- filter GRACE/GRACE-FO coefficients + # filter GRACE/GRACE-FO coefficients if DESTRIPE: - #-- destriping GRACE/GRACE-FO coefficients + # destriping GRACE/GRACE-FO coefficients ds_str = '_FL' GRACE_Ylms = GRACE_Ylms.destripe() else: - #-- using standard GRACE/GRACE-FO harmonics + # using standard GRACE/GRACE-FO harmonics ds_str = '' - #-- full path to directory for specific GRACE/GRACE-FO product + # full path to directory for specific GRACE/GRACE-FO product GRACE_Ylms.directory = Ylms['directory'] - #-- date information of GRACE/GRACE-FO coefficients + # date information of GRACE/GRACE-FO coefficients n_files = len(GRACE_Ylms.time) - #-- input GIA spherical harmonic datafiles + # input GIA spherical harmonic datafiles GIA_Ylms_rate = read_GIA_model(GIA_FILE,GIA=GIA,LMAX=LMAX,MMAX=MMAX) gia_str = '_{0}'.format(GIA_Ylms_rate['title']) if GIA else '' - #-- calculate the monthly mass change from GIA + # calculate the monthly mass change from GIA GIA_Ylms = GRACE_Ylms.zeros_like() GIA_Ylms.time[:] = np.copy(GRACE_Ylms.time) GIA_Ylms.month[:] = np.copy(GRACE_Ylms.month) - #-- monthly GIA calculated by gia_rate*time elapsed - #-- finding change in GIA each month + # monthly GIA calculated by gia_rate*time elapsed + # finding change in GIA each month for t in range(n_files): GIA_Ylms.clm[:,:,t] = GIA_Ylms_rate['clm']*(GIA_Ylms.time[t]-2003.3) GIA_Ylms.slm[:,:,t] = GIA_Ylms_rate['slm']*(GIA_Ylms.time[t]-2003.3) - #-- input spherical harmonic datafiles to be removed from the GRACE data - #-- Remove sets of Ylms from the GRACE data before returning + # input spherical harmonic datafiles to be removed from the GRACE data + # Remove sets of Ylms from the GRACE data before returning remove_Ylms = GRACE_Ylms.zeros_like() remove_Ylms.time[:] = np.copy(GRACE_Ylms.time) remove_Ylms.month[:] = np.copy(GRACE_Ylms.month) if REMOVE_FILES: - #-- extend list if a single format was entered for all files + # extend list if a single format was entered for all files if len(REMOVE_FORMAT) < len(REMOVE_FILES): REMOVE_FORMAT = REMOVE_FORMAT*len(REMOVE_FILES) - #-- for each file to be removed + # for each file to be removed for REMOVE_FILE,REMOVEFORM in zip(REMOVE_FILES,REMOVE_FORMAT): if REMOVEFORM in ('ascii','netCDF4','HDF5'): - #-- ascii (.txt) - #-- netCDF4 (.nc) - #-- HDF5 (.H5) + # ascii (.txt) + # netCDF4 (.nc) + # HDF5 (.H5) Ylms = harmonics().from_file(REMOVE_FILE, format=REMOVEFORM) elif REMOVEFORM in ('index-ascii','index-netCDF4','index-HDF5'): - #-- read from index file + # read from index file _,removeform = REMOVEFORM.split('-') - #-- index containing files in data format + # index containing files in data format Ylms = harmonics().from_index(REMOVE_FILE, format=removeform) - #-- reduce to GRACE/GRACE-FO months and truncate to degree and order + # reduce to GRACE/GRACE-FO months and truncate to degree and order Ylms = Ylms.subset(GRACE_Ylms.month).truncate(lmax=LMAX,mmax=MMAX) - #-- distribute removed Ylms uniformly over the ocean + # distribute removed Ylms uniformly over the ocean if REDISTRIBUTE_REMOVED: - #-- calculate ratio between total removed mass and - #-- a uniformly distributed cm of water over the ocean + # calculate ratio between total removed mass and + # a uniformly distributed cm of water over the ocean ratio = Ylms.clm[0,0,:]/ocean_Ylms.clm[0,0] - #-- for each spherical harmonic - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX - #-- remove the ratio*ocean Ylms from Ylms - #-- note: x -= y is equivalent to x = x - y + # for each spherical harmonic + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + for l in range(m,LMAX+1):# LMAX+1 to include LMAX + # remove the ratio*ocean Ylms from Ylms + # note: x -= y is equivalent to x = x - y Ylms.clm[l,m,:] -= ratio*ocean_Ylms.clm[l,m] Ylms.slm[l,m,:] -= ratio*ocean_Ylms.slm[l,m] - #-- filter removed coefficients + # filter removed coefficients if DESTRIPE: Ylms = Ylms.destripe() - #-- add data for month t and INDEX_FILE to the total - #-- remove_clm and remove_slm matrices - #-- redistributing the mass over the ocean if specified + # add data for month t and INDEX_FILE to the total + # remove_clm and remove_slm matrices + # redistributing the mass over the ocean if specified remove_Ylms.add(Ylms) - #-- input reconstructed spherical harmonic datafiles + # input reconstructed spherical harmonic datafiles construct_Ylms = GRACE_Ylms.zeros_like() construct_Ylms.time[:] = np.copy(GRACE_Ylms.time) construct_Ylms.month[:] = np.copy(GRACE_Ylms.month) if RECONSTRUCT: - #-- input index for reconstructed spherical harmonic datafiles + # input index for reconstructed spherical harmonic datafiles with open(RECONSTRUCT_FILE, mode='r', encoding='utf8') as f: file_list = [l for l in f.read().splitlines() if parser.match(l)] - #-- for each valid file in the index (iterate over mascons) + # for each valid file in the index (iterate over mascons) for reconstruct_file in file_list: - #-- read reconstructed spherical harmonics + # read reconstructed spherical harmonics Ylms = harmonics().from_file(reconstruct_file,format=DATAFORM) - #-- truncate clm and slm matrices to LMAX/MMAX - #-- add harmonics object to total + # truncate clm and slm matrices to LMAX/MMAX + # add harmonics object to total construct_Ylms.add(Ylms.truncate(lmax=LMAX, mmax=MMAX)) - #-- filter reconstructed coefficients + # filter reconstructed coefficients if DESTRIPE: construct_Ylms = construct_Ylms.destripe() - #-- set flag for removing reconstructed coefficients + # set flag for removing reconstructed coefficients construct_str = '_LEAKAGE' else: - #-- set flag for not removing the reconstructed coefficients + # set flag for not removing the reconstructed coefficients construct_str = '' - #-- input mascon spherical harmonic datafiles + # input mascon spherical harmonic datafiles with open(MASCON_FILE, mode='r', encoding='utf8') as f: mascon_files = [l for l in f.read().splitlines() if parser.match(l)] - #-- number of mascons + # number of mascons n_mas = len(mascon_files) - #-- spatial area of the mascon + # spatial area of the mascon total_area = np.zeros((n_mas)) - #-- name of each mascon + # name of each mascon mascon_name = [] - #-- for each valid file in the index (iterate over mascons) + # for each valid file in the index (iterate over mascons) mascon_list = [] for k,fi in enumerate(mascon_files): - #-- read mascon spherical harmonics + # read mascon spherical harmonics Ylms = harmonics().from_file(os.path.expanduser(fi), format=MASCON_FORMAT, date=False) - #-- Calculating the total mass of each mascon (1 cmwe uniform) + # Calculating the total mass of each mascon (1 cmwe uniform) total_area[k] = 4.0*np.pi*(rad_e**3)*rho_e*Ylms.clm[0,0]/3.0 - #-- distribute mascon mass uniformly over the ocean + # distribute mascon mass uniformly over the ocean if REDISTRIBUTE_MASCONS: - #-- calculate ratio between total mascon mass and - #-- a uniformly distributed cm of water over the ocean + # calculate ratio between total mascon mass and + # a uniformly distributed cm of water over the ocean ratio = Ylms.clm[0,0]/ocean_Ylms.clm[0,0] - #-- for each spherical harmonic - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX - #-- remove ratio*ocean Ylms from mascon Ylms - #-- note: x -= y is equivalent to x = x - y + # for each spherical harmonic + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + for l in range(m,LMAX+1):# LMAX+1 to include LMAX + # remove ratio*ocean Ylms from mascon Ylms + # note: x -= y is equivalent to x = x - y Ylms.clm[l,m] -= ratio*ocean_Ylms.clm[l,m] Ylms.slm[l,m] -= ratio*ocean_Ylms.slm[l,m] - #-- truncate mascon spherical harmonics to d/o LMAX/MMAX and add to list + # truncate mascon spherical harmonics to d/o LMAX/MMAX and add to list mascon_list.append(Ylms.truncate(lmax=LMAX, mmax=MMAX)) - #-- mascon base is the file without directory or suffix + # mascon base is the file without directory or suffix mascon_base = os.path.basename(mascon_files[k]) mascon_base = os.path.splitext(mascon_base)[0] - #-- if lower case, will capitalize + # if lower case, will capitalize mascon_base = mascon_base.upper() - #-- if mascon name contains degree and order info, remove + # if mascon name contains degree and order info, remove mascon_name.append(mascon_base.replace(f'_L{LMAX:d}', '')) - #-- create single harmonics object from list + # create single harmonics object from list mascon_Ylms = harmonics().from_list(mascon_list, date=False) - #-- calculating GRACE/GRACE-FO error (Wahr et al. 2006) - #-- output GRACE error file (for both LMAX==MMAX and LMAX != MMAX cases) + # calculating GRACE/GRACE-FO error (Wahr et al. 2006) + # output GRACE error file (for both LMAX==MMAX and LMAX != MMAX cases) args = (PROC,DREL,DSET,LMAX,order_str,ds_str,atm_str,GRACE_Ylms.month[0], GRACE_Ylms.month[-1], suffix[DATAFORM]) delta_format = '{0}_{1}_{2}_DELTA_CLM_L{3:d}{4}{5}{6}_{7:03d}-{8:03d}.{9}' DELTA_FILE = os.path.join(GRACE_Ylms.directory,delta_format.format(*args)) - #-- check full path of the GRACE directory for delta file - #-- if file was previously calculated: will read file - #-- else: will calculate the GRACE/GRACE-FO error + # check full path of the GRACE directory for delta file + # if file was previously calculated: will read file + # else: will calculate the GRACE/GRACE-FO error if not os.access(DELTA_FILE, os.F_OK): - #-- add output delta file to list object + # add output delta file to list object output_files.append(DELTA_FILE) - #-- Delta coefficients of GRACE time series (Error components) + # Delta coefficients of GRACE time series (Error components) delta_Ylms = harmonics(lmax=LMAX,mmax=MMAX) delta_Ylms.clm = np.zeros((LMAX+1,MMAX+1)) delta_Ylms.slm = np.zeros((LMAX+1,MMAX+1)) - #-- Smoothing Half-Width (CNES is a 10-day solution) - #-- All other solutions are monthly solutions (HFWTH for annual = 6) + # Smoothing Half-Width (CNES is a 10-day solution) + # All other solutions are monthly solutions (HFWTH for annual = 6) if ((PROC == 'CNES') and (DREL in ('RL01','RL02'))): HFWTH = 19 else: HFWTH = 6 - #-- Equal to the noise of the smoothed time-series - #-- for each spherical harmonic order - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - #-- for each spherical harmonic degree - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX - #-- Delta coefficients of GRACE time series + # Equal to the noise of the smoothed time-series + # for each spherical harmonic order + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + # for each spherical harmonic degree + for l in range(m,LMAX+1):# LMAX+1 to include LMAX + # Delta coefficients of GRACE time series for cs,csharm in enumerate(['clm','slm']): - #-- calculate GRACE Error (Noise of smoothed time-series) - #-- With Annual and Semi-Annual Terms + # calculate GRACE Error (Noise of smoothed time-series) + # With Annual and Semi-Annual Terms val1 = getattr(GRACE_Ylms, csharm) smth = tssmooth(GRACE_Ylms.time, val1[l,m,:], HFWTH=HFWTH) - #-- number of smoothed points + # number of smoothed points nsmth = len(smth['data']) tsmth = np.mean(smth['time']) - #-- GRACE delta Ylms - #-- variance of data-(smoothed+annual+semi) + # GRACE delta Ylms + # variance of data-(smoothed+annual+semi) val2 = getattr(delta_Ylms, csharm) val2[l,m] = np.sqrt(np.sum(smth['noise']**2)/nsmth) - #-- save GRACE/GRACE-FO delta harmonics to file + # save GRACE/GRACE-FO delta harmonics to file delta_Ylms.time = np.copy(tsmth) delta_Ylms.month = np.int64(nsmth) delta_Ylms.to_file(DELTA_FILE,format=DATAFORM) else: - #-- read GRACE/GRACE-FO delta harmonics from file + # read GRACE/GRACE-FO delta harmonics from file delta_Ylms = harmonics().from_file(DELTA_FILE,format=DATAFORM) - #-- truncate GRACE/GRACE-FO delta clm and slm to d/o LMAX/MMAX + # truncate GRACE/GRACE-FO delta clm and slm to d/o LMAX/MMAX delta_Ylms = delta_Ylms.truncate(lmax=LMAX, mmax=MMAX) tsmth = np.squeeze(delta_Ylms.time) nsmth = np.int64(delta_Ylms.month) - #-- Calculating the number of cos and sin harmonics between LMIN and LMAX - #-- taking into account MMAX (if MMAX == LMAX then LMAX-MMAX=0) + # Calculating the number of cos and sin harmonics between LMIN and LMAX + # taking into account MMAX (if MMAX == LMAX then LMAX-MMAX=0) n_harm=np.int64(LMAX**2 - LMIN**2 + 2*LMAX + 1 - (LMAX-MMAX)**2 - (LMAX-MMAX)) - #-- Initialing harmonics for least squares fitting - #-- mascon kernel + # Initialing harmonics for least squares fitting + # mascon kernel M_lm = np.zeros((n_harm,n_mas)) - #-- mascon kernel converted to output unit + # mascon kernel converted to output unit MA_lm = np.zeros((n_harm,n_mas)) - #-- corrected clm and slm + # corrected clm and slm Y_lm = np.zeros((n_harm,n_files)) - #-- sensitivity kernel + # sensitivity kernel A_lm = np.zeros((n_harm,n_mas)) - #-- Satellite error harmonics + # Satellite error harmonics delta_lm = np.zeros((n_harm)) - #-- Initializing output Mascon time-series + # Initializing output Mascon time-series mascon = np.zeros((n_mas,n_files)) - #-- Mascon satellite error component + # Mascon satellite error component M_delta = np.zeros((n_mas)) - #-- Initializing conversion factors - #-- factor for converting to coefficients of mass + # Initializing conversion factors + # factor for converting to coefficients of mass fact = np.zeros((n_harm)) - #-- smoothing factor + # smoothing factor wt_lm = np.zeros((n_harm)) - #-- ii is a counter variable for building the mascon column array + # ii is a counter variable for building the mascon column array ii = 0 - #-- Creating column array of clm/slm coefficients - #-- Order is [C00...C6060,S11...S6060] - #-- Calculating factor to convert geoid spherical harmonic coefficients - #-- to coefficients of mass (Wahr, 1998) + # Creating column array of clm/slm coefficients + # Order is [C00...C6060,S11...S6060] + # Calculating factor to convert geoid spherical harmonic coefficients + # to coefficients of mass (Wahr, 1998) coeff = rho_e*rad_e/3.0 - #-- Switching between Cosine and Sine Stokes + # Switching between Cosine and Sine Stokes for cs,csharm in enumerate(['clm','slm']): - #-- copy cosine and sin harmonics + # copy cosine and sin harmonics mascon_harm = getattr(mascon_Ylms, csharm) grace_harm = getattr(GRACE_Ylms, csharm) GIA_harm = getattr(GIA_Ylms, csharm) remove_harm = getattr(remove_Ylms, csharm) construct_harm = getattr(construct_Ylms, csharm) delta_harm = getattr(delta_Ylms, csharm) - #-- for each spherical harmonic degree - #-- +1 to include LMAX + # for each spherical harmonic degree + # +1 to include LMAX for l in range(LMIN,LMAX+1): - #-- for each spherical harmonic order - #-- Sine Stokes for (m=0) = 0 + # for each spherical harmonic order + # Sine Stokes for (m=0) = 0 mm = np.min([MMAX,l]) - #-- +1 to include l or MMAX (whichever is smaller) + # +1 to include l or MMAX (whichever is smaller) for m in range(cs,mm+1): - #-- Mascon Spherical Harmonics + # Mascon Spherical Harmonics M_lm[ii,:] = np.copy(mascon_harm[l,m,:]) - #-- GRACE Spherical Harmonics - #-- Correcting GRACE Harmonics for GIA and Removed Terms + # GRACE Spherical Harmonics + # Correcting GRACE Harmonics for GIA and Removed Terms Y_lm[ii,:] = grace_harm[l,m,:] - GIA_harm[l,m,:] - \ remove_harm[l,m,:] - construct_harm[l,m,:] - #-- GRACE delta spherical harmonics + # GRACE delta spherical harmonics delta_lm[ii] = np.copy(delta_harm[l,m]) - #-- degree dependent factor to convert to mass + # degree dependent factor to convert to mass fact[ii] = (2.0*l + 1.0)/(1.0 + kl[l]) - #-- degree dependent smoothing + # degree dependent smoothing wt_lm[ii] = np.copy(wt[l]) - #-- add 1 to counter + # add 1 to counter ii += 1 - #-- Converting mascon coefficients to fit method + # Converting mascon coefficients to fit method if (FIT_METHOD == 1): - #-- Fitting Sensitivity Kernel as mass coefficients - #-- converting M_lm to mass coefficients of the kernel + # Fitting Sensitivity Kernel as mass coefficients + # converting M_lm to mass coefficients of the kernel for i in range(n_harm): MA_lm[i,:] = M_lm[i,:]*wt_lm[i]*fact[i] fit_factor = wt_lm*fact else: - #-- Fitting Sensitivity Kernel as geoid coefficients + # Fitting Sensitivity Kernel as geoid coefficients for i in range(n_harm): MA_lm[:,:] = M_lm[i,:]*wt_lm[i] fit_factor = wt_lm*np.ones((n_harm)) - #-- Fitting the sensitivity kernel from the input kernel + # Fitting the sensitivity kernel from the input kernel for i in range(n_harm): - #-- setting kern_i equal to 1 for d/o + # setting kern_i equal to 1 for d/o kern_i = np.zeros((n_harm)) - #-- converting to mass coefficients if specified + # converting to mass coefficients if specified kern_i[i] = 1.0*fit_factor[i] - #-- spherical harmonics solution for the - #-- mascon sensitivity kernels - #-- Least Squares Solutions: Inv(X'.X).(X'.Y) + # spherical harmonics solution for the + # mascon sensitivity kernels + # Least Squares Solutions: Inv(X'.X).(X'.Y) kern_lm = np.linalg.lstsq(MA_lm,kern_i,rcond=-1)[0] for k in range(n_mas): A_lm[i,k] = kern_lm[k]*total_area[k] - #-- for each mascon + # for each mascon for k in range(n_mas): - #-- Multiply the Satellite error (noise of a smoothed time-series - #-- with annual and semi-annual components) by the sensitivity kernel - #-- Converting to Gigatonnes + # Multiply the Satellite error (noise of a smoothed time-series + # with annual and semi-annual components) by the sensitivity kernel + # Converting to Gigatonnes M_delta[k] = np.sqrt(np.sum((delta_lm*A_lm[:,k])**2))/1e15 - #-- output filename format (for both LMAX==MMAX and LMAX != MMAX cases): - #-- mascon name, GRACE dataset, GIA model, LMAX, (MMAX,) - #-- Gaussian smoothing, filter flag, remove reconstructed fields flag - #-- output GRACE error file + # output filename format (for both LMAX==MMAX and LMAX != MMAX cases): + # mascon name, GRACE dataset, GIA model, LMAX, (MMAX,) + # Gaussian smoothing, filter flag, remove reconstructed fields flag + # output GRACE error file file_out='{0}{1}{2}{3}{4}_L{5:d}{6}{7}{8}{9}.txt'.format(mascon_name[k], dset_str, gia_str.upper(), atm_str, ocean_str, LMAX, order_str, gw_str, ds_str, construct_str) - #-- Output mascon datafiles - #-- Will output each mascon time series - #-- month, date, mascon mass [Gt], satellite error [Gt], mascon area [km^2] - #-- open output mascon time-series file + # Output mascon datafiles + # Will output each mascon time series + # month, date, mascon mass [Gt], satellite error [Gt], mascon area [km^2] + # open output mascon time-series file fid = open(os.path.join(OUTPUT_DIRECTORY,file_out),'w') - #-- for each date + # for each date formatting_string = '{0:03d} {1:12.4f} {2:16.10f} {3:16.10f} {4:16.5f}' for t,mon in enumerate(GRACE_Ylms.month): - #-- Summing over all spherical harmonics for mascon k, and time t - #-- multiplies by the degree dependent factor to convert - #-- the harmonics into mass coefficients - #-- Converting mascon mass time-series from g to gigatonnes + # Summing over all spherical harmonics for mascon k, and time t + # multiplies by the degree dependent factor to convert + # the harmonics into mass coefficients + # Converting mascon mass time-series from g to gigatonnes mascon[k,t] = np.sum(A_lm[:,k]*Y_lm[:,t])/1e15 - #-- output to file + # output to file args=(mon,GRACE_Ylms.time[t],mascon[k,t],M_delta[k],total_area[k]/1e10) print(formatting_string.format(*args), file=fid) - #-- close the output file + # close the output file fid.close() - #-- change the permissions mode + # change the permissions mode os.chmod(os.path.join(OUTPUT_DIRECTORY,file_out),MODE) - #-- add output files to list object + # add output files to list object output_files.append(os.path.join(OUTPUT_DIRECTORY,file_out)) - #-- return the list of output files + # return the list of output files return output_files -#-- PURPOSE: print a file log for the GRACE mascon analysis +# PURPOSE: print a file log for the GRACE mascon analysis def output_log_file(arguments,output_files): - #-- format: calc_mascon_run_2002-04-01_PID-70335.log + # format: calc_mascon_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'calc_mascon_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print output files + # print output files logging.info('\n\nOUTPUT FILES:') for f in output_files: logging.info('{0}'.format(f)) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: print a error file log for the GRACE mascon analysis +# PURPOSE: print a error file log for the GRACE mascon analysis def output_error_log_file(arguments): - #-- format: calc_mascon_failed_run_2002-04-01_PID-70335.log + # format: calc_mascon_failed_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'calc_mascon_failed_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print traceback error + # print traceback error logging.info('\n\nTRACEBACK ERROR:') traceback.print_exc(file=fid) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Calculates a time-series of regional mass anomalies @@ -740,8 +740,8 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), @@ -750,30 +750,30 @@ def arguments(): type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Output directory for mascon files') - #-- Data processing center or satellite mission + # Data processing center or satellite mission parser.add_argument('--center','-c', metavar='PROC', type=str, required=True, help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', help='GRACE/GRACE-FO Data Release') - #-- GRACE/GRACE-FO Level-2 data product + # GRACE/GRACE-FO Level-2 data product parser.add_argument('--product','-p', metavar='DSET', type=str, default='GSM', help='GRACE/GRACE-FO Level-2 data product') - #-- minimum spherical harmonic degree + # minimum spherical harmonic degree parser.add_argument('--lmin', type=int, default=1, help='Minimum spherical harmonic degree') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month') @@ -785,27 +785,27 @@ def arguments(): parser.add_argument('--missing','-N', metavar='MISSING', type=int, nargs='+', default=MISSING, help='Missing GRACE/GRACE-FO months') - #-- different treatments of the load Love numbers - #-- 0: Han and Wahr (1995) values from PREM - #-- 1: Gegout (2005) values from PREM - #-- 2: Wang et al. (2012) values from PREM + # different treatments of the load Love numbers + # 0: Han and Wahr (1995) values from PREM + # 1: Gegout (2005) values from PREM + # 2: Wang et al. (2012) values from PREM parser.add_argument('--love','-n', type=int, default=0, choices=[0,1,2], help='Treatment of the Load Love numbers') - #-- option for setting reference frame for gravitational load love number - #-- reference frame options (CF, CM, CE) + # option for setting reference frame for gravitational load love number + # reference frame options (CF, CM, CE) parser.add_argument('--reference', type=str.upper, default='CF', choices=['CF','CM','CE'], help='Reference frame for load Love numbers') - #-- Gaussian smoothing radius (km) + # Gaussian smoothing radius (km) parser.add_argument('--radius','-R', type=float, default=0, help='Gaussian smoothing radius (km)') - #-- Use a decorrelation (destriping) filter + # Use a decorrelation (destriping) filter parser.add_argument('--destripe','-d', default=False, action='store_true', help='Use decorrelation (destriping) filter') - #-- GIA model type list + # GIA model type list models = {} models['IJ05-R2'] = 'Ivins R2 GIA Models' models['W12a'] = 'Whitehouse GIA Models' @@ -819,33 +819,33 @@ def arguments(): models['ascii'] = 'reformatted GIA in ascii format' models['netCDF4'] = 'reformatted GIA in netCDF4 format' models['HDF5'] = 'reformatted GIA in HDF5 format' - #-- GIA model type + # GIA model type parser.add_argument('--gia','-G', type=str, metavar='GIA', choices=models.keys(), help='GIA model type to read') - #-- full path to GIA file + # full path to GIA file parser.add_argument('--gia-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GIA file to read') - #-- use atmospheric jump corrections from Fagiolini et al. (2015) + # use atmospheric jump corrections from Fagiolini et al. (2015) parser.add_argument('--atm-correction', default=False, action='store_true', help='Apply atmospheric jump correction coefficients') - #-- correct for pole tide drift follow Wahr et al. (2015) + # correct for pole tide drift follow Wahr et al. (2015) parser.add_argument('--pole-tide', default=False, action='store_true', help='Correct for pole tide drift') - #-- Update Degree 1 coefficients with SLR or derived values - #-- Tellus: GRACE/GRACE-FO TN-13 from PO.DAAC - #-- https://grace.jpl.nasa.gov/data/get-data/geocenter/ - #-- SLR: satellite laser ranging from CSR - #-- ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ - #-- UCI: Sutterley and Velicogna, Remote Sensing (2019) - #-- https://www.mdpi.com/2072-4292/11/18/2108 - #-- Swenson: GRACE-derived coefficients from Sean Swenson - #-- https://doi.org/10.1029/2007JB005338 - #-- GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS - #-- http://gravis.gfz-potsdam.de/corrections + # Update Degree 1 coefficients with SLR or derived values + # Tellus: GRACE/GRACE-FO TN-13 from PO.DAAC + # https://grace.jpl.nasa.gov/data/get-data/geocenter/ + # SLR: satellite laser ranging from CSR + # ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ + # UCI: Sutterley and Velicogna, Remote Sensing (2019) + # https://www.mdpi.com/2072-4292/11/18/2108 + # Swenson: GRACE-derived coefficients from Sean Swenson + # https://doi.org/10.1029/2007JB005338 + # GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS + # http://gravis.gfz-potsdam.de/corrections parser.add_argument('--geocenter', metavar='DEG1', type=str, choices=['Tellus','SLR','SLF','UCI','Swenson','GFZ'], @@ -856,7 +856,7 @@ def arguments(): parser.add_argument('--interpolate-geocenter', default=False, action='store_true', help='Least-squares model missing Degree 1 coefficients') - #-- replace low degree harmonics with values from Satellite Laser Ranging + # replace low degree harmonics with values from Satellite Laser Ranging parser.add_argument('--slr-c20', type=str, default=None, choices=['CSR','GFZ','GSFC'], help='Replace C20 coefficients with SLR values') @@ -875,19 +875,19 @@ def arguments(): parser.add_argument('--slr-c50', type=str, default=None, choices=['CSR','GSFC','LARES'], help='Replace C50 coefficients with SLR values') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input data format for auxiliary files') - #-- mean file to remove + # mean file to remove parser.add_argument('--mean-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GRACE/GRACE-FO mean file to remove from the harmonic data') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--mean-format', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5','gfc'], help='Input data format for GRACE/GRACE-FO mean file') - #-- mascon index file and parameters + # mascon index file and parameters parser.add_argument('--mascon-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='Index file of mascons spherical harmonics') @@ -897,12 +897,12 @@ def arguments(): parser.add_argument('--redistribute-mascons', default=False, action='store_true', help='Redistribute mascon mass over the ocean') - #-- 1: mass coefficients - #-- 2: geoid coefficients + # 1: mass coefficients + # 2: geoid coefficients parser.add_argument('--fit-method', type=int, default=1, choices=(1,2), help='Method for fitting sensitivity kernel to harmonics') - #-- monthly files to be removed from the GRACE/GRACE-FO data + # monthly files to be removed from the GRACE/GRACE-FO data parser.add_argument('--remove-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', help='Monthly files to be removed from the GRACE/GRACE-FO data') @@ -915,49 +915,49 @@ def arguments(): parser.add_argument('--redistribute-removed', default=False, action='store_true', help='Redistribute removed mass fields over the ocean') - #-- mascon reconstruct parameters + # mascon reconstruct parameters parser.add_argument('--remove-reconstruct', default=False, action='store_true', help='Remove reconstructed mascon time series fields') parser.add_argument('--reconstruct-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='Reconstructed mascon time series file to be removed') - #-- land-sea mask for redistributing mascon mass and land water flux + # land-sea mask for redistributing mascon mass and land water flux lsmask = utilities.get_data_path(['data','landsea_hd.nc']) parser.add_argument('--mask', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=lsmask, help='Land-sea mask for redistributing mascon mass and land water flux') - #-- Output log file for each job in forms - #-- calc_mascon_run_2002-04-01_PID-00000.log - #-- calc_mascon_failed_run_2002-04-01_PID-00000.log + # Output log file for each job in forms + # calc_mascon_run_2002-04-01_PID-00000.log + # calc_mascon_failed_run_2002-04-01_PID-00000.log parser.add_argument('--log', default=False, action='store_true', help='Output log file for each job') - #-- print information about processing run + # print information about processing run parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of processing run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- try to run the analysis with listed parameters + # try to run the analysis with listed parameters try: info(args) - #-- run calc_mascon algorithm with parameters + # run calc_mascon algorithm with parameters output_files = calc_mascon( args.directory, args.center, @@ -1002,17 +1002,17 @@ def main(): OUTPUT_DIRECTORY=args.output_directory, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) - if args.log:#-- write failed job completion log file + if args.log:# write failed job completion log file output_error_log_file(args) else: - if args.log:#-- write successful job completion log file + if args.log:# write successful job completion log file output_log_file(args,output_files) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/calc_sensitivity_kernel.py b/scripts/calc_sensitivity_kernel.py index b13be33c..b2e6af1a 100644 --- a/scripts/calc_sensitivity_kernel.py +++ b/scripts/calc_sensitivity_kernel.py @@ -150,7 +150,7 @@ from gravity_toolkit.ocean_stokes import ocean_stokes from gravity_toolkit.harmonic_summation import harmonic_summation -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -159,8 +159,8 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: calculate a regional time-series through a least -#-- squares mascon process +# PURPOSE: calculate a regional time-series through a least +# squares mascon process def calc_sensitivity_kernel(LMAX, RAD, LMIN=None, MMAX=None, @@ -178,309 +178,309 @@ def calc_sensitivity_kernel(LMAX, RAD, OUTPUT_DIRECTORY=None, MODE=0o775): - #-- file information + # file information suffix = dict(ascii='txt', netCDF4='nc', HDF5='H5')[DATAFORM] - #-- file parser for reading index files - #-- removes commented lines (can comment out files in the index) - #-- removes empty lines (if there are extra empty lines) + # file parser for reading index files + # removes commented lines (can comment out files in the index) + # removes empty lines (if there are extra empty lines) parser = re.compile(r'^(?!\#|\%|$)', re.VERBOSE) - #-- Create output Directory if not currently existing + # Create output Directory if not currently existing if (not os.access(OUTPUT_DIRECTORY,os.F_OK)): os.mkdir(OUTPUT_DIRECTORY) - #-- list object of output files for file logs (full path) + # list object of output files for file logs (full path) output_files = [] - #-- read arrays of kl, hl, and ll Love Numbers + # read arrays of kl, hl, and ll Love Numbers hl,kl,ll = load_love_numbers(LMAX, LOVE_NUMBERS=LOVE_NUMBERS, REFERENCE=REFERENCE) - #-- Earth Parameters + # Earth Parameters factors = units(lmax=LMAX).harmonic(hl,kl,ll) - #-- Average Density of the Earth [g/cm^3] + # Average Density of the Earth [g/cm^3] rho_e = factors.rho_e - #-- Average Radius of the Earth [cm] + # Average Radius of the Earth [cm] rad_e = factors.rad_e - #-- input/output string for both LMAX==MMAX and LMAX != MMAX cases + # input/output string for both LMAX==MMAX and LMAX != MMAX cases MMAX = np.copy(LMAX) if not MMAX else MMAX order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' - #-- Calculating the Gaussian smoothing for radius RAD + # Calculating the Gaussian smoothing for radius RAD if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) gw_str = f'_r{RAD:0.0f}km' else: - #-- else = 1 + # else = 1 wt = np.ones((LMAX+1)) gw_str = '' - #-- Read Ocean function and convert to Ylms for redistribution + # Read Ocean function and convert to Ylms for redistribution if REDISTRIBUTE_MASCONS: - #-- read Land-Sea Mask and convert to spherical harmonics + # read Land-Sea Mask and convert to spherical harmonics ocean_Ylms = ocean_stokes(LANDMASK, LMAX, MMAX=MMAX, LOVE=(hl,kl,ll)) ocean_str = '_OCN' else: - #-- not distributing uniformly over ocean + # not distributing uniformly over ocean ocean_str = '' - #-- input mascon spherical harmonic datafiles + # input mascon spherical harmonic datafiles with open(MASCON_FILE, mode='r', encoding='utf8') as f: mascon_files = [l for l in f.read().splitlines() if parser.match(l)] - #-- number of mascons + # number of mascons n_mas = len(mascon_files) - #-- spatial area of the mascon + # spatial area of the mascon total_area = np.zeros((n_mas)) - #-- name of each mascon + # name of each mascon mascon_name = [] - #-- for each valid file in the index (iterate over mascons) + # for each valid file in the index (iterate over mascons) mascon_list = [] for k,fi in enumerate(mascon_files): - #-- read mascon spherical harmonics + # read mascon spherical harmonics Ylms = harmonics().from_file(os.path.expanduser(fi), format=DATAFORM, date=False) - #-- Calculating the total mass of each mascon (1 cmwe uniform) + # Calculating the total mass of each mascon (1 cmwe uniform) total_area[k] = 4.0*np.pi*(rad_e**3)*rho_e*Ylms.clm[0,0]/3.0 - #-- distribute mascon mass uniformly over the ocean + # distribute mascon mass uniformly over the ocean if REDISTRIBUTE_MASCONS: - #-- calculate ratio between total mascon mass and - #-- a uniformly distributed cm of water over the ocean + # calculate ratio between total mascon mass and + # a uniformly distributed cm of water over the ocean ratio = Ylms.clm[0,0]/ocean_Ylms.clm[0,0] - #-- for each spherical harmonic - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX - #-- remove ratio*ocean Ylms from mascon Ylms - #-- note: x -= y is equivalent to x = x - y + # for each spherical harmonic + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + for l in range(m,LMAX+1):# LMAX+1 to include LMAX + # remove ratio*ocean Ylms from mascon Ylms + # note: x -= y is equivalent to x = x - y Ylms.clm[l,m] -= ratio*ocean_Ylms.clm[l,m] Ylms.slm[l,m] -= ratio*ocean_Ylms.slm[l,m] - #-- truncate mascon spherical harmonics to d/o LMAX/MMAX and add to list + # truncate mascon spherical harmonics to d/o LMAX/MMAX and add to list mascon_list.append(Ylms.truncate(lmax=LMAX, mmax=MMAX)) - #-- mascon base is the file without directory or suffix + # mascon base is the file without directory or suffix mascon_base = os.path.basename(mascon_files[k]) mascon_base = os.path.splitext(mascon_base)[0] - #-- if lower case, will capitalize + # if lower case, will capitalize mascon_base = mascon_base.upper() - #-- if mascon name contains degree and order info, remove + # if mascon name contains degree and order info, remove mascon_name.append(mascon_base.replace(f'_L{LMAX:d}', '')) - #-- create single harmonics object from list + # create single harmonics object from list mascon_Ylms = harmonics().from_list(mascon_list, date=False) - #-- Calculating the number of cos and sin harmonics between LMIN and LMAX - #-- taking into account MMAX (if MMAX == LMAX then LMAX-MMAX=0) + # Calculating the number of cos and sin harmonics between LMIN and LMAX + # taking into account MMAX (if MMAX == LMAX then LMAX-MMAX=0) n_harm=np.int64(LMAX**2 - LMIN**2 + 2*LMAX + 1 - (LMAX-MMAX)**2 - (LMAX-MMAX)) - #-- Initialing harmonics for least squares fitting - #-- mascon kernel + # Initialing harmonics for least squares fitting + # mascon kernel M_lm = np.zeros((n_harm,n_mas)) - #-- mascon kernel converted to output unit + # mascon kernel converted to output unit MA_lm = np.zeros((n_harm,n_mas)) - #-- sensitivity kernel + # sensitivity kernel A_lm = np.zeros((n_harm,n_mas)) - #-- Initializing conversion factors - #-- factor for converting to smoothed coefficients of mass + # Initializing conversion factors + # factor for converting to smoothed coefficients of mass fact = np.zeros((n_harm)) - #-- factor for converting back into geoid coefficients + # factor for converting back into geoid coefficients fact_inv = np.zeros((n_harm)) - #-- smoothing factor + # smoothing factor wt_lm = np.zeros((n_harm)) - #-- ii is a counter variable for building the mascon column array + # ii is a counter variable for building the mascon column array ii = 0 - #-- Creating column array of clm/slm coefficients - #-- Order is [C00...C6060,S11...S6060] - #-- Calculating factor to convert geoid spherical harmonic coefficients - #-- to coefficients of mass (Wahr, 1998) + # Creating column array of clm/slm coefficients + # Order is [C00...C6060,S11...S6060] + # Calculating factor to convert geoid spherical harmonic coefficients + # to coefficients of mass (Wahr, 1998) coeff = rho_e*rad_e/3.0 coeff_inv = 0.75/(np.pi*rho_e*rad_e**3) - #-- Switching between Cosine and Sine Stokes + # Switching between Cosine and Sine Stokes for cs,csharm in enumerate(['clm','slm']): - #-- copy cosine and sin harmonics + # copy cosine and sin harmonics mascon_harm = getattr(mascon_Ylms, csharm) - #-- for each spherical harmonic degree - #-- +1 to include LMAX + # for each spherical harmonic degree + # +1 to include LMAX for l in range(LMIN,LMAX+1): - #-- for each spherical harmonic order - #-- Sine Stokes for (m=0) = 0 + # for each spherical harmonic order + # Sine Stokes for (m=0) = 0 mm = np.min([MMAX,l]) - #-- +1 to include l or MMAX (whichever is smaller) + # +1 to include l or MMAX (whichever is smaller) for m in range(cs,mm+1): - #-- Mascon Spherical Harmonics + # Mascon Spherical Harmonics M_lm[ii,:] = np.copy(mascon_harm[l,m,:]) - #-- degree dependent factor to convert to mass + # degree dependent factor to convert to mass fact[ii] = (2.0*l + 1.0)/(1.0 + kl[l]) - #-- degree dependent factor to convert from mass + # degree dependent factor to convert from mass fact_inv[ii] = coeff_inv*(1.0 + kl[l])/(2.0*l+1.0) - #-- degree dependent smoothing + # degree dependent smoothing wt_lm[ii] = np.copy(wt[l]) - #-- add 1 to counter + # add 1 to counter ii += 1 - #-- Converting mascon coefficients to fit method + # Converting mascon coefficients to fit method if (FIT_METHOD == 1): - #-- Fitting Sensitivity Kernel as mass coefficients - #-- converting M_lm to mass coefficients of the kernel + # Fitting Sensitivity Kernel as mass coefficients + # converting M_lm to mass coefficients of the kernel for i in range(n_harm): MA_lm[i,:] = M_lm[i,:]*wt_lm[i]*fact[i] fit_factor = wt_lm*fact inv_fit_factor = np.copy(fact_inv) else: - #-- Fitting Sensitivity Kernel as geoid coefficients + # Fitting Sensitivity Kernel as geoid coefficients for i in range(n_harm): MA_lm[:,:] = M_lm[i,:]*wt_lm[i] fit_factor = wt_lm*np.ones((n_harm)) inv_fit_factor = np.ones((n_harm)) - #-- Fitting the sensitivity kernel from the input kernel + # Fitting the sensitivity kernel from the input kernel for i in range(n_harm): - #-- setting kern_i equal to 1 for d/o + # setting kern_i equal to 1 for d/o kern_i = np.zeros((n_harm)) - #-- converting to mass coefficients if specified + # converting to mass coefficients if specified kern_i[i] = 1.0*fit_factor[i] - #-- spherical harmonics solution for the - #-- mascon sensitivity kernels - #-- Least Squares Solutions: Inv(X'.X).(X'.Y) + # spherical harmonics solution for the + # mascon sensitivity kernels + # Least Squares Solutions: Inv(X'.X).(X'.Y) kern_lm = np.linalg.lstsq(MA_lm, kern_i, rcond=-1)[0] for k in range(n_mas): A_lm[i,k] = kern_lm[k]*total_area[k] - #-- free up larger variables + # free up larger variables del M_lm, MA_lm, wt_lm, fact, fact_inv, fit_factor - #-- reshaping harmonics of sensitivity kernel to LMAX+1,MMAX+1 - #-- calculating the spatial sensitivity kernel of each mascon - #-- kernel calculated as outlined in Tiwari (2009) and Jacobs (2012) - #-- Initializing output sensitivity kernel (both spatial and Ylms) + # reshaping harmonics of sensitivity kernel to LMAX+1,MMAX+1 + # calculating the spatial sensitivity kernel of each mascon + # kernel calculated as outlined in Tiwari (2009) and Jacobs (2012) + # Initializing output sensitivity kernel (both spatial and Ylms) kern_Ylms = harmonics(lmax=LMAX, mmax=MMAX) kern_Ylms.clm = np.zeros((LMAX+1,MMAX+1,n_mas)) kern_Ylms.slm = np.zeros((LMAX+1,MMAX+1,n_mas)) kern_Ylms.time = np.copy(total_area) - #-- counter variable for deconstructing the mascon column arrays + # counter variable for deconstructing the mascon column arrays ii = 0 - #-- Switching between Cosine and Sine Stokes + # Switching between Cosine and Sine Stokes for cs,csharm in enumerate(['clm','slm']): - #-- for each spherical harmonic degree - #-- +1 to include LMAX + # for each spherical harmonic degree + # +1 to include LMAX for l in range(LMIN,LMAX+1): - #-- for each spherical harmonic order - #-- Sine Stokes for (m=0) = 0 + # for each spherical harmonic order + # Sine Stokes for (m=0) = 0 mm = np.min([MMAX,l]) - #-- +1 to include l or MMAX (whichever is smaller) + # +1 to include l or MMAX (whichever is smaller) for m in range(cs,mm+1): - #-- inv_fit_factor: normalize from mass harmonics + # inv_fit_factor: normalize from mass harmonics temp = getattr(kern_Ylms, csharm) temp[l,m,:] = inv_fit_factor[ii]*A_lm[ii,:] - #-- add 1 to counter + # add 1 to counter ii += 1 - #-- free up larger variables + # free up larger variables del A_lm, inv_fit_factor - #-- for each mascon + # for each mascon for k in range(n_mas): - #-- get harmonics for mascon + # get harmonics for mascon Ylms = kern_Ylms.index(k, date=False) - #-- output sensitivity kernel to file + # output sensitivity kernel to file args = (mascon_name[k],ocean_str,LMAX,order_str,gw_str,suffix) FILE1 = '{0}_SKERNEL_CLM{1}_L{2:d}{3}{4}.{5}'.format(*args) Ylms.to_file(os.path.join(OUTPUT_DIRECTORY, FILE1), format=DATAFORM, date=False) - #-- change the permissions mode + # change the permissions mode os.chmod(os.path.join(OUTPUT_DIRECTORY,FILE1),MODE) - #-- add output files to list object + # add output files to list object output_files.append(os.path.join(OUTPUT_DIRECTORY,FILE1)) - #-- if outputting spatial grids + # if outputting spatial grids if SPATIAL: - #-- Output spatial data object + # Output spatial data object grid = spatial() - #-- Output Degree Spacing + # Output Degree Spacing dlon,dlat = (DDEG[0],DDEG[0]) if (len(DDEG) == 1) else (DDEG[0],DDEG[1]) - #-- Output Degree Interval + # Output Degree Interval if (INTERVAL == 1): - #-- (-180:180,90:-90) + # (-180:180,90:-90) n_lon = np.int64((360.0/dlon)+1.0) n_lat = np.int64((180.0/dlat)+1.0) grid.lon = -180 + dlon*np.arange(0,n_lon) grid.lat = 90.0 - dlat*np.arange(0,n_lat) elif (INTERVAL == 2): - #-- (Degree spacing)/2 + # (Degree spacing)/2 grid.lon = np.arange(-180+dlon/2.0,180+dlon/2.0,dlon) grid.lat = np.arange(90.0-dlat/2.0,-90.0-dlat/2.0,-dlat) n_lon = len(grid.lon) n_lat = len(grid.lat) elif (INTERVAL == 3): - #-- non-global grid set with BOUNDS parameter + # non-global grid set with BOUNDS parameter minlon,maxlon,minlat,maxlat = BOUNDS.copy() grid.lon = np.arange(minlon+dlon/2.0,maxlon+dlon/2.0,dlon) grid.lat = np.arange(maxlat-dlat/2.0,minlat-dlat/2.0,-dlat) nlon = len(grid.lon) nlat = len(grid.lat) - #-- Computing plms for converting to spatial domain + # Computing plms for converting to spatial domain theta = (90.0-grid.lat)*np.pi/180.0 PLM, dPLM = plm_holmes(LMAX, np.cos(theta)) - #-- for each mascon + # for each mascon for k in range(n_mas): - #-- get harmonics for mascon + # get harmonics for mascon Ylms = kern_Ylms.index(k, date=False) - #-- convert spherical harmonics to output spatial grid + # convert spherical harmonics to output spatial grid grid.data = harmonic_summation(Ylms.clm, Ylms.slm, grid.lon, grid.lat, LMAX=LMAX, MMAX=MMAX, PLM=PLM).T grid.mask = np.zeros_like(grid.data, dtype=bool) - #-- output sensitivity kernel to file + # output sensitivity kernel to file args = (mascon_name[k],ocean_str,LMAX,order_str,gw_str,suffix) FILE2 = '{0}_SKERNEL{1}_L{2:d}{3}{4}.{5}'.format(*args) grid.to_file(os.path.join(OUTPUT_DIRECTORY,FILE2), format=DATAFORM, date=False, units='unitless', longname='Sensitivity_Kernel') - #-- change the permissions mode + # change the permissions mode os.chmod(os.path.join(OUTPUT_DIRECTORY,FILE2),MODE) - #-- add output files to list object + # add output files to list object output_files.append(os.path.join(OUTPUT_DIRECTORY,FILE2)) - #-- return the list of output files + # return the list of output files return output_files -#-- PURPOSE: print a file log for the mascon sensitivity kernel analysis +# PURPOSE: print a file log for the mascon sensitivity kernel analysis def output_log_file(arguments,output_files): - #-- format: calc_skernel_run_2002-04-01_PID-70335.log + # format: calc_skernel_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'calc_skernel_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print output files + # print output files logging.info('\n\nOUTPUT FILES:') for f in output_files: logging.info('{0}'.format(f)) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: print a error file log for the mascon sensitivity kernel analysis +# PURPOSE: print a error file log for the mascon sensitivity kernel analysis def output_error_log_file(arguments): - #-- format: calc_skernel_failed_run_2002-04-01_PID-70335.log + # format: calc_skernel_failed_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'calc_skernel_failed_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print traceback error + # print traceback error logging.info('\n\nTRACEBACK ERROR:') traceback.print_exc(file=fid) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Calculates spatial sensitivity kernels through a @@ -489,64 +489,64 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters + # command line parameters parser.add_argument('--output-directory','-O', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Output directory for mascon files') - #-- minimum spherical harmonic degree + # minimum spherical harmonic degree parser.add_argument('--lmin', type=int, default=1, help='Minimum spherical harmonic degree') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- different treatments of the load Love numbers - #-- 0: Han and Wahr (1995) values from PREM - #-- 1: Gegout (2005) values from PREM - #-- 2: Wang et al. (2012) values from PREM + # different treatments of the load Love numbers + # 0: Han and Wahr (1995) values from PREM + # 1: Gegout (2005) values from PREM + # 2: Wang et al. (2012) values from PREM parser.add_argument('--love','-n', type=int, default=0, choices=[0,1,2], help='Treatment of the Load Love numbers') - #-- option for setting reference frame for gravitational load love number - #-- reference frame options (CF, CM, CE) + # option for setting reference frame for gravitational load love number + # reference frame options (CF, CM, CE) parser.add_argument('--reference', type=str.upper, default='CF', choices=['CF','CM','CE'], help='Reference frame for load Love numbers') - #-- Gaussian smoothing radius (km) + # Gaussian smoothing radius (km) parser.add_argument('--radius','-R', type=float, default=0, help='Gaussian smoothing radius (km)') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input data format for auxiliary files') - #-- mascon index file and parameters + # mascon index file and parameters parser.add_argument('--mascon-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='Index file of mascons spherical harmonics') parser.add_argument('--redistribute-mascons', default=False, action='store_true', help='Redistribute mascon mass over the ocean') - #-- 1: mass coefficients - #-- 2: geoid coefficients + # 1: mass coefficients + # 2: geoid coefficients parser.add_argument('--fit-method', type=int, default=1, choices=(1,2), help='Method for fitting sensitivity kernel to harmonics') - #-- land-sea mask for redistributing mascon mass + # land-sea mask for redistributing mascon mass lsmask = utilities.get_data_path(['data','landsea_hd.nc']) parser.add_argument('--mask', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=lsmask, help='Land-sea mask for redistributing mascon mass') - #-- output spatial grid + # output spatial grid parser.add_argument('--spatial','-s', default=False, action='store_true', help='Output spatial grid file for each mascon') - #-- output grid parameters + # output grid parameters parser.add_argument('--spacing','-S', type=float, nargs='+', default=[0.5,0.5], metavar=('dlon','dlat'), help='Spatial resolution of output data') @@ -557,37 +557,37 @@ def arguments(): parser.add_argument('--bounds','-B', type=float, nargs=4, metavar=('lon_min','lon_max','lat_min','lat_max'), help='Bounding box for non-global grid') - #-- Output log file for each job in forms - #-- calc_skernel_run_2002-04-01_PID-00000.log - #-- calc_skernel_failed_run_2002-04-01_PID-00000.log + # Output log file for each job in forms + # calc_skernel_run_2002-04-01_PID-00000.log + # calc_skernel_failed_run_2002-04-01_PID-00000.log parser.add_argument('--log', default=False, action='store_true', help='Output log file for each job') - #-- print information about processing run + # print information about processing run parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of processing run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- try to run the analysis with listed parameters + # try to run the analysis with listed parameters try: info(args) - #-- run calc_sensitivity_kernel algorithm with parameters + # run calc_sensitivity_kernel algorithm with parameters output_files = calc_sensitivity_kernel( args.lmax, args.radius, @@ -607,17 +607,17 @@ def main(): OUTPUT_DIRECTORY=args.output_directory, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) - if args.log:#-- write failed job completion log file + if args.log:# write failed job completion log file output_error_log_file(args) else: - if args.log:#-- write successful job completion log file + if args.log:# write successful job completion log file output_log_file(args,output_files) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/cnes_grace_sync.py b/scripts/cnes_grace_sync.py index 648557bc..a5386e74 100755 --- a/scripts/cnes_grace_sync.py +++ b/scripts/cnes_grace_sync.py @@ -106,15 +106,15 @@ import posixpath import gravity_toolkit.utilities -#-- PURPOSE: sync local GRACE/GRACE-FO files with CNES server +# PURPOSE: sync local GRACE/GRACE-FO files with CNES server def cnes_grace_sync(DIRECTORY, DREL=[], TIMEOUT=None, LOG=False, CLOBBER=False, MODE=None): - #-- remote CNES/GRGS host directory + # remote CNES/GRGS host directory HOST = ['http://gravitegrace.get.obs-mip.fr','grgs.obs-mip.fr','data'] - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not os.makedirs(DIRECTORY,MODE) if not os.path.exists(DIRECTORY) else None - #-- create dictionaries for dataset and host directory + # create dictionaries for dataset and host directory DSET = {} DSET['RL01'] = ['GSM', 'GAC'] DSET['RL02'] = ['GSM', 'GAA', 'GAB'] @@ -122,209 +122,209 @@ def cnes_grace_sync(DIRECTORY, DREL=[], TIMEOUT=None, LOG=False, DSET['RL04'] = ['GSM'] DSET['RL05'] = ['GSM', 'GAA', 'GAB'] - #-- remote path to tar files on CNES servers + # remote path to tar files on CNES servers REMOTE = dict(RL01={},RL02={},RL03={},RL04={},RL05={}) - #-- RL01: GSM and GAC + # RL01: GSM and GAC REMOTE['RL01']['GSM'] = ['RL01','variable','archives'] REMOTE['RL01']['GAC'] = ['RL01','variable','archives'] - #-- RL02: GSM, GAA and GAB + # RL02: GSM, GAA and GAB REMOTE['RL02']['GSM'] = ['RL02','variable','archives'] REMOTE['RL02']['GAA'] = ['RL02','variable','archives'] REMOTE['RL02']['GAB'] = ['RL02','variable','archives'] - #-- RL03: GSM, GAA and GAB + # RL03: GSM, GAA and GAB REMOTE['RL03']['GSM'] = ['RL03-v3','archives'] REMOTE['RL03']['GAA'] = ['RL03','variable','archives'] REMOTE['RL03']['GAB'] = ['RL03','variable','archives'] - #-- RL04: GSM + # RL04: GSM REMOTE['RL04']['GSM'] = ['RL04-v1','archives'] - #-- RL05: GSM, GAA, GAB for GRACE/GRACE-FO + # RL05: GSM, GAA, GAB for GRACE/GRACE-FO REMOTE['RL05']['GSM'] = ['RL05','archives'] REMOTE['RL05']['GAA'] = ['RL05','archives'] REMOTE['RL05']['GAB'] = ['RL05','archives'] - #-- tar file names for each dataset + # tar file names for each dataset TAR = dict(RL01={},RL02={},RL03={},RL04={},RL05={}) - #-- RL01: GSM and GAC + # RL01: GSM and GAC TAR['RL01']['GSM'] = ['GRGS.SH_models.GRACEFORMAT.RL01.tar.gz'] TAR['RL01']['GAC'] = ['GRGS.dealiasing.RL01.tar.gz'] - #-- RL02: GSM, GAA and GAB + # RL02: GSM, GAA and GAB TAR['RL02']['GSM'] = ['GRGS.SH_models.GRACEFORMAT.all.tar.gz'] TAR['RL02']['GAA'] = ['GRGS.dealiasing.GRACEFORMAT.all.tar.gz'] TAR['RL02']['GAB'] = ['GRGS.dealiasing.GRACEFORMAT.all.tar.gz'] - #-- RL03: GSM, GAA and GAB + # RL03: GSM, GAA and GAB TAR['RL03']['GSM'] = ['CNES-GRGS.RL03-v3.monthly.coeff.tar.gz'] TAR['RL03']['GAA'] = ['GRGS.RL03.dealiasing.monthly.tar.gz'] TAR['RL03']['GAB'] = ['GRGS.RL03.dealiasing.monthly.tar.gz'] - #-- RL04: GSM + # RL04: GSM # TAR['RL04']['GSM'] = ['CNES.RL04-v1.monthly.OLD_IERS2010_MEAN_POLE_CONVENTION.tar.gz'] TAR['RL04']['GSM'] = ['CNES.RL04-v1.monthly.NEW_IERS2010_MEAN_POLE_CONVENTION.tar.gz'] - #-- RL05: GSM, GAA and GAB + # RL05: GSM, GAA and GAB TAR['RL05']['GSM'] = ['CNES-GRGS.RL05.GRACE.monthly.tar.gz', 'CNES-GRGS.RL05.GRACE-FO.monthly.tar.gz'] TAR['RL05']['GAA'] = ['CNES-GRGS.RL05.monthly.dealiasing.tar.gz'] TAR['RL05']['GAB'] = ['CNES-GRGS.RL05.monthly.dealiasing.tar.gz'] - #-- create log file with list of synchronized files (or print to terminal) + # create log file with list of synchronized files (or print to terminal) if LOG: - #-- output to log file - #-- format: CNES_sync_2002-04-01.log + # output to log file + # format: CNES_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) LOGFILE = f'CNES_sync_{today}.log' fid1 = open(os.path.join(DIRECTORY,LOGFILE),'w') logging.basicConfig(stream=fid1,level=logging.INFO) logging.info(f'CNES Sync Log ({today})') else: - #-- standard output (terminal output) + # standard output (terminal output) logging.basicConfig(level=logging.INFO) - #-- DATA RELEASES (RL01, RL02, RL03, RL04) - #-- RL01 and RL02 are no longer updated as default + # DATA RELEASES (RL01, RL02, RL03, RL04) + # RL01 and RL02 are no longer updated as default for rl in DREL: - #-- datasets (GSM, GAA, GAB) + # datasets (GSM, GAA, GAB) for ds in DSET[rl]: logging.info(f'CNES/{rl}/{ds}') - #-- specific GRACE directory + # specific GRACE directory local_dir = os.path.join(DIRECTORY, 'CNES', rl, ds) - #-- check if GRACE directory exists and recursively create if not + # check if GRACE directory exists and recursively create if not os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None - #-- retrieve each tar file from CNES + # retrieve each tar file from CNES for t in TAR[rl][ds]: remote_tar_path = copy.copy(HOST) remote_tar_path.extend(REMOTE[rl][ds]) remote_tar_path.append(t) - #-- local copy of CNES data tar file + # local copy of CNES data tar file local_file = os.path.join(DIRECTORY, 'CNES', rl, t) MD5 = gravity_toolkit.utilities.get_hash(local_file) - #-- copy remote tar file to local if new or updated + # copy remote tar file to local if new or updated gravity_toolkit.utilities.from_http(remote_tar_path, local=local_file, timeout=TIMEOUT, hash=MD5, chunk=16384, verbose=True, fid=fid1, mode=MODE) - #-- Create and submit request to get modification time of file + # Create and submit request to get modification time of file remote_file = posixpath.join(*remote_tar_path) request = gravity_toolkit.utilities.urllib2.Request(remote_file) response = gravity_toolkit.utilities.urllib2.urlopen(request, timeout=TIMEOUT) - #-- change modification time to remote + # change modification time to remote time_string = response.headers['last-modified'] remote_mtime=gravity_toolkit.utilities.get_unix_time(time_string, format='%a, %d %b %Y %H:%M:%S %Z') - #-- keep remote modification time of file and local access time + # keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) - #-- open file with tarfile (read) + # open file with tarfile (read) tar = tarfile.open(name=local_file, mode='r:gz') - #-- copy files from the tar file into the data directory + # copy files from the tar file into the data directory member_list=[m for m in tar.getmembers() if re.search(ds,m.name)] - #-- for each member of the dataset within the tar file + # for each member of the dataset within the tar file for member in member_list: - #-- local gzipped version of the file + # local gzipped version of the file fi = os.path.basename(member.name) local_file = os.path.join(local_dir, f'{fi}.gz') gzip_copy_file(tar, member, local_file, CLOBBER, MODE) - #-- close the tar file + # close the tar file tar.close() - #-- find GRACE files and sort by date + # find GRACE files and sort by date grace_files=[fi for fi in os.listdir(local_dir) if re.search(ds,fi)] - #-- outputting GRACE filenames to index + # outputting GRACE filenames to index with open(os.path.join(local_dir,'index.txt'),'w') as fid: for fi in sorted(grace_files): print(fi, file=fid) - #-- change permissions of index file + # change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) - #-- close log file and set permissions level to MODE + # close log file and set permissions level to MODE if LOG: fid1.close() os.chmod(os.path.join(DIRECTORY,LOGFILE), MODE) -#-- PURPOSE: copy file from tar file checking if file exists locally -#-- and if the original file is newer than the local file +# PURPOSE: copy file from tar file checking if file exists locally +# and if the original file is newer than the local file def gzip_copy_file(tar, member, local_file, CLOBBER, MODE): - #-- if file exists in file system: check if remote file is newer + # if file exists in file system: check if remote file is newer TEST = False OVERWRITE = ' (clobber)' - #-- last modification time of file within tar file + # last modification time of file within tar file file1_mtime = member.mtime - #-- check if output compressed file exists in local directory + # check if output compressed file exists in local directory if os.access(local_file, os.F_OK): - #-- check last modification time of output gzipped file + # check last modification time of output gzipped file with gzip.open(local_file, 'rb') as fileID: fileobj = fileID.fileobj fileobj.seek(4) - #-- extract little endian 4 bit unsigned integer + # extract little endian 4 bit unsigned integer file2_mtime, = struct.unpack(" file2_mtime): TEST = True OVERWRITE = ' (overwrite)' else: TEST = True OVERWRITE = ' (new)' - #-- if file does not exist, is to be overwritten, or CLOBBERed + # if file does not exist, is to be overwritten, or CLOBBERed if TEST or CLOBBER: - #-- Printing files copied from tar file to new compressed file + # Printing files copied from tar file to new compressed file logging.info(f'{tar.name}/{member.name} --> ') logging.info(f'\t{local_file}{OVERWRITE}\n') - #-- extract file contents to new compressed file + # extract file contents to new compressed file f_in = tar.extractfile(member) with gzip.GzipFile(local_file, 'wb', 9, None, file1_mtime) as f_out: shutil.copyfileobj(f_in, f_out) f_in.close() - #-- keep remote modification time of file and local access time + # keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, file1_mtime)) os.chmod(local_file, MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""CNES/GRGS GRACE data download program for gravity field products """ ) - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, nargs='+', default=['RL05'], choices=['RL01','RL02','RL03','RL04','RL05'], help='GRACE/GRACE-FO data release') - #-- connection timeout + # connection timeout parser.add_argument('--timeout','-t', type=int, default=360, help='Timeout in seconds for blocking operations') - #-- Output log file in form - #-- CNES_sync_2002-04-01.log + # Output log file in form + # CNES_sync_2002-04-01.log parser.add_argument('--log','-l', default=False, action='store_true', help='Output log file') parser.add_argument('--clobber','-C', default=False, action='store_true', help='Overwrite existing data in transfer') - #-- permissions mode of the directories and files synced (number in octal) + # permissions mode of the directories and files synced (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permission mode of directories and files synced') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- check internet connection before attempting to run program + # check internet connection before attempting to run program HOST = 'http://gravitegrace.get.obs-mip.fr' if gravity_toolkit.utilities.check_connection(HOST): cnes_grace_sync(args.directory, DREL=args.release, TIMEOUT=args.timeout, LOG=args.log, CLOBBER=args.clobber, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/combine_harmonics.py b/scripts/combine_harmonics.py index 68753bcc..33dfa176 100644 --- a/scripts/combine_harmonics.py +++ b/scripts/combine_harmonics.py @@ -112,7 +112,7 @@ from gravity_toolkit.spatial import spatial from gravity_toolkit.units import units -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -121,7 +121,7 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: converts from the spherical harmonic domain into the spatial domain +# PURPOSE: converts from the spherical harmonic domain into the spatial domain def combine_harmonics(INPUT_FILE, OUTPUT_FILE, LMAX=None, MMAX=None, @@ -139,164 +139,164 @@ def combine_harmonics(INPUT_FILE, OUTPUT_FILE, DATAFORM=None, MODE=0o775): - #-- verify that output directory exists + # verify that output directory exists DIRECTORY = os.path.abspath(os.path.dirname(OUTPUT_FILE)) if not os.access(DIRECTORY, os.F_OK): os.makedirs(DIRECTORY,MODE,exist_ok=True) - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) if MMAX is None: MMAX = np.copy(LMAX) - #-- read input spherical harmonic coefficients from file in DATAFORM + # read input spherical harmonic coefficients from file in DATAFORM if (DATAFORM == 'ascii'): input_Ylms = harmonics().from_ascii(INPUT_FILE) elif (DATAFORM == 'netCDF4'): - #-- read input netCDF4 file (.nc) + # read input netCDF4 file (.nc) input_Ylms = harmonics().from_netCDF4(INPUT_FILE) elif (DATAFORM == 'HDF5'): - #-- read input HDF5 file (.H5) + # read input HDF5 file (.H5) input_Ylms = harmonics().from_HDF5(INPUT_FILE) - #-- reform harmonic dimensions to be l,m,t - #-- truncate to degree and order LMAX, MMAX + # reform harmonic dimensions to be l,m,t + # truncate to degree and order LMAX, MMAX input_Ylms = input_Ylms.truncate(lmax=LMAX, mmax=MMAX).expand_dims() - #-- remove mean file from input Ylms + # remove mean file from input Ylms if MEAN_FILE and (DATAFORM == 'ascii'): mean_Ylms = harmonics().from_ascii(MEAN_FILE,date=False) input_Ylms.subtract(mean_Ylms) elif MEAN_FILE and (DATAFORM == 'netCDF4'): - #-- read input netCDF4 file (.nc) + # read input netCDF4 file (.nc) mean_Ylms = harmonics().from_netCDF4(MEAN_FILE,date=False) input_Ylms.subtract(mean_Ylms) elif MEAN_FILE and (DATAFORM == 'HDF5'): - #-- read input HDF5 file (.H5) + # read input HDF5 file (.H5) mean_Ylms = harmonics().from_HDF5(MEAN_FILE,date=False) input_Ylms.subtract(mean_Ylms) - #-- read arrays of kl, hl, and ll Love Numbers + # read arrays of kl, hl, and ll Love Numbers hl,kl,ll = load_love_numbers(LMAX, LOVE_NUMBERS=LOVE_NUMBERS, REFERENCE=REFERENCE) - #-- distribute total mass uniformly over the ocean + # distribute total mass uniformly over the ocean if REDISTRIBUTE: - #-- read Land-Sea Mask and convert to spherical harmonics + # read Land-Sea Mask and convert to spherical harmonics ocean_Ylms = ocean_stokes(LANDMASK, LMAX, MMAX=MMAX, LOVE=(hl,kl,ll)) - #-- calculate ratio between total mass and a uniformly distributed - #-- layer of water over the ocean + # calculate ratio between total mass and a uniformly distributed + # layer of water over the ocean ratio = input_Ylms.clm[0,0,:]/ocean_Ylms.clm[0,0] - #-- for each spherical harmonic - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX - #-- remove the ratio*ocean Ylms from Ylms - #-- note: x -= y is equivalent to x = x - y + # for each spherical harmonic + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + for l in range(m,LMAX+1):# LMAX+1 to include LMAX + # remove the ratio*ocean Ylms from Ylms + # note: x -= y is equivalent to x = x - y input_Ylms.clm[l,m,:] -= ratio*ocean_Ylms.clm[l,m] input_Ylms.slm[l,m,:] -= ratio*ocean_Ylms.slm[l,m] - #-- if using a decorrelation filter (Isabella's destriping Routine) + # if using a decorrelation filter (Isabella's destriping Routine) if DESTRIPE: input_Ylms = input_Ylms.destripe() - #-- Gaussian smoothing + # Gaussian smoothing if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) else: wt = np.ones((LMAX+1)) - #-- Output spatial data + # Output spatial data grid = spatial() grid.time = np.copy(input_Ylms.time) grid.month = np.copy(input_Ylms.month) nt = len(input_Ylms.time) - #-- Output Degree Spacing + # Output Degree Spacing dlon,dlat = (DDEG[0],DDEG[0]) if (len(DDEG) == 1) else (DDEG[0],DDEG[1]) - #-- Output Degree Interval + # Output Degree Interval if (INTERVAL == 1): - #-- (0:360,90:-90) + # (0:360,90:-90) nlon = np.int64((360.0/dlon)+1.0) nlat = np.int64((180.0/dlat)+1.0) grid.lon = dlon*np.arange(0,nlon) grid.lat = 90.0 - dlat*np.arange(0,nlat) elif (INTERVAL == 2): - #-- (Degree spacing)/2 + # (Degree spacing)/2 grid.lon = np.arange(dlon/2.0,360+dlon/2.0,dlon) grid.lat = np.arange(90.0-dlat/2.0,-90.0-dlat/2.0,-dlat) nlon = len(grid.lon) nlat = len(grid.lat) elif (INTERVAL == 3): - #-- non-global grid set with BOUNDS parameter + # non-global grid set with BOUNDS parameter minlon,maxlon,minlat,maxlat = BOUNDS.copy() grid.lon = np.arange(minlon+dlon/2.0,maxlon+dlon/2.0,dlon) grid.lat = np.arange(maxlat-dlat/2.0,minlat-dlat/2.0,-dlat) nlon = len(grid.lon) nlat = len(grid.lat) - #-- output spatial grid + # output spatial grid grid.data = np.zeros((nlat,nlon,nt)) grid.mask = np.zeros((nlat,nlon,nt), dtype=bool) - #-- update attributes + # update attributes grid.update_spacing() grid.update_extents() grid.update_dimensions() - #-- Setting units factor for output - #-- dfactor computes the degree dependent coefficients + # Setting units factor for output + # dfactor computes the degree dependent coefficients if (UNITS == 1): - #-- 1: cmwe, centimeters water equivalent + # 1: cmwe, centimeters water equivalent dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).cmwe elif (UNITS == 2): - #-- 2: mmGH, millimeters geoid height + # 2: mmGH, millimeters geoid height dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).mmGH elif (UNITS == 3): - #-- 3: mmCU, millimeters elastic crustal deformation + # 3: mmCU, millimeters elastic crustal deformation dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).mmCU elif (UNITS == 4): - #-- 4: micGal, microGal gravity perturbations + # 4: micGal, microGal gravity perturbations dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).microGal elif (UNITS == 5): - #-- 5: mbar, millibars equivalent surface pressure + # 5: mbar, millibars equivalent surface pressure dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).mbar else: raise ValueError(f'Invalid units code {UNITS:d}') - #-- Computing plms for converting to spatial domain + # Computing plms for converting to spatial domain theta = (90.0-grid.lat)*np.pi/180.0 PLM, dPLM = plm_holmes(LMAX, np.cos(theta)) - #-- converting harmonics to truncated, smoothed coefficients in output units + # converting harmonics to truncated, smoothed coefficients in output units for t in range(nt): - #-- spherical harmonics for time t + # spherical harmonics for time t Ylms = input_Ylms.index(t) Ylms.convolve(dfactor*wt) - #-- convert spherical harmonics to output spatial grid + # convert spherical harmonics to output spatial grid grid.data[:,:,t] = harmonic_summation(Ylms.clm, Ylms.slm, grid.lon, grid.lat, LMAX=LMAX, PLM=PLM).T - #-- outputting data to file + # outputting data to file output_data(grid.squeeze(), FILENAME=OUTPUT_FILE, DATAFORM=DATAFORM, UNITS=UNITS) - #-- change output permissions level to MODE + # change output permissions level to MODE os.chmod(OUTPUT_FILE,MODE) -#-- PURPOSE: wrapper function for outputting data to file +# PURPOSE: wrapper function for outputting data to file def output_data(data, FILENAME=None, DATAFORM=None, UNITS=None): - #-- output units and units longname + # output units and units longname unit_short = ['cmwe', 'mmGH', 'mmCU', 'microGal', 'mbar'] unit_name = ['Equivalent Water Thickness', 'Geoid Height', 'Elastic Crustal Uplift', 'Gravitational Undulation', 'Equivalent Surface Pressure'] if (DATAFORM == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) data.to_ascii(FILENAME) elif (DATAFORM == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) data.to_netCDF4(FILENAME, units=unit_short[UNITS-1], longname=unit_name[UNITS-1]) elif (DATAFORM == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) data.to_HDF5(FILENAME, units=unit_short[UNITS-1], longname=unit_name[UNITS-1]) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Converts a file from the spherical harmonic @@ -305,46 +305,46 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- input and output file + # command line parameters + # input and output file parser.add_argument('infile', type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='?', help='Input harmonic file') parser.add_argument('outfile', type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='?', help='Output spatial file') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- different treatments of the load Love numbers - #-- 0: Han and Wahr (1995) values from PREM - #-- 1: Gegout (2005) values from PREM - #-- 2: Wang et al. (2012) values from PREM + # different treatments of the load Love numbers + # 0: Han and Wahr (1995) values from PREM + # 1: Gegout (2005) values from PREM + # 2: Wang et al. (2012) values from PREM parser.add_argument('--love','-n', type=int, default=0, choices=[0,1,2], help='Treatment of the Load Love numbers') - #-- option for setting reference frame for gravitational load love number - #-- reference frame options (CF, CM, CE) + # option for setting reference frame for gravitational load love number + # reference frame options (CF, CM, CE) parser.add_argument('--reference', type=str.upper, default='CF', choices=['CF','CM','CE'], help='Reference frame for load Love numbers') - #-- Gaussian smoothing radius (km) + # Gaussian smoothing radius (km) parser.add_argument('--radius','-R', type=float, default=0, help='Gaussian smoothing radius (km)') - #-- Use a decorrelation (destriping) filter + # Use a decorrelation (destriping) filter parser.add_argument('--destripe','-d', default=False, action='store_true', help='Verbose output of run') - #-- output units + # output units parser.add_argument('--units','-U', type=int, default=1, choices=[1,2,3,4,5], help='Output units') - #-- output grid parameters + # output grid parameters parser.add_argument('--spacing','-S', type=float, nargs='+', default=[0.5,0.5], metavar=('dlon','dlat'), help='Spatial resolution of output data') @@ -355,45 +355,45 @@ def arguments(): parser.add_argument('--bounds','-B', type=float, nargs=4, metavar=('lon_min','lon_max','lat_min','lat_max'), help='Bounding box for non-global grid') - #-- redistribute total mass over the ocean + # redistribute total mass over the ocean parser.add_argument('--redistribute-mass', default=False, action='store_true', help='Redistribute total mass over the ocean') - #-- land-sea mask for redistributing over the ocean + # land-sea mask for redistributing over the ocean lsmask = utilities.get_data_path(['data','landsea_hd.nc']) parser.add_argument('--mask', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=lsmask, help='Land-sea mask for redistributing over the ocean') - #-- mean file to remove + # mean file to remove parser.add_argument('--mean', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='Mean file to remove from the harmonic data') - #-- input and output data format (ascii, netCDF4, HDF5) + # input and output data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input and output data format') - #-- print information about each input and output file + # print information about each input and output file parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of run') - #-- permissions mode of the output files (octal) + # permissions mode of the output files (octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- run program with parameters + # run program with parameters try: info(args) combine_harmonics(args.infile, args.outfile, @@ -413,12 +413,12 @@ def main(): DATAFORM=args.format, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/convert_harmonics.py b/scripts/convert_harmonics.py index 736cba28..f63d0e0d 100644 --- a/scripts/convert_harmonics.py +++ b/scripts/convert_harmonics.py @@ -94,7 +94,7 @@ from gravity_toolkit.spatial import spatial from gravity_toolkit.time import calendar_to_grace -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -103,7 +103,7 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: converts from the spatial domain into the spherical harmonic domain +# PURPOSE: converts from the spatial domain into the spherical harmonic domain def convert_harmonics(INPUT_FILE, OUTPUT_FILE, LMAX=None, MMAX=None, @@ -117,80 +117,80 @@ def convert_harmonics(INPUT_FILE, OUTPUT_FILE, DATAFORM=None, MODE=0o775): - #-- verify that output directory exists + # verify that output directory exists DIRECTORY = os.path.abspath(os.path.dirname(OUTPUT_FILE)) if not os.access(DIRECTORY, os.F_OK): os.makedirs(DIRECTORY,MODE,exist_ok=True) - #-- Grid spacing + # Grid spacing dlon,dlat = (DDEG,DDEG) if (np.ndim(DDEG) == 0) else (DDEG[0],DDEG[1]) - #-- Grid dimensions - if (INTERVAL == 1):#-- (0:360, 90:-90) + # Grid dimensions + if (INTERVAL == 1):# (0:360, 90:-90) nlon = np.int64((360.0/dlon)+1.0) nlat = np.int64((180.0/dlat)+1.0) - elif (INTERVAL == 2):#-- degree spacing/2 + elif (INTERVAL == 2):# degree spacing/2 nlon = np.int64((360.0/dlon)) nlat = np.int64((180.0/dlat)) - #-- read spatial file in data format - #-- expand dimensions + # read spatial file in data format + # expand dimensions if (DATAFORM == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) input_spatial = spatial(spacing=[dlon,dlat],nlat=nlat, nlon=nlon,fill_value=FILL_VALUE).from_ascii(INPUT_FILE, header=HEADER).expand_dims() elif (DATAFORM == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) input_spatial = spatial().from_netCDF4(INPUT_FILE).expand_dims() elif (DATAFORM == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) input_spatial = spatial().from_HDF5(INPUT_FILE).expand_dims() - #-- convert missing values to zero + # convert missing values to zero input_spatial.replace_invalid(0.0) - #-- input data shape + # input data shape nlat,nlon,nt = input_spatial.shape - #-- read arrays of kl, hl, and ll Love Numbers + # read arrays of kl, hl, and ll Love Numbers LOVE = load_love_numbers(LMAX, LOVE_NUMBERS=LOVE_NUMBERS, REFERENCE=REFERENCE) - #-- upper bound of spherical harmonic orders (default = LMAX) + # upper bound of spherical harmonic orders (default = LMAX) if MMAX is None: MMAX = np.copy(LMAX) - #-- calculate associated Legendre polynomials + # calculate associated Legendre polynomials th = (90.0 - input_spatial.lat)*np.pi/180.0 PLM, dPLM = plm_holmes(LMAX, np.cos(th)) - #-- create list of harmonics objects + # create list of harmonics objects Ylms_list = [] for i,t in enumerate(input_spatial.time): - #-- convert spatial field to spherical harmonics + # convert spatial field to spherical harmonics output_Ylms = gen_stokes(input_spatial.data[:,:,i].T, input_spatial.lon, input_spatial.lat, UNITS=UNITS, LMIN=0, LMAX=LMAX, MMAX=MMAX, PLM=PLM, LOVE=LOVE) output_Ylms.time = np.copy(t) output_Ylms.month = calendar_to_grace(t) - #-- append to list + # append to list Ylms_list.append(output_Ylms) - #-- convert Ylms list for output spherical harmonics + # convert Ylms list for output spherical harmonics Ylms = harmonics().from_list(Ylms_list) Ylms_list = None - #-- outputting data to file + # outputting data to file if (DATAFORM == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) Ylms.to_ascii(OUTPUT_FILE) elif (DATAFORM == 'netCDF4'): - #-- netCDF4 (.nc) + # netCDF4 (.nc) Ylms.to_netCDF4(OUTPUT_FILE) elif (DATAFORM == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) Ylms.to_HDF5(OUTPUT_FILE) - #-- change output permissions level to MODE + # change output permissions level to MODE os.chmod(OUTPUT_FILE,MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Converts a file from the spatial domain into the @@ -199,77 +199,77 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- input and output file + # command line parameters + # input and output file parser.add_argument('infile', type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='?', help='Input spatial file') parser.add_argument('outfile', type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='?', help='Output harmonic file') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- different treatments of the load Love numbers - #-- 0: Han and Wahr (1995) values from PREM - #-- 1: Gegout (2005) values from PREM - #-- 2: Wang et al. (2012) values from PREM + # different treatments of the load Love numbers + # 0: Han and Wahr (1995) values from PREM + # 1: Gegout (2005) values from PREM + # 2: Wang et al. (2012) values from PREM parser.add_argument('--love','-n', type=int, default=0, choices=[0,1,2], help='Treatment of the Load Love numbers') - #-- option for setting reference frame for gravitational load love number - #-- reference frame options (CF, CM, CE) + # option for setting reference frame for gravitational load love number + # reference frame options (CF, CM, CE) parser.add_argument('--reference', type=str.upper, default='CF', choices=['CF','CM','CE'], help='Reference frame for load Love numbers') - #-- output units + # output units parser.add_argument('--units','-U', type=int, default=1, choices=[1,2,3], help='Output units') - #-- output grid parameters + # output grid parameters parser.add_argument('--spacing','-S', type=float, nargs='+', default=[0.5,0.5], metavar=('dlon','dlat'), help='Spatial resolution of output data') parser.add_argument('--interval','-I', type=int, default=2, choices=[1,2], help='Input grid interval (1: global, 2: centered global)') - #-- fill value for ascii + # fill value for ascii parser.add_argument('--fill-value','-f', type=float, help='Set fill_value for input spatial fields') - #-- ascii parameters + # ascii parameters parser.add_argument('--header', type=int, help='Number of header rows to skip in input ascii files') - #-- input and output data format (ascii, netCDF4, HDF5) + # input and output data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input and output data format') - #-- print information about each input and output file + # print information about each input and output file parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of run') - #-- permissions mode of the output files (octal) + # permissions mode of the output files (octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- run program with parameters + # run program with parameters try: info(args) convert_harmonics(args.infile, args.outfile, @@ -285,12 +285,12 @@ def main(): DATAFORM=args.format, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/dealiasing_monthly_mean.py b/scripts/dealiasing_monthly_mean.py index a5985da5..0c9c0db3 100755 --- a/scripts/dealiasing_monthly_mean.py +++ b/scripts/dealiasing_monthly_mean.py @@ -83,41 +83,41 @@ import gravity_toolkit.utilities as utilities from gravity_toolkit.harmonics import harmonics -#-- PURPOSE: calculate the Julian day from the year and the day of the year -#-- http://scienceworld.wolfram.com/astronomy/JulianDate.html +# PURPOSE: calculate the Julian day from the year and the day of the year +# http://scienceworld.wolfram.com/astronomy/JulianDate.html def calc_julian_day(YEAR, DAY_OF_YEAR): JD = 367.0*YEAR - np.floor(7.0*(YEAR + np.floor(10.0/12.0))/4.0) - \ np.floor(3.0*(np.floor((YEAR + 8.0/7.0)/100.0) + 1.0)/4.0) + \ np.floor(275.0/9.0) + np.float64(DAY_OF_YEAR) + 1721028.5 return JD -#-- PURPOSE: reads the AOD1B data and outputs a monthly mean +# PURPOSE: reads the AOD1B data and outputs a monthly mean def dealiasing_monthly_mean(base_dir, PROC=None, DREL=None, DSET=None, LMAX=None, DATAFORM=None, CLOBBER=False, MODE=0o775): - #-- output data suffix + # output data suffix suffix = dict(ascii='txt', netCDF4='nc', HDF5='H5') - #-- aod1b data products + # aod1b data products aod1b_products = dict(GAA='atm',GAB='ocn',GAC='glo',GAD='oba') - #-- compile regular expressions operator for the clm/slm headers - #-- for the specific AOD1b product + # compile regular expressions operator for the clm/slm headers + # for the specific AOD1b product hx = re.compile(r'^DATA.*SET.*{0}'.format(aod1b_products[DSET]),re.VERBOSE) - #-- compile regular expression operator to find numerical instances - #-- will extract the data from the file + # compile regular expression operator to find numerical instances + # will extract the data from the file regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' rx = re.compile(regex_pattern, re.VERBOSE) - #-- set number of hours in a file - #-- set the ocean model for a given release + # set number of hours in a file + # set the ocean model for a given release if DREL in ('RL01','RL02','RL03','RL04','RL05'): - #-- for 00, 06, 12 and 18 + # for 00, 06, 12 and 18 n_time = 4 ATMOSPHERE = 'ECMWF' OCEAN_MODEL = 'OMCT' default_center = 'EIGEN' default_lmax = 100 elif DREL in ('RL06',): - #-- for 00, 03, 06, 09, 12, 15, 18 and 21 + # for 00, 03, 06, 09, 12, 15, 18 and 21 n_time = 8 ATMOSPHERE = 'ECMWF' OCEAN_MODEL = 'MPIOM' @@ -125,28 +125,28 @@ def dealiasing_monthly_mean(base_dir, PROC=None, DREL=None, DSET=None, default_lmax = 180 else: raise ValueError('Invalid data release') - #-- Maximum spherical harmonic degree (LMAX) + # Maximum spherical harmonic degree (LMAX) LMAX = default_lmax if not LMAX else LMAX - #-- Calculating the number of cos and sin harmonics up to d/o of file + # Calculating the number of cos and sin harmonics up to d/o of file n_harm = (default_lmax**2 + 3*default_lmax)//2 + 1 - #-- AOD1B data products + # AOD1B data products product = {} product['atm'] = 'Atmospheric loading from {0}'.format(ATMOSPHERE) product['ocn'] = 'Oceanic loading from {0}'.format(OCEAN_MODEL) product['glo'] = 'Global atmospheric and oceanic loading' product['oba'] = 'Ocean bottom pressure from {0}'.format(OCEAN_MODEL) - #-- GRACE AOD1B directory for data release + # GRACE AOD1B directory for data release aod1b_dir = os.path.join(base_dir,'AOD1B',DREL) - #-- GRACE data directory for data release and processing center + # GRACE data directory for data release and processing center grace_dir = os.path.join(base_dir,PROC,DREL) - #-- recursively create output directory if not currently existing + # recursively create output directory if not currently existing if not os.access(os.path.join(grace_dir,DSET),os.F_OK): os.makedirs(os.path.join(grace_dir,DSET), MODE) - #-- file formatting string if outputting to SHM format + # file formatting string if outputting to SHM format shm = '{0}-2_{1:4.0f}{2:03.0f}-{3:4.0f}{4:03.0f}_{5}_{6}_{7}_{8}00.gz' - #-- center name if outputting to SHM format + # center name if outputting to SHM format if (PROC == 'CSR'): CENTER = 'UTCSR' elif (PROC == 'GFZ'): @@ -156,7 +156,7 @@ def dealiasing_monthly_mean(base_dir, PROC=None, DREL=None, DSET=None, else: CENTER = default_center - #-- read input DATE file from GSM data product + # read input DATE file from GSM data product grace_datefile = '{0}_{1}_DATES.txt'.format(PROC, DREL) date_input = np.loadtxt(os.path.join(grace_dir,'GSM',grace_datefile), skiprows=1) @@ -165,31 +165,31 @@ def dealiasing_monthly_mean(base_dir, PROC=None, DREL=None, DSET=None, start_day = date_input[:,3].astype(np.int64) end_yr = date_input[:,4] end_day = date_input[:,5].astype(np.int64) - #-- output date file reduced to months with complete AOD + # output date file reduced to months with complete AOD f_out = open(os.path.join(grace_dir,DSET,grace_datefile), 'w') - #-- date file header information + # date file header information args = ('Mid-date','Month','Start_Day','End_Day','Total_Days') print('{0} {1:>10} {2:>11} {3:>10} {4:>13}'.format(*args),file=f_out) - #-- for each GRACE/GRACE-FO month + # for each GRACE/GRACE-FO month for t,gm in enumerate(grace_month): - #-- check if GRACE/GRACE-FO month crosses years + # check if GRACE/GRACE-FO month crosses years if (start_yr[t] != end_yr[t]): - #-- check if start_yr is a Leap Year or Standard Year + # check if start_yr is a Leap Year or Standard Year dpy = gravity_toolkit.time.calendar_days(start_yr[t]).sum() - #-- list of Julian Days to read from both start and end year + # list of Julian Days to read from both start and end year julian_days_to_read = [] - #-- add days to read from start and end years + # add days to read from start and end years julian_days_to_read.extend([calc_julian_day(start_yr[t],D) for D in range(start_day[t],dpy+1)]) julian_days_to_read.extend([calc_julian_day(end_yr[t],D) for D in range(1,end_day[t]+1)]) else: - #-- Julian Days to read going from start_day to end_day + # Julian Days to read going from start_day to end_day julian_days_to_read = [calc_julian_day(start_yr[t],D) for D in range(start_day[t],end_day[t]+1)] - #-- output filename for GRACE/GRACE-FO month + # output filename for GRACE/GRACE-FO month if (DATAFORM == 'SHM'): MISSION = 'GRAC' if (gm <= 186) else 'GRFO' FILE = shm.format(DSET.upper(),start_yr[t],start_day[t], @@ -198,46 +198,46 @@ def dealiasing_monthly_mean(base_dir, PROC=None, DREL=None, DSET=None, args = (PROC,DREL,DSET.upper(),LMAX,gm,suffix[DATAFORM]) FILE = '{0}_{1}_{2}_CLM_L{3:d}_{4:03d}.{5}'.format(*args) - #-- calendar dates to read + # calendar dates to read JD = np.array(julian_days_to_read) Y,M,D,h,m,s = gravity_toolkit.time.convert_julian(JD, astype='i', format='tuple') - #-- find unique year and month pairs to read + # find unique year and month pairs to read rx1='|'.join(['{0:d}-{1:02d}'.format(*p) for p in set(zip(Y,M))]) rx2='|'.join(['{0:0d}-{1:02d}-{2:02d}'.format(*p) for p in set(zip(Y,M,D))]) - #-- compile regular expressions operators for finding tar files + # compile regular expressions operators for finding tar files tx = re.compile(r'AOD1B_({0})_\d+.(tar.gz|tgz)$'.format(rx1),re.VERBOSE) - #-- finding all of the tar files in the AOD1b directory + # finding all of the tar files in the AOD1b directory input_tar_files = [tf for tf in os.listdir(aod1b_dir) if tx.match(tf)] - #-- compile regular expressions operators for file dates - #-- will extract year and month and calendar day from the ascii file + # compile regular expressions operators for file dates + # will extract year and month and calendar day from the ascii file fx = re.compile(r'AOD1B_({0})_X_\d+.asc(.gz)?$'.format(rx2),re.VERBOSE) - #-- check the last modified times of the tar file members + # check the last modified times of the tar file members input_mtime = np.zeros_like(julian_days_to_read,dtype=np.int64) input_file_check = np.zeros_like(julian_days_to_read,dtype=bool) c = 0 - #-- for each tar file + # for each tar file for fi in sorted(input_tar_files): - #-- open the AOD1B monthly tar file + # open the AOD1B monthly tar file tar = tarfile.open(name=os.path.join(aod1b_dir,fi), mode='r:gz') - #-- for each ascii file within the tar file that matches fx + # for each ascii file within the tar file that matches fx monthly_members = [m for m in tar.getmembers() if fx.match(m.name)] for member in monthly_members: - #-- check last modification time of input tar file members + # check last modification time of input tar file members input_mtime[c] = member.mtime input_file_check[c] = True c += 1 - #-- check if all files exist + # check if all files exist COMPLETE = input_file_check.all() - #-- if output file exists: check if input tar file is newer + # if output file exists: check if input tar file is newer TEST = False OVERWRITE = 'clobber' if os.access(os.path.join(grace_dir,DSET,FILE), os.F_OK): - #-- check last modification time of input and output files + # check last modification time of input and output files output_mtime = os.stat(os.path.join(grace_dir,DSET,FILE)).st_mtime - #-- if input tar file is newer: overwrite the output file + # if input tar file is newer: overwrite the output file if (input_mtime > output_mtime).any(): TEST = True OVERWRITE = 'overwrite' @@ -245,86 +245,86 @@ def dealiasing_monthly_mean(base_dir, PROC=None, DREL=None, DSET=None, TEST = True OVERWRITE = 'new' - #-- print GRACE/GRACE-FO dates if there is a complete month of AOD + # print GRACE/GRACE-FO dates if there is a complete month of AOD if COMPLETE: - #-- print GRACE/GRACE-FO dates to file + # print GRACE/GRACE-FO dates to file print(('{0:13.8f} {1:03d} {2:8.0f} {3:03d} {4:8.0f} {5:03d} ' '{6:8.0f}').format(date_input[t,0],gm,start_yr[t],start_day[t], end_yr[t],end_day[t],date_input[t,6]),file=f_out) - #-- if there are new files, files to be rewritten or clobbered + # if there are new files, files to be rewritten or clobbered if COMPLETE and (TEST or CLOBBER): - #-- if verbose: output information about the output file + # if verbose: output information about the output file logging.info('{0} ({1})'.format(FILE,OVERWRITE)) - #-- allocate for the mean output harmonics + # allocate for the mean output harmonics Ylms = harmonics(lmax=LMAX, mmax=LMAX) nt = len(julian_days_to_read)*n_time Ylms.clm = np.zeros((LMAX+1,LMAX+1,nt)) Ylms.slm = np.zeros((LMAX+1,LMAX+1,nt)) Ylms.time = np.zeros((nt)) count = 0 - #-- for each tar file + # for each tar file for fi in sorted(input_tar_files): - #-- open the AOD1B monthly tar file + # open the AOD1B monthly tar file tar = tarfile.open(name=os.path.join(aod1b_dir,fi), mode='r:gz') - #-- for each ascii file within the tar file that matches fx + # for each ascii file within the tar file that matches fx monthly_members=[m for m in tar.getmembers() if fx.match(m.name)] for member in monthly_members: - #-- extract member name + # extract member name YMD,SFX = fx.findall(member.name).pop() - #-- open datafile for day + # open datafile for day if (SFX == '.gz'): fid = gzip.GzipFile(fileobj=tar.extractfile(member)) else: fid = tar.extractfile(member) - #-- create counters for hour in dataset + # create counters for hour in dataset hours = np.zeros((n_time)) c = 0 - #-- while loop ends when dataset is read + # while loop ends when dataset is read while (c < n_time): - #-- read line + # read line file_contents=fid.readline().decode('ISO-8859-1') - #-- find file header for data product + # find file header for data product if bool(hx.search(file_contents)): - #-- extract hour from header and convert to float + # extract hour from header and convert to float HH, = re.findall(r'(\d+):\d+:\d+',file_contents) hours[c] = np.int64(HH) - #-- read each line of spherical harmonics + # read each line of spherical harmonics for k in range(0,n_harm): file_contents=fid.readline().decode('ISO-8859-1') - #-- find numerical instances in the data line + # find numerical instances in the data line line_contents = rx.findall(file_contents) - #-- spherical harmonic degree and order + # spherical harmonic degree and order l1 = np.int64(line_contents[0]) m1 = np.int64(line_contents[1]) - #-- spherical harmonic data saved to output Ylms + # spherical harmonic data saved to output Ylms if (l1 <= LMAX) & (m1 <= LMAX): Ylms.clm[l1,m1,c]+=np.float64(line_contents[2]) Ylms.slm[l1,m1,c]+=np.float64(line_contents[3]) - #-- add 1 to hour counter + # add 1 to hour counter c += 1 - #-- close the input file for day + # close the input file for day fid.close() - #-- year fraction of the particular date and times + # year fraction of the particular date and times YEAR = np.repeat(Y[count//n_time], n_time).astype('f') MONTH = np.repeat(M[count//n_time], n_time).astype('f') DAY = np.repeat(D[count//n_time], n_time).astype('f') Ylms.time[count:count+n_time] = \ gravity_toolkit.time.convert_calendar_decimal(YEAR, MONTH, day=DAY, hour=hours) - #-- add to day counter + # add to day counter count += n_time - #-- calculate mean harmonics for GRACE/GRACE-FO month - #-- convert from harmonics object to dealiasing object + # calculate mean harmonics for GRACE/GRACE-FO month + # convert from harmonics object to dealiasing object mean_Ylms = dealiasing().from_harmonics(Ylms.mean()) mean_Ylms.time = np.mean(Ylms.time) mean_Ylms.month = np.int64(gm) - #-- product information + # product information mean_Ylms.center = PROC mean_Ylms.release = DREL mean_Ylms.product = DSET - #-- start and end time for month + # start and end time for month start_time = gravity_toolkit.time.convert_julian(np.min(JD)) mean_Ylms.start_time = ['{0:4.0f}'.format(start_time['year']), '{0:02.0f}'.format(start_time['month']), @@ -333,42 +333,42 @@ def dealiasing_monthly_mean(base_dir, PROC=None, DREL=None, DSET=None, mean_Ylms.end_time = ['{0:4.0f}'.format(end_time['year']), '{0:02.0f}'.format(end_time['month']), '{0:02.0f}'.format(end_time['day'])] - #-- output mean Ylms to file + # output mean Ylms to file if (DATAFORM == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) mean_Ylms.to_ascii(os.path.join(grace_dir,DSET,FILE)) elif (DATAFORM == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) mean_Ylms.to_netCDF4(os.path.join(grace_dir,DSET,FILE)) elif (DATAFORM == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) mean_Ylms.to_HDF5(os.path.join(grace_dir,DSET,FILE)) elif (DATAFORM == 'SHM'): mean_Ylms.to_SHM(os.path.join(grace_dir,DSET,FILE)) - #-- set the permissions mode of the output file + # set the permissions mode of the output file os.chmod(os.path.join(grace_dir,DSET,FILE), MODE) elif not COMPLETE: logging.info('File {0} not output (incomplete)'.format(FILE)) - #-- if outputting as spherical harmonic model files + # if outputting as spherical harmonic model files if (DATAFORM == 'SHM'): - #-- Create an index file for each GRACE product + # Create an index file for each GRACE product grace_files = [fi for fi in os.listdir(os.path.join(grace_dir,DSET)) if re.match(r'{0}-2(.*?)\.gz'.format(DSET),fi)] - #-- outputting GRACE filenames to index + # outputting GRACE filenames to index with open(os.path.join(grace_dir,DSET,'index.txt'),'w') as fid: for fi in sorted(grace_files): print(fi, file=fid) - #-- change permissions of index file + # change permissions of index file os.chmod(os.path.join(grace_dir,DSET,'index.txt'), MODE) - #-- print completion flag + # print completion flag logging.info('Complete: {0}/{1}/{2}'.format(PROC,DREL,DSET)) - #-- close the output date file + # close the output date file f_out.close() -#-- PURPOSE: additional routines for the harmonics module +# PURPOSE: additional routines for the harmonics module class dealiasing(harmonics): def __init__(self, **kwargs): super().__init__(**kwargs) @@ -383,7 +383,7 @@ def from_harmonics(self, temp): Convert a harmonics object to a new dealiasing object """ self = dealiasing(lmax=temp.lmax, mmax=temp.mmax) - #-- try to assign variables to self + # try to assign variables to self for key in ['clm','slm','time','month','shape','ndim','filename', 'center','release','product','start_time','end_time']: try: @@ -391,7 +391,7 @@ def from_harmonics(self, temp): setattr(self, key, np.copy(val)) except AttributeError: pass - #-- assign ndim and shape attributes + # assign ndim and shape attributes self.update_dimensions() return self @@ -404,60 +404,60 @@ def to_SHM(self, filename, **kwargs): keyword arguments for SHM output """ self.filename = os.path.expanduser(filename) - #-- set default verbosity + # set default verbosity kwargs.setdefault('verbose',False) logging.info(self.filename) - #-- open the output file + # open the output file fid = gzip.open(self.filename, 'wt') - #-- print the header informat + # print the header informat self.print_header(fid) self.print_harmonic(fid) self.print_global(fid) self.print_variables(fid,'double precision') - #-- output file format + # output file format file_format = ('{0:6} {1:4d} {2:4d} {3:+18.12E} {4:+18.12E} ' '{5:10.4E} {6:10.4E} {7} {8} {9}') - #-- start and end time in line format + # start and end time in line format start_date = '{0}{1}{2}.0000'.format(*self.start_time) end_date = '{0}{1}{2}.0000'.format(*self.end_time) - #-- write to file for each spherical harmonic degree and order + # write to file for each spherical harmonic degree and order for m in range(0, self.mmax+1): for l in range(m, self.lmax+1): args = ('GRCOF2', l, m, self.clm[l,m], self.slm[l,m], 0, 0, start_date, end_date, 'nnnn') print(file_format.format(*args), file=fid) - #-- close the output file + # close the output file fid.close() - #-- PURPOSE: print YAML header to top of file + # PURPOSE: print YAML header to top of file def print_header(self, fid): - #-- print header + # print header fid.write('{0}:\n'.format('header')) - #-- data dimensions + # data dimensions fid.write(' {0}:\n'.format('dimensions')) fid.write(' {0:22}: {1:d}\n'.format('degree',self.lmax)) fid.write(' {0:22}: {1:d}\n'.format('order',self.lmax)) fid.write('\n') - #-- PURPOSE: print spherical harmonic attributes to YAML header + # PURPOSE: print spherical harmonic attributes to YAML header def print_harmonic(self, fid): - #-- non-standard attributes + # non-standard attributes fid.write(' {0}:\n'.format('non-standard_attributes')) - #-- product id + # product id product_id = '{0}-2'.format(self.product) fid.write(' {0:22}: {1}\n'.format('product_id',product_id)) - #-- format id + # format id fid.write(' {0:22}:\n'.format('format_id')) short_name = 'SHM' fid.write(' {0:20}: {1}\n'.format('short_name',short_name)) long_name = 'Earth Gravity Spherical Harmonic Model Format' fid.write(' {0:20}: {1}\n'.format('long_name',long_name)) - #-- harmonic normalization + # harmonic normalization normalization = 'fully normalized' fid.write(' {0:22}: {1}\n'.format('normalization', normalization)) - #-- earth parameters - #-- gravitational constant + # earth parameters + # gravitational constant fid.write(' {0:22}:\n'.format('earth_gravity_param')) long_name = 'gravitational constant times mass of Earth' fid.write(' {0:20}: {1}\n'.format('long_name',long_name)) @@ -465,7 +465,7 @@ def print_harmonic(self, fid): fid.write(' {0:20}: {1}\n'.format('units',units)) value = '3.9860044180E+14' fid.write(' {0:20}: {1}\n'.format('value',value)) - #-- equatorial radius + # equatorial radius fid.write(' {0:22}:\n'.format('mean_equator_radius')) long_name = 'mean equator radius' fid.write(' {0:20}: {1}\n'.format('long_name',long_name)) @@ -475,10 +475,10 @@ def print_harmonic(self, fid): fid.write(' {0:20}: {1}\n'.format('value',value)) fid.write('\n') - #-- PURPOSE: print global attributes to YAML header + # PURPOSE: print global attributes to YAML header def print_global(self,fid): fid.write(' {0}:\n'.format('global_attributes')) - #-- product title + # product title if (self.month <= 186): MISSION = 'GRACE' PROJECT = 'NASA Gravity Recovery And Climate Experiment (GRACE)' @@ -494,7 +494,7 @@ def print_global(self,fid): args = (MISSION,self.product,self.center,self.release) title = '{0} Geopotential {1} Coefficients {2} {3}'.format(*args) fid.write(' {0:22}: {1}\n'.format('title',title)) - #-- product summaries + # product summaries summaries = {} summaries['GAA'] = ("Spherical harmonic coefficients that represent " "anomalous contributions of the non-tidal atmosphere to the Earth's " @@ -556,7 +556,7 @@ def print_global(self,fid): elif (self.center == 'JPL'): institution = 'NASA/JPL' else: - #-- default to GFZ + # default to GFZ institution = 'GFZ German Research Centre for Geosciences' fid.write(' {0:22}: {1}\n'.format('institution',institution)) src = 'All data from AOD1B {0}'.format(self.release) @@ -566,7 +566,7 @@ def print_global(self,fid): PRODUCT_VERSION = 'Release-{0}'.format(self.release[2:]) fid.write(' {0:22}: {1}\n'.format('product_version',PRODUCT_VERSION)) fid.write(' {0:22}:\n'.format('references')) - #-- date range and date created + # date range and date created start_date = '{0}-{1}-{2}'.format(*self.start_time) fid.write(' {0:22}: {1}\n'.format('time_coverage_start',start_date)) end_date = '{0}-{1}-{2}'.format(*self.end_time) @@ -575,67 +575,67 @@ def print_global(self,fid): fid.write(' {0:22}: {1}\n'.format('date_created', today)) fid.write('\n') - #-- PURPOSE: print variable descriptions to YAML header + # PURPOSE: print variable descriptions to YAML header def print_variables(self,fid,data_precision): - #-- variables + # variables fid.write(' {0}:\n'.format('variables')) - #-- record_key + # record_key fid.write(' {0:22}:\n'.format('record_key')) long_name = 'Earth Gravity Spherical Harmonic Model Format Type 2' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', 'string')) fid.write(' {0:20}: {1}\n'.format('comment', '1st column')) - #-- degree_index + # degree_index fid.write(' {0:22}:\n'.format('degree_index')) long_name = 'spherical harmonic degree l' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', 'integer')) fid.write(' {0:20}: {1}\n'.format('comment', '2nd column')) - #-- order_index + # order_index fid.write(' {0:22}:\n'.format('order_index')) long_name = 'spherical harmonic order m' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', 'integer')) fid.write(' {0:20}: {1}\n'.format('comment', '3rd column')) - #-- clm + # clm fid.write(' {0:22}:\n'.format('clm')) long_name = 'Clm coefficient; cosine coefficient for degree l and order m' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('comment', '4th column')) - #-- slm + # slm fid.write(' {0:22}:\n'.format('slm')) long_name = 'Slm coefficient; sine coefficient for degree l and order m' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('comment', '5th column')) - #-- clm_std_dev + # clm_std_dev fid.write(' {0:22}:\n'.format('clm_std_dev')) long_name = 'standard deviation of Clm' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('comment', '6th column')) - #-- slm_std_dev + # slm_std_dev fid.write(' {0:22}:\n'.format('slm_std_dev')) long_name = 'standard deviation of Slm' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('comment', '7th column')) - #-- epoch_begin_time + # epoch_begin_time fid.write(' {0:22}:\n'.format('epoch_begin_time')) long_name = 'epoch begin of Clm, Slm coefficients' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('time_format', 'yyyymmdd.hhmm')) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('comment', '8th column')) - #-- epoch_stop_time + # epoch_stop_time fid.write(' {0:22}:\n'.format('epoch_stop_time')) long_name = 'epoch stop of Clm, Slm coefficients' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('time_format', 'yyyymmdd.hhmm')) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('comment', '9th column')) - #-- solution_flags + # solution_flags fid.write(' {0:22}:\n'.format('solution_flags')) long_name = 'Coefficient adjustment and a priori flags' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) @@ -643,7 +643,7 @@ def print_variables(self,fid,data_precision): 'auxiliaryInformation')) fid.write(' {0:20}: {1}\n'.format('data_type', 'byte')) fid.write(' {0:20}:\n'.format('flag_meanings')) - #-- solution flag meanings + # solution flag meanings m = [] m.append('Clm adjusted, y for yes and n for no') m.append('Slm adjusted, y for yes and n for no') @@ -652,10 +652,10 @@ def print_variables(self,fid,data_precision): for i,meaning in enumerate(m): fid.write(' - char {0:d} = {1}\n'.format(i, meaning)) fid.write(' {0:20}: {1}\n'.format('comment', '10th column')) - #-- end of header + # end of header fid.write('\n\n# End of YAML header\n') -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Reads GRACE/GRACE-FO AOD1B datafiles for a @@ -665,61 +665,61 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- Data processing center or satellite mission + # Data processing center or satellite mission parser.add_argument('--center','-c', metavar='PROC', type=str, required=True, help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', help='GRACE/GRACE-FO Data Release') - #-- GRACE/GRACE-FO dealiasing product + # GRACE/GRACE-FO dealiasing product parser.add_argument('--product','-p', metavar='DSET', type=str.upper, nargs='+', choices=['GAA','GAB','GAC','GAD'], help='GRACE/GRACE-FO dealiasing product') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=180, help='Maximum spherical harmonic degree') - #-- input and output data format (ascii, netCDF4, HDF5, SHM) + # input and output data format (ascii, netCDF4, HDF5, SHM) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5','SHM'], help='Output data format') - #-- clobber will overwrite the existing data + # clobber will overwrite the existing data parser.add_argument('--clobber','-C', default=False, action='store_true', help='Overwrite existing data') - #-- verbose will output information about each output file + # verbose will output information about each output file parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger for verbosity level + # create logger for verbosity level loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) for DSET in args.product: - #-- run monthly mean AOD1b program with parameters + # run monthly mean AOD1b program with parameters dealiasing_monthly_mean(args.directory, PROC=args.center, DREL=args.release, @@ -729,6 +729,6 @@ def main(): CLOBBER=args.clobber, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/esa_costg_swarm_sync.py b/scripts/esa_costg_swarm_sync.py index 586e92e2..d84cc8e0 100644 --- a/scripts/esa_costg_swarm_sync.py +++ b/scripts/esa_costg_swarm_sync.py @@ -49,57 +49,57 @@ import lxml.etree import gravity_toolkit.utilities -#-- PURPOSE: sync local Swarm files with ESA server +# PURPOSE: sync local Swarm files with ESA server def esa_costg_swarm_sync(DIRECTORY, RELEASE=None, TIMEOUT=None, LOG=False, LIST=False, CLOBBER=False, CHECKSUM=False, MODE=0o775): - #-- local directory for exact data product + # local directory for exact data product local_dir = os.path.join(DIRECTORY,'Swarm',RELEASE,'GSM') - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None - #-- create log file with list of synchronized files (or print to terminal) + # create log file with list of synchronized files (or print to terminal) if LOG: - #-- output to log file - #-- format: ESA_Swarm_sync_2002-04-01.log + # output to log file + # format: ESA_Swarm_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) LOGFILE = f'ESA_Swarm_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), level=logging.INFO) logging.info(f'ESA Swarm Sync Log ({today})') else: - #-- standard output (terminal output) + # standard output (terminal output) logging.basicConfig(level=logging.INFO) - #-- Swarm Science Server url - #-- using the JSON api protocols to retrieve files - #-- static site is no longer available + # Swarm Science Server url + # using the JSON api protocols to retrieve files + # static site is no longer available HOST = 'https://swarm-diss.eo.esa.int' - #-- compile xml parsers for lxml + # compile xml parsers for lxml XMLparser = lxml.etree.XMLParser() - #-- create "opener" (OpenerDirector instance) + # create "opener" (OpenerDirector instance) gravity_toolkit.utilities.build_opener(None, None, authorization_header=False, urs=HOST) - #-- All calls to urllib2.urlopen will now use handler - #-- Make sure not to include the protocol in with the URL, or - #-- HTTPPasswordMgrWithDefaultRealm will be confused. + # All calls to urllib2.urlopen will now use handler + # Make sure not to include the protocol in with the URL, or + # HTTPPasswordMgrWithDefaultRealm will be confused. - #-- compile regular expression operator for files + # compile regular expression operator for files swarm_data = r'(SW)_(.*?)_(EGF_SHA_2)__(.*?)_(.*?)_(.*?)(\.gfc|\.ZIP)' R1 = re.compile(swarm_data, re.VERBOSE) - #-- create combined list of filenames and last modified times + # create combined list of filenames and last modified times colnames = [] collastmod = [] - #-- position, maximum number of files to list, flag to check if done + # position, maximum number of files to list, flag to check if done pos,maxfiles,prevmax = (0,500,500) - #-- iterate to get a compiled list of files - #-- will iterate until there are no more files to add to the lists + # iterate to get a compiled list of files + # will iterate until there are no more files to add to the lists while (maxfiles == prevmax): - #-- set previous flag to maximum + # set previous flag to maximum prevmax = maxfiles - #-- open connection with Swarm science server at remote directory - #-- to list maxfiles number of files at position + # open connection with Swarm science server at remote directory + # to list maxfiles number of files at position parameters = gravity_toolkit.utilities.urlencode({'maxfiles':prevmax, 'pos':pos,'file':posixpath.join('swarm','Level2longterm','EGF')}) url=posixpath.join(HOST,f'?do=list&{parameters}') @@ -107,69 +107,69 @@ def esa_costg_swarm_sync(DIRECTORY, RELEASE=None, TIMEOUT=None, LOG=False, response = gravity_toolkit.utilities.urllib2.urlopen(request, timeout=TIMEOUT) table = json.loads(response.read().decode()) - #-- extend lists with new files + # extend lists with new files colnames.extend([t['name'] for t in table['results']]) collastmod.extend([t['mtime'] for t in table['results']]) - #-- update maximum number of files + # update maximum number of files maxfiles = len(table['results']) - #-- update position + # update position pos += maxfiles - #-- find lines of valid files + # find lines of valid files valid_lines = [i for i,f in enumerate(colnames) if R1.match(f)] - #-- write each file to an index + # write each file to an index fid = open(os.path.join(local_dir,'index.txt'),'w') - #-- for each data and header file + # for each data and header file for i in valid_lines: - #-- remote and local versions of the file + # remote and local versions of the file parameters = gravity_toolkit.utilities.urlencode({'file': posixpath.join('swarm','Level2longterm','EGF',colnames[i])}) remote_file = posixpath.join(HOST, f'?do=download&{parameters}') local_file = os.path.join(local_dir,colnames[i]) - #-- check that file is not in file system unless overwriting + # check that file is not in file system unless overwriting http_pull_file(remote_file, collastmod[i], local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- output Swarm filenames to index + # output Swarm filenames to index print(colnames[i], file=fid) - #-- change permissions of index file + # change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) - #-- close log file and set permissions level to MODE + # close log file and set permissions level to MODE if LOG: os.chmod(os.path.join(DIRECTORY,LOGFILE), MODE) -#-- PURPOSE: pull file from a remote host checking if file exists locally -#-- and if the remote file is newer than the local file +# PURPOSE: pull file from a remote host checking if file exists locally +# and if the remote file is newer than the local file def http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=120, LIST=False, CLOBBER=False, CHECKSUM=False, MODE=0o775): - #-- if file exists in file system: check if remote file is newer + # if file exists in file system: check if remote file is newer TEST = False OVERWRITE = ' (clobber)' - #-- check if local version of file exists + # check if local version of file exists if CHECKSUM and os.access(local_file, os.F_OK): - #-- generate checksum hash for local file - #-- open the local_file in binary read mode + # generate checksum hash for local file + # open the local_file in binary read mode local_hash = gravity_toolkit.utilities.get_hash(local_file) - #-- Create and submit request. - #-- There are a wide range of exceptions that can be thrown here - #-- including HTTPError and URLError. + # Create and submit request. + # There are a wide range of exceptions that can be thrown here + # including HTTPError and URLError. req=gravity_toolkit.utilities.urllib2.Request(remote_file) resp=gravity_toolkit.utilities.urllib2.urlopen(req,timeout=TIMEOUT) - #-- copy remote file contents to bytesIO object + # copy remote file contents to bytesIO object remote_buffer = io.BytesIO(resp.read()) remote_buffer.seek(0) - #-- generate checksum hash for remote file + # generate checksum hash for remote file remote_hash = gravity_toolkit.utilities.get_hash(remote_buffer) - #-- compare checksums + # compare checksums if (local_hash != remote_hash): TEST = True OVERWRITE = f' (checksums: {local_hash} {remote_hash})' elif os.access(local_file, os.F_OK): - #-- check last modification time of local file + # check last modification time of local file local_mtime = os.stat(local_file).st_mtime - #-- if remote file is newer: overwrite the local file + # if remote file is newer: overwrite the local file if (gravity_toolkit.utilities.even(remote_mtime) > gravity_toolkit.utilities.even(local_mtime)): TEST = True @@ -177,62 +177,62 @@ def http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=120, else: TEST = True OVERWRITE = ' (new)' - #-- if file does not exist locally, is to be overwritten, or CLOBBER is set + # if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: - #-- Printing files transferred + # Printing files transferred logging.info(f'{remote_file} --> ') logging.info(f'\t{local_file}{OVERWRITE}\n') - #-- if executing copy command (not only printing the files) + # if executing copy command (not only printing the files) if not LIST: - #-- chunked transfer encoding size + # chunked transfer encoding size CHUNK = 16 * 1024 - #-- copy bytes or transfer file + # copy bytes or transfer file if CHECKSUM and os.access(local_file, os.F_OK): - #-- store bytes to file using chunked transfer encoding + # store bytes to file using chunked transfer encoding remote_buffer.seek(0) with open(local_file, 'wb') as f: shutil.copyfileobj(remote_buffer, f, CHUNK) else: - #-- Create and submit request. - #-- There are a range of exceptions that can be thrown here - #-- including HTTPError and URLError. + # Create and submit request. + # There are a range of exceptions that can be thrown here + # including HTTPError and URLError. request = gravity_toolkit.utilities.urllib2.Request(remote_file) response = gravity_toolkit.utilities.urllib2.urlopen(request, timeout=TIMEOUT) - #-- copy remote file contents to local file + # copy remote file contents to local file with open(local_file, 'wb') as f: shutil.copyfileobj(response, f, CHUNK) - #-- keep remote modification time of file and local access time + # keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) os.chmod(local_file, MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Syncs Swarm gravity field products from the ESA Swarm Science Server """ ) - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- data release + # data release parser.add_argument('--release','-r', type=str, default='RL01', choices=['RL01'], help='Data release to sync') - #-- connection timeout + # connection timeout parser.add_argument('--timeout','-t', type=int, default=360, help='Timeout in seconds for blocking operations') - #-- Output log file in form - #-- ESA_Swarm_sync_2002-04-01.log + # Output log file in form + # ESA_Swarm_sync_2002-04-01.log parser.add_argument('--log','-l', default=False, action='store_true', help='Output log file') - #-- sync options + # sync options parser.add_argument('--list','-L', default=False, action='store_true', help='Only print files that could be transferred') @@ -242,26 +242,26 @@ def arguments(): parser.add_argument('--checksum', default=False, action='store_true', help='Compare hashes to check for overwriting existing data') - #-- permissions mode of the directories and files synced (number in octal) + # permissions mode of the directories and files synced (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permission mode of directories and files synced') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args = parser.parse_args() - #-- check internet connection before attempting to run program + # check internet connection before attempting to run program HOST = 'https://swarm-diss.eo.esa.int' if gravity_toolkit.utilities.check_connection(HOST): esa_costg_swarm_sync(args.directory, RELEASE=args.release, TIMEOUT=args.timeout, LOG=args.log, LIST=args.list, CLOBBER=args.clobber, CHECKSUM=args.checksum, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/geocenter_compare_tellus.py b/scripts/geocenter_compare_tellus.py index 1c8bf4e8..859aa57e 100644 --- a/scripts/geocenter_compare_tellus.py +++ b/scripts/geocenter_compare_tellus.py @@ -35,55 +35,55 @@ from gravity_toolkit.time import convert_calendar_decimal import gravity_toolkit.geocenter as geocenter -#-- rebuilt the matplotlib fonts and set parameters +# rebuilt the matplotlib fonts and set parameters matplotlib.font_manager._load_fontmanager() matplotlib.rcParams['font.family'] = 'sans-serif' matplotlib.rcParams['font.sans-serif'] = ['Helvetica'] matplotlib.rcParams['mathtext.default'] = 'regular' -#-- PURPOSE: plots the GRACE/GRACE-FO geocenter time series +# PURPOSE: plots the GRACE/GRACE-FO geocenter time series def geocenter_compare_tellus(grace_dir,DREL,START_MON,END_MON,MISSING): - #-- GRACE months + # GRACE months GAP = [187,188,189,190,191,192,193,194,195,196,197] months = sorted(set(np.arange(START_MON,END_MON+1)) - set(MISSING)) - #-- labels for each scenario + # labels for each scenario input_flags = ['','iter','SLF_iter','SLF_iter_wSLR21'] input_labels = ['Static','Iterated','Iterated SLF'] - #-- labels for Release-6 + # labels for Release-6 PROC = ['CSR','GFZ','JPL'] model_str = 'OMCT' if DREL in ('RL04','RL05') else 'MPIOM' - #-- degree one coefficient labels + # degree one coefficient labels fig_labels = ['C11','S11','C10'] axes_labels = dict(C10='c)',C11='a)',S11='b)') ylabels = dict(C10='z',C11='x',S11='y') - #-- plot colors for each dataset + # plot colors for each dataset plot_colors = {'Iterated SLF':'darkorchid','GFZ GravIS':'darkorange', 'JPL Tellus':'mediumseagreen'} - #-- plot geocenter estimates for each processing center + # plot geocenter estimates for each processing center for k,pr in enumerate(PROC): - #-- 3 row plot (C10, C11 and S11) + # 3 row plot (C10, C11 and S11) ax = {} fig,(ax[0],ax[1],ax[2])=plt.subplots(num=1,ncols=3, sharey=True,figsize=(9,4)) - #-- additionally plot GFZ with SLR replaced pole tide + # additionally plot GFZ with SLR replaced pole tide if (pr == 'GFZwPT'): fargs = ('GFZ',DREL,model_str,input_flags[3]) else: fargs = (pr,DREL,model_str,input_flags[2]) - #-- read geocenter file for processing center and model + # read geocenter file for processing center and model grace_file = '{0}_{1}_{2}_{3}.txt'.format(*fargs) DEG1 = geocenter().from_UCI(os.path.join(grace_dir,grace_file)) - #-- indices for mean months + # indices for mean months kk, = np.nonzero((DEG1.month >= START_MON) & (DEG1.month <= 176)) DEG1.mean(apply=True, indices=kk) - #-- setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) + # setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) DEG1.to_cartesian(kl=0.021) - #-- plot each coefficient + # plot each coefficient for j,key in enumerate(fig_labels): - #-- plot model outputs - #-- create a time series with nans for missing months + # plot model outputs + # create a time series with nans for missing months tdec = np.full_like(months,np.nan,dtype=np.float64) data = np.full_like(months,np.nan,dtype=np.float64) val = getattr(DEG1, ylabels[key].upper()) @@ -93,23 +93,23 @@ def geocenter_compare_tellus(grace_dir,DREL,START_MON,END_MON,MISSING): mm, = np.nonzero(DEG1.month == m) tdec[i] = DEG1.time[mm] data[i] = val[mm] - #-- plot all dates + # plot all dates ax[j].plot(tdec, data, color=plot_colors['Iterated SLF'], label='Iterated SLF') if (pr == 'GFZwPT'): grace_file = 'GRAVIS-2B_GFZOP_GEOCENTER_0002.dat' DEG1 = geocenter().from_gravis(os.path.join(grace_dir,grace_file)) - #-- indices for mean months + # indices for mean months kk, = np.nonzero((DEG1.month >= START_MON) & (DEG1.month <= 176)) DEG1.mean(apply=True, indices=kk) - #-- setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) + # setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) DEG1.to_cartesian(kl=0.021) - #-- plot each coefficient + # plot each coefficient for j,key in enumerate(fig_labels): - #-- plot model outputs + # plot model outputs DEG1[key] -= DEG1[key][kk].mean() - #-- create a time series with nans for missing months + # create a time series with nans for missing months tdec = np.full_like(months,np.nan,dtype=np.float64) data = np.full_like(months,np.nan,dtype=np.float64) val = getattr(DEG1, ylabels[key].upper()) @@ -119,22 +119,22 @@ def geocenter_compare_tellus(grace_dir,DREL,START_MON,END_MON,MISSING): mm, = np.nonzero(DEG1.month == m) tdec[i] = DEG1.time[mm] data[i] = val[mm] - #-- plot all dates + # plot all dates ax[j].plot(tdec, data, color=plot_colors['GFZ GravIS'], label='GFZ GravIS') - #-- Running function read_tellus_geocenter.py + # Running function read_tellus_geocenter.py grace_file = f'TN-13_GEOC_{pr}_{DREL}.txt' DEG1 = geocenter().from_tellus(os.path.join(grace_dir,grace_file),JPL=True) - #-- indices for mean months + # indices for mean months kk, = np.nonzero((DEG1.month >= START_MON) & (DEG1.month <= 176)) DEG1.mean(apply=True, indices=kk) - #-- setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) + # setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) DEG1.to_cartesian(kl=0.021) - #-- plot each coefficient + # plot each coefficient for j,key in enumerate(fig_labels): - #-- plot model outputs - #-- create a time series with nans for missing months + # plot model outputs + # create a time series with nans for missing months tdec = np.full_like(months,np.nan,dtype=np.float64) data = np.full_like(months,np.nan,dtype=np.float64) val = getattr(DEG1, ylabels[key].upper()) @@ -144,27 +144,27 @@ def geocenter_compare_tellus(grace_dir,DREL,START_MON,END_MON,MISSING): mm, = np.nonzero(DEG1.month == m) tdec[i] = DEG1.time[mm] data[i] = val[mm] - #-- plot all dates + # plot all dates ax[j].plot(tdec, data, color=plot_colors['JPL Tellus'], label='JPL Tellus') - #-- add axis labels and adjust font sizes for axis ticks + # add axis labels and adjust font sizes for axis ticks for j,key in enumerate(fig_labels): - #-- vertical line denoting the accelerometer shutoff + # vertical line denoting the accelerometer shutoff acc = convert_calendar_decimal(2016,9,day=3,hour=12,minute=12) ax[j].axvline(acc,color='0.5',ls='dashed',lw=0.5,dashes=(8,4)) - #-- vertical lines for end of the GRACE mission and start of GRACE-FO + # vertical lines for end of the GRACE mission and start of GRACE-FO jj, = np.flatnonzero(DEG1.month == 186) kk, = np.flatnonzero(DEG1.month == 198) vs = ax[j].axvspan(DEG1.time[jj],DEG1.time[kk], color='0.5',ls='dashed',alpha=0.15) vs._dashes = (4,2) - #-- axis label + # axis label ax[j].set_title(ylabels[key], style='italic', fontsize=14) ax[j].add_artist(AnchoredText(axes_labels[key], pad=0., prop=dict(size=16,weight='bold'), frameon=False, loc=2)) ax[j].set_xlabel('Time [Yr]', fontsize=14) - #-- set ticks + # set ticks xmin = 2002 + (START_MON + 1.0)//12.0 xmax = 2002 + (END_MON + 1.0)/12.0 major_ticks = np.arange(2005, xmax, 5) @@ -173,7 +173,7 @@ def geocenter_compare_tellus(grace_dir,DREL,START_MON,END_MON,MISSING): ax[j].xaxis.set_ticks(minor_ticks, minor=True) ax[j].set_xlim(xmin, xmax) ax[j].set_ylim(-9.5,8.5) - #-- axes tick adjustments + # axes tick adjustments ax[j].get_xaxis().set_tick_params(which='both', direction='in') ax[j].get_yaxis().set_tick_params(which='both', direction='in') for tick in ax[j].xaxis.get_major_ticks(): @@ -181,7 +181,7 @@ def geocenter_compare_tellus(grace_dir,DREL,START_MON,END_MON,MISSING): for tick in ax[j].yaxis.get_major_ticks(): tick.label.set_fontsize(14) - #-- add legend + # add legend lgd = ax[0].legend(loc=3,frameon=False) lgd.get_frame().set_alpha(1.0) for line in lgd.get_lines(): @@ -189,16 +189,16 @@ def geocenter_compare_tellus(grace_dir,DREL,START_MON,END_MON,MISSING): for i,text in enumerate(lgd.get_texts()): text.set_weight('bold') text.set_color(plot_colors[text.get_text()]) - #-- labels and set limits + # labels and set limits ax[0].set_ylabel('Geocenter Variation [mm]', fontsize=14) - #-- adjust locations of subplots + # adjust locations of subplots fig.subplots_adjust(left=0.06,right=0.98,bottom=0.12,top=0.94,wspace=0.05) - #-- save figure to file + # save figure to file OUTPUT_FIGURE = f'TN13_SV19_{pr}_{DREL}.pdf' plt.savefig(os.path.join(grace_dir,OUTPUT_FIGURE), format='pdf', dpi=300) plt.clf() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Plots the GRACE/GRACE-FO geocenter time series for @@ -206,17 +206,17 @@ def arguments(): JPL GRACE Tellus product """ ) - #-- working data directory + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', choices=['RL04','RL05','RL06'], help='GRACE/GRACE-FO data release') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month for time series') @@ -228,19 +228,19 @@ def arguments(): parser.add_argument('--missing','-M', metavar='MISSING', type=int, nargs='+', default=MISSING, help='Missing GRACE/GRACE-FO months in time series') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- run program with parameters + # run program with parameters geocenter_compare_tellus(args.directory, args.release, args.start, args.end, args.missing) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/geocenter_monte_carlo.py b/scripts/geocenter_monte_carlo.py index 5f13aae5..aef38cbf 100644 --- a/scripts/geocenter_monte_carlo.py +++ b/scripts/geocenter_monte_carlo.py @@ -33,47 +33,47 @@ from gravity_toolkit.time import convert_calendar_decimal import gravity_toolkit.geocenter as geocenter -#-- rebuilt the matplotlib fonts and set parameters +# rebuilt the matplotlib fonts and set parameters matplotlib.font_manager._load_fontmanager() matplotlib.rcParams['font.family'] = 'sans-serif' matplotlib.rcParams['font.sans-serif'] = ['Helvetica'] matplotlib.rcParams['mathtext.default'] = 'regular' -#-- PURPOSE: plots the GRACE/GRACE-FO geocenter time series +# PURPOSE: plots the GRACE/GRACE-FO geocenter time series def geocenter_monte_carlo(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): - #-- GRACE months + # GRACE months GAP = [187,188,189,190,191,192,193,194,195,196,197] months = sorted(set(np.arange(START_MON,END_MON+1)) - set(MISSING)) nmon = len(months) - #-- labels for Release-6 + # labels for Release-6 model_str = 'OMCT' if DREL in ('RL04','RL05') else 'MPIOM' - #-- GIA and processing labels + # GIA and processing labels input_flag = 'SLF' gia_str = '_AW13_ice6g_GA' delta_str = '_monte_carlo' ds_str = '_FL' - #-- degree one coefficient labels + # degree one coefficient labels fig_labels = ['C11','S11','C10'] axes_labels = dict(C10='c)',C11='a)',S11='b)') ylabels = dict(C10='z',C11='x',S11='y') - #-- 3 row plot (C10, C11 and S11) + # 3 row plot (C10, C11 and S11) ax = {} fig,(ax[0],ax[1],ax[2])=plt.subplots(num=1,ncols=3,sharey=True,figsize=(9,4)) - #-- read geocenter file for processing center and model + # read geocenter file for processing center and model fargs = (PROC,DREL,model_str,input_flag,gia_str,delta_str,ds_str) grace_file = '{0}_{1}_{2}_{3}{4}{5}{6}.nc'.format(*fargs) DEG1 = geocenter().from_netCDF4(os.path.join(grace_dir,grace_file)) - #-- setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) + # setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) DEG1.to_cartesian(kl=0.021) - #-- number of monte carlo runs + # number of monte carlo runs _,nruns = np.shape(DEG1.C10) - #-- plot each coefficient + # plot each coefficient for j,key in enumerate(fig_labels): - #-- create a time series with nans for missing months + # create a time series with nans for missing months tdec = np.full((nmon),np.nan,dtype=np.float64) data = np.full((nmon,nruns),np.nan,dtype=np.float64) val = getattr(DEG1, ylabels[key].upper()) @@ -84,38 +84,38 @@ def geocenter_monte_carlo(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): tdec[i] = DEG1.time[mm] data[i,:] = val[mm,:] - #-- show solutions for each iteration + # show solutions for each iteration plot_colors = iter(cm.rainbow(np.linspace(0,1,nruns))) - #-- mean of all monte carlo solutions + # mean of all monte carlo solutions MEAN = np.mean(data, axis=1) nvalid = np.count_nonzero(np.isfinite(MEAN)) - #-- calculate variance off of the mean + # calculate variance off of the mean variance = np.zeros((nruns)) max_var = 0.0 for k in range(nruns): color_k = next(plot_colors) - #-- plot all dates + # plot all dates ax[j].plot(tdec, data[:,k], color=color_k) - #-- variance off of the mean + # variance off of the mean variance[k] = np.nansum((data[:,k] - MEAN)**2)/nvalid if (np.nanmax(np.abs(data[:,k] - MEAN)) > max_var): max_var = np.nanmax(np.abs(data[:,k] - MEAN)) - #-- add mean solution + # add mean solution ax[j].plot(tdec, MEAN, color='k', lw=1) - #-- calculate total RMS + # calculate total RMS RMS = np.nansum(np.sqrt(variance))/nruns - #-- add axis labels and adjust font sizes for axis ticks - #-- vertical line denoting the accelerometer shutoff + # add axis labels and adjust font sizes for axis ticks + # vertical line denoting the accelerometer shutoff acc = convert_calendar_decimal(2016,9,day=3,hour=12,minute=12) ax[j].axvline(acc,color='0.5',ls='dashed',lw=0.5,dashes=(8,4)) - #-- vertical lines for end of the GRACE mission and start of GRACE-FO + # vertical lines for end of the GRACE mission and start of GRACE-FO jj, = np.flatnonzero(DEG1.month == 186) kk, = np.flatnonzero(DEG1.month == 198) vs = ax[j].axvspan(DEG1.time[jj],DEG1.time[kk], color='0.5',ls='dashed',alpha=0.15) vs._dashes = (4,2) - #-- axis label + # axis label ax[j].set_title(ylabels[key], style='italic', fontsize=14) ax[j].add_artist(AnchoredText(axes_labels[key], pad=0., prop=dict(size=16,weight='bold'), frameon=False, loc=2)) @@ -123,7 +123,7 @@ def geocenter_monte_carlo(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): ax[j].add_artist(AnchoredText(lbl, pad=0., prop=dict(size=12), frameon=False, loc=3)) ax[j].set_xlabel('Time [Yr]', fontsize=14) - #-- set ticks + # set ticks xmin = 2002 + (START_MON + 1.0)//12.0 xmax = 2002 + (END_MON + 1.0)/12.0 major_ticks = np.arange(2005, xmax, 5) @@ -132,7 +132,7 @@ def geocenter_monte_carlo(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): ax[j].xaxis.set_ticks(minor_ticks, minor=True) ax[j].set_xlim(xmin, xmax) ax[j].set_ylim(-9.5,8.5) - #-- axes tick adjustments + # axes tick adjustments ax[j].get_xaxis().set_tick_params(which='both', direction='in') ax[j].get_yaxis().set_tick_params(which='both', direction='in') for tick in ax[j].xaxis.get_major_ticks(): @@ -140,37 +140,37 @@ def geocenter_monte_carlo(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): for tick in ax[j].yaxis.get_major_ticks(): tick.label.set_fontsize(14) - #-- labels and set limits + # labels and set limits ax[0].set_ylabel(f'{PROC} Geocenter Variation [mm]', fontsize=14) - #-- adjust locations of subplots + # adjust locations of subplots fig.subplots_adjust(left=0.06,right=0.98,bottom=0.12,top=0.94,wspace=0.05) - #-- save figure to file + # save figure to file OUTPUT_FIGURE = f'SV19_{PROC}_{DREL}_monte_carlo.pdf' plt.savefig(os.path.join(grace_dir,OUTPUT_FIGURE), format='pdf', dpi=300) plt.clf() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Plots the GRACE/GRACE-FO geocenter time series for each iteration of a monte carlo solution """ ) - #-- working data directory + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO data processing center + # GRACE/GRACE-FO data processing center parser.add_argument('--center','-c', metavar='PROC', type=str, required=True, help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', choices=['RL04','RL05','RL06'], help='GRACE/GRACE-FO data release') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month for time series') @@ -182,19 +182,19 @@ def arguments(): parser.add_argument('--missing','-M', metavar='MISSING', type=int, nargs='+', default=MISSING, help='Missing GRACE/GRACE-FO months in time series') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- run program with parameters + # run program with parameters geocenter_monte_carlo(args.directory, args.center, args.release, args.start, args.end, args.missing) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/geocenter_ocean_models.py b/scripts/geocenter_ocean_models.py index 7d356f5f..3dbe6ef5 100644 --- a/scripts/geocenter_ocean_models.py +++ b/scripts/geocenter_ocean_models.py @@ -43,39 +43,39 @@ from matplotlib.offsetbox import AnchoredText import gravity_toolkit.geocenter as geocenter -#-- PURPOSE: plots the GRACE/GRACE-FO geocenter time series -#-- comparing results using different ocean bottom pressure estimates +# PURPOSE: plots the GRACE/GRACE-FO geocenter time series +# comparing results using different ocean bottom pressure estimates def geocenter_ocean_models(grace_dir,PROC,DREL,MODEL,START_MON,END_MON,MISSING): - #-- GRACE months + # GRACE months GAP = [187,188,189,190,191,192,193,194,195,196,197] months = sorted(set(np.arange(START_MON,END_MON+1)) - set(MISSING)) - #-- labels for each scenario + # labels for each scenario input_flags = ['','iter','SLF_iter'] input_labels = ['Static','Iterated','Iterated SLF'] - #-- degree one coefficient labels + # degree one coefficient labels fig_labels = ['C11','S11','C10'] axes_labels = dict(C10='c)',C11='a)',S11='b)') ylabels = dict(C10='z',C11='x',S11='y') - #-- list of plot colors + # list of plot colors plot_colors = ['darkorange','darkorchid','mediumseagreen','dodgerblue','0.4'] - #-- 3 row plot (C10, C11 and S11) + # 3 row plot (C10, C11 and S11) ax = {} fig,(ax[0],ax[1],ax[2])=plt.subplots(num=1,ncols=3,sharey=True,figsize=(9,4)) - #-- plot geocenter estimates for each processing center + # plot geocenter estimates for each processing center for k,mdl in enumerate(MODEL): - #-- read geocenter file for processing center and model + # read geocenter file for processing center and model grace_file = '{0}_{1}_{2}_{3}.txt'.format(PROC,DREL,mdl,input_flags[2]) DEG1 = geocenter().from_UCI(os.path.join(grace_dir,grace_file)) - #-- indices for mean months + # indices for mean months kk, = np.nonzero((DEG1.month >= START_MON) & (DEG1.month <= 176)) DEG1.mean(apply=True, indices=kk) - #-- setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) + # setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) DEG1.to_cartesian(kl=0.021) - #-- plot each coefficient + # plot each coefficient for j,key in enumerate(fig_labels): - #-- create a time series with nans for missing months + # create a time series with nans for missing months tdec = np.full_like(months,np.nan,dtype=np.float64) data = np.full_like(months,np.nan,dtype=np.float64) val = getattr(DEG1, ylabels[key].upper()) @@ -85,27 +85,27 @@ def geocenter_ocean_models(grace_dir,PROC,DREL,MODEL,START_MON,END_MON,MISSING): mm, = np.nonzero(DEG1.month == m) tdec[i] = DEG1.time[mm] data[i] = val[mm] - #-- plot all dates + # plot all dates label = mdl.replace('_','-') ax[j].plot(tdec, data, color=plot_colors[k], label=label) - #-- read geocenter file for processing center and model + # read geocenter file for processing center and model model_str = 'OMCT' if DREL in ('RL04','RL05') else 'MPIOM' grace_file = '{0}_{1}_{2}_{3}.txt'.format(PROC,DREL,model_str,input_flags[2]) DEG1 = geocenter().from_UCI(os.path.join(grace_dir,grace_file)) - #-- add axis labels and adjust font sizes for axis ticks + # add axis labels and adjust font sizes for axis ticks for j,key in enumerate(fig_labels): - #-- vertical lines for end of the GRACE mission and start of GRACE-FO + # vertical lines for end of the GRACE mission and start of GRACE-FO jj, = np.flatnonzero(DEG1.month == 186) kk, = np.flatnonzero(DEG1.month == 198) ax[j].axvspan(DEG1.time[jj],DEG1.time[kk], color='0.5',ls='dashed',alpha=0.15) - #-- axis label + # axis label ax[j].set_title(ylabels[key], style='italic', fontsize=14) ax[j].add_artist(AnchoredText(axes_labels[key], pad=0., prop=dict(size=16,weight='bold'), frameon=False, loc=2)) ax[j].set_xlabel('Time [Yr]', fontsize=14) - #-- set ticks + # set ticks xmin = 2002 + (START_MON + 1.0)//12.0 xmax = 2002 + (END_MON + 1.0)/12.0 major_ticks = np.arange(2005, xmax, 5) @@ -114,7 +114,7 @@ def geocenter_ocean_models(grace_dir,PROC,DREL,MODEL,START_MON,END_MON,MISSING): ax[j].xaxis.set_ticks(minor_ticks, minor=True) ax[j].set_xlim(xmin, xmax) ax[j].set_ylim(-9.5,8.5) - #-- axes tick adjustments + # axes tick adjustments ax[j].get_xaxis().set_tick_params(which='both', direction='in') ax[j].get_yaxis().set_tick_params(which='both', direction='in') for tick in ax[j].xaxis.get_major_ticks(): @@ -122,7 +122,7 @@ def geocenter_ocean_models(grace_dir,PROC,DREL,MODEL,START_MON,END_MON,MISSING): for tick in ax[j].yaxis.get_major_ticks(): tick.label.set_fontsize(14) - #-- add legend + # add legend lgd = ax[0].legend(loc=3,frameon=False) lgd.get_frame().set_alpha(1.0) for line in lgd.get_lines(): @@ -130,38 +130,38 @@ def geocenter_ocean_models(grace_dir,PROC,DREL,MODEL,START_MON,END_MON,MISSING): for i,text in enumerate(lgd.get_texts()): text.set_weight('bold') text.set_color(plot_colors[i]) - #-- labels and set limits + # labels and set limits ax[0].set_ylabel('Geocenter Variation [mm]', fontsize=14) - #-- adjust locations of subplots + # adjust locations of subplots fig.subplots_adjust(left=0.06,right=0.98,bottom=0.12,top=0.94,wspace=0.05) - #-- save figure to file + # save figure to file OUTPUT_FIGURE = f'SV19_{PROC}_{DREL}_ocean_models.pdf' plt.savefig(os.path.join(grace_dir,OUTPUT_FIGURE), format='pdf', dpi=300) plt.clf() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Plots the GRACE/GRACE-FO geocenter time series comparing results using different ocean bottom pressure estimates """ ) - #-- working data directory + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO processing center + # GRACE/GRACE-FO processing center parser.add_argument('--center','-c', metavar='PROC', type=str, nargs='+', default=['CSR','GFZ','JPL'], choices=['CSR','GFZ','JPL'], help='GRACE/GRACE-FO processing center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', choices=['RL04','RL05','RL06'], help='GRACE/GRACE-FO data release') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month for time series') @@ -176,20 +176,20 @@ def arguments(): parser.add_argument('--ocean','-O', type=str, nargs='+', help='Ocean bottom pressure products to use') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- run program with parameters + # run program with parameters for PROC in args.center: geocenter_ocean_models(args.directory, PROC, args.release, args.ocean, args.start, args.end, args.missing) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/geocenter_processing_centers.py b/scripts/geocenter_processing_centers.py index 1a733c24..e41d8d4f 100644 --- a/scripts/geocenter_processing_centers.py +++ b/scripts/geocenter_processing_centers.py @@ -43,56 +43,56 @@ from gravity_toolkit.time import convert_calendar_decimal from gravity_toolkit.geocenter import geocenter -#-- rebuilt the matplotlib fonts and set parameters +# rebuilt the matplotlib fonts and set parameters matplotlib.font_manager._load_fontmanager() matplotlib.rcParams['font.family'] = 'sans-serif' matplotlib.rcParams['font.sans-serif'] = ['Helvetica'] matplotlib.rcParams['mathtext.default'] = 'regular' -#-- PURPOSE: plots the GRACE/GRACE-FO geocenter time series +# PURPOSE: plots the GRACE/GRACE-FO geocenter time series def geocenter_processing_centers(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): - #-- GRACE months + # GRACE months GAP = [187,188,189,190,191,192,193,194,195,196,197] months = sorted(set(np.arange(START_MON,END_MON+1)) - set(MISSING)) - #-- labels for each scenario + # labels for each scenario input_flags = ['','iter','SLF_iter','SLF_iter_wSLR21','SLF_iter_wSLR21_wSLR22'] input_labels = ['Static','Iterated','Iterated SLF'] - #-- labels for Release-6 + # labels for Release-6 model_str = 'OMCT' if DREL in ('RL04','RL05') else 'MPIOM' - #-- degree one coefficient labels + # degree one coefficient labels fig_labels = ['C11','S11','C10'] axes_labels = dict(C10='c)',C11='a)',S11='b)') ylabels = dict(C10='z',C11='x',S11='y') - #-- plot colors for each dataset + # plot colors for each dataset plot_colors = dict(CSR='darkorange',GFZ='darkorchid',JPL='mediumseagreen') plot_colors['GFZwPT'] = 'dodgerblue' plot_colors['GFZ+CS21'] = 'darkorchid' plot_colors['GFZ+CS21+CS22'] = 'darkorchid' - #-- 3 row plot (C10, C11 and S11) + # 3 row plot (C10, C11 and S11) ax = {} fig,(ax[0],ax[1],ax[2])=plt.subplots(num=1,ncols=3,sharey=True,figsize=(9,4)) - #-- plot geocenter estimates for each processing center + # plot geocenter estimates for each processing center for k,pr in enumerate(PROC): - #-- additionally plot GFZ with SLR replaced pole tide + # additionally plot GFZ with SLR replaced pole tide if pr in ('GFZwPT','GFZ+CS21'): fargs = ('GFZ',DREL,model_str,input_flags[3]) elif (pr == 'GFZ+CS21+CS22'): fargs = ('GFZ',DREL,model_str,input_flags[4]) else: fargs = (pr,DREL,model_str,input_flags[2]) - #-- read geocenter file for processing center and model + # read geocenter file for processing center and model grace_file = '{0}_{1}_{2}_{3}.txt'.format(*fargs) DEG1 = geocenter().from_UCI(os.path.join(grace_dir,grace_file)) - #-- indices for mean months + # indices for mean months kk, = np.nonzero((DEG1.month >= START_MON) & (DEG1.month <= 176)) DEG1.mean(apply=True, indices=kk) - #-- setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) + # setting Load Love Number (kl) to 0.021 to match Swenson et al. (2008) DEG1.to_cartesian(kl=0.021) - #-- plot each coefficient + # plot each coefficient for j,key in enumerate(fig_labels): - #-- create a time series with nans for missing months + # create a time series with nans for missing months tdec = np.full_like(months,np.nan,dtype=np.float64) data = np.full_like(months,np.nan,dtype=np.float64) val = getattr(DEG1, ylabels[key].upper()) @@ -102,26 +102,26 @@ def geocenter_processing_centers(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): mm, = np.nonzero(DEG1.month == m) tdec[i] = DEG1.time[mm] data[i] = val[mm] - #-- plot all dates + # plot all dates ax[j].plot(tdec, data, color=plot_colors[pr], label=pr) - #-- add axis labels and adjust font sizes for axis ticks + # add axis labels and adjust font sizes for axis ticks for j,key in enumerate(fig_labels): - #-- vertical line denoting the accelerometer shutoff + # vertical line denoting the accelerometer shutoff acc = convert_calendar_decimal(2016,9,day=3,hour=12,minute=12) ax[j].axvline(acc,color='0.5',ls='dashed',lw=0.5,dashes=(8,4)) - #-- vertical lines for end of the GRACE mission and start of GRACE-FO + # vertical lines for end of the GRACE mission and start of GRACE-FO jj, = np.flatnonzero(DEG1.month == 186) kk, = np.flatnonzero(DEG1.month == 198) vs = ax[j].axvspan(DEG1.time[jj],DEG1.time[kk], color='0.5',ls='dashed',alpha=0.15) vs._dashes = (4,2) - #-- axis label + # axis label ax[j].set_title(ylabels[key], style='italic', fontsize=14) ax[j].add_artist(AnchoredText(axes_labels[key], pad=0., prop=dict(size=16,weight='bold'), frameon=False, loc=2)) ax[j].set_xlabel('Time [Yr]', fontsize=14) - #-- set ticks + # set ticks xmin = 2002 + (START_MON + 1.0)//12.0 xmax = 2002 + (END_MON + 1.0)/12.0 major_ticks = np.arange(2005, xmax, 5) @@ -130,7 +130,7 @@ def geocenter_processing_centers(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): ax[j].xaxis.set_ticks(minor_ticks, minor=True) ax[j].set_xlim(xmin, xmax) ax[j].set_ylim(-9.5,8.5) - #-- axes tick adjustments + # axes tick adjustments ax[j].get_xaxis().set_tick_params(which='both', direction='in') ax[j].get_yaxis().set_tick_params(which='both', direction='in') for tick in ax[j].xaxis.get_major_ticks(): @@ -138,7 +138,7 @@ def geocenter_processing_centers(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): for tick in ax[j].yaxis.get_major_ticks(): tick.label.set_fontsize(14) - #-- add legend + # add legend lgd = ax[0].legend(loc=3,frameon=False) lgd.get_frame().set_alpha(1.0) for line in lgd.get_lines(): @@ -146,38 +146,38 @@ def geocenter_processing_centers(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): for i,text in enumerate(lgd.get_texts()): text.set_weight('bold') text.set_color(plot_colors[text.get_text()]) - #-- labels and set limits + # labels and set limits ax[0].set_ylabel('Geocenter Variation [mm]', fontsize=14) - #-- adjust locations of subplots + # adjust locations of subplots fig.subplots_adjust(left=0.06,right=0.98,bottom=0.12,top=0.94,wspace=0.05) - #-- save figure to file + # save figure to file OUTPUT_FIGURE = f'SV19_{DREL}_centers.pdf' plt.savefig(os.path.join(grace_dir,OUTPUT_FIGURE), format='pdf', dpi=300) plt.clf() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Plots the GRACE/GRACE-FO geocenter time series for different GRACE/GRACE-FO processing centers """ ) - #-- working data directory + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- Data processing center or satellite mission + # Data processing center or satellite mission PROC = ['CSR','GFZ','GFZwPT','JPL'] parser.add_argument('--center','-c', metavar='PROC', type=str, nargs='+', default=PROC, help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', choices=['RL04','RL05','RL06'], help='GRACE/GRACE-FO data release') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month for time series') @@ -189,19 +189,19 @@ def arguments(): parser.add_argument('--missing','-M', metavar='MISSING', type=int, nargs='+', default=MISSING, help='Missing GRACE/GRACE-FO months in time series') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- run program with parameters + # run program with parameters geocenter_processing_centers(args.directory, args.center, args.release, args.start, args.end, args.missing) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/gfz_icgem_costg_ftp.py b/scripts/gfz_icgem_costg_ftp.py index 4e95b001..2714cc84 100644 --- a/scripts/gfz_icgem_costg_ftp.py +++ b/scripts/gfz_icgem_costg_ftp.py @@ -57,67 +57,67 @@ import posixpath import gravity_toolkit.utilities -#-- PURPOSE: create and compile regular expression operator to find files +# PURPOSE: create and compile regular expression operator to find files def compile_regex_pattern(MISSION, DSET): if ((DSET == 'GSM') and (MISSION == 'Swarm')): - #-- regular expression operators for Swarm data + # regular expression operators for Swarm data regex=r'(SW)_(.*?)_(EGF_SHA_2)__(.*?)_(.*?)_(.*?)(\.gfc|\.ZIP)' elif ((DSET != 'GSM') and (MISSION == 'Swarm')): regex=r'(GAA|GAB|GAC|GAD)_Swarm_(\d+)_(\d{2})_(\d{4})(\.gfc|\.ZIP)' else: regex=rf'{DSET}-2_(.*?)\.gfc$' - #-- return the compiled regular expression operator used to find files + # return the compiled regular expression operator used to find files return re.compile(regex, re.VERBOSE) -#-- PURPOSE: sync local GRACE/GRACE-FO/Swarm files with GFZ ICGEM server +# PURPOSE: sync local GRACE/GRACE-FO/Swarm files with GFZ ICGEM server def gfz_icgem_costg_ftp(DIRECTORY, MISSION=[], RELEASE=None, TIMEOUT=None, LOG=False, LIST=False, CLOBBER=False, CHECKSUM=False, MODE=None): - #-- connect and login to GFZ ICGEM ftp server + # connect and login to GFZ ICGEM ftp server ftp = ftplib.FTP('icgem.gfz-potsdam.de', timeout=TIMEOUT) ftp.login() - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not os.makedirs(DIRECTORY,MODE) if not os.path.exists(DIRECTORY) else None - #-- dealiasing datasets for each mission + # dealiasing datasets for each mission DSET = {} DSET['Grace'] = ['GAC','GSM'] DSET['Grace-FO'] = ['GSM'] DSET['Swarm'] = ['GAA','GAB','GAC','GAD','GSM'] - #-- local subdirectory for data + # local subdirectory for data LOCAL = {} LOCAL['Grace'] = 'COSTG' LOCAL['Grace-FO'] = 'COSTG' LOCAL['Swarm'] = 'Swarm' - #-- create log file with list of synchronized files (or print to terminal) + # create log file with list of synchronized files (or print to terminal) if LOG: - #-- output to log file - #-- format: GFZ_ICGEM_COST-G_sync_2002-04-01.log + # output to log file + # format: GFZ_ICGEM_COST-G_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) LOGFILE = 'GFZ_ICGEM_COST-G_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), level=logging.INFO) logging.info('GFZ ICGEM COST-G Sync Log ({today})') else: - #-- standard output (terminal output) + # standard output (terminal output) logging.basicConfig(level=logging.INFO) - #-- find files for a particular mission + # find files for a particular mission logging.info(f'{MISSION} Spherical Harmonics:') - #-- Sync gravity field dealiasing products + # Sync gravity field dealiasing products for ds in DSET[MISSION]: - #-- print string of exact data product + # print string of exact data product logging.info(f'{MISSION}/{RELEASE}/{ds}') - #-- local directory for exact data product + # local directory for exact data product local_dir = os.path.join(DIRECTORY,LOCAL[MISSION],RELEASE,ds) - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not if not os.path.exists(local_dir): os.makedirs(local_dir,MODE) - #-- compile the regular expression operator to find files + # compile the regular expression operator to find files R1 = compile_regex_pattern(MISSION, ds) - #-- set the remote path to download files + # set the remote path to download files if ds in ('GAA','GAB','GAC','GAD') and (MISSION == 'Swarm'): remote_path = [ftp.host,'02_COST-G',MISSION,'GAX_products',ds] elif ds in ('GAA','GAB','GAC','GAD') and (MISSION != 'Swarm'): @@ -128,62 +128,62 @@ def gfz_icgem_costg_ftp(DIRECTORY, MISSION=[], RELEASE=None, TIMEOUT=None, remote_path = [ftp.host,'02_COST-G',MISSION,'unfiltered'] elif (MISSION == 'Grace-FO'): remote_path = [ftp.host,'02_COST-G',MISSION] - #-- get filenames from remote directory + # get filenames from remote directory remote_files,remote_mtimes = gravity_toolkit.utilities.ftp_list( remote_path, timeout=TIMEOUT, basename=True, pattern=R1, sort=True) - #-- download the file from the ftp server + # download the file from the ftp server for fi,remote_mtime in zip(remote_files,remote_mtimes): - #-- remote and local versions of the file + # remote and local versions of the file remote_path.append(fi) local_file = os.path.join(local_dir,fi) ftp_mirror_file(ftp, remote_path, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- remove the file from the remote path list + # remove the file from the remote path list remote_path.remove(fi) - #-- find local GRACE/GRACE-FO/Swarm files to create index + # find local GRACE/GRACE-FO/Swarm files to create index grace_files=[fi for fi in os.listdir(local_dir) if R1.match(fi)] - #-- write each file to an index + # write each file to an index with open(os.path.join(local_dir,'index.txt'),'w') as fid: - #-- output GRACE/GRACE-FO/Swarm filenames to index + # output GRACE/GRACE-FO/Swarm filenames to index for fi in sorted(grace_files): print(fi, file=fid) - #-- change permissions of index file + # change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) - #-- close the ftp connection + # close the ftp connection ftp.quit() - #-- close log file and set permissions level to MODE + # close log file and set permissions level to MODE if LOG: os.chmod(os.path.join(DIRECTORY,LOGFILE), MODE) -#-- PURPOSE: pull file from a remote host checking if file exists locally -#-- and if the remote file is newer than the local file +# PURPOSE: pull file from a remote host checking if file exists locally +# and if the remote file is newer than the local file def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, TIMEOUT=None,LIST=False,CLOBBER=False,CHECKSUM=False,MODE=0o775): - #-- if file exists in file system: check if remote file is newer + # if file exists in file system: check if remote file is newer TEST = False OVERWRITE = ' (clobber)' - #-- check if local version of file exists + # check if local version of file exists if CHECKSUM and os.access(local_file, os.F_OK): - #-- generate checksum hash for local file - #-- open the local_file in binary read mode + # generate checksum hash for local file + # open the local_file in binary read mode with open(local_file, 'rb') as local_buffer: local_hash = hashlib.md5(local_buffer.read()).hexdigest() - #-- copy remote file contents to bytesIO object + # copy remote file contents to bytesIO object remote_buffer = gravity_toolkit.utilities.from_ftp(remote_path, timeout=TIMEOUT) - #-- generate checksum hash for remote file + # generate checksum hash for remote file remote_hash = hashlib.md5(remote_buffer.getvalue()).hexdigest() - #-- compare checksums + # compare checksums if (local_hash != remote_hash): TEST = True OVERWRITE = f' (checksums: {local_hash} {remote_hash})' elif os.access(local_file, os.F_OK): - #-- check last modification time of local file + # check last modification time of local file local_mtime = os.stat(local_file).st_mtime - #-- if remote file is newer: overwrite the local file + # if remote file is newer: overwrite the local file if (gravity_toolkit.utilities.even(remote_mtime) > gravity_toolkit.utilities.even(local_mtime)): TEST = True @@ -191,63 +191,63 @@ def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, else: TEST = True OVERWRITE = ' (new)' - #-- if file does not exist locally, is to be overwritten, or CLOBBER is set + # if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: - #-- Printing files transferred + # Printing files transferred remote_ftp_url = posixpath.join('ftp://',*remote_path) logging.info(f'{remote_ftp_url} -->') logging.info(f'\t{local_file}{OVERWRITE}\n') - #-- if executing copy command (not only printing the files) + # if executing copy command (not only printing the files) if not LIST: - #-- copy file from ftp server or from bytesIO object + # copy file from ftp server or from bytesIO object if CHECKSUM and os.access(local_file, os.F_OK): - #-- store bytes to file using chunked transfer encoding + # store bytes to file using chunked transfer encoding remote_buffer.seek(0) with open(local_file, 'wb') as f: shutil.copyfileobj(remote_buffer, f, 16 * 1024) else: - #-- path to remote file + # path to remote file remote_file = posixpath.join(*remote_path[1:]) - #-- copy remote file contents to local file + # copy remote file contents to local file with open(local_file, 'wb') as f: ftp.retrbinary(f'RETR {remote_file}', f.write) - #-- keep remote modification time of file and local access time + # keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) os.chmod(local_file, MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Syncs GRACE/GRACE-FO/Swarm COST-G data from the GFZ International Centre for Global Earth Models (ICGEM) """ ) - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- mission (GRACE, GRACE Follow-On or Swarm) + # mission (GRACE, GRACE Follow-On or Swarm) choices = ['Grace','Grace-FO','Swarm'] parser.add_argument('--mission','-m', type=str, nargs='+', default=['Grace','Grace-FO','Swarm'], choices=choices, help='Mission to sync between GRACE, GRACE-FO and Swarm') - #-- data release + # data release parser.add_argument('--release','-r', type=str, default='RL01', choices=['RL01'], help='Data release to sync') - #-- connection timeout + # connection timeout parser.add_argument('--timeout','-t', type=int, default=360, help='Timeout in seconds for blocking operations') - #-- Output log file in form - #-- GFZ_ICGEM_COST-G_sync_2002-04-01.log + # Output log file in form + # GFZ_ICGEM_COST-G_sync_2002-04-01.log parser.add_argument('--log','-l', default=False, action='store_true', help='Output log file') - #-- sync options + # sync options parser.add_argument('--list','-L', default=False, action='store_true', help='Only print files that could be transferred') @@ -257,20 +257,20 @@ def arguments(): parser.add_argument('--clobber','-C', default=False, action='store_true', help='Overwrite existing data in transfer') - #-- permissions mode of the directories and files synced (number in octal) + # permissions mode of the directories and files synced (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permission mode of directories and files synced') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- check internet connection before attempting to run program + # check internet connection before attempting to run program HOST = 'icgem.gfz-potsdam.de' if gravity_toolkit.utilities.check_ftp_connection(HOST): for m in args.mission: @@ -281,6 +281,6 @@ def main(): else: raise RuntimeError('Check internet connection') -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/gfz_isdc_dealiasing_ftp.py b/scripts/gfz_isdc_dealiasing_ftp.py index fb4b80fd..60071235 100644 --- a/scripts/gfz_isdc_dealiasing_ftp.py +++ b/scripts/gfz_isdc_dealiasing_ftp.py @@ -56,111 +56,111 @@ import posixpath import gravity_toolkit.utilities -#-- PURPOSE: syncs GRACE Level-1b dealiasing products from the GFZ data server -#-- and optionally outputs as monthly tar files +# PURPOSE: syncs GRACE Level-1b dealiasing products from the GFZ data server +# and optionally outputs as monthly tar files def gfz_isdc_dealiasing_ftp(base_dir, DREL, YEAR=None, MONTHS=None, TAR=False, TIMEOUT=None, LOG=False, CLOBBER=False, MODE=None): - #-- output data directory + # output data directory grace_dir = os.path.join(base_dir,'AOD1B',DREL) os.makedirs(grace_dir) if not os.access(grace_dir,os.F_OK) else None - #-- create log file with list of synchronized files (or print to terminal) + # create log file with list of synchronized files (or print to terminal) if LOG: - #-- output to log file - #-- format: GFZ_AOD1B_sync_2002-04-01.log + # output to log file + # format: GFZ_AOD1B_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) LOGFILE = f'GFZ_AOD1B_sync_{today}.log' logging.basicConfig(filename=os.path.join(base_dir,LOGFILE), level=logging.INFO) logging.info(f'GFZ AOD1b Sync Log ({today})') else: - #-- standard output (terminal output) + # standard output (terminal output) logging.basicConfig(level=logging.INFO) - #-- remote HOST for DREL on GFZ data server - #-- connect and login to GFZ ftp server + # remote HOST for DREL on GFZ data server + # connect and login to GFZ ftp server ftp = ftplib.FTP('isdcftp.gfz-potsdam.de',timeout=TIMEOUT) ftp.login() - #-- compile regular expression operator for years to sync + # compile regular expression operator for years to sync if YEAR is None: regex_years = r'\d{4}' else: regex_years = r'|'.join(rf'{y:d}' for y in YEAR) - #-- compile regular expression operator for years to sync + # compile regular expression operator for years to sync R1 = re.compile(rf'({regex_years})', re.VERBOSE) - #-- suffix for each data release + # suffix for each data release SUFFIX = dict(RL04='tar.gz',RL05='tar.gz',RL06='tgz') - #-- find remote yearly directories for DREL + # find remote yearly directories for DREL YRS,_ = gravity_toolkit.utilities.ftp_list([ftp.host,'grace', 'Level-1B', 'GFZ','AOD',DREL], timeout=TIMEOUT, basename=True, pattern=R1, sort=True) - #-- for each year + # for each year for Y in YRS: - #-- for each month of interest + # for each month of interest for M in MONTHS: - #-- output tar file for year and month + # output tar file for year and month args = (Y, M, DREL.replace('RL',''), SUFFIX[DREL]) FILE = 'AOD1B_{0}-{1:02d}_{2}.{3}'.format(*args) - #-- check if output tar file exists (if TAR) + # check if output tar file exists (if TAR) TEST = not os.access(os.path.join(grace_dir,FILE), os.F_OK) - #-- compile regular expressions operators for file dates - #-- will extract year and month and calendar day from the ascii file + # compile regular expressions operators for file dates + # will extract year and month and calendar day from the ascii file regex_pattern = r'AOD1B_({0})-({1:02d})-(\d+)_X_\d+.asc.gz$' R2 = re.compile(regex_pattern.format(Y,M), re.VERBOSE) remote_files,remote_mtimes = gravity_toolkit.utilities.ftp_list( [ftp.host,'grace','Level-1B','GFZ','AOD',DREL,Y], timeout=TIMEOUT, basename=True, pattern=R2, sort=True) file_count = len(remote_files) - #-- if compressing into monthly tar files + # if compressing into monthly tar files if TAR and (file_count > 0) and (TEST or CLOBBER): - #-- copy each gzip file and store within monthly tar files + # copy each gzip file and store within monthly tar files tar = tarfile.open(name=os.path.join(grace_dir,FILE),mode='w:gz') for fi,remote_mtime in zip(remote_files,remote_mtimes): - #-- remote version of each input file + # remote version of each input file remote = [ftp.host,'grace','Level-1B','GFZ','AOD',DREL,Y,fi] logging.info(posixpath.join('ftp://',*remote)) - #-- retrieve bytes from remote file + # retrieve bytes from remote file remote_buffer = gravity_toolkit.utilities.from_ftp(remote, timeout=TIMEOUT) - #-- add file to tar + # add file to tar tar_info = tarfile.TarInfo(name=fi) tar_info.mtime = remote_mtime tar_info.size = remote_buffer.getbuffer().nbytes tar.addfile(tarinfo=tar_info, fileobj=remote_buffer) - #-- close tar file and set permissions level to MODE + # close tar file and set permissions level to MODE tar.close() logging.info(' --> {0}\n'.format(os.path.join(grace_dir,FILE))) os.chmod(os.path.join(grace_dir,FILE), MODE) elif (file_count > 0) and not TAR: - #-- copy each gzip file and keep as individual daily files + # copy each gzip file and keep as individual daily files for fi,remote_mtime in zip(remote_files,remote_mtimes): - #-- remote and local version of each input file + # remote and local version of each input file remote = [ftp.host,'grace','Level-1B','GFZ','AOD',DREL,Y,fi] local = os.path.join(grace_dir,fi) ftp_mirror_file(ftp,remote,remote_mtime,local, CLOBBER=CLOBBER,MODE=MODE) - #-- close the ftp connection + # close the ftp connection ftp.quit() - #-- close log file and set permissions level to MODE + # close log file and set permissions level to MODE if LOG: os.chmod(os.path.join(base_dir,LOGFILE), MODE) -#-- PURPOSE: pull file from a remote host checking if file exists locally -#-- and if the remote file is newer than the local file +# PURPOSE: pull file from a remote host checking if file exists locally +# and if the remote file is newer than the local file def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, CLOBBER=False,MODE=0o775): - #-- path to remote file + # path to remote file remote_file = posixpath.join(*remote_path[1:]) - #-- if file exists in file system: check if remote file is newer + # if file exists in file system: check if remote file is newer TEST = False OVERWRITE = ' (clobber)' - #-- check if local version of file exists + # check if local version of file exists if os.access(local_file, os.F_OK): - #-- check last modification time of local file + # check last modification time of local file local_mtime = os.stat(local_file).st_mtime - #-- if remote file is newer: overwrite the local file + # if remote file is newer: overwrite the local file if (gravity_toolkit.utilities.even(remote_mtime) > gravity_toolkit.utilities.even(local_mtime)): TEST = True @@ -168,76 +168,76 @@ def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, else: TEST = True OVERWRITE = ' (new)' - #-- if file does not exist locally, is to be overwritten, or CLOBBER is set + # if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: - #-- Printing files transferred + # Printing files transferred remote_ftp_url = posixpath.join('ftp://',*remote_path) logging.info(f'{remote_ftp_url} -->') logging.info(f'\t{local_file}{OVERWRITE}\n') - #-- copy remote file contents to local file + # copy remote file contents to local file with open(local_file, 'wb') as f: ftp.retrbinary(f'RETR {remote_file}', f.write) - #-- keep remote modification time of file and local access time + # keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) os.chmod(local_file, MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Syncs GRACE Level-1b dealiasing products from the GFZ Information System and Data Center (ISDC) """ ) - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, nargs='+', default=['RL06'], choices=['RL04','RL05','RL06'], help='GRACE/GRACE-FO data release') - #-- years to download + # years to download parser.add_argument('--year','-Y', type=int, nargs='+', default=range(2000,2021), help='Years of data to sync') - #-- months to download + # months to download parser.add_argument('--month','-m', type=int, nargs='+', default=range(1,13), help='Months of data to sync') - #-- output dealiasing files as monthly tar files + # output dealiasing files as monthly tar files parser.add_argument('--tar','-T', default=False, action='store_true', help='Output data as monthly tar files') - #-- connection timeout + # connection timeout parser.add_argument('--timeout','-t', type=int, default=360, help='Timeout in seconds for blocking operations') - #-- Output log file in form - #-- GFZ_AOD1B_sync_2002-04-01.log + # Output log file in form + # GFZ_AOD1B_sync_2002-04-01.log parser.add_argument('--log','-l', default=False, action='store_true', help='Output log file') - #-- sync options + # sync options parser.add_argument('--clobber','-C', default=False, action='store_true', help='Overwrite existing data in transfer') - #-- permissions mode of the directories and files synced (number in octal) + # permissions mode of the directories and files synced (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permission mode of directories and files synced') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- check internet connection before attempting to run program + # check internet connection before attempting to run program HOST = 'isdcftp.gfz-potsdam.de' if gravity_toolkit.utilities.check_ftp_connection(HOST): for DREL in args.release: @@ -246,6 +246,6 @@ def main(): TIMEOUT=args.timeout, LOG=args.log, CLOBBER=args.clobber, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/gfz_isdc_grace_ftp.py b/scripts/gfz_isdc_grace_ftp.py index 3828c82c..b7d9c59e 100644 --- a/scripts/gfz_isdc_grace_ftp.py +++ b/scripts/gfz_isdc_grace_ftp.py @@ -76,29 +76,29 @@ import gravity_toolkit.time import gravity_toolkit.utilities -#-- PURPOSE: sync local GRACE/GRACE-FO files with GFZ ISDC server +# PURPOSE: sync local GRACE/GRACE-FO files with GFZ ISDC server def gfz_isdc_grace_ftp(DIRECTORY, PROC=[], DREL=[], VERSION=[], NEWSLETTERS=False, TIMEOUT=None, LOG=False, LIST=False, CLOBBER=False, CHECKSUM=False, MODE=None): - #-- connect and login to GFZ ISDC ftp server + # connect and login to GFZ ISDC ftp server ftp = ftplib.FTP('isdcftp.gfz-potsdam.de', timeout=TIMEOUT) ftp.login() - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not os.makedirs(DIRECTORY,MODE) if not os.path.exists(DIRECTORY) else None - #-- mission shortnames + # mission shortnames shortname = {'grace':'GRAC', 'grace-fo':'GRFO'} - #-- datasets for each processing center + # datasets for each processing center DSET = {} DSET['CSR'] = ['GAC', 'GAD', 'GSM'] DSET['GFZ'] = ['GAA', 'GAB', 'GAC', 'GAD', 'GSM'] DSET['JPL'] = ['GAA', 'GAB', 'GAC', 'GAD', 'GSM'] - #-- create log file with list of synchronized files (or print to terminal) + # create log file with list of synchronized files (or print to terminal) if LOG: - #-- output to log file - #-- format: GFZ_ISDC_sync_2002-04-01.log + # output to log file + # format: GFZ_ISDC_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) LOGFILE = f'GFZ_ISDC_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), @@ -107,205 +107,205 @@ def gfz_isdc_grace_ftp(DIRECTORY, PROC=[], DREL=[], VERSION=[], logging.info('CENTERS={0}'.format(','.join(PROC))) logging.info('RELEASES={0}'.format(','.join(DREL))) else: - #-- standard output (terminal output) + # standard output (terminal output) logging.basicConfig(level=logging.INFO) - #-- Degree 1 (geocenter) coefficients + # Degree 1 (geocenter) coefficients logging.info('Degree 1 Coefficients:') local_dir = os.path.join(DIRECTORY,'geocenter') - #-- check if geocenter directory exists and recursively create if not + # check if geocenter directory exists and recursively create if not os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None - #-- TN-13 JPL degree 1 files - #-- compile regular expression operator for remote files + # TN-13 JPL degree 1 files + # compile regular expression operator for remote files R1 = re.compile(r'TN-13_GEOC_(CSR|GFZ|JPL)_(.*?).txt$', re.VERBOSE) - #-- get filenames from remote directory + # get filenames from remote directory remote_files,remote_mtimes = gravity_toolkit.utilities.ftp_list( [ftp.host,'grace-fo','DOCUMENTS','TECHNICAL_NOTES'], timeout=TIMEOUT, basename=True, pattern=R1, sort=True) - #-- for each file on the remote server + # for each file on the remote server for fi,remote_mtime in zip(remote_files,remote_mtimes): - #-- extract filename from regex object + # extract filename from regex object remote_path = [ftp.host,'grace-fo','DOCUMENTS','TECHNICAL_NOTES',fi] local_file = os.path.join(local_dir,fi) ftp_mirror_file(ftp, remote_path, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- SLR C2,0 coefficients + # SLR C2,0 coefficients logging.info('C2,0 Coefficients:') local_dir = os.path.expanduser(DIRECTORY) - #-- compile regular expression operator for remote files + # compile regular expression operator for remote files R1 = re.compile(r'TN-(05|07|11)_C20_SLR_RL(.*?).txt$', re.VERBOSE) - #-- get filenames from remote directory + # get filenames from remote directory remote_files,remote_mtimes = gravity_toolkit.utilities.ftp_list( [ftp.host,'grace','DOCUMENTS','TECHNICAL_NOTES'], timeout=TIMEOUT, basename=True, pattern=R1, sort=True) - #-- for each file on the remote server + # for each file on the remote server for fi,remote_mtime in zip(remote_files,remote_mtimes): - #-- extract filename from regex object + # extract filename from regex object remote_path = [ftp.host,'grace','DOCUMENTS','TECHNICAL_NOTES',fi] local_file = os.path.join(local_dir,re.sub(r'(_RL.*?).txt','.txt',fi)) ftp_mirror_file(ftp, remote_path, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- SLR C3,0 coefficients + # SLR C3,0 coefficients logging.info('C3,0 Coefficients:') local_dir = os.path.expanduser(DIRECTORY) - #-- compile regular expression operator for remote files + # compile regular expression operator for remote files R1 = re.compile(r'TN-(14)_C30_C20_SLR_GSFC.txt$', re.VERBOSE) - #-- get filenames from remote directory + # get filenames from remote directory remote_files,remote_mtimes = gravity_toolkit.utilities.ftp_list( [ftp.host,'grace-fo','DOCUMENTS','TECHNICAL_NOTES'], timeout=TIMEOUT, basename=True, pattern=R1, sort=True) - #-- for each file on the remote server + # for each file on the remote server for fi,remote_mtime in zip(remote_files,remote_mtimes): - #-- extract filename from regex object + # extract filename from regex object remote_path = [ftp.host,'grace-fo','DOCUMENTS','TECHNICAL_NOTES',fi] local_file = os.path.join(local_dir,re.sub(r'(SLR_GSFC)','GSFC_SLR',fi)) ftp_mirror_file(ftp, remote_path, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- TN-08 GAE, TN-09 GAF and TN-10 GAG ECMWF atmosphere correction products + # TN-08 GAE, TN-09 GAF and TN-10 GAG ECMWF atmosphere correction products logging.info('TN-08 GAE, TN-09 GAF and TN-10 GAG products:') local_dir = os.path.expanduser(DIRECTORY) ECMWF_files = [] ECMWF_files.append('TN-08_GAE-2_2006032-2010031_0000_EIGEN_G---_0005.gz') ECMWF_files.append('TN-09_GAF-2_2010032-2015131_0000_EIGEN_G---_0005.gz') ECMWF_files.append('TN-10_GAG-2_2015132-2099001_0000_EIGEN_G---_0005.gz') - #-- compile regular expression operator for remote files + # compile regular expression operator for remote files R1 = re.compile(r'({0}|{1}|{2})'.format(*ECMWF_files), re.VERBOSE) - #-- get filenames from remote directory + # get filenames from remote directory remote_files,remote_mtimes = gravity_toolkit.utilities.ftp_list( [ftp.host,'grace','DOCUMENTS','TECHNICAL_NOTES'], timeout=TIMEOUT, basename=True, pattern=R1, sort=True) - #-- for each file on the remote server + # for each file on the remote server for fi,remote_mtime in zip(remote_files,remote_mtimes): - #-- extract filename from regex object + # extract filename from regex object remote_path = [ftp.host,'grace','DOCUMENTS','TECHNICAL_NOTES',fi] local_file = os.path.join(local_dir,fi) ftp_mirror_file(ftp, remote_path, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- GRACE and GRACE-FO newsletters + # GRACE and GRACE-FO newsletters if NEWSLETTERS: - #-- local newsletter directory (place GRACE and GRACE-FO together) + # local newsletter directory (place GRACE and GRACE-FO together) local_dir = os.path.join(DIRECTORY,'newsletters') - #-- check if newsletters directory exists and recursively create if not + # check if newsletters directory exists and recursively create if not os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None - #-- for each satellite mission (grace, grace-fo) + # for each satellite mission (grace, grace-fo) for i,mi in enumerate(['grace','grace-fo']): logging.info(f'{mi} Newsletters:') - #-- compile regular expression operator for remote files + # compile regular expression operator for remote files NAME = mi.upper().replace('-','_') R1 = re.compile(rf'{NAME}_SDS_NL_(\d+).pdf', re.VERBOSE) - #-- find years for GRACE/GRACE-FO newsletters + # find years for GRACE/GRACE-FO newsletters years,_ = gravity_toolkit.utilities.ftp_list( [ftp.host,mi,'DOCUMENTS','NEWSLETTER'], timeout=TIMEOUT, basename=True, pattern=r'\d+', sort=True) - #-- for each year of GRACE/GRACE-FO newsletters + # for each year of GRACE/GRACE-FO newsletters for Y in years: - #-- find GRACE/GRACE-FO newsletters + # find GRACE/GRACE-FO newsletters remote_files,remote_mtimes = gravity_toolkit.utilities.ftp_list( [ftp.host,mi,'DOCUMENTS','NEWSLETTER',Y], timeout=TIMEOUT, basename=True, pattern=R1, sort=True) - #-- for each file on the remote server + # for each file on the remote server for fi,remote_mtime in zip(remote_files,remote_mtimes): - #-- extract filename from regex object + # extract filename from regex object remote_path = [ftp.host,mi,'DOCUMENTS','NEWSLETTER',Y,fi] local_file = os.path.join(local_dir,fi) ftp_mirror_file(ftp, remote_path, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- GRACE/GRACE-FO level-2 spherical harmonic products + # GRACE/GRACE-FO level-2 spherical harmonic products logging.info('GRACE/GRACE-FO L2 Global Spherical Harmonics:') - #-- for each processing center (CSR, GFZ, JPL) + # for each processing center (CSR, GFZ, JPL) for pr in PROC: - #-- for each data release (RL04, RL05, RL06) + # for each data release (RL04, RL05, RL06) for rl in DREL: - #-- for each level-2 product (GAC, GAD, GSM, GAA, GAB) + # for each level-2 product (GAC, GAD, GSM, GAA, GAB) for ds in DSET[pr]: - #-- local directory for exact data product + # local directory for exact data product local_dir = os.path.join(DIRECTORY, pr, rl, ds) - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not if not os.path.exists(local_dir): os.makedirs(local_dir,MODE) - #-- list of GRACE/GRACE-FO files for index + # list of GRACE/GRACE-FO files for index grace_files = [] - #-- for each satellite mission (grace, grace-fo) + # for each satellite mission (grace, grace-fo) for i,mi in enumerate(['grace','grace-fo']): - #-- modifiers for intermediate data releases + # modifiers for intermediate data releases if (int(VERSION[i]) > 0): drel_str = f'{rl}.{VERSION[i]}' else: drel_str = copy.copy(rl) - #-- print string of exact data product + # print string of exact data product logging.info(f'{mi}/{pr}/{drel_str}/{ds}') - #-- compile the regular expression operator to find files + # compile the regular expression operator to find files R1 = re.compile(rf'({ds}-(.*?)(gz|txt|dif))') - #-- get filenames from remote directory + # get filenames from remote directory remote_files,remote_mtimes = gravity_toolkit.utilities.ftp_list( [ftp.host,mi,'Level-2',pr,drel_str], timeout=TIMEOUT, basename=True, pattern=R1, sort=True) for fi,remote_mtime in zip(remote_files,remote_mtimes): - #-- extract filename from regex object + # extract filename from regex object remote_path = [ftp.host,mi,'Level-2',pr,drel_str,fi] local_file = os.path.join(local_dir,fi) ftp_mirror_file(ftp, remote_path, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- regular expression operator for data product + # regular expression operator for data product rx = gravity_toolkit.utilities.compile_regex_pattern( pr, rl, ds, mission=shortname[mi]) - #-- find local GRACE/GRACE-FO files to create index + # find local GRACE/GRACE-FO files to create index granules = [f for f in os.listdir(local_dir) if rx.match(f)] - #-- reduce list of GRACE/GRACE-FO files to unique dates + # reduce list of GRACE/GRACE-FO files to unique dates granules = gravity_toolkit.time.reduce_by_date(granules) - #-- extend list of GRACE/GRACE-FO files with granules + # extend list of GRACE/GRACE-FO files with granules grace_files.extend(granules) - #-- outputting GRACE/GRACE-FO filenames to index + # outputting GRACE/GRACE-FO filenames to index with open(os.path.join(local_dir,'index.txt'),'w') as fid: for fi in sorted(grace_files): print(fi, file=fid) - #-- change permissions of index file + # change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) - #-- close the ftp connection + # close the ftp connection ftp.quit() - #-- close log file and set permissions level to MODE + # close log file and set permissions level to MODE if LOG: os.chmod(os.path.join(DIRECTORY,LOGFILE), MODE) -#-- PURPOSE: pull file from a remote host checking if file exists locally -#-- and if the remote file is newer than the local file +# PURPOSE: pull file from a remote host checking if file exists locally +# and if the remote file is newer than the local file def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, TIMEOUT=None,LIST=False,CLOBBER=False,CHECKSUM=False,MODE=0o775): - #-- if file exists in file system: check if remote file is newer + # if file exists in file system: check if remote file is newer TEST = False OVERWRITE = ' (clobber)' - #-- check if local version of file exists + # check if local version of file exists if CHECKSUM and os.access(local_file, os.F_OK): - #-- generate checksum hash for local file - #-- open the local_file in binary read mode + # generate checksum hash for local file + # open the local_file in binary read mode with open(local_file, 'rb') as local_buffer: local_hash = hashlib.md5(local_buffer.read()).hexdigest() - #-- copy remote file contents to bytesIO object + # copy remote file contents to bytesIO object remote_buffer = gravity_toolkit.utilities.from_ftp(remote_path, timeout=TIMEOUT) - #-- generate checksum hash for remote file + # generate checksum hash for remote file remote_hash = hashlib.md5(remote_buffer.getvalue()).hexdigest() - #-- compare checksums + # compare checksums if (local_hash != remote_hash): TEST = True OVERWRITE = f' (checksums: {local_hash} {remote_hash})' elif os.access(local_file, os.F_OK): - #-- check last modification time of local file + # check last modification time of local file local_mtime = os.stat(local_file).st_mtime - #-- if remote file is newer: overwrite the local file + # if remote file is newer: overwrite the local file if (gravity_toolkit.utilities.even(remote_mtime) > gravity_toolkit.utilities.even(local_mtime)): TEST = True @@ -313,72 +313,72 @@ def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, else: TEST = True OVERWRITE = ' (new)' - #-- if file does not exist locally, is to be overwritten, or CLOBBER is set + # if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: - #-- Printing files transferred + # Printing files transferred remote_ftp_url = posixpath.join('ftp://',*remote_path) logging.info(f'{remote_ftp_url} -->') logging.info(f'\t{local_file}{OVERWRITE}\n') - #-- if executing copy command (not only printing the files) + # if executing copy command (not only printing the files) if not LIST: - #-- copy file from ftp server or from bytesIO object + # copy file from ftp server or from bytesIO object if CHECKSUM and os.access(local_file, os.F_OK): - #-- store bytes to file using chunked transfer encoding + # store bytes to file using chunked transfer encoding remote_buffer.seek(0) with open(local_file, 'wb') as f: shutil.copyfileobj(remote_buffer, f, 16 * 1024) else: - #-- path to remote file + # path to remote file remote_file = posixpath.join(*remote_path[1:]) - #-- copy remote file contents to local file + # copy remote file contents to local file with open(local_file, 'wb') as f: ftp.retrbinary(f'RETR {remote_file}', f.write) - #-- keep remote modification time of file and local access time + # keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) os.chmod(local_file, MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Syncs GRACE/GRACE-FO data from the GFZ Information System and Data Center (ISDC) """ ) - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO processing center + # GRACE/GRACE-FO processing center parser.add_argument('--center','-c', metavar='PROC', type=str, nargs='+', default=['CSR','GFZ','JPL'], choices=['CSR','GFZ','JPL'], help='GRACE/GRACE-FO processing center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, nargs='+', default=['RL06'], choices=['RL04','RL05','RL06'], help='GRACE/GRACE-FO data release') - #-- GRACE/GRACE-FO data version + # GRACE/GRACE-FO data version parser.add_argument('--version','-v', metavar='VERSION', type=str, nargs=2, default=['0','1'], choices=['0','1','2','3'], help='GRACE/GRACE-FO Level-2 data version') - #-- GRACE/GRACE-FO newsletters + # GRACE/GRACE-FO newsletters parser.add_argument('--newsletters','-n', default=False, action='store_true', help='Sync GRACE/GRACE-FO Newsletters') - #-- connection timeout + # connection timeout parser.add_argument('--timeout','-t', type=int, default=360, help='Timeout in seconds for blocking operations') - #-- Output log file in form - #-- GFZ_ISDC_sync_2002-04-01.log + # Output log file in form + # GFZ_ISDC_sync_2002-04-01.log parser.add_argument('--log','-l', default=False, action='store_true', help='Output log file') - #-- sync options + # sync options parser.add_argument('--list','-L', default=False, action='store_true', help='Only print files that could be transferred') @@ -388,20 +388,20 @@ def arguments(): parser.add_argument('--clobber','-C', default=False, action='store_true', help='Overwrite existing data in transfer') - #-- permissions mode of the directories and files synced (number in octal) + # permissions mode of the directories and files synced (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permission mode of directories and files synced') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- check internet connection before attempting to run program + # check internet connection before attempting to run program HOST = 'isdcftp.gfz-potsdam.de' if gravity_toolkit.utilities.check_ftp_connection(HOST): gfz_isdc_grace_ftp(args.directory, PROC=args.center, @@ -412,6 +412,6 @@ def main(): else: raise RuntimeError('Check internet connection') -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/grace_mean_harmonics.py b/scripts/grace_mean_harmonics.py index 40ef6681..68b6be29 100644 --- a/scripts/grace_mean_harmonics.py +++ b/scripts/grace_mean_harmonics.py @@ -119,7 +119,7 @@ from gravity_toolkit.harmonics import harmonics import gravity_toolkit.utilities as utilities -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -128,8 +128,8 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: import GRACE/GRACE-FO files for a given months range -#-- calculate the mean of the spherical harmonics and output to file +# PURPOSE: import GRACE/GRACE-FO files for a given months range +# calculate the mean of the spherical harmonics and output to file def grace_mean_harmonics(base_dir, PROC, DREL, DSET, LMAX, START=None, END=None, @@ -151,40 +151,40 @@ def grace_mean_harmonics(base_dir, PROC, DREL, DSET, LMAX, VERBOSE=0, MODE=0o775): - #-- output string for both LMAX==MMAX and LMAX != MMAX cases + # output string for both LMAX==MMAX and LMAX != MMAX cases MMAX = np.copy(LMAX) if not MMAX else MMAX order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' - #-- data formats for output: ascii, netCDF4, HDF5, gfc + # data formats for output: ascii, netCDF4, HDF5, gfc suffix = dict(ascii='txt',netCDF4='nc',HDF5='H5',gfc='gfc')[MEANFORM] - #-- reading GRACE months for input date range - #-- replacing low-degree harmonics with SLR values if specified - #-- include degree 1 (geocenter) harmonics if specified - #-- correcting for Pole Tide Drift and Atmospheric Jumps if specified + # reading GRACE months for input date range + # replacing low-degree harmonics with SLR values if specified + # include degree 1 (geocenter) harmonics if specified + # correcting for Pole Tide Drift and Atmospheric Jumps if specified input_Ylms = grace_input_months(base_dir, PROC, DREL, DSET, LMAX, START, END, MISSING, SLR_C20, DEG1, MMAX=MMAX, SLR_21=SLR_21, SLR_22=SLR_22, SLR_C30=SLR_C30, SLR_C40=SLR_C40, SLR_C50=SLR_C50, DEG1_FILE=DEG1_FILE, MODEL_DEG1=MODEL_DEG1, ATM=ATM, POLE_TIDE=POLE_TIDE) grace_Ylms = harmonics().from_dict(input_Ylms) - #-- descriptor string for processing parameters + # descriptor string for processing parameters grace_str = input_Ylms['title'] - #-- calculate mean Ylms + # calculate mean Ylms mean_Ylms = mean().from_harmonics(grace_Ylms.mean()) mean_Ylms.time = np.mean(grace_Ylms.time) mean_Ylms.month = np.mean(grace_Ylms.month) - #-- number of months + # number of months nt = grace_Ylms.shape[-1] - #-- calculate RMS of harmonic errors + # calculate RMS of harmonic errors mean_Ylms.eclm = np.sqrt(np.sum(input_Ylms['eclm']**2,axis=2)/nt) mean_Ylms.eslm = np.sqrt(np.sum(input_Ylms['eslm']**2,axis=2)/nt) - #-- product information + # product information mean_Ylms.center = PROC mean_Ylms.release = DREL mean_Ylms.product = DSET - #-- default output filename if not entering via parameter file + # default output filename if not entering via parameter file if not MEAN_FILE: DIRECTORY = os.path.expanduser(input_Ylms['directory']) args = (PROC,DREL,DSET,grace_str,LMAX,order_str,START,END,suffix) @@ -192,24 +192,24 @@ def grace_mean_harmonics(base_dir, PROC, DREL, DSET, LMAX, MEAN_FILE = os.path.join(DIRECTORY,file_format.format(*args)) else: DIRECTORY = os.path.dirname(MEAN_FILE) - #-- recursively create output directory if non-existent + # recursively create output directory if non-existent if not os.access(DIRECTORY, os.F_OK): os.makedirs(DIRECTORY, MODE) - #-- output spherical harmonics for the static field + # output spherical harmonics for the static field if (MEANFORM == 'gfc'): - #-- output mean field to gfc format + # output mean field to gfc format mean_Ylms.to_gfc(MEAN_FILE, verbose=VERBOSE) else: - #-- output mean field to specified file format + # output mean field to specified file format mean_Ylms.to_file(MEAN_FILE, format=MEANFORM, verbose=VERBOSE) - #-- change the permissions mode + # change the permissions mode os.chmod(MEAN_FILE, MODE) - #-- return the output file + # return the output file return MEAN_FILE -#-- PURPOSE: additional routines for the harmonics module +# PURPOSE: additional routines for the harmonics module class mean(harmonics): def __init__(self, **kwargs): super().__init__(**kwargs) @@ -224,7 +224,7 @@ def from_harmonics(self, temp): Convert a harmonics object to a new mean object """ self = mean(lmax=temp.lmax, mmax=temp.mmax) - #-- try to assign variables to self + # try to assign variables to self for key in ['clm','slm','eclm','eslm','shape','ndim','filename', 'center','release','product']: try: @@ -232,7 +232,7 @@ def from_harmonics(self, temp): setattr(self, key, np.copy(val)) except AttributeError: pass - #-- assign ndim and shape attributes + # assign ndim and shape attributes self.update_dimensions() return self @@ -245,28 +245,28 @@ def to_gfc(self, filename, **kwargs): keyword arguments for gfc output """ self.filename = os.path.expanduser(filename) - #-- set default verbosity + # set default verbosity kwargs.setdefault('verbose',False) logging.info(self.filename) - #-- open the output file + # open the output file fid = open(self.filename, 'w') - #-- print the header informat + # print the header informat self.print_header(fid) - #-- output file format + # output file format file_format = ('{0:3} {1:4d} {2:4d} {3:+18.12E} {4:+18.12E} ' '{5:11.5E} {6:11.5E}') - #-- write to file for each spherical harmonic degree and order + # write to file for each spherical harmonic degree and order for m in range(0, self.mmax+1): for l in range(m, self.lmax+1): args = ('gfc', l, m, self.clm[l,m], self.slm[l,m], self.eclm[l,m], self.eslm[l,m]) print(file_format.format(*args), file=fid) - #-- close the output file + # close the output file fid.close() - #-- PURPOSE: print gfc header to top of file + # PURPOSE: print gfc header to top of file def print_header(self, fid): - #-- print header + # print header fid.write('{0} {1}\n'.format('begin_of_head',73*'=')) fid.write('{0:30}{1}\n'.format('product_type','gravity_field')) fid.write('{0:30}{1}\n'.format('center',self.center)) @@ -282,45 +282,45 @@ def print_header(self, fid): fid.write('\n{0:7}{1:5}{2:10}{3:20}{4:15}{5:13}{6:7}\n'.format(*args)) fid.write('{0} {1}\n'.format('end_of_head',75*'=')) -#-- PURPOSE: print a file log for the GRACE/GRACE-FO mean program +# PURPOSE: print a file log for the GRACE/GRACE-FO mean program def output_log_file(arguments,output_file): - #-- format: GRACE_mean_run_2002-04-01_PID-70335.log + # format: GRACE_mean_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'GRACE_mean_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print output files + # print output files logging.info('\n\nOUTPUT FILE:') logging.info('{0}'.format(output_file)) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: print a error file log for the GRACE/GRACE-FO mean program +# PURPOSE: print a error file log for the GRACE/GRACE-FO mean program def output_error_log_file(arguments): - #-- format: GRACE_mean_failed_run_2002-04-01_PID-70335.log + # format: GRACE_mean_failed_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'GRACE_mean_failed_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print traceback error + # print traceback error logging.info('\n\nTRACEBACK ERROR:') traceback.print_exc(file=fid) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Calculates the temporal mean of the GRACE/GRACE-FO @@ -329,32 +329,32 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- Data processing center or satellite mission + # Data processing center or satellite mission parser.add_argument('--center','-c', metavar='PROC', type=str, required=True, help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', help='GRACE/GRACE-FO Data Release') - #-- GRACE/GRACE-FO Level-2 data product + # GRACE/GRACE-FO Level-2 data product parser.add_argument('--product','-p', metavar='DSET', type=str, default='GSM', help='GRACE/GRACE-FO Level-2 data product') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month') @@ -366,25 +366,25 @@ def arguments(): parser.add_argument('--missing','-N', metavar='MISSING', type=int, nargs='+', default=MISSING, help='Missing GRACE/GRACE-FO months') - #-- use atmospheric jump corrections from Fagiolini et al. (2015) + # use atmospheric jump corrections from Fagiolini et al. (2015) parser.add_argument('--atm-correction', default=False, action='store_true', help='Apply atmospheric jump correction coefficients') - #-- correct for pole tide drift follow Wahr et al. (2015) + # correct for pole tide drift follow Wahr et al. (2015) parser.add_argument('--pole-tide', default=False, action='store_true', help='Correct for pole tide drift') - #-- Update Degree 1 coefficients with SLR or derived values - #-- Tellus: GRACE/GRACE-FO TN-13 from PO.DAAC - #-- https://grace.jpl.nasa.gov/data/get-data/geocenter/ - #-- SLR: satellite laser ranging from CSR - #-- ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ - #-- UCI: Sutterley and Velicogna, Remote Sensing (2019) - #-- https://www.mdpi.com/2072-4292/11/18/2108 - #-- Swenson: GRACE-derived coefficients from Sean Swenson - #-- https://doi.org/10.1029/2007JB005338 - #-- GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS - #-- http://gravis.gfz-potsdam.de/corrections + # Update Degree 1 coefficients with SLR or derived values + # Tellus: GRACE/GRACE-FO TN-13 from PO.DAAC + # https://grace.jpl.nasa.gov/data/get-data/geocenter/ + # SLR: satellite laser ranging from CSR + # ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ + # UCI: Sutterley and Velicogna, Remote Sensing (2019) + # https://www.mdpi.com/2072-4292/11/18/2108 + # Swenson: GRACE-derived coefficients from Sean Swenson + # https://doi.org/10.1029/2007JB005338 + # GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS + # http://gravis.gfz-potsdam.de/corrections parser.add_argument('--geocenter', metavar='DEG1', type=str, choices=['Tellus','SLR','SLF','UCI','Swenson','GFZ'], @@ -395,7 +395,7 @@ def arguments(): parser.add_argument('--interpolate-geocenter', default=False, action='store_true', help='Least-squares model missing Degree 1 coefficients') - #-- replace low degree harmonics with values from Satellite Laser Ranging + # replace low degree harmonics with values from Satellite Laser Ranging parser.add_argument('--slr-c20', type=str, default=None, choices=['CSR','GFZ','GSFC'], help='Replace C20 coefficients with SLR values') @@ -414,45 +414,45 @@ def arguments(): parser.add_argument('--slr-c50', type=str, default=None, choices=['CSR','GSFC','LARES'], help='Replace C50 coefficients with SLR values') - #-- mean file to remove + # mean file to remove parser.add_argument('--mean-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='Output GRACE/GRACE-FO mean file') - #-- input data format (ascii, netCDF4, HDF5, gfc) + # input data format (ascii, netCDF4, HDF5, gfc) parser.add_argument('--mean-format', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5','gfc'], help='Output data format for GRACE/GRACE-FO mean file') - #-- Output log file for each job in forms - #-- GRACE_mean_run_2002-04-01_PID-00000.log - #-- GRACE_mean_failed_run_2002-04-01_PID-00000.log + # Output log file for each job in forms + # GRACE_mean_run_2002-04-01_PID-00000.log + # GRACE_mean_failed_run_2002-04-01_PID-00000.log parser.add_argument('--log', default=False, action='store_true', help='Output log file for each job') - #-- print information about each input and output file + # print information about each input and output file parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- try to run the analysis with listed parameters + # try to run the analysis with listed parameters try: info(args) - #-- run grace_mean_harmonics algorithm with parameters + # run grace_mean_harmonics algorithm with parameters output_file = grace_mean_harmonics( args.directory, args.center, @@ -479,17 +479,17 @@ def main(): VERBOSE=args.verbose, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) - if args.log:#-- write failed job completion log file + if args.log:# write failed job completion log file output_error_log_file(args) else: - if args.log:#-- write successful job completion log file + if args.log:# write successful job completion log file output_log_file(args,output_file) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/grace_spatial_error.py b/scripts/grace_spatial_error.py index 7440b660..82dbf036 100755 --- a/scripts/grace_spatial_error.py +++ b/scripts/grace_spatial_error.py @@ -171,7 +171,7 @@ from gravity_toolkit.tssmooth import tssmooth from gravity_toolkit.units import units -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -180,8 +180,8 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: import GRACE files for a given months range -#-- Estimates the GRACE/GRACE-FO errors applying the specified procedures +# PURPOSE: import GRACE files for a given months range +# Estimates the GRACE/GRACE-FO errors applying the specified procedures def grace_spatial_error(base_dir, PROC, DREL, DSET, LMAX, RAD, START=None, END=None, @@ -214,275 +214,275 @@ def grace_spatial_error(base_dir, PROC, DREL, DSET, LMAX, RAD, VERBOSE=0, MODE=0o775): - #-- recursively create output directory if not currently existing + # recursively create output directory if not currently existing if not os.access(OUTPUT_DIRECTORY, os.F_OK): os.makedirs(OUTPUT_DIRECTORY, mode=MODE, exist_ok=True) - #-- list object of output files for file logs (full path) + # list object of output files for file logs (full path) output_files = [] - #-- file information + # file information suffix = dict(ascii='txt', netCDF4='nc', HDF5='H5') - #-- read arrays of kl, hl, and ll Love Numbers + # read arrays of kl, hl, and ll Love Numbers hl,kl,ll = load_love_numbers(LMAX, LOVE_NUMBERS=LOVE_NUMBERS, REFERENCE=REFERENCE) - #-- Calculating the Gaussian smoothing for radius RAD + # Calculating the Gaussian smoothing for radius RAD if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) gw_str = f'_r{RAD:0.0f}km' else: - #-- else = 1 + # else = 1 wt = np.ones((LMAX+1)) gw_str = '' - #-- flag for spherical harmonic order + # flag for spherical harmonic order MMAX = np.copy(LMAX) if not MMAX else MMAX order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' - #-- atmospheric ECMWF "jump" flag (if ATM) + # atmospheric ECMWF "jump" flag (if ATM) atm_str = '_wATM' if ATM else '' - #-- reading GRACE months for input date range - #-- replacing low-degree harmonics with SLR values if specified - #-- include degree 1 (geocenter) harmonics if specified - #-- correcting for Pole-Tide and Atmospheric Jumps if specified + # reading GRACE months for input date range + # replacing low-degree harmonics with SLR values if specified + # include degree 1 (geocenter) harmonics if specified + # correcting for Pole-Tide and Atmospheric Jumps if specified Ylms = grace_input_months(base_dir, PROC, DREL, DSET, LMAX, START, END, MISSING, SLR_C20, DEG1, MMAX=MMAX, SLR_21=SLR_21, SLR_22=SLR_22, SLR_C30=SLR_C30, SLR_C40=SLR_C40, SLR_C50=SLR_C50, DEG1_FILE=DEG1_FILE, MODEL_DEG1=MODEL_DEG1, ATM=ATM, POLE_TIDE=POLE_TIDE) - #-- convert to harmonics object and remove mean if specified + # convert to harmonics object and remove mean if specified GRACE_Ylms = harmonics().from_dict(Ylms) - #-- full path to directory for specific GRACE/GRACE-FO product + # full path to directory for specific GRACE/GRACE-FO product GRACE_Ylms.directory = Ylms['directory'] - #-- use a mean file for the static field to remove + # use a mean file for the static field to remove if MEAN_FILE: - #-- read data form for input mean file (ascii, netCDF4, HDF5, gfc) + # read data form for input mean file (ascii, netCDF4, HDF5, gfc) mean_Ylms = harmonics().from_file(MEAN_FILE,format=MEANFORM,date=False) - #-- remove the input mean + # remove the input mean GRACE_Ylms.subtract(mean_Ylms) else: GRACE_Ylms.mean(apply=True) - #-- date information of GRACE/GRACE-FO coefficients + # date information of GRACE/GRACE-FO coefficients nfiles = len(GRACE_Ylms.time) - #-- default file prefix + # default file prefix if not FILE_PREFIX: FILE_PREFIX = '{0}_{1}_{2}{3}_'.format(PROC,DREL,DSET,Ylms['title']) - #-- filter GRACE/GRACE-FO coefficients + # filter GRACE/GRACE-FO coefficients if DESTRIPE: - #-- destriping GRACE/GRACE-FO coefficients + # destriping GRACE/GRACE-FO coefficients ds_str = '_FL' GRACE_Ylms = GRACE_Ylms.destripe() else: - #-- using standard GRACE/GRACE-FO harmonics + # using standard GRACE/GRACE-FO harmonics ds_str = '' - #-- calculating GRACE error (Wahr et al 2006) - #-- output GRACE error file (for both LMAX==MMAX and LMAX != MMAX cases) + # calculating GRACE error (Wahr et al 2006) + # output GRACE error file (for both LMAX==MMAX and LMAX != MMAX cases) args = (PROC,DREL,DSET,LMAX,order_str,ds_str,atm_str,GRACE_Ylms.month[0], GRACE_Ylms.month[-1],suffix[DATAFORM]) delta_format = '{0}_{1}_{2}_DELTA_CLM_L{3:d}{4}{5}{6}_{7:03d}-{8:03d}.{9}' DELTA_FILE = os.path.join(GRACE_Ylms.directory,delta_format.format(*args)) - #-- full path of the GRACE directory - #-- if file was previously calculated, will read file - #-- else will calculate the GRACE error + # full path of the GRACE directory + # if file was previously calculated, will read file + # else will calculate the GRACE error if not os.access(DELTA_FILE, os.F_OK): - #-- add output delta file to list object + # add output delta file to list object output_files.append(DELTA_FILE) - #-- Delta coefficients of GRACE time series (Error components) + # Delta coefficients of GRACE time series (Error components) delta_Ylms = harmonics(lmax=LMAX,mmax=MMAX) delta_Ylms.clm = np.zeros((LMAX+1,MMAX+1)) delta_Ylms.slm = np.zeros((LMAX+1,MMAX+1)) - #-- Smoothing Half-Width (CNES is a 10-day solution) - #-- 365/10/2 = 18.25 (next highest is 19) - #-- All other solutions are monthly solutions (HFWTH for annual = 6) + # Smoothing Half-Width (CNES is a 10-day solution) + # 365/10/2 = 18.25 (next highest is 19) + # All other solutions are monthly solutions (HFWTH for annual = 6) if ((PROC == 'CNES') and (DREL in ('RL01','RL02'))): HFWTH = 19 else: HFWTH = 6 - #-- Equal to the noise of the smoothed time-series - #-- for each spherical harmonic order - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - #-- for each spherical harmonic degree - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX - #-- Delta coefficients of GRACE time series + # Equal to the noise of the smoothed time-series + # for each spherical harmonic order + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + # for each spherical harmonic degree + for l in range(m,LMAX+1):# LMAX+1 to include LMAX + # Delta coefficients of GRACE time series for cs,csharm in enumerate(['clm','slm']): - #-- Constrained GRACE Error (Noise of smoothed time-series) - #-- With Annual and Semi-Annual Terms + # Constrained GRACE Error (Noise of smoothed time-series) + # With Annual and Semi-Annual Terms val1 = getattr(GRACE_Ylms, csharm) smth = tssmooth(GRACE_Ylms.time, val1[l,m,:], HFWTH=HFWTH) - #-- number of smoothed points + # number of smoothed points nsmth = len(smth['data']) - #-- GRACE delta Ylms - #-- variance of data-(smoothed+annual+semi) + # GRACE delta Ylms + # variance of data-(smoothed+annual+semi) val2 = getattr(delta_Ylms, csharm) val2[l,m] = np.sqrt(np.sum(smth['noise']**2)/nsmth) - #-- save GRACE DELTA to file + # save GRACE DELTA to file delta_Ylms.time = np.copy(nsmth) delta_Ylms.month = np.copy(nsmth) delta_Ylms.to_file(DELTA_FILE,format=DATAFORM) - #-- set the permissions mode of the output harmonics file + # set the permissions mode of the output harmonics file os.chmod(DELTA_FILE, MODE) - #-- append delta harmonics file to output files list + # append delta harmonics file to output files list output_files.append(DELTA_FILE) else: - #-- read GRACE DELTA spherical harmonics datafile + # read GRACE DELTA spherical harmonics datafile delta_Ylms = harmonics().from_file(DELTA_FILE,format=DATAFORM) - #-- truncate grace delta clm and slm to d/o LMAX/MMAX + # truncate grace delta clm and slm to d/o LMAX/MMAX delta_Ylms = delta_Ylms.truncate(lmax=LMAX, mmax=MMAX) nsmth = np.int64(delta_Ylms.time) - #-- Output spatial data object + # Output spatial data object delta = spatial() - #-- Output Degree Spacing + # Output Degree Spacing dlon,dlat = (DDEG[0],DDEG[0]) if (len(DDEG) == 1) else (DDEG[0],DDEG[1]) - #-- Output Degree Interval + # Output Degree Interval if (INTERVAL == 1): - #-- (-180:180,90:-90) + # (-180:180,90:-90) nlon = np.int64((360.0/dlon)+1.0) nlat = np.int64((180.0/dlat)+1.0) delta.lon = -180 + dlon*np.arange(0,nlon) delta.lat = 90.0 - dlat*np.arange(0,nlat) elif (INTERVAL == 2): - #-- (Degree spacing)/2 + # (Degree spacing)/2 delta.lon = np.arange(-180+dlon/2.0,180+dlon/2.0,dlon) delta.lat = np.arange(90.0-dlat/2.0,-90.0-dlat/2.0,-dlat) nlon = len(delta.lon) nlat = len(delta.lat) elif (INTERVAL == 3): - #-- non-global grid set with BOUNDS parameter + # non-global grid set with BOUNDS parameter minlon,maxlon,minlat,maxlat = BOUNDS.copy() delta.lon = np.arange(minlon+dlon/2.0,maxlon+dlon/2.0,dlon) delta.lat = np.arange(maxlat-dlat/2.0,minlat-dlat/2.0,-dlat) nlon = len(delta.lon) nlat = len(delta.lat) - #-- Earth Parameters - #-- output spatial units + # Earth Parameters + # output spatial units unit_list = ['cmwe', 'mmGH', 'mmCU', u'\u03BCGal', 'mbar'] unit_name = ['Equivalent Water Thickness', 'Geoid Height', 'Elastic Crustal Uplift', 'Gravitational Undulation', 'Equivalent Surface Pressure'] - #-- dfactor is the degree dependent coefficients - #-- for specific spherical harmonic output units + # dfactor is the degree dependent coefficients + # for specific spherical harmonic output units if (UNITS == 1): - #-- 1: cmwe, centimeters water equivalent + # 1: cmwe, centimeters water equivalent dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).cmwe elif (UNITS == 2): - #-- 2: mmGH, millimeters geoid height + # 2: mmGH, millimeters geoid height dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).mmGH elif (UNITS == 3): - #-- 3: mmCU, millimeters elastic crustal deformation + # 3: mmCU, millimeters elastic crustal deformation dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).mmCU elif (UNITS == 4): - #-- 4: micGal, microGal gravity perturbations + # 4: micGal, microGal gravity perturbations dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).microGal elif (UNITS == 5): - #-- 5: mbar, millibar equivalent surface pressure + # 5: mbar, millibar equivalent surface pressure dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).mbar - #-- Computing plms for converting to spatial domain + # Computing plms for converting to spatial domain phi = delta.lon[np.newaxis,:]*np.pi/180.0 theta = (90.0-delta.lat)*np.pi/180.0 PLM, dPLM = plm_holmes(LMAX, np.cos(theta)) - #-- square of legendre polynomials truncated to order MMAX + # square of legendre polynomials truncated to order MMAX mm = np.arange(0,MMAX+1) PLM2 = PLM[:,mm,:]**2 - #-- Calculating cos(m*phi)^2 and sin(m*phi)^2 + # Calculating cos(m*phi)^2 and sin(m*phi)^2 m = delta_Ylms.m[:,np.newaxis] ccos = np.cos(np.dot(m,phi))**2 ssin = np.sin(np.dot(m,phi))**2 - #-- truncate delta harmonics to spherical harmonic range + # truncate delta harmonics to spherical harmonic range Ylms = delta_Ylms.truncate(LMAX,lmin=LMIN,mmax=MMAX) - #-- convolve delta harmonics with degree dependent factors - #-- smooth harmonics and convert to output units + # convolve delta harmonics with degree dependent factors + # smooth harmonics and convert to output units Ylms = Ylms.convolve(dfactor*wt).power(2.0).scale(1.0/nsmth) - #-- Calculate fourier coefficients - d_cos = np.zeros((MMAX+1,nlat))#-- [m,th] - d_sin = np.zeros((MMAX+1,nlat))#-- [m,th] - #-- Calculating delta spatial values + # Calculate fourier coefficients + d_cos = np.zeros((MMAX+1,nlat))# [m,th] + d_sin = np.zeros((MMAX+1,nlat))# [m,th] + # Calculating delta spatial values for k in range(0,nlat): - #-- summation over all spherical harmonic degrees + # summation over all spherical harmonic degrees d_cos[:,k] = np.sum(PLM2[:,:,k]*Ylms.clm, axis=0) d_sin[:,k] = np.sum(PLM2[:,:,k]*Ylms.slm, axis=0) - #-- Multiplying by c/s(phi#m) to get spatial maps (lon,lat) + # Multiplying by c/s(phi#m) to get spatial maps (lon,lat) delta.data=np.sqrt(np.dot(ccos.T,d_cos) + np.dot(ssin.T,d_sin)).T - #-- output file format + # output file format file_format = '{0}{1}_L{2:d}{3}{4}{5}_ERR_{6:03d}-{7:03d}.{8}' - #-- output error file to ascii, netCDF4 or HDF5 + # output error file to ascii, netCDF4 or HDF5 args = (FILE_PREFIX,unit_list[UNITS-1],LMAX,order_str,gw_str,ds_str, GRACE_Ylms.month[0],GRACE_Ylms.month[-1],suffix[DATAFORM]) FILE = os.path.join(OUTPUT_DIRECTORY,file_format.format(*args)) if (DATAFORM == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) delta.to_ascii(FILE, date=False, verbose=VERBOSE) elif (DATAFORM == 'netCDF4'): - #-- netCDF4 + # netCDF4 delta.to_netCDF4(FILE, date=False, verbose=VERBOSE, units=unit_list[UNITS-1], longname=unit_name[UNITS-1], title='GRACE/GRACE-FO Spatial Error') elif (DATAFORM == 'HDF5'): - #-- HDF5 + # HDF5 delta.to_HDF5(FILE, date=False, verbose=VERBOSE, units=unit_list[UNITS-1], longname=unit_name[UNITS-1], title='GRACE/GRACE-FO Spatial Error') - #-- set the permissions mode of the output files + # set the permissions mode of the output files os.chmod(FILE, MODE) - #-- add file to list + # add file to list output_files.append(FILE) - #-- return the list of output files + # return the list of output files return output_files -#-- PURPOSE: print a file log for the GRACE analysis +# PURPOSE: print a file log for the GRACE analysis def output_log_file(arguments,output_files): - #-- format: GRACE_error_run_2002-04-01_PID-70335.log + # format: GRACE_error_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'GRACE_error_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print output files + # print output files logging.info('\n\nOUTPUT FILES:') for f in output_files: logging.info('{0}'.format(f)) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: print a error file log for the GRACE analysis +# PURPOSE: print a error file log for the GRACE analysis def output_error_log_file(arguments): - #-- format: GRACE_error_failed_run_2002-04-01_PID-70335.log + # format: GRACE_error_failed_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'GRACE_error_failed_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print traceback error + # print traceback error logging.info('\n\nTRACEBACK ERROR:') traceback.print_exc(file=fid) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Calculates the GRACE/GRACE-FO spatial errors @@ -491,8 +491,8 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), @@ -504,30 +504,30 @@ def arguments(): parser.add_argument('--file-prefix','-P', type=str, help='Prefix string for input and output files') - #-- Data processing center or satellite mission + # Data processing center or satellite mission parser.add_argument('--center','-c', metavar='PROC', type=str, required=True, help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', help='GRACE/GRACE-FO Data Release') - #-- GRACE/GRACE-FO Level-2 data product + # GRACE/GRACE-FO Level-2 data product parser.add_argument('--product','-p', metavar='DSET', type=str, default='GSM', help='GRACE/GRACE-FO Level-2 data product') - #-- minimum spherical harmonic degree + # minimum spherical harmonic degree parser.add_argument('--lmin', type=int, default=1, help='Minimum spherical harmonic degree') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month') @@ -539,31 +539,31 @@ def arguments(): parser.add_argument('--missing','-N', metavar='MISSING', type=int, nargs='+', default=MISSING, help='Missing GRACE/GRACE-FO months') - #-- different treatments of the load Love numbers - #-- 0: Han and Wahr (1995) values from PREM - #-- 1: Gegout (2005) values from PREM - #-- 2: Wang et al. (2012) values from PREM + # different treatments of the load Love numbers + # 0: Han and Wahr (1995) values from PREM + # 1: Gegout (2005) values from PREM + # 2: Wang et al. (2012) values from PREM parser.add_argument('--love','-n', type=int, default=0, choices=[0,1,2], help='Treatment of the Load Love numbers') - #-- option for setting reference frame for gravitational load love number - #-- reference frame options (CF, CM, CE) + # option for setting reference frame for gravitational load love number + # reference frame options (CF, CM, CE) parser.add_argument('--reference', type=str.upper, default='CF', choices=['CF','CM','CE'], help='Reference frame for load Love numbers') - #-- Gaussian smoothing radius (km) + # Gaussian smoothing radius (km) parser.add_argument('--radius','-R', type=float, default=0, help='Gaussian smoothing radius (km)') - #-- Use a decorrelation (destriping) filter + # Use a decorrelation (destriping) filter parser.add_argument('--destripe','-d', default=False, action='store_true', help='Use decorrelation (destriping) filter') - #-- output units + # output units parser.add_argument('--units','-U', type=int, default=1, choices=[1,2,3,4,5], help='Output units') - #-- output grid parameters + # output grid parameters parser.add_argument('--spacing', type=float, nargs='+', default=[0.5,0.5], metavar=('dlon','dlat'), help='Spatial resolution of output data') @@ -574,25 +574,25 @@ def arguments(): parser.add_argument('--bounds', type=float, nargs=4, metavar=('lon_min','lon_max','lat_min','lat_max'), help='Bounding box for non-global grid') - #-- use atmospheric jump corrections from Fagiolini et al. (2015) + # use atmospheric jump corrections from Fagiolini et al. (2015) parser.add_argument('--atm-correction', default=False, action='store_true', help='Apply atmospheric jump correction coefficients') - #-- correct for pole tide drift follow Wahr et al. (2015) + # correct for pole tide drift follow Wahr et al. (2015) parser.add_argument('--pole-tide', default=False, action='store_true', help='Correct for pole tide drift') - #-- Update Degree 1 coefficients with SLR or derived values - #-- Tellus: GRACE/GRACE-FO TN-13 from PO.DAAC - #-- https://grace.jpl.nasa.gov/data/get-data/geocenter/ - #-- SLR: satellite laser ranging from CSR - #-- ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ - #-- UCI: Sutterley and Velicogna, Remote Sensing (2019) - #-- https://www.mdpi.com/2072-4292/11/18/2108 - #-- Swenson: GRACE-derived coefficients from Sean Swenson - #-- https://doi.org/10.1029/2007JB005338 - #-- GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS - #-- http://gravis.gfz-potsdam.de/corrections + # Update Degree 1 coefficients with SLR or derived values + # Tellus: GRACE/GRACE-FO TN-13 from PO.DAAC + # https://grace.jpl.nasa.gov/data/get-data/geocenter/ + # SLR: satellite laser ranging from CSR + # ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ + # UCI: Sutterley and Velicogna, Remote Sensing (2019) + # https://www.mdpi.com/2072-4292/11/18/2108 + # Swenson: GRACE-derived coefficients from Sean Swenson + # https://doi.org/10.1029/2007JB005338 + # GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS + # http://gravis.gfz-potsdam.de/corrections parser.add_argument('--geocenter', metavar='DEG1', type=str, choices=['Tellus','SLR','SLF','UCI','Swenson','GFZ'], @@ -603,7 +603,7 @@ def arguments(): parser.add_argument('--interpolate-geocenter', default=False, action='store_true', help='Least-squares model missing Degree 1 coefficients') - #-- replace low degree harmonics with values from Satellite Laser Ranging + # replace low degree harmonics with values from Satellite Laser Ranging parser.add_argument('--slr-c20', type=str, default=None, choices=['CSR','GFZ','GSFC'], help='Replace C20 coefficients with SLR values') @@ -622,49 +622,49 @@ def arguments(): parser.add_argument('--slr-c50', type=str, default=None, choices=['CSR','GSFC','LARES'], help='Replace C50 coefficients with SLR values') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input/output data format') - #-- mean file to remove + # mean file to remove parser.add_argument('--mean-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GRACE/GRACE-FO mean file to remove from the harmonic data') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--mean-format', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5','gfc'], help='Input data format for GRACE/GRACE-FO mean file') - #-- Output log file for each job in forms - #-- GRACE_error_run_2002-04-01_PID-00000.log - #-- GRACE_error_failed_run_2002-04-01_PID-00000.log + # Output log file for each job in forms + # GRACE_error_run_2002-04-01_PID-00000.log + # GRACE_error_failed_run_2002-04-01_PID-00000.log parser.add_argument('--log', default=False, action='store_true', help='Output log file for each job') - #-- print information about each input and output file + # print information about each input and output file parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- try to run the analysis with listed parameters + # try to run the analysis with listed parameters try: info(args) - #-- run grace_spatial_error algorithm with parameters + # run grace_spatial_error algorithm with parameters output_files = grace_spatial_error( args.directory, args.center, @@ -703,17 +703,17 @@ def main(): VERBOSE=args.verbose, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) - if args.log:#-- write failed job completion log file + if args.log:# write failed job completion log file output_error_log_file(args) else: - if args.log:#-- write successful job completion log file + if args.log:# write successful job completion log file output_log_file(args,output_files) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/grace_spatial_maps.py b/scripts/grace_spatial_maps.py index a52f6e31..f36acdad 100755 --- a/scripts/grace_spatial_maps.py +++ b/scripts/grace_spatial_maps.py @@ -195,7 +195,7 @@ from gravity_toolkit.spatial import spatial from gravity_toolkit.units import units -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -204,8 +204,8 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: import GRACE/GRACE-FO files for a given months range -#-- Converts the GRACE/GRACE-FO harmonics applying the specified procedures +# PURPOSE: import GRACE/GRACE-FO files for a given months range +# Converts the GRACE/GRACE-FO harmonics applying the specified procedures def grace_spatial_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, START=None, END=None, @@ -244,276 +244,276 @@ def grace_spatial_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, VERBOSE=0, MODE=0o775): - #-- recursively create output directory if not currently existing + # recursively create output directory if not currently existing if not os.access(OUTPUT_DIRECTORY, os.F_OK): os.makedirs(OUTPUT_DIRECTORY, mode=MODE, exist_ok=True) - #-- list object of output files for file logs (full path) + # list object of output files for file logs (full path) output_files = [] - #-- file information + # file information suffix = dict(ascii='txt', netCDF4='nc', HDF5='H5') - #-- read arrays of kl, hl, and ll Love Numbers + # read arrays of kl, hl, and ll Love Numbers hl,kl,ll = load_love_numbers(LMAX, LOVE_NUMBERS=LOVE_NUMBERS, REFERENCE=REFERENCE) - #-- Calculating the Gaussian smoothing for radius RAD + # Calculating the Gaussian smoothing for radius RAD if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) gw_str = f'_r{RAD:0.0f}km' else: - #-- else = 1 + # else = 1 wt = np.ones((LMAX+1)) gw_str = '' - #-- flag for spherical harmonic order + # flag for spherical harmonic order MMAX = np.copy(LMAX) if not MMAX else MMAX order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' - #-- reading GRACE months for input date range - #-- replacing low-degree harmonics with SLR values if specified - #-- include degree 1 (geocenter) harmonics if specified - #-- correcting for Pole-Tide and Atmospheric Jumps if specified + # reading GRACE months for input date range + # replacing low-degree harmonics with SLR values if specified + # include degree 1 (geocenter) harmonics if specified + # correcting for Pole-Tide and Atmospheric Jumps if specified Ylms = grace_input_months(base_dir, PROC, DREL, DSET, LMAX, START, END, MISSING, SLR_C20, DEG1, MMAX=MMAX, SLR_21=SLR_21, SLR_22=SLR_22, SLR_C30=SLR_C30, SLR_C40=SLR_C40, SLR_C50=SLR_C50, DEG1_FILE=DEG1_FILE, MODEL_DEG1=MODEL_DEG1, ATM=ATM, POLE_TIDE=POLE_TIDE) - #-- convert to harmonics object and remove mean if specified + # convert to harmonics object and remove mean if specified GRACE_Ylms = harmonics().from_dict(Ylms) GRACE_Ylms.directory = Ylms['directory'] - #-- use a mean file for the static field to remove + # use a mean file for the static field to remove if MEAN_FILE: - #-- read data form for input mean file (ascii, netCDF4, HDF5, gfc) + # read data form for input mean file (ascii, netCDF4, HDF5, gfc) mean_Ylms = harmonics().from_file(MEAN_FILE,format=MEANFORM,date=False) - #-- remove the input mean + # remove the input mean GRACE_Ylms.subtract(mean_Ylms) else: GRACE_Ylms.mean(apply=True) - #-- date information of GRACE/GRACE-FO coefficients + # date information of GRACE/GRACE-FO coefficients nfiles = len(GRACE_Ylms.time) - #-- filter GRACE/GRACE-FO coefficients + # filter GRACE/GRACE-FO coefficients if DESTRIPE: - #-- destriping GRACE/GRACE-FO coefficients + # destriping GRACE/GRACE-FO coefficients ds_str = '_FL' GRACE_Ylms = GRACE_Ylms.destripe() else: - #-- using standard GRACE/GRACE-FO harmonics + # using standard GRACE/GRACE-FO harmonics ds_str = '' - #-- input GIA spherical harmonic datafiles + # input GIA spherical harmonic datafiles GIA_Ylms_rate = read_GIA_model(GIA_FILE,GIA=GIA,LMAX=LMAX,MMAX=MMAX) gia_str = '_{0}'.format(GIA_Ylms_rate['title']) if GIA else '' - #-- calculate the monthly mass change from GIA + # calculate the monthly mass change from GIA GIA_Ylms = GRACE_Ylms.zeros_like() GIA_Ylms.time[:] = np.copy(GRACE_Ylms.time) GIA_Ylms.month[:] = np.copy(GRACE_Ylms.month) - #-- monthly GIA calculated by gia_rate*time elapsed - #-- finding change in GIA each month + # monthly GIA calculated by gia_rate*time elapsed + # finding change in GIA each month for t in range(nfiles): GIA_Ylms.clm[:,:,t] = GIA_Ylms_rate['clm']*(GIA_Ylms.time[t]-2003.3) GIA_Ylms.slm[:,:,t] = GIA_Ylms_rate['slm']*(GIA_Ylms.time[t]-2003.3) - #-- default file prefix + # default file prefix if not FILE_PREFIX: fargs = (PROC,DREL,DSET,Ylms['title'],gia_str) FILE_PREFIX = '{0}_{1}_{2}{3}{4}_'.format(*fargs) - #-- Read Ocean function and convert to Ylms for redistribution + # Read Ocean function and convert to Ylms for redistribution if REDISTRIBUTE_REMOVED: - #-- read Land-Sea Mask and convert to spherical harmonics + # read Land-Sea Mask and convert to spherical harmonics ocean_Ylms = ocean_stokes(LANDMASK,LMAX,MMAX=MMAX,LOVE=(hl,kl,ll)) ocean_str = '_OCN' else: ocean_str = '' - #-- input spherical harmonic datafiles to be removed from the GRACE data - #-- Remove sets of Ylms from the GRACE data before returning + # input spherical harmonic datafiles to be removed from the GRACE data + # Remove sets of Ylms from the GRACE data before returning remove_Ylms = GRACE_Ylms.zeros_like() remove_Ylms.time[:] = np.copy(GRACE_Ylms.time) remove_Ylms.month[:] = np.copy(GRACE_Ylms.month) if REMOVE_FILES: - #-- extend list if a single format was entered for all files + # extend list if a single format was entered for all files if len(REMOVE_FORMAT) < len(REMOVE_FILES): REMOVE_FORMAT = REMOVE_FORMAT*len(REMOVE_FILES) - #-- for each file to be removed + # for each file to be removed for REMOVE_FILE,REMOVEFORM in zip(REMOVE_FILES,REMOVE_FORMAT): if REMOVEFORM in ('ascii','netCDF4','HDF5'): - #-- ascii (.txt) - #-- netCDF4 (.nc) - #-- HDF5 (.H5) + # ascii (.txt) + # netCDF4 (.nc) + # HDF5 (.H5) Ylms = harmonics().from_file(REMOVE_FILE, format=REMOVEFORM) elif REMOVEFORM in ('index-ascii','index-netCDF4','index-HDF5'): - #-- read from index file + # read from index file _,removeform = REMOVEFORM.split('-') - #-- index containing files in data format + # index containing files in data format Ylms = harmonics().from_index(REMOVE_FILE, format=removeform) - #-- reduce to GRACE/GRACE-FO months and truncate to degree and order + # reduce to GRACE/GRACE-FO months and truncate to degree and order Ylms = Ylms.subset(GRACE_Ylms.month).truncate(lmax=LMAX,mmax=MMAX) - #-- distribute removed Ylms uniformly over the ocean + # distribute removed Ylms uniformly over the ocean if REDISTRIBUTE_REMOVED: - #-- calculate ratio between total removed mass and - #-- a uniformly distributed cm of water over the ocean + # calculate ratio between total removed mass and + # a uniformly distributed cm of water over the ocean ratio = Ylms.clm[0,0,:]/ocean_Ylms.clm[0,0] - #-- for each spherical harmonic - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX - #-- remove the ratio*ocean Ylms from Ylms - #-- note: x -= y is equivalent to x = x - y + # for each spherical harmonic + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + for l in range(m,LMAX+1):# LMAX+1 to include LMAX + # remove the ratio*ocean Ylms from Ylms + # note: x -= y is equivalent to x = x - y Ylms.clm[l,m,:] -= ratio*ocean_Ylms.clm[l,m] Ylms.slm[l,m,:] -= ratio*ocean_Ylms.slm[l,m] - #-- filter removed coefficients + # filter removed coefficients if DESTRIPE: Ylms = Ylms.destripe() - #-- add data for month t and INDEX_FILE to the total - #-- remove_clm and remove_slm matrices - #-- redistributing the mass over the ocean if specified + # add data for month t and INDEX_FILE to the total + # remove_clm and remove_slm matrices + # redistributing the mass over the ocean if specified remove_Ylms.add(Ylms) - #-- Output spatial data object + # Output spatial data object grid = spatial() - #-- Output Degree Spacing + # Output Degree Spacing dlon,dlat = (DDEG[0],DDEG[0]) if (len(DDEG) == 1) else (DDEG[0],DDEG[1]) - #-- Output Degree Interval + # Output Degree Interval if (INTERVAL == 1): - #-- (-180:180,90:-90) + # (-180:180,90:-90) nlon = np.int64((360.0/dlon)+1.0) nlat = np.int64((180.0/dlat)+1.0) grid.lon = -180 + dlon*np.arange(0,nlon) grid.lat = 90.0 - dlat*np.arange(0,nlat) elif (INTERVAL == 2): - #-- (Degree spacing)/2 + # (Degree spacing)/2 grid.lon = np.arange(-180+dlon/2.0,180+dlon/2.0,dlon) grid.lat = np.arange(90.0-dlat/2.0,-90.0-dlat/2.0,-dlat) nlon = len(grid.lon) nlat = len(grid.lat) elif (INTERVAL == 3): - #-- non-global grid set with BOUNDS parameter + # non-global grid set with BOUNDS parameter minlon,maxlon,minlat,maxlat = BOUNDS.copy() grid.lon = np.arange(minlon+dlon/2.0,maxlon+dlon/2.0,dlon) grid.lat = np.arange(maxlat-dlat/2.0,minlat-dlat/2.0,-dlat) nlon = len(grid.lon) nlat = len(grid.lat) - #-- Computing plms for converting to spatial domain + # Computing plms for converting to spatial domain theta = (90.0-grid.lat)*np.pi/180.0 PLM, dPLM = plm_holmes(LMAX, np.cos(theta)) - #-- Earth Parameters - #-- output spatial units + # Earth Parameters + # output spatial units unit_list = ['cmwe', 'mmGH', 'mmCU', u'\u03BCGal', 'mbar'] unit_name = ['Equivalent Water Thickness', 'Geoid Height', 'Elastic Crustal Uplift', 'Gravitational Undulation', 'Equivalent Surface Pressure'] - #-- Setting units factor for output - #-- dfactor computes the degree dependent coefficients + # Setting units factor for output + # dfactor computes the degree dependent coefficients if (UNITS == 1): - #-- 1: cmwe, centimeters water equivalent + # 1: cmwe, centimeters water equivalent dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).cmwe elif (UNITS == 2): - #-- 2: mmGH, mm geoid height + # 2: mmGH, mm geoid height dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).mmGH elif (UNITS == 3): - #-- 3: mmCU, mm elastic crustal deformation + # 3: mmCU, mm elastic crustal deformation dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).mmCU elif (UNITS == 4): - #-- 4: micGal, microGal gravity perturbations + # 4: micGal, microGal gravity perturbations dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).microGal elif (UNITS == 5): - #-- 5: mbar, millibars equivalent surface pressure + # 5: mbar, millibars equivalent surface pressure dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).mbar else: raise ValueError(f'Invalid units code {UNITS:d}') - #-- output file format + # output file format file_format = '{0}{1}_L{2:d}{3}{4}{5}_{6:03d}.{7}' - #-- converting harmonics to truncated, smoothed coefficients in units - #-- combining harmonics to calculate output spatial fields + # converting harmonics to truncated, smoothed coefficients in units + # combining harmonics to calculate output spatial fields for i,grace_month in enumerate(GRACE_Ylms.month): - #-- GRACE/GRACE-FO harmonics for time t + # GRACE/GRACE-FO harmonics for time t Ylms = GRACE_Ylms.index(i) - #-- Remove GIA rate for time + # Remove GIA rate for time Ylms.subtract(GIA_Ylms.index(i)) - #-- Remove monthly files to be removed + # Remove monthly files to be removed Ylms.subtract(remove_Ylms.index(i)) - #-- smooth harmonics and convert to output units + # smooth harmonics and convert to output units Ylms.convolve(dfactor*wt) - #-- convert spherical harmonics to output spatial grid + # convert spherical harmonics to output spatial grid grid.data = harmonic_summation(Ylms.clm, Ylms.slm, grid.lon, grid.lat, LMIN=LMIN, LMAX=LMAX, MMAX=MMAX, PLM=PLM).T grid.mask = np.zeros_like(grid.data, dtype=bool) - #-- copy time variables for month + # copy time variables for month grid.time = np.copy(Ylms.time) grid.month = np.copy(Ylms.month) - #-- output monthly files to ascii, netCDF4 or HDF5 + # output monthly files to ascii, netCDF4 or HDF5 args=(FILE_PREFIX,unit_list[UNITS-1],LMAX,order_str,gw_str, ds_str,grace_month,suffix[DATAFORM]) FILE=os.path.join(OUTPUT_DIRECTORY,file_format.format(*args)) if (DATAFORM == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) grid.to_ascii(FILE, date=True, verbose=VERBOSE) elif (DATAFORM == 'netCDF4'): - #-- netCDF4 + # netCDF4 grid.to_netCDF4(FILE, date=True, verbose=VERBOSE, units=unit_list[UNITS-1], longname=unit_name[UNITS-1], title='GRACE/GRACE-FO Spatial Data') elif (DATAFORM == 'HDF5'): - #-- HDF5 + # HDF5 grid.to_HDF5(FILE, date=True, verbose=VERBOSE, units=unit_list[UNITS-1], longname=unit_name[UNITS-1], title='GRACE/GRACE-FO Spatial Data') - #-- set the permissions mode of the output files + # set the permissions mode of the output files os.chmod(FILE, MODE) - #-- add file to list + # add file to list output_files.append(FILE) - #-- return the list of output files + # return the list of output files return output_files -#-- PURPOSE: print a file log for the GRACE analysis +# PURPOSE: print a file log for the GRACE analysis def output_log_file(arguments,output_files): - #-- format: GRACE_processing_run_2002-04-01_PID-70335.log + # format: GRACE_processing_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'GRACE_processing_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print output files + # print output files logging.info('\n\nOUTPUT FILES:') for f in output_files: logging.info('{0}'.format(f)) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: print a error file log for the GRACE analysis +# PURPOSE: print a error file log for the GRACE analysis def output_error_log_file(arguments): - #-- format: GRACE_processing_failed_run_2002-04-01_PID-70335.log + # format: GRACE_processing_failed_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'GRACE_processing_failed_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print traceback error + # print traceback error logging.info('\n\nTRACEBACK ERROR:') traceback.print_exc(file=fid) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Calculates monthly spatial maps from GRACE/GRACE-FO @@ -522,8 +522,8 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), @@ -535,30 +535,30 @@ def arguments(): parser.add_argument('--file-prefix','-P', type=str, help='Prefix string for input and output files') - #-- Data processing center or satellite mission + # Data processing center or satellite mission parser.add_argument('--center','-c', metavar='PROC', type=str, required=True, help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', help='GRACE/GRACE-FO Data Release') - #-- GRACE/GRACE-FO Level-2 data product + # GRACE/GRACE-FO Level-2 data product parser.add_argument('--product','-p', metavar='DSET', type=str, default='GSM', help='GRACE/GRACE-FO Level-2 data product') - #-- minimum spherical harmonic degree + # minimum spherical harmonic degree parser.add_argument('--lmin', type=int, default=1, help='Minimum spherical harmonic degree') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month') @@ -570,31 +570,31 @@ def arguments(): parser.add_argument('--missing','-N', metavar='MISSING', type=int, nargs='+', default=MISSING, help='Missing GRACE/GRACE-FO months') - #-- different treatments of the load Love numbers - #-- 0: Han and Wahr (1995) values from PREM - #-- 1: Gegout (2005) values from PREM - #-- 2: Wang et al. (2012) values from PREM + # different treatments of the load Love numbers + # 0: Han and Wahr (1995) values from PREM + # 1: Gegout (2005) values from PREM + # 2: Wang et al. (2012) values from PREM parser.add_argument('--love','-n', type=int, default=0, choices=[0,1,2], help='Treatment of the Load Love numbers') - #-- option for setting reference frame for gravitational load love number - #-- reference frame options (CF, CM, CE) + # option for setting reference frame for gravitational load love number + # reference frame options (CF, CM, CE) parser.add_argument('--reference', type=str.upper, default='CF', choices=['CF','CM','CE'], help='Reference frame for load Love numbers') - #-- Gaussian smoothing radius (km) + # Gaussian smoothing radius (km) parser.add_argument('--radius','-R', type=float, default=0, help='Gaussian smoothing radius (km)') - #-- Use a decorrelation (destriping) filter + # Use a decorrelation (destriping) filter parser.add_argument('--destripe','-d', default=False, action='store_true', help='Use decorrelation (destriping) filter') - #-- output units + # output units parser.add_argument('--units','-U', type=int, default=1, choices=[1,2,3,4,5], help='Output units') - #-- output grid parameters + # output grid parameters parser.add_argument('--spacing', type=float, nargs='+', default=[0.5,0.5], metavar=('dlon','dlat'), help='Spatial resolution of output data') @@ -605,7 +605,7 @@ def arguments(): parser.add_argument('--bounds', type=float, nargs=4, metavar=('lon_min','lon_max','lat_min','lat_max'), help='Bounding box for non-global grid') - #-- GIA model type list + # GIA model type list models = {} models['IJ05-R2'] = 'Ivins R2 GIA Models' models['W12a'] = 'Whitehouse GIA Models' @@ -619,33 +619,33 @@ def arguments(): models['ascii'] = 'reformatted GIA in ascii format' models['netCDF4'] = 'reformatted GIA in netCDF4 format' models['HDF5'] = 'reformatted GIA in HDF5 format' - #-- GIA model type + # GIA model type parser.add_argument('--gia','-G', type=str, metavar='GIA', choices=models.keys(), help='GIA model type to read') - #-- full path to GIA file + # full path to GIA file parser.add_argument('--gia-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GIA file to read') - #-- use atmospheric jump corrections from Fagiolini et al. (2015) + # use atmospheric jump corrections from Fagiolini et al. (2015) parser.add_argument('--atm-correction', default=False, action='store_true', help='Apply atmospheric jump correction coefficients') - #-- correct for pole tide drift follow Wahr et al. (2015) + # correct for pole tide drift follow Wahr et al. (2015) parser.add_argument('--pole-tide', default=False, action='store_true', help='Correct for pole tide drift') - #-- Update Degree 1 coefficients with SLR or derived values - #-- Tellus: GRACE/GRACE-FO TN-13 from PO.DAAC - #-- https://grace.jpl.nasa.gov/data/get-data/geocenter/ - #-- SLR: satellite laser ranging from CSR - #-- ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ - #-- UCI: Sutterley and Velicogna, Remote Sensing (2019) - #-- https://www.mdpi.com/2072-4292/11/18/2108 - #-- Swenson: GRACE-derived coefficients from Sean Swenson - #-- https://doi.org/10.1029/2007JB005338 - #-- GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS - #-- http://gravis.gfz-potsdam.de/corrections + # Update Degree 1 coefficients with SLR or derived values + # Tellus: GRACE/GRACE-FO TN-13 from PO.DAAC + # https://grace.jpl.nasa.gov/data/get-data/geocenter/ + # SLR: satellite laser ranging from CSR + # ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ + # UCI: Sutterley and Velicogna, Remote Sensing (2019) + # https://www.mdpi.com/2072-4292/11/18/2108 + # Swenson: GRACE-derived coefficients from Sean Swenson + # https://doi.org/10.1029/2007JB005338 + # GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS + # http://gravis.gfz-potsdam.de/corrections parser.add_argument('--geocenter', metavar='DEG1', type=str, choices=['Tellus','SLR','SLF','UCI','Swenson','GFZ'], @@ -656,7 +656,7 @@ def arguments(): parser.add_argument('--interpolate-geocenter', default=False, action='store_true', help='Least-squares model missing Degree 1 coefficients') - #-- replace low degree harmonics with values from Satellite Laser Ranging + # replace low degree harmonics with values from Satellite Laser Ranging parser.add_argument('--slr-c20', type=str, default=None, choices=['CSR','GFZ','GSFC'], help='Replace C20 coefficients with SLR values') @@ -675,19 +675,19 @@ def arguments(): parser.add_argument('--slr-c50', type=str, default=None, choices=['CSR','GSFC','LARES'], help='Replace C50 coefficients with SLR values') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input/output data format') - #-- mean file to remove + # mean file to remove parser.add_argument('--mean-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GRACE/GRACE-FO mean file to remove from the harmonic data') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--mean-format', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5','gfc'], help='Input data format for GRACE/GRACE-FO mean file') - #-- monthly files to be removed from the GRACE/GRACE-FO data + # monthly files to be removed from the GRACE/GRACE-FO data parser.add_argument('--remove-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', help='Monthly files to be removed from the GRACE/GRACE-FO data') @@ -700,42 +700,42 @@ def arguments(): parser.add_argument('--redistribute-removed', default=False, action='store_true', help='Redistribute removed mass fields over the ocean') - #-- land-sea mask for redistributing fluxes + # land-sea mask for redistributing fluxes lsmask = utilities.get_data_path(['data','landsea_hd.nc']) parser.add_argument('--mask', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=lsmask, help='Land-sea mask for redistributing land water flux') - #-- Output log file for each job in forms - #-- GRACE_processing_run_2002-04-01_PID-00000.log - #-- GRACE_processing_failed_run_2002-04-01_PID-00000.log + # Output log file for each job in forms + # GRACE_processing_run_2002-04-01_PID-00000.log + # GRACE_processing_failed_run_2002-04-01_PID-00000.log parser.add_argument('--log', default=False, action='store_true', help='Output log file for each job') - #-- print information about each input and output file + # print information about each input and output file parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- try to run the analysis with listed parameters + # try to run the analysis with listed parameters try: info(args) - #-- run grace_spatial_maps algorithm with parameters + # run grace_spatial_maps algorithm with parameters output_files = grace_spatial_maps( args.directory, args.center, @@ -780,17 +780,17 @@ def main(): VERBOSE=args.verbose, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) - if args.log:#-- write failed job completion log file + if args.log:# write failed job completion log file output_error_log_file(args) else: - if args.log:#-- write successful job completion log file + if args.log:# write successful job completion log file output_log_file(args,output_files) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/itsg_graz_grace_sync.py b/scripts/itsg_graz_grace_sync.py index 99a9b871..ee98e65b 100755 --- a/scripts/itsg_graz_grace_sync.py +++ b/scripts/itsg_graz_grace_sync.py @@ -56,16 +56,16 @@ import posixpath import gravity_toolkit.utilities -#-- PURPOSE: sync local GRACE/GRACE-FO files with ITSG GRAZ server +# PURPOSE: sync local GRACE/GRACE-FO files with ITSG GRAZ server def itsg_graz_grace_sync(DIRECTORY, RELEASE=None, LMAX=None, TIMEOUT=0, LOG=False, LIST=False, MODE=0o775, CLOBBER=False): - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not os.makedirs(DIRECTORY,MODE) if not os.path.exists(DIRECTORY) else None - #-- create log file with list of synchronized files (or print to terminal) + # create log file with list of synchronized files (or print to terminal) if LOG: - #-- output to log file - #-- format: ITSG_GRAZ_GRACE_sync_2002-04-01.log + # output to log file + # format: ITSG_GRAZ_GRACE_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) LOGFILE = f'ITSG_GRAZ_GRACE_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), @@ -74,14 +74,14 @@ def itsg_graz_grace_sync(DIRECTORY, RELEASE=None, LMAX=None, TIMEOUT=0, logging.info(f'Release: {RELEASE}') logging.info(f'LMAX: {LMAX:d}') else: - #-- standard output (terminal output) + # standard output (terminal output) logging.basicConfig(level=logging.INFO) - #-- ITSG GRAZ server + # ITSG GRAZ server HOST = ['http://ftp.tugraz.at','outgoing','ITSG','GRACE'] - #-- open connection with ITSG GRAZ server at remote directory + # open connection with ITSG GRAZ server at remote directory release_directory = f'ITSG-{RELEASE}' - #-- regular expression operators for ITSG data and models + # regular expression operators for ITSG data and models itsg_products = [] itsg_products.append(r'atmosphere') itsg_products.append(r'dealiasing') @@ -94,89 +94,89 @@ def itsg_graz_grace_sync(DIRECTORY, RELEASE=None, LMAX=None, TIMEOUT=0, itsg_pattern = (r'(AOD1B_RL\d+|model|ITSG)[-_]({0})(_n\d+)?_' r'(\d+)-(\d+)(\.gfc)').format(r'|'.join(itsg_products)) R1 = re.compile(itsg_pattern, re.VERBOSE | re.IGNORECASE) - #-- local directory for release + # local directory for release DREL = {} DREL['Grace2014'] = '2014' DREL['Grace2016'] = '2016' DREL['Grace2018'] = '2018' DREL['Grace_operational'] = '2018' - #-- local dealiasing directories for each product + # local dealiasing directories for each product DEALIASING = {} DEALIASING['atmosphere'] = 'GAA' DEALIASING['ocean'] = 'GAB' DEALIASING['dealiasing'] = 'GAC' DEALIASING['oceanBottomPressure'] = 'GAD' - #-- sync ITSG GRAZ dealiasing products + # sync ITSG GRAZ dealiasing products subdir = 'background' if (RELEASE == 'Grace2014') else 'monthly_background' REMOTE = [*HOST,release_directory,'monthly',subdir] files,mtimes = gravity_toolkit.utilities.http_list(REMOTE, timeout=TIMEOUT,pattern=R1,sort=True) - #-- for each file on the remote directory + # for each file on the remote directory for colname,remote_mtime in zip(files,mtimes): - #-- extract parameters from input filename + # extract parameters from input filename PFX,PRD,trunc,year,month,SFX = R1.findall(colname).pop() - #-- local directory for output GRAZ data + # local directory for output GRAZ data local_dir=os.path.join(DIRECTORY,'GRAZ',DREL[RELEASE],DEALIASING[PRD]) - #-- check if local directory exists and recursively create if not + # check if local directory exists and recursively create if not os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None - #-- local and remote versions of the file + # local and remote versions of the file local_file = os.path.join(local_dir,colname) remote_file = posixpath.join(*REMOTE,colname) - #-- copy file from remote directory comparing modified dates + # copy file from remote directory comparing modified dates http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, MODE=MODE) - #-- sync ITSG GRAZ data for truncation + # sync ITSG GRAZ data for truncation subdir = f'monthly_n{LMAX:d}' REMOTE = [*HOST,release_directory,'monthly',subdir] files,mtimes = gravity_toolkit.utilities.http_list(REMOTE, timeout=TIMEOUT,pattern=R1,sort=True) - #-- local directory for output GRAZ data + # local directory for output GRAZ data local_dir = os.path.join(DIRECTORY,'GRAZ',DREL[RELEASE],'GSM') - #-- check if local directory exists and recursively create if not + # check if local directory exists and recursively create if not os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None - #-- for each file on the remote directory + # for each file on the remote directory for colname,remote_mtime in zip(files,mtimes): - #-- local and remote versions of the file + # local and remote versions of the file local_file = os.path.join(local_dir,colname) remote_file = posixpath.join(*REMOTE,colname) - #-- copy file from remote directory comparing modified dates + # copy file from remote directory comparing modified dates http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, MODE=MODE) - #-- create index file for GRACE/GRACE-FO L2 Spherical Harmonic Data - #-- DATA PRODUCTS (GAC, GAD, GSM, GAA, GAB) + # create index file for GRACE/GRACE-FO L2 Spherical Harmonic Data + # DATA PRODUCTS (GAC, GAD, GSM, GAA, GAB) for ds in ['GAA','GAB','GAC','GAD','GSM']: - #-- local directory for exact data product + # local directory for exact data product local_dir = os.path.join(DIRECTORY,'GRAZ',DREL[RELEASE],ds) if not os.access(local_dir,os.F_OK): continue - #-- find local GRACE files to create index + # find local GRACE files to create index grace_files=[fi for fi in os.listdir(local_dir) if R1.match(fi)] - #-- outputting GRACE filenames to index + # outputting GRACE filenames to index with open(os.path.join(local_dir,'index.txt'),'w') as fid: for fi in sorted(grace_files): print(fi, file=fid) - #-- change permissions of index file + # change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) - #-- close log file and set permissions level to MODE + # close log file and set permissions level to MODE if LOG: os.chmod(os.path.join(DIRECTORY,LOGFILE), MODE) -#-- PURPOSE: pull file from a remote host checking if file exists locally -#-- and if the remote file is newer than the local file +# PURPOSE: pull file from a remote host checking if file exists locally +# and if the remote file is newer than the local file def http_pull_file(remote_file,remote_mtime,local_file, TIMEOUT=0,LIST=False,CLOBBER=False,MODE=0o775): - #-- if file exists in file system: check if remote file is newer + # if file exists in file system: check if remote file is newer TEST = False OVERWRITE = ' (clobber)' - #-- check if local version of file exists + # check if local version of file exists if os.access(local_file, os.F_OK): - #-- check last modification time of local file + # check last modification time of local file local_mtime = os.stat(local_file).st_mtime - #-- if remote file is newer: overwrite the local file + # if remote file is newer: overwrite the local file if (gravity_toolkit.utilities.even(remote_mtime) > gravity_toolkit.utilities.even(local_mtime)): TEST = True @@ -184,42 +184,42 @@ def http_pull_file(remote_file,remote_mtime,local_file, else: TEST = True OVERWRITE = ' (new)' - #-- if file does not exist locally, is to be overwritten, or CLOBBER is set + # if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: - #-- Printing files transferred + # Printing files transferred logging.info(f'{remote_file} --> ') logging.info(f'\t{local_file}{OVERWRITE}\n') - #-- if executing copy command (not only printing the files) + # if executing copy command (not only printing the files) if not LIST: - #-- Create and submit request. There are a wide range of exceptions - #-- that can be thrown here, including HTTPError and URLError. + # Create and submit request. There are a wide range of exceptions + # that can be thrown here, including HTTPError and URLError. request = gravity_toolkit.utilities.urllib2.Request(remote_file) response = gravity_toolkit.utilities.urllib2.urlopen(request, timeout=TIMEOUT) - #-- chunked transfer encoding size + # chunked transfer encoding size CHUNK = 16 * 1024 - #-- copy contents to local file using chunked transfer encoding - #-- transfer should work properly with ascii and binary data formats + # copy contents to local file using chunked transfer encoding + # transfer should work properly with ascii and binary data formats with open(local_file, 'wb') as f: shutil.copyfileobj(response, f, CHUNK) - #-- keep remote modification time of file and local access time + # keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) os.chmod(local_file, MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Syncs GRACE/GRACE-FO and auxiliary data from the ITSG GRAZ server """ ) - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- ITSG GRAZ releases + # ITSG GRAZ releases choices = ['Grace2014','Grace2016','Grace2018','Grace_operational'] parser.add_argument('--release','-r', type=str, nargs='+', metavar='DREL', @@ -228,44 +228,44 @@ def arguments(): parser.add_argument('--lmax', type=int, default=60, choices=[60,96,120], help='Maximum degree and order of GRAZ products') - #-- connection timeout + # connection timeout parser.add_argument('--timeout','-t', type=int, default=360, help='Timeout in seconds for blocking operations') - #-- Output log file in form - #-- ITSG_GRAZ_GRACE_sync_2002-04-01.log + # Output log file in form + # ITSG_GRAZ_GRACE_sync_2002-04-01.log parser.add_argument('--log','-l', default=False, action='store_true', help='Output log file') - #-- sync options + # sync options parser.add_argument('--list','-L', default=False, action='store_true', help='Only print files that could be transferred') parser.add_argument('--clobber','-C', default=False, action='store_true', help='Overwrite existing data in transfer') - #-- permissions mode of the directories and files synced (number in octal) + # permissions mode of the directories and files synced (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permission mode of directories and files synced') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- check internet connection before attempting to run program + # check internet connection before attempting to run program HOST = posixpath.join('http://ftp.tugraz.at') if gravity_toolkit.utilities.check_connection(HOST): - #-- for each ITSG GRAZ release + # for each ITSG GRAZ release for RELEASE in args.release: itsg_graz_grace_sync(args.directory, RELEASE=RELEASE, LMAX=args.lmax, TIMEOUT=args.timeout, LOG=args.log, LIST=args.list, CLOBBER=args.clobber, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/make_grace_index.py b/scripts/make_grace_index.py index 9ebd5f6e..1b0d6dd6 100644 --- a/scripts/make_grace_index.py +++ b/scripts/make_grace_index.py @@ -35,105 +35,105 @@ import argparse from gravity_toolkit.utilities import compile_regex_pattern -#-- PURPOSE: Creates index files of GRACE/GRACE-FO data +# PURPOSE: Creates index files of GRACE/GRACE-FO data def make_grace_index(DIRECTORY, PROC=[], DREL=[], DSET=[], VERSION=[], MODE=None): - #-- mission shortnames + # mission shortnames shortname = {'grace':'GRAC', 'grace-fo':'GRFO'} - #-- GRACE/GRACE-FO level-2 spherical harmonic products + # GRACE/GRACE-FO level-2 spherical harmonic products logging.info('GRACE/GRACE-FO L2 Global Spherical Harmonics:') - #-- for each processing center (CSR, GFZ, JPL) + # for each processing center (CSR, GFZ, JPL) for pr in PROC: - #-- for each data release (RL04, RL05, RL06) + # for each data release (RL04, RL05, RL06) for rl in DREL: - #-- for each level-2 product + # for each level-2 product for ds in DSET: - #-- local directory for exact data product + # local directory for exact data product local_dir = os.path.join(DIRECTORY, pr, rl, ds) - #-- check if local directory exists + # check if local directory exists if not os.access(local_dir, os.F_OK): continue - #-- list of GRACE/GRACE-FO files for index + # list of GRACE/GRACE-FO files for index grace_files = [] - #-- for each satellite mission (grace, grace-fo) + # for each satellite mission (grace, grace-fo) for i,mi in enumerate(['grace','grace-fo']): - #-- print string of exact data product + # print string of exact data product logging.info(f'{mi} {pr}/{rl}/{ds}') - #-- regular expression operator for data product + # regular expression operator for data product rx = compile_regex_pattern(pr, rl, ds, mission=shortname[mi], version=VERSION[i]) - #-- find local GRACE/GRACE-FO files to create index + # find local GRACE/GRACE-FO files to create index granules = [f for f in os.listdir(local_dir) if rx.match(f)] - #-- extend list of GRACE/GRACE-FO files + # extend list of GRACE/GRACE-FO files grace_files.extend(granules) - #-- outputting GRACE/GRACE-FO filenames to index + # outputting GRACE/GRACE-FO filenames to index with open(os.path.join(local_dir,'index.txt'),'w') as fid: for fi in sorted(grace_files): print(fi, file=fid) - #-- change permissions of index file + # change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Creates index files of GRACE/GRACE-FO monthly Level-2 data """ ) - #-- command line parameters - # #-- working data directory + # command line parameters + # # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO processing center + # GRACE/GRACE-FO processing center parser.add_argument('--center','-c', metavar='PROC', type=str, nargs='+', default=['CSR','GFZ','JPL'], choices=['CSR','GFZ','JPL'], help='GRACE/GRACE-FO processing center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, nargs='+', default=['RL06'], choices=['RL06'], help='GRACE/GRACE-FO data release') - #-- GRACE/GRACE-FO data product + # GRACE/GRACE-FO data product parser.add_argument('--product','-p', metavar='DSET', type=str.upper, nargs='+', default=['GSM'], choices=['GAA','GAB','GAC','GAD','GSM'], help='GRACE/GRACE-FO Level-2 data product') - #-- GRACE/GRACE-FO data version + # GRACE/GRACE-FO data version parser.add_argument('--version','-v', metavar='VERSION', type=str, nargs=2, default=['0','1'], choices=['0','1','2','3'], help='GRACE/GRACE-FO Level-2 data version') - #-- verbose will output information about each output file + # verbose will output information about each output file parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of processing run') - #-- permissions mode of the directories and files synced (number in octal) + # permissions mode of the directories and files synced (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permission mode of files created') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- run program with parameters + # run program with parameters make_grace_index(args.directory, PROC=args.center, DREL=args.release, DSET=args.product, VERSION=args.version, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/mascon_reconstruct.py b/scripts/mascon_reconstruct.py index 42e4084a..372624e1 100644 --- a/scripts/mascon_reconstruct.py +++ b/scripts/mascon_reconstruct.py @@ -125,7 +125,7 @@ from gravity_toolkit.harmonics import harmonics from gravity_toolkit.units import units -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -134,12 +134,12 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: tilde-compress a file path string +# PURPOSE: tilde-compress a file path string def tilde_compress(file_path): return file_path.replace(os.path.expanduser('~'),'~') -#-- PURPOSE: Reconstruct spherical harmonic fields from the mascon -#-- time series calculated in calc_mascon +# PURPOSE: Reconstruct spherical harmonic fields from the mascon +# time series calculated in calc_mascon def mascon_reconstruct(DSET, LMAX, RAD, START=None, END=None, @@ -158,125 +158,125 @@ def mascon_reconstruct(DSET, LMAX, RAD, OUTPUT_DIRECTORY=None, MODE=0o775): - #-- for datasets not GSM: will add a label for the dataset + # for datasets not GSM: will add a label for the dataset dset_str = '' if (DSET == 'GSM') else f'_{DSET}' - #-- atmospheric ECMWF "jump" flag (if ATM) + # atmospheric ECMWF "jump" flag (if ATM) atm_str = '_wATM' if ATM else '' - #-- Gaussian smoothing string for radius RAD + # Gaussian smoothing string for radius RAD gw_str = f'_r{RAD:0.0f}km' if (RAD != 0) else '' - #-- input GIA spherical harmonic datafiles + # input GIA spherical harmonic datafiles GIA_Ylms_rate = read_GIA_model(GIA_FILE,GIA=GIA,LMAX=LMAX,MMAX=MMAX) gia_str = '_{0}'.format(GIA_Ylms_rate['title']) if GIA else '' - #-- output string for both LMAX==MMAX and LMAX != MMAX cases + # output string for both LMAX==MMAX and LMAX != MMAX cases MMAX = np.copy(LMAX) if not MMAX else MMAX order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' - #-- filter grace coefficients flag + # filter grace coefficients flag ds_str = '_FL' if DESTRIPE else '' - #-- output filename suffix + # output filename suffix suffix = dict(ascii='txt', netCDF4='nc', HDF5='H5') - #-- file parser for reading index files - #-- removes commented lines (can comment out files in the index) - #-- removes empty lines (if there are extra empty lines) + # file parser for reading index files + # removes commented lines (can comment out files in the index) + # removes empty lines (if there are extra empty lines) parser = re.compile(r'^(?!\#|\%|$)', re.VERBOSE) - #-- create initial reconstruct index for calc_mascon.py + # create initial reconstruct index for calc_mascon.py fid = open(RECONSTRUCT_FILE,'w') - #-- output file format + # output file format file_format = '{0}{1}{2}{3}{4}_L{5:d}{6}{7}{8}_{9:03d}-{10:03d}.{11}' - #-- read load love numbers + # read load love numbers hl,kl,ll = load_love_numbers(LMAX, LOVE_NUMBERS=LOVE_NUMBERS, REFERENCE=REFERENCE) - #-- Earth Parameters + # Earth Parameters factors = units(lmax=LMAX).harmonic(hl,kl,ll) - #-- Average Density of the Earth [g/cm^3] + # Average Density of the Earth [g/cm^3] rho_e = factors.rho_e - #-- Average Radius of the Earth [cm] + # Average Radius of the Earth [cm] rad_e = factors.rad_e - #-- Read Ocean function and convert to Ylms for redistribution + # Read Ocean function and convert to Ylms for redistribution if REDISTRIBUTE_MASCONS: - #-- read Land-Sea Mask and convert to spherical harmonics + # read Land-Sea Mask and convert to spherical harmonics ocean_Ylms = ocean_stokes(LANDMASK,LMAX,MMAX=MMAX,LOVE=(hl,kl,ll)) ocean_str = '_OCN' else: - #-- not distributing uniformly over ocean + # not distributing uniformly over ocean ocean_str = '' - #-- input mascon spherical harmonic datafiles + # input mascon spherical harmonic datafiles with open(MASCON_FILE, mode='r', encoding='utf8') as f: mascon_files = [l for l in f.read().splitlines() if parser.match(l)] for k,fi in enumerate(mascon_files): - #-- read mascon spherical harmonics + # read mascon spherical harmonics if (DATAFORM == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) Ylms=harmonics().from_ascii(os.path.expanduser(fi),date=False) elif (DATAFORM == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) Ylms=harmonics().from_netCDF4(os.path.expanduser(fi),date=False) elif (DATAFORM == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) Ylms=harmonics().from_HDF5(os.path.expanduser(fi),date=False) - #-- Calculating the total mass of each mascon (1 cmwe uniform) + # Calculating the total mass of each mascon (1 cmwe uniform) total_area = 4.0*np.pi*(rad_e**3)*rho_e*Ylms.clm[0,0]/3.0 - #-- distribute mascon mass uniformly over the ocean + # distribute mascon mass uniformly over the ocean if REDISTRIBUTE_MASCONS: - #-- calculate ratio between total mascon mass and - #-- a uniformly distributed cm of water over the ocean + # calculate ratio between total mascon mass and + # a uniformly distributed cm of water over the ocean ratio = Ylms.clm[0,0]/ocean_Ylms.clm[0,0] - #-- for each spherical harmonic - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX - #-- remove ratio*ocean Ylms from mascon Ylms - #-- note: x -= y is equivalent to x = x - y + # for each spherical harmonic + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + for l in range(m,LMAX+1):# LMAX+1 to include LMAX + # remove ratio*ocean Ylms from mascon Ylms + # note: x -= y is equivalent to x = x - y Ylms.clm[l,m] -= ratio*ocean_Ylms.clm[l,m] Ylms.slm[l,m] -= ratio*ocean_Ylms.slm[l,m] - #-- truncate mascon spherical harmonics to d/o LMAX/MMAX + # truncate mascon spherical harmonics to d/o LMAX/MMAX Ylms = Ylms.truncate(lmax=LMAX, mmax=MMAX) - #-- mascon base is the file without directory or suffix + # mascon base is the file without directory or suffix mascon_base = os.path.basename(fi) mascon_base = os.path.splitext(mascon_base)[0] - #-- if lower case, will capitalize + # if lower case, will capitalize mascon_base = mascon_base.upper() - #-- if mascon name contains degree and order info, remove + # if mascon name contains degree and order info, remove mascon_name = mascon_base.replace(f'_L{LMAX:d}', '') - #-- input filename format (for both LMAX==MMAX and LMAX != MMAX cases): - #-- mascon name, GRACE dataset, GIA model, LMAX, (MMAX,) - #-- Gaussian smoothing, filter flag, remove reconstructed fields flag - #-- output GRACE error file + # input filename format (for both LMAX==MMAX and LMAX != MMAX cases): + # mascon name, GRACE dataset, GIA model, LMAX, (MMAX,) + # Gaussian smoothing, filter flag, remove reconstructed fields flag + # output GRACE error file args = (mascon_name,dset_str,gia_str.upper(),atm_str,ocean_str, LMAX,order_str,gw_str,ds_str) file_input = '{0}{1}{2}{3}{4}_L{5:d}{6}{7}{8}.txt'.format(*args) mascon_data_input=np.loadtxt(os.path.join(OUTPUT_DIRECTORY,file_input)) - #-- convert mascon time-series from Gt to cmwe + # convert mascon time-series from Gt to cmwe mascon_sigma = 1e15*mascon_data_input[:,2]/total_area - #-- mascon time-series Ylms + # mascon time-series Ylms mascon_Ylms = Ylms.scale(mascon_sigma) mascon_Ylms.time = mascon_data_input[:,1].copy() mascon_Ylms.month = mascon_data_input[:,0].astype(np.int64) - #-- output to file: no ascii option + # output to file: no ascii option args = (mascon_name,dset_str,gia_str.upper(),atm_str,ocean_str, LMAX,order_str,gw_str,ds_str,START,END,suffix[DATAFORM]) FILE = file_format.format(*args) - #-- output harmonics to file + # output harmonics to file if (DATAFORM == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) mascon_Ylms.to_netCDF4(os.path.join(OUTPUT_DIRECTORY,FILE)) elif (DATAFORM == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) mascon_Ylms.to_HDF5(os.path.join(OUTPUT_DIRECTORY,FILE)) - #-- print file name to index + # print file name to index print(tilde_compress(os.path.join(OUTPUT_DIRECTORY,FILE)),file=fid) - #-- change the permissions mode + # change the permissions mode os.chmod(os.path.join(OUTPUT_DIRECTORY,FILE),MODE) - #-- close the reconstruct index + # close the reconstruct index fid.close() - #-- change the permissions mode of the index file + # change the permissions mode of the index file os.chmod(RECONSTRUCT_FILE,MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Calculates the equivalent spherical @@ -285,50 +285,50 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters + # command line parameters parser.add_argument('--output-directory','-O', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Output directory for mascon files') - #-- GRACE/GRACE-FO Level-2 data product + # GRACE/GRACE-FO Level-2 data product parser.add_argument('--product','-p', metavar='DSET', type=str, default='GSM', help='GRACE/GRACE-FO Level-2 data product') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month') parser.add_argument('--end','-E', type=int, default=232, help='Ending GRACE/GRACE-FO month') - #-- different treatments of the load Love numbers - #-- 0: Han and Wahr (1995) values from PREM - #-- 1: Gegout (2005) values from PREM - #-- 2: Wang et al. (2012) values from PREM + # different treatments of the load Love numbers + # 0: Han and Wahr (1995) values from PREM + # 1: Gegout (2005) values from PREM + # 2: Wang et al. (2012) values from PREM parser.add_argument('--love','-n', type=int, default=0, choices=[0,1,2], help='Treatment of the Load Love numbers') - #-- option for setting reference frame for gravitational load love number - #-- reference frame options (CF, CM, CE) + # option for setting reference frame for gravitational load love number + # reference frame options (CF, CM, CE) parser.add_argument('--reference', type=str.upper, default='CF', choices=['CF','CM','CE'], help='Reference frame for load Love numbers') - #-- Gaussian smoothing radius (km) + # Gaussian smoothing radius (km) parser.add_argument('--radius','-R', type=float, default=0, help='Gaussian smoothing radius (km)') - #-- Use a decorrelation (destriping) filter + # Use a decorrelation (destriping) filter parser.add_argument('--destripe','-d', default=False, action='store_true', help='Use decorrelation (destriping) filter') - #-- GIA model type list + # GIA model type list models = {} models['IJ05-R2'] = 'Ivins R2 GIA Models' models['W12a'] = 'Whitehouse GIA Models' @@ -342,63 +342,63 @@ def arguments(): models['ascii'] = 'reformatted GIA in ascii format' models['netCDF4'] = 'reformatted GIA in netCDF4 format' models['HDF5'] = 'reformatted GIA in HDF5 format' - #-- GIA model type + # GIA model type parser.add_argument('--gia','-G', type=str, metavar='GIA', choices=models.keys(), help='GIA model type to read') - #-- full path to GIA file + # full path to GIA file parser.add_argument('--gia-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GIA file to read') - #-- use atmospheric jump corrections from Fagiolini et al. (2015) + # use atmospheric jump corrections from Fagiolini et al. (2015) parser.add_argument('--atm-correction', default=False, action='store_true', help='Apply atmospheric jump correction coefficients') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input data format for auxiliary files') - #-- mascon index file and parameters + # mascon index file and parameters parser.add_argument('--mascon-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='Index file of mascons spherical harmonics') parser.add_argument('--redistribute-mascons', default=False, action='store_true', help='Redistribute mascon mass over the ocean') - #-- mascon reconstruct parameters + # mascon reconstruct parameters parser.add_argument('--reconstruct-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='Reconstructed mascon time series file') - #-- land-sea mask for redistributing mascon mass + # land-sea mask for redistributing mascon mass lsmask = utilities.get_data_path(['data','landsea_hd.nc']) parser.add_argument('--mask', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=lsmask, help='Land-sea mask for redistributing mascon mass') - #-- print information about processing run + # print information about processing run parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of processing run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- try to run the analysis with listed parameters + # try to run the analysis with listed parameters try: info(args) - #-- run mascon_reconstruct algorithm with parameters + # run mascon_reconstruct algorithm with parameters mascon_reconstruct( args.product, args.lmax, @@ -420,12 +420,12 @@ def main(): OUTPUT_DIRECTORY=args.output_directory, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/monte_carlo_degree_one.py b/scripts/monte_carlo_degree_one.py index 21136b1e..b6c6e3db 100644 --- a/scripts/monte_carlo_degree_one.py +++ b/scripts/monte_carlo_degree_one.py @@ -216,7 +216,7 @@ from gravity_toolkit.tssmooth import tssmooth from gravity_toolkit.time import grace_to_calendar -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -225,30 +225,30 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: model the seasonal component of an initial degree 1 model -#-- using preliminary estimates of annual and semi-annual variations from LWM -#-- as calculated in Chen et al. (1999), doi:10.1029/1998JB900019 -#-- NOTE: this is to get an accurate assessment of the land water mass for the -#-- eustatic component (not for the ocean component from GRACE) +# PURPOSE: model the seasonal component of an initial degree 1 model +# using preliminary estimates of annual and semi-annual variations from LWM +# as calculated in Chen et al. (1999), doi:10.1029/1998JB900019 +# NOTE: this is to get an accurate assessment of the land water mass for the +# eustatic component (not for the ocean component from GRACE) def model_seasonal_geocenter(grace_date): - #-- Annual amplitudes of (Soil Moisture + Snow) geocenter components (mm) + # Annual amplitudes of (Soil Moisture + Snow) geocenter components (mm) AAx = 1.28 AAy = 0.52 AAz = 3.30 - #-- Annual phase of (Soil Moisture + Snow) geocenter components (degrees) + # Annual phase of (Soil Moisture + Snow) geocenter components (degrees) APx = 44.0 APy = 182.0 APz = 43.0 - #-- Semi-Annual amplitudes of (Soil Moisture + Snow) geocenter components + # Semi-Annual amplitudes of (Soil Moisture + Snow) geocenter components SAAx = 0.15 SAAy = 0.56 SAAz = 0.50 - #-- Semi-Annual phase of (Soil Moisture + Snow) geocenter components + # Semi-Annual phase of (Soil Moisture + Snow) geocenter components SAPx = 331.0 SAPy = 312.0 SAPz = 75.0 - #-- calculate each geocenter component from the amplitude and phase - #-- converting the phase from degrees to radians + # calculate each geocenter component from the amplitude and phase + # converting the phase from degrees to radians X = AAx*np.sin(2.0*np.pi*grace_date + APx*np.pi/180.0) + \ SAAx*np.sin(4.0*np.pi*grace_date + SAPx*np.pi/180.0) Y = AAy*np.sin(2.0*np.pi*grace_date + APy*np.pi/180.0) + \ @@ -258,7 +258,7 @@ def model_seasonal_geocenter(grace_date): DEG1 = geocenter(X=X-X.mean(), Y=Y-Y.mean(), Z=Z-Z.mean()) return DEG1.from_cartesian() -#-- PURPOSE: calculate the satellite error for a geocenter time-series +# PURPOSE: calculate the satellite error for a geocenter time-series def monte_carlo_degree_one(base_dir, PROC, DREL, LMAX, RAD, START=None, END=None, @@ -288,22 +288,22 @@ def monte_carlo_degree_one(base_dir, PROC, DREL, LMAX, RAD, PLOT=False, MODE=0o775): - #-- GRACE/GRACE-FO dataset + # GRACE/GRACE-FO dataset DSET = 'GSM' - #-- do not import degree 1 coefficients + # do not import degree 1 coefficients DEG1 = '' - #-- delta coefficients flag for monte carlo run + # delta coefficients flag for monte carlo run delta_str = '_monte_carlo' - #-- output string for both LMAX==MMAX and LMAX != MMAX cases + # output string for both LMAX==MMAX and LMAX != MMAX cases order_str = f'M{MMAX:d}' if MMAX and (MMAX != LMAX) else '' - #-- atmospheric ECMWF "jump" flag (if ATM) + # atmospheric ECMWF "jump" flag (if ATM) atm_str = '_wATM' if ATM else '' - #-- ocean model string + # ocean model string model_str = 'MPIOM' if (DREL == 'RL06') else 'OMCT' - #-- output flag for using sea level fingerprints + # output flag for using sea level fingerprints slf_str = '_SLF' if FINGERPRINT else '' - #-- output flag for low-degree harmonic replacements + # output flag for low-degree harmonic replacements if SLR_21 in ('CSR','GFZ','GSFC'): C21_str = f'_w{SLR_21}_21' else: @@ -313,7 +313,7 @@ def monte_carlo_degree_one(base_dir, PROC, DREL, LMAX, RAD, else: C22_str = '' if SLR_C30 in ('GSFC',): - #-- C30 replacement now default for all solutions + # C30 replacement now default for all solutions C30_str = '' elif SLR_C30 in ('CSR','GFZ','LARES'): C30_str = f'_w{SLR_C30}_C30' @@ -327,123 +327,123 @@ def monte_carlo_degree_one(base_dir, PROC, DREL, LMAX, RAD, C50_str = f'_w{SLR_C50}_C50' else: C50_str = '' - #-- combine satellite laser ranging flags + # combine satellite laser ranging flags slr_str = ''.join([C21_str,C22_str,C30_str,C40_str,C50_str]) - #-- suffix for input ascii, netcdf and HDF5 files + # suffix for input ascii, netcdf and HDF5 files suffix = dict(ascii='txt', netCDF4='nc', HDF5='H5') - #-- output directory + # output directory DIRECTORY = os.path.join(base_dir,'geocenter') - #-- list object of output files for file logs (full path) + # list object of output files for file logs (full path) output_files = [] - #-- read load love numbers + # read load love numbers hl,kl,ll = load_love_numbers(EXPANSION, LOVE_NUMBERS=LOVE_NUMBERS, REFERENCE='CF') - #-- set gravitational load love number to a specific value + # set gravitational load love number to a specific value if LOVE_K1: kl[1] = np.copy(LOVE_K1) - #-- maximum spherical harmonic order + # maximum spherical harmonic order if not MMAX: MMAX = np.copy(LMAX) - #-- Earth Parameters + # Earth Parameters factors = units(lmax=LMAX).harmonic(hl,kl,ll) - rho_e = factors.rho_e#-- Average Density of the Earth [g/cm^3] - rad_e = factors.rad_e#-- Average Radius of the Earth [cm] + rho_e = factors.rho_e# Average Density of the Earth [g/cm^3] + rad_e = factors.rad_e# Average Radius of the Earth [cm] l = factors.l - #-- Factor for converting to Mass SH + # Factor for converting to Mass SH dfactor = factors.cmwe - #-- Read Smoothed Ocean and Land Functions - #-- smoothed functions are from the read_ocean_function.py program - #-- Open the land-sea NetCDF file for reading + # Read Smoothed Ocean and Land Functions + # smoothed functions are from the read_ocean_function.py program + # Open the land-sea NetCDF file for reading landsea = spatial().from_netCDF4(LANDMASK, date=False, varname='LSMASK') - #-- degree spacing and grid dimensions - #-- will create GRACE spatial fields with same dimensions + # degree spacing and grid dimensions + # will create GRACE spatial fields with same dimensions dlon,dlat = landsea.spacing nlat,nlon = landsea.shape - #-- spatial parameters in radians + # spatial parameters in radians dphi = dlon*np.pi/180.0 dth = dlat*np.pi/180.0 - #-- longitude and colatitude in radians + # longitude and colatitude in radians phi = landsea.lon[np.newaxis,:]*np.pi/180.0 th = (90.0 - np.squeeze(landsea.lat))*np.pi/180.0 - #-- create land function + # create land function land_function = np.zeros((nlon,nlat),dtype=np.float64) - #-- extract land function from file - #-- combine land and island levels for land function + # extract land function from file + # combine land and island levels for land function indx,indy = np.nonzero((landsea.data.T >= 1) & (landsea.data.T <= 3)) land_function[indx,indy] = 1.0 - #-- calculate ocean function from land function + # calculate ocean function from land function ocean_function = 1.0 - land_function - #-- Calculating Legendre Polynomials using Holmes and Featherstone relation + # Calculating Legendre Polynomials using Holmes and Featherstone relation PLM, dPLM = plm_holmes(LMAX, np.cos(th)) - #-- calculate spherical harmonics of ocean function to degree 1 - #-- mass is equivalent to 1 cm ocean height change - #-- eustatic ratio = -land total/ocean total + # calculate spherical harmonics of ocean function to degree 1 + # mass is equivalent to 1 cm ocean height change + # eustatic ratio = -land total/ocean total ocean_Ylms = gen_stokes(ocean_function, landsea.lon, landsea.lat, UNITS=1, LMIN=0, LMAX=1, LOVE=(hl,kl,ll), PLM=PLM[:2,:2,:]) - #-- Gaussian Smoothing (Jekeli, 1981) + # Gaussian Smoothing (Jekeli, 1981) if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) else: - #-- else = 1 + # else = 1 wt = np.ones((LMAX+1)) - #-- reading GRACE months for input date range - #-- replacing low-degree harmonics with SLR values if specified - #-- correcting for Pole-Tide drift if specified - #-- atmospheric jumps will be corrected externally if specified + # reading GRACE months for input date range + # replacing low-degree harmonics with SLR values if specified + # correcting for Pole-Tide drift if specified + # atmospheric jumps will be corrected externally if specified Ylms = grace_input_months(base_dir, PROC, DREL, DSET, LMAX, START, END, MISSING, SLR_C20, DEG1, MMAX=MMAX, SLR_21=SLR_21, SLR_22=SLR_22, SLR_C30=SLR_C30, SLR_C40=SLR_C40, SLR_C50=SLR_C50, POLE_TIDE=POLE_TIDE, ATM=False, MODEL_DEG1=False) - #-- create harmonics object from GRACE/GRACE-FO data + # create harmonics object from GRACE/GRACE-FO data GSM_Ylms = harmonics().from_dict(Ylms) - #-- use a mean file for the static field to remove + # use a mean file for the static field to remove if MEAN_FILE: - #-- read data form for input mean file (ascii, netCDF4, HDF5, gfc) + # read data form for input mean file (ascii, netCDF4, HDF5, gfc) mean_Ylms = harmonics().from_file(MEAN_FILE,format=MEANFORM,date=False) - #-- remove the input mean + # remove the input mean GSM_Ylms.subtract(mean_Ylms) else: GSM_Ylms.mean(apply=True) - #-- filter GRACE/GRACE-FO coefficients + # filter GRACE/GRACE-FO coefficients if DESTRIPE: - #-- destriping GRACE/GRACE-FO coefficients + # destriping GRACE/GRACE-FO coefficients ds_str = '_FL' GSM_Ylms = GSM_Ylms.destripe() else: - #-- using standard GRACE/GRACE-FO harmonics + # using standard GRACE/GRACE-FO harmonics ds_str = '' - #-- full path to directory for specific GRACE/GRACE-FO product + # full path to directory for specific GRACE/GRACE-FO product GSM_Ylms.directory = Ylms['directory'] - #-- GRACE dates + # GRACE dates tdec = np.copy(GSM_Ylms.time) months = np.copy(GSM_Ylms.month) - #-- number of months considered + # number of months considered n_files = len(GSM_Ylms.month) - #-- input GIA spherical harmonic datafiles + # input GIA spherical harmonic datafiles GIA_Ylms_rate = read_GIA_model(GIA_FILE,GIA=GIA,LMAX=LMAX,MMAX=MMAX) gia_str = '_{0}'.format(GIA_Ylms_rate['title']) if GIA else '' - #-- calculate the monthly mass change from GIA + # calculate the monthly mass change from GIA GIA_Ylms = GSM_Ylms.zeros_like() GIA_Ylms.time[:] = np.copy(GSM_Ylms.time) GIA_Ylms.month[:] = np.copy(GSM_Ylms.month) - #-- monthly GIA calculated by gia_rate*time elapsed - #-- finding change in GIA each month + # monthly GIA calculated by gia_rate*time elapsed + # finding change in GIA each month for t in range(n_files): GIA_Ylms.clm[:,:,t] = GIA_Ylms_rate['clm']*(GIA_Ylms.time[t]-2003.3) GIA_Ylms.slm[:,:,t] = GIA_Ylms_rate['slm']*(GIA_Ylms.time[t]-2003.3) - #-- save geocenter coefficients of monthly GIA variability + # save geocenter coefficients of monthly GIA variability gia = geocenter().from_harmonics(GIA_Ylms) - #-- read atmospheric jump corrections from Fagiolini et al. (2015) + # read atmospheric jump corrections from Fagiolini et al. (2015) ATM_Ylms = GSM_Ylms.zeros_like() ATM_Ylms.time[:] = np.copy(GSM_Ylms.time) ATM_Ylms.month[:] = np.copy(GSM_Ylms.month) @@ -451,261 +451,261 @@ def monte_carlo_degree_one(base_dir, PROC, DREL, LMAX, RAD, atm_corr = read_ecmwf_corrections(base_dir,LMAX,ATM_Ylms.month) ATM_Ylms.clm[:,:,:] = np.copy(atm_corr['clm']) ATM_Ylms.slm[:,:,:] = np.copy(atm_corr['slm']) - #-- removing the mean of the atmospheric jump correction coefficients + # removing the mean of the atmospheric jump correction coefficients ATM_Ylms.mean(apply=True) - #-- truncate to degree and order LMAX/MMAX + # truncate to degree and order LMAX/MMAX ATM_Ylms = ATM_Ylms.truncate(lmax=LMAX, mmax=MMAX) - #-- save geocenter coefficients of the atmospheric jump corrections + # save geocenter coefficients of the atmospheric jump corrections atm = geocenter().from_harmonics(ATM_Ylms) - #-- input spherical harmonic datafiles to be used in monte carlo + # input spherical harmonic datafiles to be used in monte carlo error_Ylms = [] - #-- for each file to be removed + # for each file to be removed for ERROR_FILE in ERROR_FILES: - #-- file in ascii, netCDF4 or HDF5 formats + # file in ascii, netCDF4 or HDF5 formats Ylms = harmonics().from_file(ERROR_FILE, format=DATAFORM) - #-- truncate to degree and order and append to list + # truncate to degree and order and append to list error_Ylms.append(Ylms.truncate(lmax=LMAX, mmax=MMAX)) - #-- calculating GRACE/GRACE-FO error (Wahr et al. 2006) - #-- output GRACE error file (for both LMAX==MMAX and LMAX != MMAX cases) + # calculating GRACE/GRACE-FO error (Wahr et al. 2006) + # output GRACE error file (for both LMAX==MMAX and LMAX != MMAX cases) args = (PROC,DREL,DSET,LMAX,order_str,ds_str,atm_str,GSM_Ylms.month[0], GSM_Ylms.month[-1], suffix[DATAFORM]) delta_format = '{0}_{1}_{2}_DELTA_CLM_L{3:d}{4}{5}{6}_{7:03d}-{8:03d}.{9}' DELTA_FILE = os.path.join(GSM_Ylms.directory,delta_format.format(*args)) - #-- check full path of the GRACE directory for delta file - #-- if file was previously calculated: will read file - #-- else: will calculate the GRACE/GRACE-FO error + # check full path of the GRACE directory for delta file + # if file was previously calculated: will read file + # else: will calculate the GRACE/GRACE-FO error if not os.access(DELTA_FILE, os.F_OK): - #-- add output delta file to list object + # add output delta file to list object output_files.append(DELTA_FILE) - #-- Delta coefficients of GRACE time series (Error components) + # Delta coefficients of GRACE time series (Error components) delta_Ylms = harmonics(lmax=LMAX,mmax=MMAX) delta_Ylms.clm = np.zeros((LMAX+1,MMAX+1)) delta_Ylms.slm = np.zeros((LMAX+1,MMAX+1)) - #-- Smoothing Half-Width (CNES is a 10-day solution) - #-- All other solutions are monthly solutions (HFWTH for annual = 6) + # Smoothing Half-Width (CNES is a 10-day solution) + # All other solutions are monthly solutions (HFWTH for annual = 6) if ((PROC == 'CNES') and (DREL in ('RL01','RL02'))): HFWTH = 19 else: HFWTH = 6 - #-- Equal to the noise of the smoothed time-series - #-- for each spherical harmonic order - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - #-- for each spherical harmonic degree - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX - #-- Delta coefficients of GRACE time series + # Equal to the noise of the smoothed time-series + # for each spherical harmonic order + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + # for each spherical harmonic degree + for l in range(m,LMAX+1):# LMAX+1 to include LMAX + # Delta coefficients of GRACE time series for cs,csharm in enumerate(['clm','slm']): - #-- calculate GRACE Error (Noise of smoothed time-series) - #-- With Annual and Semi-Annual Terms + # calculate GRACE Error (Noise of smoothed time-series) + # With Annual and Semi-Annual Terms val1 = getattr(GSM_Ylms, csharm) smth = tssmooth(tdec, val1[l,m,:], HFWTH=HFWTH) - #-- number of smoothed points + # number of smoothed points nsmth = len(smth['data']) tsmth = np.mean(smth['time']) - #-- GRACE delta Ylms - #-- variance of data-(smoothed+annual+semi) + # GRACE delta Ylms + # variance of data-(smoothed+annual+semi) val2 = getattr(delta_Ylms, csharm) val2[l,m] = np.sqrt(np.sum(smth['noise']**2)/nsmth) - #-- save GRACE/GRACE-FO delta harmonics to file + # save GRACE/GRACE-FO delta harmonics to file delta_Ylms.time = np.copy(tsmth) delta_Ylms.month = np.int64(nsmth) delta_Ylms.to_file(DELTA_FILE,format=DATAFORM) else: - #-- read GRACE/GRACE-FO delta harmonics from file + # read GRACE/GRACE-FO delta harmonics from file delta_Ylms = harmonics().from_file(DELTA_FILE,format=DATAFORM) - #-- truncate GRACE/GRACE-FO delta clm and slm to d/o LMAX/MMAX + # truncate GRACE/GRACE-FO delta clm and slm to d/o LMAX/MMAX delta_Ylms = delta_Ylms.truncate(lmax=LMAX, mmax=MMAX) tsmth = np.squeeze(delta_Ylms.time) nsmth = np.int64(delta_Ylms.month) - #-- Calculating cos/sin of phi arrays - #-- output [m,phi] + # Calculating cos/sin of phi arrays + # output [m,phi] m = GSM_Ylms.m[:, np.newaxis] - #-- Integration factors (solid angle) + # Integration factors (solid angle) int_fact = np.sin(th)*dphi*dth - #-- Calculating cos(m*phi) and sin(m*phi) + # Calculating cos(m*phi) and sin(m*phi) ccos = np.cos(np.dot(m,phi)) ssin = np.sin(np.dot(m,phi)) - #-- Legendre polynomials for degree 1 + # Legendre polynomials for degree 1 P10 = np.squeeze(PLM[1,0,:]) P11 = np.squeeze(PLM[1,1,:]) - #-- PLM for spherical harmonic degrees 2+ - #-- converted into mass and smoothed if specified + # PLM for spherical harmonic degrees 2+ + # converted into mass and smoothed if specified plmout = np.zeros((LMAX+1,MMAX+1,nlat)) for l in range(1,LMAX+1): m = np.arange(0,np.min([l,MMAX])+1) - #-- convert to smoothed coefficients of mass - #-- Convolving plms with degree dependent factor and smoothing + # convert to smoothed coefficients of mass + # Convolving plms with degree dependent factor and smoothing plmout[l,m,:] = PLM[l,m,:]*dfactor[l]*wt[l] - #-- Initializing 3x3 I-Parameter matrix + # Initializing 3x3 I-Parameter matrix IMAT = np.zeros((3,3)) - #-- Calculating I-Parameter matrix by integrating over latitudes - #-- I-Parameter matrix accounts for the fact that the GRACE data only - #-- includes spherical harmonic degrees greater than or equal to 2 + # Calculating I-Parameter matrix by integrating over latitudes + # I-Parameter matrix accounts for the fact that the GRACE data only + # includes spherical harmonic degrees greater than or equal to 2 for i in range(0,nlat): - #-- C10: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) + # C10: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) IMAT[0,0] += np.sum(int_fact[i]*P10[i]*ccos[0,:]*ocean_function[:,i]*P10[i]*ccos[0,:])/(4.0*np.pi) IMAT[1,0] += np.sum(int_fact[i]*P10[i]*ccos[0,:]*ocean_function[:,i]*P11[i]*ccos[1,:])/(4.0*np.pi) IMAT[2,0] += np.sum(int_fact[i]*P10[i]*ccos[0,:]*ocean_function[:,i]*P11[i]*ssin[1,:])/(4.0*np.pi) - #-- C11: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) + # C11: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) IMAT[0,1] += np.sum(int_fact[i]*P11[i]*ccos[1,:]*ocean_function[:,i]*P10[i]*ccos[0,:])/(4.0*np.pi) IMAT[1,1] += np.sum(int_fact[i]*P11[i]*ccos[1,:]*ocean_function[:,i]*P11[i]*ccos[1,:])/(4.0*np.pi) IMAT[2,1] += np.sum(int_fact[i]*P11[i]*ccos[1,:]*ocean_function[:,i]*P11[i]*ssin[1,:])/(4.0*np.pi) - #-- S11: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) + # S11: C10, C11, S11 (see equations 12 and 13 of Swenson et al., 2008) IMAT[0,2] += np.sum(int_fact[i]*P11[i]*ssin[1,:]*ocean_function[:,i]*P10[i]*ccos[0,:])/(4.0*np.pi) IMAT[1,2] += np.sum(int_fact[i]*P11[i]*ssin[1,:]*ocean_function[:,i]*P11[i]*ccos[1,:])/(4.0*np.pi) IMAT[2,2] += np.sum(int_fact[i]*P11[i]*ssin[1,:]*ocean_function[:,i]*P11[i]*ssin[1,:])/(4.0*np.pi) - #-- get seasonal variations of an initial geocenter correction - #-- for use in the land water mass calculation + # get seasonal variations of an initial geocenter correction + # for use in the land water mass calculation seasonal_geocenter = model_seasonal_geocenter(tdec) - #-- degree 1 iterations for each monte carlo run + # degree 1 iterations for each monte carlo run iteration = geocenter() iteration.C10 = np.zeros((n_files,RUNS)) iteration.C11 = np.zeros((n_files,RUNS)) iteration.S11 = np.zeros((n_files,RUNS)) - #-- for each monte carlo iteration + # for each monte carlo iteration for n_iter in range(0, RUNS): - #-- calculate non-iterated terms for each file (G-matrix parameters) + # calculate non-iterated terms for each file (G-matrix parameters) for t in range(n_files): - #-- calculate uncertainty for time t and each degree/order + # calculate uncertainty for time t and each degree/order Ylms = harmonics(lmax=LMAX, mmax=MMAX) Ylms.clm = (1.0-2.0*np.random.rand(LMAX+1,MMAX+1))*delta_Ylms.clm Ylms.slm = (1.0-2.0*np.random.rand(LMAX+1,MMAX+1))*delta_Ylms.slm - #-- add additional uncertainty terms + # add additional uncertainty terms for eYlms in error_Ylms: Ylms.clm += (1.0-2.0*np.random.rand(LMAX+1,MMAX+1))*eYlms.clm Ylms.slm += (1.0-2.0*np.random.rand(LMAX+1,MMAX+1))*eYlms.slm - #-- Removing monthly GIA signal and atmospheric correction + # Removing monthly GIA signal and atmospheric correction GRACE_Ylms = GSM_Ylms.index(t) GRACE_Ylms.subtract(GIA_Ylms.index(t)) GRACE_Ylms.subtract(ATM_Ylms.index(t)) - #-- G matrix calculates the GRACE ocean mass variations + # G matrix calculates the GRACE ocean mass variations G = geocenter() G.C10 = 0.0 G.C11 = 0.0 G.S11 = 0.0 - #-- calculate non-iterated terms (G-matrix parameters) - #-- calculate geocenter component of ocean mass with GRACE - #-- allocate for product of grace and legendre polynomials + # calculate non-iterated terms (G-matrix parameters) + # calculate geocenter component of ocean mass with GRACE + # allocate for product of grace and legendre polynomials pcos = np.zeros((MMAX+1, nlat))#-[m,lat] psin = np.zeros((MMAX+1, nlat))#-[m,lat] - #-- Summing product of plms and c/slms over all SH degrees >= 2 + # Summing product of plms and c/slms over all SH degrees >= 2 for i in range(0, nlat): l = np.arange(2,LMAX+1) pcos[:,i] = np.sum(plmout[l,:,i]*(GRACE_Ylms.clm[l,:]+Ylms.clm[l,:]), axis=0) psin[:,i] = np.sum(plmout[l,:,i]*(GRACE_Ylms.slm[l,:]+Ylms.slm[l,:]), axis=0) - #-- Multiplying by c/s(phi#m) to get surface density in cmH2Oeq (lon,lat) - #-- ccos/ssin are mXphi, pcos/psin are mXtheta: resultant matrices are phiXtheta - #-- The summation over spherical harmonic order is in this multiplication + # Multiplying by c/s(phi#m) to get surface density in cmH2Oeq (lon,lat) + # ccos/ssin are mXphi, pcos/psin are mXtheta: resultant matrices are phiXtheta + # The summation over spherical harmonic order is in this multiplication rmass = np.dot(np.transpose(ccos),pcos) + np.dot(np.transpose(ssin),psin) - #-- calculate G matrix parameters through a summation of each latitude + # calculate G matrix parameters through a summation of each latitude for i in range(0,nlat): - #-- summation of integration factors, Legendre polynomials, - #-- (convolution of order and harmonics) and the ocean mass at t + # summation of integration factors, Legendre polynomials, + # (convolution of order and harmonics) and the ocean mass at t G.C10 += np.sum(int_fact[i]*P10[i]*ccos[0,:]*ocean_function[:,i]*rmass[:,i])/(4.0*np.pi) G.C11 += np.sum(int_fact[i]*P11[i]*ccos[1,:]*ocean_function[:,i]*rmass[:,i])/(4.0*np.pi) G.S11 += np.sum(int_fact[i]*P11[i]*ssin[1,:]*ocean_function[:,i]*rmass[:,i])/(4.0*np.pi) - #-- seasonal component of geocenter variation for land water + # seasonal component of geocenter variation for land water GSM_Ylms.clm[1,0,t] = seasonal_geocenter.C10[t] GSM_Ylms.clm[1,1,t] = seasonal_geocenter.C11[t] GSM_Ylms.slm[1,1,t] = seasonal_geocenter.S11[t] - #-- Removing monthly GIA signal and atmospheric correction + # Removing monthly GIA signal and atmospheric correction GRACE_Ylms = GSM_Ylms.index(t) GRACE_Ylms.subtract(GIA_Ylms.index(t)) GRACE_Ylms.subtract(ATM_Ylms.index(t)) - #-- allocate for product of grace and legendre polynomials + # allocate for product of grace and legendre polynomials pcos = np.zeros((MMAX+1, nlat))#-[m,lat] psin = np.zeros((MMAX+1, nlat))#-[m,lat] - #-- Summing product of plms and c/slms over all SH degrees + # Summing product of plms and c/slms over all SH degrees for i in range(0, nlat): - #-- for land water: use an initial seasonal geocenter estimate - #-- from Chen et al. (1999) + # for land water: use an initial seasonal geocenter estimate + # from Chen et al. (1999) l = np.arange(1,LMAX+1) pcos[:,i] = np.sum(plmout[l,:,i]*(GRACE_Ylms.clm[l,:]+Ylms.clm[l,:]), axis=0) psin[:,i] = np.sum(plmout[l,:,i]*(GRACE_Ylms.slm[l,:]+Ylms.slm[l,:]), axis=0) - #-- Multiplying by c/s(phi#m) to get surface density in cm w.e. (lonxlat) - #-- this will be a spatial field similar to outputs from stokes_combine.py - #-- ccos/ssin are mXphi, pcos/psin are mXtheta: resultant matrices are phiXtheta - #-- The summation over spherical harmonic order is in this multiplication + # Multiplying by c/s(phi#m) to get surface density in cm w.e. (lonxlat) + # this will be a spatial field similar to outputs from stokes_combine.py + # ccos/ssin are mXphi, pcos/psin are mXtheta: resultant matrices are phiXtheta + # The summation over spherical harmonic order is in this multiplication lmass = np.dot(np.transpose(ccos),pcos) + np.dot(np.transpose(ssin),psin) - #-- use sea level fingerprints or eustatic from GRACE land components + # use sea level fingerprints or eustatic from GRACE land components if FINGERPRINT: - #-- calculate total sea level fingerprint for eustatic component - #-- steps to calculate sea level from GRACE land-water change: - #-- 1) calculate total land mass at time t (GRACE*land function) - #-- NOTE: this is an unscaled GRACE estimate that uses the - #-- buffered land function when solving the sea-level equation. - #-- possible improvement using scaled estimate with real coastlines + # calculate total sea level fingerprint for eustatic component + # steps to calculate sea level from GRACE land-water change: + # 1) calculate total land mass at time t (GRACE*land function) + # NOTE: this is an unscaled GRACE estimate that uses the + # buffered land function when solving the sea-level equation. + # possible improvement using scaled estimate with real coastlines land_Ylms = gen_stokes(land_function*lmass, landsea.lon, landsea.lat, UNITS=1, LMIN=0, LMAX=EXPANSION, LOVE=(hl,kl,ll)) - #-- 2) calculate sea level fingerprints of land mass at time t - #-- use maximum of 3 iterations for computational efficiency + # 2) calculate sea level fingerprints of land mass at time t + # use maximum of 3 iterations for computational efficiency sea_level = sea_level_equation(land_Ylms.clm, land_Ylms.slm, landsea.lon, landsea.lat, land_function, LMAX=EXPANSION, LOVE=(hl,kl,ll), BODY_TIDE_LOVE=0, FLUID_LOVE=0, ITERATIONS=3, POLAR=True, FILL_VALUE=0) - #-- 3) convert sea level fingerprints into spherical harmonics + # 3) convert sea level fingerprints into spherical harmonics slf_Ylms = gen_stokes(sea_level, landsea.lon, landsea.lat, UNITS=1, LMIN=0, LMAX=1, PLM=PLM[:2,:2,:], LOVE=(hl,kl,ll)) - #-- 4) convert the slf degree 1 harmonics to mass with dfactor + # 4) convert the slf degree 1 harmonics to mass with dfactor eustatic = geocenter().from_harmonics(slf_Ylms).scale(dfactor[1]) else: - #-- steps to calculate eustatic component from GRACE land-water change: - #-- 1) calculate total mass of 1 cm of ocean height (calculated above) - #-- 2) calculate total land mass at time t (GRACE*land function) - #-- NOTE: possible improvement using the sea-level equation to solve - #-- for the spatial pattern of sea level from the land water mass + # steps to calculate eustatic component from GRACE land-water change: + # 1) calculate total mass of 1 cm of ocean height (calculated above) + # 2) calculate total land mass at time t (GRACE*land function) + # NOTE: possible improvement using the sea-level equation to solve + # for the spatial pattern of sea level from the land water mass land_Ylms = gen_stokes(lmass*land_function, landsea.lon, landsea.lat, UNITS=1, LMIN=0, LMAX=1, PLM=PLM[:2,:2,:], LOVE=(hl,kl,ll)) - #-- 3) calculate ratio between the total land mass and the total mass - #-- of 1 cm of ocean height (negative as positive land = sea level drop) - #-- this converts the total land change to ocean height change + # 3) calculate ratio between the total land mass and the total mass + # of 1 cm of ocean height (negative as positive land = sea level drop) + # this converts the total land change to ocean height change eustatic_ratio = -land_Ylms.clm[0,0]/ocean_Ylms.clm[0,0] - #-- 4) scale degree one coefficients of ocean function with ratio - #-- and convert the eustatic degree 1 harmonics to mass with dfactor + # 4) scale degree one coefficients of ocean function with ratio + # and convert the eustatic degree 1 harmonics to mass with dfactor scale_factor = eustatic_ratio*dfactor[1] eustatic = geocenter().from_harmonics(ocean_Ylms).scale(scale_factor) - #-- eustatic coefficients of degree 1 + # eustatic coefficients of degree 1 CMAT = np.array([eustatic.C10,eustatic.C11,eustatic.S11]) - #-- G Matrix for time t + # G Matrix for time t GMAT = np.array([G.C10, G.C11, G.S11]) - #-- calculate inversion for degree 1 solutions - #-- this is mathematically equivalent to an iterative procedure - #-- whereby the initial degree one coefficients are used to update - #-- the G Matrix until (C10, C11, S11) converge + # calculate inversion for degree 1 solutions + # this is mathematically equivalent to an iterative procedure + # whereby the initial degree one coefficients are used to update + # the G Matrix until (C10, C11, S11) converge DMAT = np.dot(np.linalg.inv(IMAT), (CMAT-GMAT)) - #-- could also use pseudo-inverse in least-squares + # could also use pseudo-inverse in least-squares #DMAT = np.linalg.lstsq(IMAT,(CMAT-GMAT),rcond=-1)[0] - #-- save geocenter for iteration and time t after restoring GIA+ATM + # save geocenter for iteration and time t after restoring GIA+ATM iteration.C10[t,n_iter] = DMAT[0]+gia.C10[t]+atm.C10[t] iteration.C11[t,n_iter] = DMAT[1]+gia.C11[t]+atm.C11[t] iteration.S11[t,n_iter] = DMAT[2]+gia.S11[t]+atm.S11[t] - #-- remove mean of each solution for iteration + # remove mean of each solution for iteration iteration.C10[:,n_iter] -= iteration.C10[:,n_iter].mean() iteration.C11[:,n_iter] -= iteration.C11[:,n_iter].mean() iteration.S11[:,n_iter] -= iteration.S11[:,n_iter].mean() - #-- calculate mean degree one time series through all iterations + # calculate mean degree one time series through all iterations MEAN = geocenter() MEAN.C10 = np.mean(iteration.C10,axis=1) MEAN.C11 = np.mean(iteration.C11,axis=1) MEAN.S11 = np.mean(iteration.S11,axis=1) - #-- calculate RMS off of mean time series + # calculate RMS off of mean time series RMS = geocenter() RMS.C10 = np.zeros((n_files)) RMS.C11 = np.zeros((n_files)) @@ -715,47 +715,47 @@ def monte_carlo_degree_one(base_dir, PROC, DREL, LMAX, RAD, RMS.C11[t] = np.sqrt(np.sum((iteration.C11[t,:]-MEAN.C11[t])**2)/RUNS) RMS.S11[t] = np.sqrt(np.sum((iteration.S11[t,:]-MEAN.S11[t])**2)/RUNS) - #-- Convert inverted solutions into fully normalized spherical harmonics - #-- for each of the geocenter solutions (C10, C11, S11) + # Convert inverted solutions into fully normalized spherical harmonics + # for each of the geocenter solutions (C10, C11, S11) DEG1 = MEAN.scale(1.0/dfactor[1]) - #-- convert estimated monte carlo errors into fully normalized harmonics + # convert estimated monte carlo errors into fully normalized harmonics ERROR = RMS.scale(1.0/dfactor[1]) - #-- output degree 1 coefficients + # output degree 1 coefficients file_format = '{0}_{1}_{2}{3}{4}{5}{6}{7}.{8}' output_format = ('{0:11.4f}{1:14.6e}{2:14.6e}{3:14.6e}' '{4:14.6e}{5:14.6e}{6:14.6e} {7:03d}\n') - #-- public file format in fully normalized spherical harmonics - #-- local version with all descriptor flags + # public file format in fully normalized spherical harmonics + # local version with all descriptor flags a1=(PROC,DREL,model_str,slf_str,'',gia_str,delta_str,ds_str,'txt') FILE1=os.path.join(DIRECTORY,file_format.format(*a1)) fid1 = open(FILE1,'w') - #-- print headers for cases with and without dealiasing + # print headers for cases with and without dealiasing print_header(fid1) print_harmonic(fid1,kl[1]) print_global(fid1,PROC,DREL,model_str.replace('_',' '),GIA_Ylms_rate, SLR_C20,SLR_21,months) print_variables(fid1,'single precision','fully normalized') - #-- for each GRACE/GRACE-FO month + # for each GRACE/GRACE-FO month for t,mon in enumerate(months): - #-- output geocenter coefficients to file + # output geocenter coefficients to file fid1.write(output_format.format(tdec[t], DEG1.C10[t],DEG1.C11[t],DEG1.S11[t], ERROR.C10[t],ERROR.C11[t],ERROR.S11[t],mon)) - #-- close the output file + # close the output file fid1.close() - #-- set the permissions mode of the output file + # set the permissions mode of the output file os.chmod(FILE1, MODE) output_files.append(FILE1) - #-- output all degree 1 coefficients as a netCDF4 file + # output all degree 1 coefficients as a netCDF4 file a2=(PROC,DREL,model_str,slf_str,'',gia_str,delta_str,ds_str,'nc') FILE2 = os.path.join(DIRECTORY,file_format.format(*a2)) fileID = netCDF4.Dataset(FILE2,'w',format="NETCDF4") - #-- Defining the NetCDF4 dimensions + # Defining the NetCDF4 dimensions fileID.createDimension('run', RUNS) fileID.createDimension('time', n_files) - #-- defining the NetCDF4 variables + # defining the NetCDF4 variables nc = {} nc['time'] = fileID.createVariable('time',tdec.dtype,('time',)) nc['month'] = fileID.createVariable('month',months.dtype,('time',)) @@ -765,13 +765,13 @@ def monte_carlo_degree_one(base_dir, PROC, DREL, LMAX, RAD, ('time','run',), zlib=True) nc['S11'] = fileID.createVariable('S11',iteration.S11.dtype, ('time','run',), zlib=True) - #-- filling NetCDF4 variables + # filling NetCDF4 variables nc['time'][:] = tdec[:].copy() nc['month'][:] = months[:].copy() nc['C10'][:] = iteration.C10[:,:]/dfactor[1] nc['C11'][:] = iteration.C11[:,:]/dfactor[1] nc['S11'][:] = iteration.S11[:,:]/dfactor[1] - #-- defining the NetCDF4 attributes + # defining the NetCDF4 attributes nc['time'].units = 'years' nc['time'].long_name = 'Date_in_Decimal_Years' nc['month'].long_name = 'GRACE_month' @@ -783,32 +783,32 @@ def monte_carlo_degree_one(base_dir, PROC, DREL, LMAX, RAD, nc['C11'].long_name = 'cosine_spherical_harmonic_of_degree_1,_order_1' nc['S11'].units = 'fully_normalized' nc['S11'].long_name = 'sine_spherical_harmonic_of_degree_1,_order_1' - #-- define global attributes + # define global attributes fileID.date_created = time.strftime('%Y-%m-%d',time.localtime()) - #-- close the output file + # close the output file fileID.close() - #-- set the permissions mode of the output file + # set the permissions mode of the output file os.chmod(FILE2, MODE) output_files.append(FILE2) - #-- create plot showing monte carlo iterations + # create plot showing monte carlo iterations if PLOT: - #-- 3 row plot (C10, C11 and S11) + # 3 row plot (C10, C11 and S11) ax = {} fig,(ax[0],ax[1],ax[2])=plt.subplots(nrows=3,sharex=True,figsize=(6,9)) - #-- show solutions for each iteration + # show solutions for each iteration plot_colors = iter(cm.rainbow(np.linspace(0,1,RUNS))) for j in range(n_iter): color_j = next(plot_colors) - #-- C10, C11 and S11 + # C10, C11 and S11 ax[0].plot(months,10.0*iteration.C10[:,j],color=color_j) ax[1].plot(months,10.0*iteration.C11[:,j],color=color_j) ax[2].plot(months,10.0*iteration.S11[:,j],color=color_j) - #-- mean C10, C11 and S11 + # mean C10, C11 and S11 ax[0].plot(months,10.0*MEAN.C10,color='k',lw=1.5) ax[1].plot(months,10.0*MEAN.C11,color='k',lw=1.5) ax[2].plot(months,10.0*MEAN.S11,color='k',lw=1.5) - #-- labels and set limits + # labels and set limits ax[0].set_ylabel('mm', fontsize=14) ax[1].set_ylabel('mm', fontsize=14) ax[2].set_ylabel('mm', fontsize=14) @@ -816,57 +816,57 @@ def monte_carlo_degree_one(base_dir, PROC, DREL, LMAX, RAD, ax[2].set_xlim(np.floor(months[0]/10.)*10.,np.ceil(months[-1]/10.)*10.) ax[2].xaxis.set_minor_locator(MultipleLocator(5)) ax[2].xaxis.get_major_formatter().set_useOffset(False) - #-- add axis labels and adjust font sizes for axis ticks + # add axis labels and adjust font sizes for axis ticks for i,lbl in enumerate(['C10','C11','S11']): - #-- axis label + # axis label ax[i].add_artist(AnchoredText(lbl, pad=0.0, frameon=False, loc=2, prop=dict(size=16,weight='bold'))) - #-- axes tick adjustments + # axes tick adjustments for tick in ax[i].xaxis.get_major_ticks(): tick.label.set_fontsize(14) for tick in ax[i].yaxis.get_major_ticks(): tick.label.set_fontsize(14) - #-- adjust ticks + # adjust ticks ax[i].get_xaxis().set_tick_params(which='both', direction='in') ax[i].get_yaxis().set_tick_params(which='both', direction='in') - #-- adjust locations of subplots and save to file + # adjust locations of subplots and save to file fig.subplots_adjust(left=0.12,right=0.94,bottom=0.06,top=0.98,hspace=0.1) args = (PROC,DREL,model_str,ds_str) FILE = 'Geocenter_Monte_Carlo_{0}_{1}_{2}{3}.pdf'.format(*args) plt.savefig(os.path.join(DIRECTORY,FILE), format='pdf') plt.clf() - #-- set the permissions mode of the output files + # set the permissions mode of the output files os.chmod(os.path.join(DIRECTORY,FILE), MODE) output_files.append(os.path.join(DIRECTORY,FILE)) - #-- return the list of output files + # return the list of output files return output_files -#-- PURPOSE: print YAML header to top of file +# PURPOSE: print YAML header to top of file def print_header(fid): - #-- print header + # print header fid.write('{0}:\n'.format('header')) - #-- data dimensions + # data dimensions fid.write(' {0}:\n'.format('dimensions')) fid.write(' {0:22}: {1:d}\n'.format('degree',1)) fid.write(' {0:22}: {1:d}\n'.format('order',1)) fid.write('\n') -#-- PURPOSE: print spherical harmonic attributes to YAML header +# PURPOSE: print spherical harmonic attributes to YAML header def print_harmonic(fid,kl): - #-- non-standard attributes + # non-standard attributes fid.write(' {0}:\n'.format('non-standard_attributes')) - #-- load love number + # load love number fid.write(' {0:22}:\n'.format('love_number')) long_name = 'Gravitational Load Love Number of Degree 1 (k1)' fid.write(' {0:20}: {1}\n'.format('long_name',long_name)) fid.write(' {0:20}: {1:0.3f}\n'.format('value',kl)) - #-- data format + # data format data_format = '(f11.4,3e14.6,i4)' fid.write(' {0:22}: {1}\n'.format('formatting_string',data_format)) fid.write('\n') -#-- PURPOSE: print global attributes to YAML header +# PURPOSE: print global attributes to YAML header def print_global(fid,PROC,DREL,MODEL,GIA,SLR,S21,month): fid.write(' {0}:\n'.format('global_attributes')) MISSION = dict(RL05='GRACE',RL06='GRACE/GRACE-FO') @@ -880,7 +880,7 @@ def print_global(fid,PROC,DREL,MODEL,GIA,SLR,S21,month): 'processes. In addition, the coefficients represent the ' 'atmospheric and oceanic processes not captured in the {0} {1} ' 'de-aliasing product.').format(MISSION[DREL],DREL)) - #-- get GIA parameters + # get GIA parameters summary.append((' Glacial Isostatic Adjustment (GIA) estimates from ' '{0} have been restored.').format(GIA['citation'])) if (DREL == 'RL05'): @@ -928,7 +928,7 @@ def print_global(fid,PROC,DREL,MODEL,GIA,SLR,S21,month): fid.write(' {0:22}: {1}\n'.format('product_version',PRODUCT_VERSION)) fid.write(' {0:22}:\n'.format('references')) reference = [] - #-- geocenter citations + # geocenter citations reference.append(('T. C. Sutterley, and I. Velicogna, "Improved estimates ' 'of geocenter variability from time-variable gravity and ocean model ' 'outputs", Remote Sensing, 11(18), 2108, (2019). ' @@ -937,16 +937,16 @@ def print_global(fid,PROC,DREL,MODEL,GIA,SLR,S21,month): 'geocenter variations from a combination of GRACE and ocean model ' 'output", Journal of Geophysical Research - Solid Earth, 113(B08410), ' '(2008). https://doi.org/10.1029/2007JB005338')) - #-- GIA citation + # GIA citation reference.append(GIA['reference']) - #-- ECMWF jump corrections citation + # ECMWF jump corrections citation if (DREL == 'RL05'): reference.append(('E. Fagiolini, F. Flechtner, M. Horwath, H. Dobslaw, ' '''"Correction of inconsistencies in ECMWF's operational ''' '''analysis data during de-aliasing of GRACE gravity models", ''' 'Geophysical Journal International, 202(3), 2150, (2015). ' 'https://doi.org/10.1093/gji/ggv276')) - #-- SLR citation for a given solution + # SLR citation for a given solution if (SLR == 'CSR'): reference.append(('M. Cheng, B. D. Tapley, and J. C. Ries, ' '''"Deceleration in the Earth's oblateness", Journal of ''' @@ -977,7 +977,7 @@ def print_global(fid,PROC,DREL,MODEL,GIA,SLR,S21,month): 'GRACE/GRACE-FO Geopotential GSM Coefficients GFZ RL06 ' '(Level-2B Product)." V. 0002. GFZ Data Services, (2019). ' 'http://doi.org/10.5880/GFZ.GRAVIS_06_L2B')) - #-- print list of references + # print list of references for ref in reference: fid.write(' - {0}\n'.format(ref)) creators = 'Tyler C. Sutterley and Isabella Velicogna' @@ -989,7 +989,7 @@ def print_global(fid,PROC,DREL,MODEL,GIA,SLR,S21,month): fid.write(' {0:22}: {1}\n'.format('creator_type', 'group')) inst = 'University of Washington; University of California, Irvine' fid.write(' {0:22}: {1}\n'.format('creator_institution',inst)) - #-- date range and date created + # date range and date created calendar_year,calendar_month = grace_to_calendar(month) start_time = '{0:4.0f}-{1:02.0f}'.format(calendar_year[0],calendar_month[0]) fid.write(' {0:22}: {1}\n'.format('time_coverage_start', start_time)) @@ -999,60 +999,60 @@ def print_global(fid,PROC,DREL,MODEL,GIA,SLR,S21,month): fid.write(' {0:22}: {1}\n'.format('date_created', today)) fid.write('\n') -#-- PURPOSE: print variable descriptions to YAML header +# PURPOSE: print variable descriptions to YAML header def print_variables(fid,data_precision,data_units): - #-- variables + # variables fid.write(' {0}:\n'.format('variables')) - #-- time + # time fid.write(' {0:22}:\n'.format('mid-epoch_time')) long_name = 'mid-date of each measurement epoch' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', 'single precision')) fid.write(' {0:20}: {1}\n'.format('units', 'decimal-years')) fid.write(' {0:20}: {1}\n'.format('comment', '1st column')) - #-- C10 + # C10 fid.write(' {0:22}:\n'.format('C10')) long_name = 'C10 coefficient; cosine coefficient for degree 1 and order 0' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('units', data_units)) fid.write(' {0:20}: {1}\n'.format('comment', '2nd column')) - #-- C11 + # C11 fid.write(' {0:22}:\n'.format('C11')) long_name = 'C11 coefficient; cosine coefficient for degree 1 and order 1' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('units', data_units)) fid.write(' {0:20}: {1}\n'.format('comment', '3rd column')) - #-- S11 + # S11 fid.write(' {0:22}:\n'.format('S11')) long_name = 'S11 coefficient; sine coefficient for degree 1 and order 1' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('units', data_units)) fid.write(' {0:20}: {1}\n'.format('comment', '4th column')) - #-- eC10 + # eC10 fid.write(' {0:22}:\n'.format('eC10')) long_name = 'eC10 uncertainty; cosine coefficient for degree 1 and order 0' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('units', data_units)) fid.write(' {0:20}: {1}\n'.format('comment', '5th column')) - #-- eC11 + # eC11 fid.write(' {0:22}:\n'.format('eC11')) long_name = 'eC11 uncertainty; cosine coefficient for degree 1 and order 1' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('units', data_units)) fid.write(' {0:20}: {1}\n'.format('comment', '6th column')) - #-- eS11 + # eS11 fid.write(' {0:22}:\n'.format('eS11')) long_name = 'eS11 uncertainty; sine coefficient for degree 1 and order 1' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) fid.write(' {0:20}: {1}\n'.format('data_type', data_precision)) fid.write(' {0:20}: {1}\n'.format('units', data_units)) fid.write(' {0:20}: {1}\n'.format('comment', '7th column')) - #-- GRACE month + # GRACE month fid.write(' {0:22}:\n'.format('month')) long_name = 'GRACE month of each measurement epoch' fid.write(' {0:20}: {1}\n'.format('long_name', long_name)) @@ -1061,51 +1061,51 @@ def print_variables(fid,data_precision,data_units): fid.write(' {0:20}: {1}\n'.format('data_type', 'integer')) fid.write(' {0:20}: {1}\n'.format('units', 'month')) fid.write(' {0:20}: {1}\n'.format('comment', '8th column')) - #-- end of header + # end of header fid.write('\n\n# End of YAML header\n') -#-- PURPOSE: print a file log for the GRACE degree one analysis +# PURPOSE: print a file log for the GRACE degree one analysis def output_log_file(arguments,output_files): - #-- format: monte_carlo_degree_one_run_2002-04-01_PID-70335.log + # format: monte_carlo_degree_one_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'monte_carlo_degree_one_run_{0}_PID-{1:d}.log'.format(*args) DIRECTORY = os.path.join(arguments.directory,'geocenter') - #-- create a unique log and open the log file + # create a unique log and open the log file fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print number of monte carlo iterations used in calculation + # print number of monte carlo iterations used in calculation logging.info('\n\nNUMBER OF ITERATIONS: {0:d}'.format(arguments.runs)) - #-- print output files + # print output files logging.info('\n\nOUTPUT FILES:') for f in output_files: logging.info('{0}'.format(f)) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: print a error file log for the GRACE degree one analysis +# PURPOSE: print a error file log for the GRACE degree one analysis def output_error_log_file(arguments): - #-- format: monte_carlo_degree_one_failed_run_2002-04-01_PID-70335.log + # format: monte_carlo_degree_one_failed_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'monte_carlo_degree_one_failed_run_{0}_PID-{1:d}.log'.format(*args) DIRECTORY = os.path.join(arguments.directory,'geocenter') - #-- create a unique log and open the log file + # create a unique log and open the log file fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print traceback error + # print traceback error logging.info('\n\nTRACEBACK ERROR:') traceback.print_exc(file=fid) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Calculates degree 1 errors using GRACE/GRACE-FO @@ -1115,28 +1115,28 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO data processing center + # GRACE/GRACE-FO data processing center parser.add_argument('--center','-c', metavar='PROC', type=str, required=True, help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', help='GRACE/GRACE-FO Data Release') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month') @@ -1148,29 +1148,29 @@ def arguments(): parser.add_argument('--missing','-N', metavar='MISSING', type=int, nargs='+', default=MISSING, help='Missing GRACE/GRACE-FO months') - #-- number of monte carlo iterations + # number of monte carlo iterations parser.add_argument('--runs', type=int, default=10000, help='Number of Monte Carlo iterations') - #-- different treatments of the load Love numbers - #-- 0: Han and Wahr (1995) values from PREM - #-- 1: Gegout (2005) values from PREM - #-- 2: Wang et al. (2012) values from PREM + # different treatments of the load Love numbers + # 0: Han and Wahr (1995) values from PREM + # 1: Gegout (2005) values from PREM + # 2: Wang et al. (2012) values from PREM parser.add_argument('--love','-n', type=int, default=0, choices=[0,1,2], help='Treatment of the Load Love numbers') parser.add_argument('--kl','-k', type=float, default=0.021, help='Degree 1 gravitational Load Love number') - #-- Gaussian smoothing radius (km) + # Gaussian smoothing radius (km) parser.add_argument('--radius','-R', type=float, default=0, help='Gaussian smoothing radius (km)') - #-- Use a decorrelation (destriping) filter + # Use a decorrelation (destriping) filter parser.add_argument('--destripe','-d', default=False, action='store_true', help='Use decorrelation (destriping) filter') - #-- GIA model type list + # GIA model type list models = {} models['IJ05-R2'] = 'Ivins R2 GIA Models' models['W12a'] = 'Whitehouse GIA Models' @@ -1184,23 +1184,23 @@ def arguments(): models['ascii'] = 'reformatted GIA in ascii format' models['netCDF4'] = 'reformatted GIA in netCDF4 format' models['HDF5'] = 'reformatted GIA in HDF5 format' - #-- GIA model type + # GIA model type parser.add_argument('--gia','-G', type=str, metavar='GIA', choices=models.keys(), help='GIA model type to read') - #-- full path to GIA file + # full path to GIA file parser.add_argument('--gia-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GIA file to read') - #-- use atmospheric jump corrections from Fagiolini et al. (2015) + # use atmospheric jump corrections from Fagiolini et al. (2015) parser.add_argument('--atm-correction', default=False, action='store_true', help='Apply atmospheric jump correction coefficients') - #-- correct for pole tide drift follow Wahr et al. (2015) + # correct for pole tide drift follow Wahr et al. (2015) parser.add_argument('--pole-tide', default=False, action='store_true', help='Correct for pole tide drift') - #-- replace low degree harmonics with values from Satellite Laser Ranging + # replace low degree harmonics with values from Satellite Laser Ranging parser.add_argument('--slr-c20', type=str, default=None, choices=['CSR','GFZ','GSFC'], help='Replace C20 coefficients with SLR values') @@ -1219,71 +1219,71 @@ def arguments(): parser.add_argument('--slr-c50', type=str, default=None, choices=['CSR','GSFC','LARES'], help='Replace C50 coefficients with SLR values') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input/Output data format for delta harmonics file') - #-- mean file to remove + # mean file to remove parser.add_argument('--mean-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GRACE/GRACE-FO mean file to remove from the harmonic data') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--mean-format', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5','gfc'], help='Input data format for GRACE/GRACE-FO mean file') - #-- additional error files to be used in the monte carlo run + # additional error files to be used in the monte carlo run parser.add_argument('--error-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', default=[], help='Additional error files to use in Monte Carlo analysis') - #-- run with sea level fingerprints + # run with sea level fingerprints parser.add_argument('--fingerprint', default=False, action='store_true', help='Redistribute land-water flux using sea level fingerprints') parser.add_argument('--expansion','-e', type=int, default=240, help='Spherical harmonic expansion for sea level fingerprints') - #-- land-sea mask for calculating ocean mass and land water flux + # land-sea mask for calculating ocean mass and land water flux land_mask_file = utilities.get_data_path(['data','land_fcn_300km.nc']) parser.add_argument('--mask', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=land_mask_file, help='Land-sea mask for calculating ocean mass and land water flux') - #-- create output plots + # create output plots parser.add_argument('--plot','-p', default=False, action='store_true', help='Create output plots for Monte Carlo iterations') - #-- Output log file for each job in forms - #-- monte_carlo_degree_one_run_2002-04-01_PID-00000.log - #-- monte_carlo_degree_one_failed_run_2002-04-01_PID-00000.log + # Output log file for each job in forms + # monte_carlo_degree_one_run_2002-04-01_PID-00000.log + # monte_carlo_degree_one_failed_run_2002-04-01_PID-00000.log parser.add_argument('--log', default=False, action='store_true', help='Output log file for each job') - #-- print information about processing run + # print information about processing run parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of processing run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- try to run the analysis with listed parameters + # try to run the analysis with listed parameters try: info(args) - #-- run monte_carlo_degree_one algorithm with parameters + # run monte_carlo_degree_one algorithm with parameters output_files = monte_carlo_degree_one( args.directory, args.center, @@ -1318,17 +1318,17 @@ def main(): PLOT=args.plot, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) - if args.log:#-- write failed job completion log file + if args.log:# write failed job completion log file output_error_log_file(args) else: - if args.log:#-- write successful job completion log file + if args.log:# write successful job completion log file output_log_file(args,output_files) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/podaac_cumulus.py b/scripts/podaac_cumulus.py index 93feaecf..d01f618c 100644 --- a/scripts/podaac_cumulus.py +++ b/scripts/podaac_cumulus.py @@ -74,25 +74,25 @@ import gravity_toolkit.time import gravity_toolkit.utilities -#-- PURPOSE: sync local GRACE/GRACE-FO files with JPL PO.DAAC AWS S3 bucket +# PURPOSE: sync local GRACE/GRACE-FO files with JPL PO.DAAC AWS S3 bucket def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], AOD1B=False, ENDPOINT='s3', TIMEOUT=None, GZIP=False, LOG=False, CLOBBER=False, MODE=None): - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not os.makedirs(DIRECTORY,MODE) if not os.path.exists(DIRECTORY) else None - #-- mission shortnames + # mission shortnames shortname = {'grace':'GRAC', 'grace-fo':'GRFO'} - #-- datasets for each processing center + # datasets for each processing center DSET = {} DSET['CSR'] = ['GAC', 'GAD', 'GSM'] DSET['GFZ'] = ['GAA', 'GAB', 'GAC', 'GAD', 'GSM'] DSET['JPL'] = ['GAA', 'GAB', 'GAC', 'GAD', 'GSM'] - #-- create log file with list of synchronized files (or print to terminal) + # create log file with list of synchronized files (or print to terminal) if LOG: - #-- format: PODAAC_sync_2002-04-01.log + # format: PODAAC_sync_2002-04-01.log today = time.strftime('%Y-%m-%d', time.localtime()) LOGFILE = f'PODAAC_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), @@ -101,38 +101,38 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], logging.info('CENTERS={0}'.format(','.join(PROC))) logging.info('RELEASES={0}'.format(','.join(DREL))) else: - #-- standard output (terminal output) + # standard output (terminal output) logging.basicConfig(level=logging.INFO) - #-- Degree 1 (geocenter) coefficients + # Degree 1 (geocenter) coefficients logging.info('Degree 1 Coefficients:') - #-- SLR C2,0 and C3,0 coefficients + # SLR C2,0 and C3,0 coefficients logging.info('C2,0 and C3,0 Coefficients:') - #-- compile regular expression operator for remote files + # compile regular expression operator for remote files R1 = re.compile(r'TN-13_GEOC_(CSR|GFZ|JPL)_(.*?).txt', re.VERBOSE) R2 = re.compile(r'TN-(14)_C30_C20_GSFC_SLR.txt', re.VERBOSE) - #-- check if geocenter directory exists and recursively create if not + # check if geocenter directory exists and recursively create if not local_dir = os.path.join(DIRECTORY,'geocenter') os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None - #-- current time stamp to use for local files + # current time stamp to use for local files mtime = time.time() - #-- for each processing center (CSR, GFZ, JPL) + # for each processing center (CSR, GFZ, JPL) for pr in PROC: - #-- for each data release (RL04, RL05, RL06) + # for each data release (RL04, RL05, RL06) for rl in DREL: - #-- for each unique version of data to sync + # for each unique version of data to sync for version in set(VERSION): - #-- query CMR for product metadata + # query CMR for product metadata urls = gravity_toolkit.utilities.cmr_metadata( mission='grace-fo', center=pr, release=rl, version=version, provider='POCLOUD', endpoint=ENDPOINT) - #-- TN-13 JPL degree 1 files + # TN-13 JPL degree 1 files url, = [url for url in urls if R1.search(url)] granule = gravity_toolkit.utilities.url_split(url)[-1] local_file = os.path.join(DIRECTORY,'geocenter',granule) - #-- access auxiliary data from endpoint + # access auxiliary data from endpoint if (ENDPOINT == 'data'): http_pull_file(url, mtime, local_file, TIMEOUT=TIMEOUT, CLOBBER=CLOBBER, MODE=MODE) @@ -143,11 +143,11 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], s3_pull_file(response, mtime, local_file, CLOBBER=CLOBBER, MODE=MODE) - #-- TN-14 SLR C2,0 and C3,0 files + # TN-14 SLR C2,0 and C3,0 files url, = [url for url in urls if R2.search(url)] granule = gravity_toolkit.utilities.url_split(url)[-1] local_file = os.path.join(DIRECTORY,granule) - #-- access auxiliary data from endpoint + # access auxiliary data from endpoint if (ENDPOINT == 'data'): http_pull_file(url, mtime, local_file, TIMEOUT=TIMEOUT, CLOBBER=CLOBBER, MODE=MODE) @@ -158,29 +158,29 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], s3_pull_file(response, mtime, local_file, CLOBBER=CLOBBER, MODE=MODE) - #-- GRACE/GRACE-FO AOD1B dealiasing products + # GRACE/GRACE-FO AOD1B dealiasing products if AOD1B: logging.info('GRACE L1B Dealiasing Products:') - #-- for each data release (RL04, RL05, RL06) + # for each data release (RL04, RL05, RL06) for rl in DREL: - #-- print string of exact data product + # print string of exact data product logging.info(f'GFZ/AOD1B/{rl}') - #-- local directory for exact data product + # local directory for exact data product local_dir = os.path.join(DIRECTORY,'AOD1B',rl) - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not if not os.path.exists(local_dir): os.makedirs(local_dir,MODE) - #-- query CMR for dataset + # query CMR for dataset ids,urls,mtimes = gravity_toolkit.utilities.cmr( mission='grace', level='L1B', center='GFZ', release=rl, product='AOD1B', start_date='2002-01-01T00:00:00', provider='POCLOUD', endpoint=ENDPOINT) - #-- for each model id and url + # for each model id and url for id,url,mtime in zip(ids,urls,mtimes): - #-- retrieve GRACE/GRACE-FO files + # retrieve GRACE/GRACE-FO files granule = gravity_toolkit.utilities.url_split(url)[-1] local_file = os.path.join(local_dir,granule) - #-- access data from endpoint + # access data from endpoint if (ENDPOINT == 'data'): http_pull_file(url, mtime, local_file, TIMEOUT=TIMEOUT, CLOBBER=CLOBBER, MODE=MODE) @@ -191,40 +191,40 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], s3_pull_file(response, mtime, local_file, CLOBBER=CLOBBER, MODE=MODE) - #-- GRACE/GRACE-FO level-2 spherical harmonic products + # GRACE/GRACE-FO level-2 spherical harmonic products logging.info('GRACE/GRACE-FO L2 Global Spherical Harmonics:') - #-- for each processing center (CSR, GFZ, JPL) + # for each processing center (CSR, GFZ, JPL) for pr in PROC: - #-- for each data release (RL04, RL05, RL06) + # for each data release (RL04, RL05, RL06) for rl in DREL: - #-- for each level-2 product (GAC, GAD, GSM, GAA, GAB) + # for each level-2 product (GAC, GAD, GSM, GAA, GAB) for ds in DSET[pr]: - #-- local directory for exact data product + # local directory for exact data product local_dir = os.path.join(DIRECTORY, pr, rl, ds) - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not if not os.path.exists(local_dir): os.makedirs(local_dir,MODE) - #-- list of GRACE/GRACE-FO files for index + # list of GRACE/GRACE-FO files for index grace_files = [] - #-- for each satellite mission (grace, grace-fo) + # for each satellite mission (grace, grace-fo) for i,mi in enumerate(['grace','grace-fo']): - #-- print string of exact data product + # print string of exact data product logging.info(f'{mi} {pr}/{rl}/{ds}') - #-- query CMR for dataset + # query CMR for dataset ids,urls,mtimes = gravity_toolkit.utilities.cmr( mission=mi, center=pr, release=rl, product=ds, version=VERSION[i], provider='POCLOUD', endpoint=ENDPOINT) - #-- regular expression operator for data product + # regular expression operator for data product rx = gravity_toolkit.utilities.compile_regex_pattern( pr, rl, ds, mission=shortname[mi]) - #-- for each model id and url + # for each model id and url for id,url,mtime in zip(ids,urls,mtimes): - #-- retrieve GRACE/GRACE-FO files + # retrieve GRACE/GRACE-FO files granule = gravity_toolkit.utilities.url_split(url)[-1] suffix = '.gz' if GZIP else '' local_file = os.path.join(local_dir, f'{granule}{suffix}') - #-- access data from endpoint + # access data from endpoint if (ENDPOINT == 'data'): http_pull_file(url, mtime, local_file, GZIP=GZIP, TIMEOUT=TIMEOUT, @@ -235,36 +235,36 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], response = client.get_object(Bucket=bucket, Key=key) s3_pull_file(response, mtime, local_file, GZIP=GZIP, CLOBBER=CLOBBER, MODE=MODE) - #-- find local GRACE/GRACE-FO files to create index + # find local GRACE/GRACE-FO files to create index granules = sorted([f for f in os.listdir(local_dir) if rx.match(f)]) - #-- reduce list of GRACE/GRACE-FO files to unique dates + # reduce list of GRACE/GRACE-FO files to unique dates granules = gravity_toolkit.time.reduce_by_date(granules) - #-- extend list of GRACE/GRACE-FO files with granules + # extend list of GRACE/GRACE-FO files with granules grace_files.extend(granules) - #-- outputting GRACE/GRACE-FO filenames to index + # outputting GRACE/GRACE-FO filenames to index with open(os.path.join(local_dir,'index.txt'),'w') as fid: for fi in sorted(grace_files): print(fi, file=fid) - #-- change permissions of index file + # change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) - #-- close log file and set permissions level to MODE + # close log file and set permissions level to MODE if LOG: os.chmod(os.path.join(DIRECTORY,LOGFILE), MODE) -#-- PURPOSE: pull file from a remote host checking if file exists locally -#-- and if the remote file is newer than the local file +# PURPOSE: pull file from a remote host checking if file exists locally +# and if the remote file is newer than the local file def http_pull_file(remote_file, remote_mtime, local_file, GZIP=False, TIMEOUT=120, CLOBBER=False, MODE=0o775): - #-- if file exists in file system: check if remote file is newer + # if file exists in file system: check if remote file is newer TEST = False OVERWRITE = ' (clobber)' - #-- check if local version of file exists + # check if local version of file exists if os.access(local_file, os.F_OK): - #-- check last modification time of local file + # check last modification time of local file local_mtime = os.stat(local_file).st_mtime - #-- if remote file is newer: overwrite the local file + # if remote file is newer: overwrite the local file if (gravity_toolkit.utilities.even(remote_mtime) > gravity_toolkit.utilities.even(local_mtime)): TEST = True @@ -272,42 +272,42 @@ def http_pull_file(remote_file, remote_mtime, local_file, else: TEST = True OVERWRITE = ' (new)' - #-- if file does not exist locally, is to be overwritten, or CLOBBER is set + # if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: - #-- Printing files transferred + # Printing files transferred logging.info(f'{remote_file} -->') logging.info(f'\t{local_file}{OVERWRITE}\n') - #-- chunked transfer encoding size + # chunked transfer encoding size CHUNK = 16 * 1024 - #-- Create and submit request. - #-- There are a range of exceptions that can be thrown here - #-- including HTTPError and URLError. + # Create and submit request. + # There are a range of exceptions that can be thrown here + # including HTTPError and URLError. request = gravity_toolkit.utilities.urllib2.Request(remote_file) response = gravity_toolkit.utilities.urllib2.urlopen(request, timeout=TIMEOUT) - #-- copy remote file contents to local file + # copy remote file contents to local file if GZIP: with gzip.GzipFile(local_file, 'wb', 9, None, remote_mtime) as f: shutil.copyfileobj(response, f) else: with open(local_file, 'wb') as f: shutil.copyfileobj(response, f, CHUNK) - #-- keep remote modification time of file and local access time + # keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) os.chmod(local_file, MODE) -#-- PURPOSE: pull file from AWS s3 bucket checking if file exists locally -#-- and if the remote file is newer than the local file +# PURPOSE: pull file from AWS s3 bucket checking if file exists locally +# and if the remote file is newer than the local file def s3_pull_file(response, remote_mtime, local_file, GZIP=False, CLOBBER=False, MODE=0o775): - #-- if file exists in file system: check if remote file is newer + # if file exists in file system: check if remote file is newer TEST = False OVERWRITE = ' (clobber)' - #-- check if local version of file exists + # check if local version of file exists if os.access(local_file, os.F_OK): - #-- check last modification time of local file + # check last modification time of local file local_mtime = os.stat(local_file).st_mtime - #-- if remote file is newer: overwrite the local file + # if remote file is newer: overwrite the local file if (gravity_toolkit.utilities.even(remote_mtime) > gravity_toolkit.utilities.even(local_mtime)): TEST = True @@ -315,32 +315,32 @@ def s3_pull_file(response, remote_mtime, local_file, else: TEST = True OVERWRITE = ' (new)' - #-- if file does not exist locally, is to be overwritten, or CLOBBER is set + # if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: - #-- Printing files transferred + # Printing files transferred logging.info(f'{local_file}{OVERWRITE}') - #-- chunked transfer encoding size + # chunked transfer encoding size CHUNK = 16 * 1024 - #-- copy remote file contents to local file + # copy remote file contents to local file if GZIP: with gzip.GzipFile(local_file, 'wb', 9, None, remote_mtime) as f: shutil.copyfileobj(response['Body'], f) else: with open(local_file, 'wb') as f: shutil.copyfileobj(response['Body'], f, CHUNK) - #-- keep remote modification time of file and local access time + # keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) os.chmod(local_file, MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Syncs GRACE/GRACE-FO and auxiliary data from the NASA JPL PO.DAAC Cumulus AWS bucket. """ ) - #-- command line parameters - #-- NASA Earthdata credentials + # command line parameters + # NASA Earthdata credentials parser.add_argument('--user','-U', type=str, default=os.environ.get('EARTHDATA_USERNAME'), help='Username for NASA Earthdata Login') @@ -351,85 +351,85 @@ def arguments(): type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.path.join(os.path.expanduser('~'),'.netrc'), help='Path to .netrc file for authentication') - #-- working data directory + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO processing center + # GRACE/GRACE-FO processing center parser.add_argument('--center','-c', metavar='PROC', type=str, nargs='+', default=['CSR','GFZ','JPL'], choices=['CSR','GFZ','JPL'], help='GRACE/GRACE-FO processing center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, nargs='+', default=['RL06'], choices=['RL06'], help='GRACE/GRACE-FO data release') - #-- GRACE/GRACE-FO data version + # GRACE/GRACE-FO data version parser.add_argument('--version','-v', metavar='VERSION', type=str, nargs=2, default=['0','1'], choices=['0','1','2','3'], help='GRACE/GRACE-FO Level-2 data version') - #-- GRACE/GRACE-FO dealiasing products + # GRACE/GRACE-FO dealiasing products parser.add_argument('--aod1b','-a', default=False, action='store_true', help='Sync GRACE/GRACE-FO Level-1B dealiasing products') - #-- CMR endpoint type + # CMR endpoint type parser.add_argument('--endpoint','-e', type=str, default='s3', choices=['s3','data'], help='CMR url endpoint type') - #-- connection timeout + # connection timeout parser.add_argument('--timeout','-t', type=int, default=360, help='Timeout in seconds for blocking operations') - #-- output compressed files + # output compressed files parser.add_argument('--gzip','-G', default=False, action='store_true', help='Compress output GRACE/GRACE-FO Level-2 granules') - #-- Output log file in form - #-- PODAAC_sync_2002-04-01.log + # Output log file in form + # PODAAC_sync_2002-04-01.log parser.add_argument('--log','-l', default=False, action='store_true', help='Output log file') - #-- sync options + # sync options parser.add_argument('--clobber','-C', default=False, action='store_true', help='Overwrite existing data in transfer') - #-- permissions mode of the directories and files synced (number in octal) + # permissions mode of the directories and files synced (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permission mode of directories and files synced') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- NASA Earthdata hostname + # NASA Earthdata hostname URS = 'urs.earthdata.nasa.gov' - #-- check internet connection before attempting to run program + # check internet connection before attempting to run program opener = gravity_toolkit.utilities.attempt_login(URS, username=args.user, password=args.password, netrc=args.netrc) - #-- Create and submit request to create AWS session - #-- There are a range of exceptions that can be thrown here - #-- including HTTPError and URLError. + # Create and submit request to create AWS session + # There are a range of exceptions that can be thrown here + # including HTTPError and URLError. HOST = 'https://archive.podaac.earthdata.nasa.gov/s3credentials' - #-- get aws s3 client object + # get aws s3 client object client = gravity_toolkit.utilities.s3_client(HOST, args.timeout) - #-- retrieve data objects from s3 client + # retrieve data objects from s3 client podaac_cumulus(client, args.directory, PROC=args.center, DREL=args.release, VERSION=args.version, AOD1B=args.aod1b, ENDPOINT=args.endpoint, TIMEOUT=args.timeout, GZIP=args.gzip, LOG=args.log, CLOBBER=args.clobber, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/podaac_grace_sync.py b/scripts/podaac_grace_sync.py index e75cd316..189c216b 100644 --- a/scripts/podaac_grace_sync.py +++ b/scripts/podaac_grace_sync.py @@ -182,38 +182,38 @@ import gravity_toolkit.time import gravity_toolkit.utilities -#-- PURPOSE: sync local GRACE/GRACE-FO files with JPL PO.DAAC drive server +# PURPOSE: sync local GRACE/GRACE-FO files with JPL PO.DAAC drive server def podaac_grace_sync(DIRECTORY, PROC=[], DREL=[], VERSION=[], AOD1B=False, NEWSLETTERS=False, TIMEOUT=None, LOG=False, LIST=False, CLOBBER=False, CHECKSUM=False, MODE=None): - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not os.makedirs(DIRECTORY,MODE) if not os.path.exists(DIRECTORY) else None - #-- remote https server for GRACE data + # remote https server for GRACE data HOST = 'https://podaac-tools.jpl.nasa.gov' - #-- mission shortnames + # mission shortnames shortname = {'grace':'GRAC', 'grace-fo':'GRFO'} - #-- RL04/RL05 have been moved on PO.DAAC to the retired directory + # RL04/RL05 have been moved on PO.DAAC to the retired directory retired = {} retired['RL04'] = 'retired' retired['RL05'] = 'retired' retired['RL06'] = '' - #-- datasets for each processing center + # datasets for each processing center DSET = {} DSET['CSR'] = ['GAC', 'GAD', 'GSM'] DSET['GFZ'] = ['GAA', 'GAB', 'GAC', 'GAD', 'GSM'] DSET['JPL'] = ['GAA', 'GAB', 'GAC', 'GAD', 'GSM'] - #-- remote subdirectories for newsletters (note capital for grace-fo) + # remote subdirectories for newsletters (note capital for grace-fo) newsletter_sub = {} newsletter_sub['grace'] = ['grace','docs','newsletters'] newsletter_sub['grace-fo'] = ['gracefo','docs','Newsletters'] - #-- compile HTML parser for lxml + # compile HTML parser for lxml parser = lxml.etree.HTMLParser() - #-- create log file with list of synchronized files (or print to terminal) + # create log file with list of synchronized files (or print to terminal) if LOG: - #-- format: PODAAC_sync_2002-04-01.log + # format: PODAAC_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) LOGFILE = f'PODAAC_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), @@ -222,70 +222,70 @@ def podaac_grace_sync(DIRECTORY, PROC=[], DREL=[], VERSION=[], logging.info('CENTERS={0}'.format(','.join(PROC))) logging.info('RELEASES={0}'.format(','.join(DREL))) else: - #-- standard output (terminal output) + # standard output (terminal output) logging.basicConfig(level=logging.INFO) - #-- Degree 1 (geocenter) coefficients + # Degree 1 (geocenter) coefficients logging.info('Degree 1 Coefficients:') PATH = [HOST,'drive','files','allData','gracefo','docs'] remote_dir = posixpath.join(*PATH) local_dir = os.path.join(DIRECTORY,'geocenter') - #-- check if geocenter directory exists and recursively create if not + # check if geocenter directory exists and recursively create if not os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None - #-- TN-13 JPL degree 1 files - #-- compile regular expression operator for remote files + # TN-13 JPL degree 1 files + # compile regular expression operator for remote files R1 = re.compile(r'TN-13_GEOC_(CSR|GFZ|JPL)_(.*?).txt', re.VERBOSE) - #-- open connection with PO.DAAC drive server at remote directory + # open connection with PO.DAAC drive server at remote directory files,mtimes = gravity_toolkit.utilities.drive_list(PATH, timeout=TIMEOUT,build=False,parser=parser,pattern=R1,sort=True) - #-- for each file on the remote server + # for each file on the remote server for colname,remote_mtime in zip(files,mtimes): - #-- remote and local versions of the file + # remote and local versions of the file remote_file = posixpath.join(remote_dir,colname) local_file = os.path.join(local_dir,colname) http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- SLR C2,0 coefficients + # SLR C2,0 coefficients logging.info('C2,0 Coefficients:') PATH = [HOST,'drive','files','allData','grace','docs'] remote_dir = posixpath.join(*PATH) local_dir = os.path.expanduser(DIRECTORY) - #-- compile regular expression operator for remote files + # compile regular expression operator for remote files R1 = re.compile(r'TN-(05|07|11)_C20_SLR.txt', re.VERBOSE) - #-- open connection with PO.DAAC drive server at remote directory + # open connection with PO.DAAC drive server at remote directory files,mtimes = gravity_toolkit.utilities.drive_list(PATH, timeout=TIMEOUT,build=False,parser=parser,pattern=R1,sort=True) - #-- for each file on the remote server + # for each file on the remote server for colname,remote_mtime in zip(files,mtimes): - #-- remote and local versions of the file + # remote and local versions of the file remote_file = posixpath.join(remote_dir,colname) local_file = os.path.join(local_dir,colname) http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- SLR C3,0 coefficients + # SLR C3,0 coefficients logging.info('C3,0 Coefficients:') PATH = [HOST,'drive','files','allData','gracefo','docs'] remote_dir = posixpath.join(*PATH) local_dir = os.path.expanduser(DIRECTORY) - #-- compile regular expression operator for remote files + # compile regular expression operator for remote files R1 = re.compile(r'TN-(14)_C30_C20_GSFC_SLR.txt', re.VERBOSE) - #-- open connection with PO.DAAC drive server at remote directory + # open connection with PO.DAAC drive server at remote directory files,mtimes = gravity_toolkit.utilities.drive_list(PATH, timeout=TIMEOUT,build=False,parser=parser,pattern=R1,sort=True) - #-- for each file on the remote server + # for each file on the remote server for colname,remote_mtime in zip(files,mtimes): - #-- remote and local versions of the file + # remote and local versions of the file remote_file = posixpath.join(remote_dir,colname) local_file = os.path.join(local_dir,colname) http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- TN-08 GAE, TN-09 GAF and TN-10 GAG ECMWF atmosphere correction products + # TN-08 GAE, TN-09 GAF and TN-10 GAG ECMWF atmosphere correction products logging.info('TN-08 GAE, TN-09 GAF and TN-10 GAG products:') PATH = [HOST,'drive','files','allData','grace','docs'] remote_dir = posixpath.join(*PATH) @@ -294,151 +294,151 @@ def podaac_grace_sync(DIRECTORY, PROC=[], DREL=[], VERSION=[], ECMWF_files.append('TN-08_GAE-2_2006032-2010031_0000_EIGEN_G---_0005.gz') ECMWF_files.append('TN-09_GAF-2_2010032-2015131_0000_EIGEN_G---_0005.gz') ECMWF_files.append('TN-10_GAG-2_2015132-2099001_0000_EIGEN_G---_0005.gz') - #-- compile regular expression operator for remote files + # compile regular expression operator for remote files R1 = re.compile(r'({0}|{1}|{2})'.format(*ECMWF_files), re.VERBOSE) - #-- open connection with PO.DAAC drive server at remote directory + # open connection with PO.DAAC drive server at remote directory files,mtimes = gravity_toolkit.utilities.drive_list(PATH, timeout=TIMEOUT,build=False,parser=parser,pattern=R1,sort=True) - #-- for each file on the remote server + # for each file on the remote server for colname,remote_mtime in zip(files,mtimes): - #-- remote and local versions of the file + # remote and local versions of the file remote_file = posixpath.join(remote_dir,colname) local_file = os.path.join(local_dir,colname) http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- GRACE and GRACE-FO newsletters + # GRACE and GRACE-FO newsletters if NEWSLETTERS: - #-- local newsletter directory (place GRACE and GRACE-FO together) + # local newsletter directory (place GRACE and GRACE-FO together) local_dir = os.path.join(DIRECTORY,'newsletters') - #-- check if newsletters directory exists and recursively create if not + # check if newsletters directory exists and recursively create if not os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None - #-- for each satellite mission (grace, grace-fo) + # for each satellite mission (grace, grace-fo) for i,mi in enumerate(['grace','grace-fo']): logging.info(f'{mi} Newsletters:') PATH = [HOST,'drive','files','allData',*newsletter_sub[mi]] remote_dir = posixpath.join(*PATH) - #-- compile regular expression operator for remote files + # compile regular expression operator for remote files NAME = mi.upper().replace('-','_') R1 = re.compile(rf'{NAME}_SDS_NL_(\d+).pdf', re.VERBOSE) - #-- open connection with PO.DAAC drive server at remote directory + # open connection with PO.DAAC drive server at remote directory files,mtimes = gravity_toolkit.utilities.drive_list(PATH, timeout=TIMEOUT,build=False,parser=parser,pattern=R1,sort=True) - #-- for each file on the remote server + # for each file on the remote server for colname,remote_mtime in zip(files,mtimes): - #-- remote and local versions of the file + # remote and local versions of the file remote_file = posixpath.join(remote_dir,colname) local_file = os.path.join(local_dir,colname) http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- GRACE/GRACE-FO AOD1B dealiasing products + # GRACE/GRACE-FO AOD1B dealiasing products if AOD1B: logging.info('GRACE L1B Dealiasing Products:') - #-- for each data release (RL04, RL05, RL06) + # for each data release (RL04, RL05, RL06) for rl in DREL: - #-- print string of exact data product + # print string of exact data product logging.info(f'GFZ/AOD1B/{rl}') - #-- remote and local directory for exact data product + # remote and local directory for exact data product local_dir = os.path.join(DIRECTORY,'AOD1B',rl) - #-- check if AOD1B directory exists and recursively create if not + # check if AOD1B directory exists and recursively create if not os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None - #-- query CMR for dataset + # query CMR for dataset ids,urls,mtimes = gravity_toolkit.utilities.cmr( mission='grace', level='L1B', center='GFZ', release=rl, product='AOD1B', start_date='2002-01-01T00:00:00', provider='PODAAC', endpoint='data') - #-- for each id, url and modification time + # for each id, url and modification time for id,url,mtime in zip(ids,urls,mtimes): - #-- retrieve GRACE/GRACE-FO files + # retrieve GRACE/GRACE-FO files granule = gravity_toolkit.utilities.url_split(url)[-1] http_pull_file(url, mtime, os.path.join(local_dir,granule), TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- GRACE/GRACE-FO level-2 spherical harmonic products + # GRACE/GRACE-FO level-2 spherical harmonic products logging.info('GRACE/GRACE-FO L2 Global Spherical Harmonics:') - #-- for each processing center (CSR, GFZ, JPL) + # for each processing center (CSR, GFZ, JPL) for pr in PROC: - #-- for each data release (RL04, RL05, RL06) + # for each data release (RL04, RL05, RL06) for rl in DREL: - #-- for each level-2 product (GAC, GAD, GSM, GAA, GAB) + # for each level-2 product (GAC, GAD, GSM, GAA, GAB) for ds in DSET[pr]: - #-- local directory for exact data product + # local directory for exact data product local_dir = os.path.join(DIRECTORY, pr, rl, ds) - #-- check if directory exists and recursively create if not + # check if directory exists and recursively create if not if not os.path.exists(local_dir): os.makedirs(local_dir,MODE) - #-- list of GRACE/GRACE-FO files for index + # list of GRACE/GRACE-FO files for index grace_files = [] - #-- for each satellite mission (grace, grace-fo) + # for each satellite mission (grace, grace-fo) for i,mi in enumerate(['grace','grace-fo']): - #-- print string of exact data product + # print string of exact data product logging.info(f'{mi} {pr}/{rl}/{ds}') - #-- query CMR for dataset + # query CMR for dataset ids,urls,mtimes = gravity_toolkit.utilities.cmr( mission=mi, center=pr, release=rl, product=ds, version=VERSION[i], provider='PODAAC', endpoint='data') - #-- regular expression operator for data product + # regular expression operator for data product rx = gravity_toolkit.utilities.compile_regex_pattern( pr, rl, ds, mission=shortname[mi]) - #-- for each id, url and modification time + # for each id, url and modification time for id,url,mtime in zip(ids,urls,mtimes): - #-- retrieve GRACE/GRACE-FO files + # retrieve GRACE/GRACE-FO files granule = gravity_toolkit.utilities.url_split(url)[-1] http_pull_file(url, mtime, os.path.join(local_dir,granule), TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, CHECKSUM=CHECKSUM, MODE=MODE) - #-- find local GRACE/GRACE-FO files to create index + # find local GRACE/GRACE-FO files to create index granules = [f for f in os.listdir(local_dir) if rx.match(f)] - #-- reduce list of GRACE/GRACE-FO files to unique dates + # reduce list of GRACE/GRACE-FO files to unique dates granules = gravity_toolkit.time.reduce_by_date(granules) - #-- extend list of GRACE/GRACE-FO files with granules + # extend list of GRACE/GRACE-FO files with granules grace_files.extend(granules) - #-- outputting GRACE/GRACE-FO filenames to index + # outputting GRACE/GRACE-FO filenames to index with open(os.path.join(local_dir,'index.txt'),'w') as fid: for fi in sorted(grace_files): print(fi, file=fid) - #-- change permissions of index file + # change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) - #-- close log file and set permissions level to MODE + # close log file and set permissions level to MODE if LOG: os.chmod(os.path.join(DIRECTORY,LOGFILE), MODE) -#-- PURPOSE: pull file from a remote host checking if file exists locally -#-- and if the remote file is newer than the local file +# PURPOSE: pull file from a remote host checking if file exists locally +# and if the remote file is newer than the local file def http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=120, LIST=False, CLOBBER=False, CHECKSUM=False, MODE=0o775): - #-- if file exists in file system: check if remote file is newer + # if file exists in file system: check if remote file is newer TEST = False OVERWRITE = ' (clobber)' - #-- check if local version of file exists + # check if local version of file exists if CHECKSUM and os.access(local_file, os.F_OK): - #-- generate checksum hash for local file - #-- open the local_file in binary read mode + # generate checksum hash for local file + # open the local_file in binary read mode local_hash = gravity_toolkit.utilities.get_hash(local_file) - #-- Create and submit request. - #-- There are a wide range of exceptions that can be thrown here - #-- including HTTPError and URLError. + # Create and submit request. + # There are a wide range of exceptions that can be thrown here + # including HTTPError and URLError. req=gravity_toolkit.utilities.urllib2.Request(remote_file) resp=gravity_toolkit.utilities.urllib2.urlopen(req,timeout=TIMEOUT) - #-- copy remote file contents to bytesIO object + # copy remote file contents to bytesIO object remote_buffer = io.BytesIO(resp.read()) remote_buffer.seek(0) - #-- generate checksum hash for remote file + # generate checksum hash for remote file remote_hash = gravity_toolkit.utilities.get_hash(remote_buffer) - #-- compare checksums + # compare checksums if (local_hash != remote_hash): TEST = True OVERWRITE = f' (checksums: {local_hash} {remote_hash})' elif os.access(local_file, os.F_OK): - #-- check last modification time of local file + # check last modification time of local file local_mtime = os.stat(local_file).st_mtime - #-- if remote file is newer: overwrite the local file + # if remote file is newer: overwrite the local file if (gravity_toolkit.utilities.even(remote_mtime) > gravity_toolkit.utilities.even(local_mtime)): TEST = True @@ -446,36 +446,36 @@ def http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=120, else: TEST = True OVERWRITE = ' (new)' - #-- if file does not exist locally, is to be overwritten, or CLOBBER is set + # if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: - #-- Printing files transferred + # Printing files transferred logging.info(f'{remote_file} --> ') logging.info(f'\t{local_file}{OVERWRITE}\n') - #-- if executing copy command (not only printing the files) + # if executing copy command (not only printing the files) if not LIST: - #-- chunked transfer encoding size + # chunked transfer encoding size CHUNK = 16 * 1024 - #-- copy bytes or transfer file + # copy bytes or transfer file if CHECKSUM and os.access(local_file, os.F_OK): - #-- store bytes to file using chunked transfer encoding + # store bytes to file using chunked transfer encoding remote_buffer.seek(0) with open(local_file, 'wb') as f: shutil.copyfileobj(remote_buffer, f, CHUNK) else: - #-- Create and submit request. - #-- There are a range of exceptions that can be thrown here - #-- including HTTPError and URLError. + # Create and submit request. + # There are a range of exceptions that can be thrown here + # including HTTPError and URLError. request = gravity_toolkit.utilities.urllib2.Request(remote_file) response = gravity_toolkit.utilities.urllib2.urlopen(request, timeout=TIMEOUT) - #-- copy remote file contents to local file + # copy remote file contents to local file with open(local_file, 'wb') as f: shutil.copyfileobj(response, f, CHUNK) - #-- keep remote modification time of file and local access time + # keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) os.chmod(local_file, MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Syncs GRACE/GRACE-FO and auxiliary data from the @@ -485,8 +485,8 @@ def arguments(): Gets the monthly GRACE/GRACE-FO newsletters. """ ) - #-- command line parameters - #-- NASA Earthdata credentials + # command line parameters + # NASA Earthdata credentials parser.add_argument('--user','-U', type=str, default=os.environ.get('EARTHDATA_USERNAME'), help='Username for NASA Earthdata Login') @@ -497,44 +497,44 @@ def arguments(): type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.path.join(os.path.expanduser('~'),'.netrc'), help='Path to .netrc file for authentication') - #-- working data directory + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- GRACE/GRACE-FO processing center + # GRACE/GRACE-FO processing center parser.add_argument('--center','-c', metavar='PROC', type=str, nargs='+', default=['CSR','GFZ','JPL'], choices=['CSR','GFZ','JPL'], help='GRACE/GRACE-FO processing center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, nargs='+', default=['RL06'], choices=['RL06'], help='GRACE/GRACE-FO data release') - #-- GRACE/GRACE-FO data version + # GRACE/GRACE-FO data version parser.add_argument('--version','-v', metavar='VERSION', type=str, nargs=2, default=['0','1'], choices=['0','1','2','3'], help='GRACE/GRACE-FO Level-2 data version') - #-- GRACE/GRACE-FO dealiasing products + # GRACE/GRACE-FO dealiasing products parser.add_argument('--aod1b','-a', default=False, action='store_true', help='Sync GRACE/GRACE-FO Level-1B dealiasing products') - #-- GRACE/GRACE-FO newsletters + # GRACE/GRACE-FO newsletters parser.add_argument('--newsletters','-n', default=False, action='store_true', help='Sync GRACE/GRACE-FO Newsletters') - #-- connection timeout + # connection timeout parser.add_argument('--timeout','-t', type=int, default=360, help='Timeout in seconds for blocking operations') - #-- Output log file in form - #-- PODAAC_sync_2002-04-01.log + # Output log file in form + # PODAAC_sync_2002-04-01.log parser.add_argument('--log','-l', default=False, action='store_true', help='Output log file') - #-- sync options + # sync options parser.add_argument('--list','-L', default=False, action='store_true', help='Only print files that could be transferred') @@ -544,40 +544,40 @@ def arguments(): parser.add_argument('--clobber','-C', default=False, action='store_true', help='Overwrite existing data in transfer') - #-- permissions mode of the directories and files synced (number in octal) + # permissions mode of the directories and files synced (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permission mode of directories and files synced') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- JPL PO.DAAC drive hostname + # JPL PO.DAAC drive hostname HOST = 'podaac-tools.jpl.nasa.gov' - #-- get NASA Earthdata and JPL PO.DAAC drive credentials + # get NASA Earthdata and JPL PO.DAAC drive credentials try: args.user,_,args.webdav = netrc.netrc(args.netrc).authenticators(HOST) except: - #-- check that NASA Earthdata credentials were entered + # check that NASA Earthdata credentials were entered if not args.user: prompt = f'Username for {HOST}: ' args.user = builtins.input(prompt) - #-- enter WebDAV password securely from command-line + # enter WebDAV password securely from command-line if not args.webdav: prompt = f'Password for {args.user}@{HOST}: ' args.webdav = getpass.getpass(prompt) - #-- build a urllib opener for PO.DAAC Drive - #-- Add the username and password for NASA Earthdata Login system + # build a urllib opener for PO.DAAC Drive + # Add the username and password for NASA Earthdata Login system gravity_toolkit.utilities.build_opener(args.user,args.webdav) - #-- check internet connection before attempting to run program - #-- check JPL PO.DAAC Drive credentials before attempting to run program + # check internet connection before attempting to run program + # check JPL PO.DAAC Drive credentials before attempting to run program DRIVE = f'https://{HOST}/drive/files' if gravity_toolkit.utilities.check_credentials(DRIVE): podaac_grace_sync(args.directory, PROC=args.center, @@ -587,6 +587,6 @@ def main(): CLOBBER=args.clobber, CHECKSUM=args.checksum, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/podaac_webdav.py b/scripts/podaac_webdav.py index b38799b7..1a6f3678 100644 --- a/scripts/podaac_webdav.py +++ b/scripts/podaac_webdav.py @@ -67,16 +67,16 @@ import lxml.etree import gravity_toolkit.utilities -#-- PURPOSE: retrieve PO.DAAC Drive WebDAV credentials +# PURPOSE: retrieve PO.DAAC Drive WebDAV credentials def podaac_webdav(USER, PASSWORD, parser): - #-- build opener for retrieving PO.DAAC Drive WebDAV credentials - #-- Add the username and password for NASA Earthdata Login system + # build opener for retrieving PO.DAAC Drive WebDAV credentials + # Add the username and password for NASA Earthdata Login system URS = 'https://urs.earthdata.nasa.gov' gravity_toolkit.utilities.build_opener(USER, PASSWORD, password_manager=True, authorization_header=True, urs=URS) - #-- All calls to urllib2.urlopen will now use handler - #-- Make sure not to include the protocol in with the URL, or - #-- HTTPPasswordMgrWithDefaultRealm will be confused. + # All calls to urllib2.urlopen will now use handler + # Make sure not to include the protocol in with the URL, or + # HTTPPasswordMgrWithDefaultRealm will be confused. HOST = posixpath.join('https://podaac-tools.jpl.nasa.gov','drive') parameters = gravity_toolkit.utilities.urlencode( {'client_id':'lRY01RPdFZ2BKR77Mv9ivQ', 'response_type':'code', @@ -84,27 +84,27 @@ def podaac_webdav(USER, PASSWORD, parser): 'redirect_uri':posixpath.join(HOST,'authenticated'), 'required_scope': 'country+study_area'} ) - #-- retrieve cookies from NASA Earthdata URS + # retrieve cookies from NASA Earthdata URS request = gravity_toolkit.utilities.urllib2.Request( url=posixpath.join(URS,'oauth',f'authorize?{parameters}')) gravity_toolkit.utilities.urllib2.urlopen(request) - #-- read and parse request for webdav password + # read and parse request for webdav password request = gravity_toolkit.utilities.urllib2.Request(url=HOST) response = gravity_toolkit.utilities.urllib2.urlopen(request,timeout=20) tree = lxml.etree.parse(response, parser) WEBDAV, = tree.xpath('//input[@id="password"]/@value') - #-- return webdav password + # return webdav password return WEBDAV -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Retrieves and prints a user's PO.DAAC WebDAV credentials """ ) - #-- command line parameters - #-- NASA Earthdata credentials + # command line parameters + # NASA Earthdata credentials parser.add_argument('--user','-U', type=str, default=os.environ.get('EARTHDATA_USERNAME'), help='Username for NASA Earthdata Login') @@ -115,50 +115,50 @@ def arguments(): type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.path.join(os.path.expanduser('~'),'.netrc'), help='Path to .netrc file for authentication') - #-- append to netrc + # append to netrc parser.add_argument('--append','-A', default=False, action='store_true', help='Append .netrc file instead of printing') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- NASA Earthdata hostname + # NASA Earthdata hostname URS = 'urs.earthdata.nasa.gov' - #-- JPL PO.DAAC drive hostname + # JPL PO.DAAC drive hostname HOST = 'podaac-tools.jpl.nasa.gov' - #-- get NASA Earthdata credentials + # get NASA Earthdata credentials try: args.user,_,args.password = netrc.netrc(args.netrc).authenticators(URS) except: - #-- check that NASA Earthdata credentials were entered + # check that NASA Earthdata credentials were entered if not args.user: prompt = f'Username for {URS}: ' args.user = builtins.input(prompt) - #-- enter password securely from command-line + # enter password securely from command-line if not args.password: prompt = f'Password for {args.user}@{URS}: ' args.password = getpass.getpass(prompt) - #-- check internet connection before attempting to run program + # check internet connection before attempting to run program DRIVE = posixpath.join('https://podaac-tools.jpl.nasa.gov','drive') if gravity_toolkit.utilities.check_connection(DRIVE): - #-- compile HTML parser for lxml + # compile HTML parser for lxml WEBDAV = podaac_webdav(args.user, args.password, lxml.etree.HTMLParser()) - #-- output to terminal or append to netrc file + # output to terminal or append to netrc file if args.append: - #-- append to netrc file and set permissions level + # append to netrc file and set permissions level with open(args.netrc,'a+') as f: f.write(f'machine {args.user} login {HOST} password {WEBDAV}\n') os.chmod(args.netrc, 0o600) else: print(f'\nWebDAV Password for {args.user}@{HOST}:\n\t{WEBDAV}') -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/regress_grace_maps.py b/scripts/regress_grace_maps.py index d9402eb3..4acf3453 100755 --- a/scripts/regress_grace_maps.py +++ b/scripts/regress_grace_maps.py @@ -100,7 +100,7 @@ from gravity_toolkit.tsamplitude import tsamplitude from gravity_toolkit.spatial import spatial -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -109,7 +109,7 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- program module to run with specified parameters +# program module to run with specified parameters def regress_grace_maps(LMAX, RAD, START=None, END=None, @@ -129,66 +129,66 @@ def regress_grace_maps(LMAX, RAD, VERBOSE=0, MODE=0o775): - #-- output filename suffix + # output filename suffix suffix = dict(ascii='txt', netCDF4='nc', HDF5='H5')[DATAFORM] - #-- flag for spherical harmonic order + # flag for spherical harmonic order order_str = f'M{MMAX:d}' if MMAX and (MMAX != LMAX) else '' - #-- Calculating the Gaussian smoothing for radius RAD + # Calculating the Gaussian smoothing for radius RAD gw_str = f'_r{RAD:0.0f}km' if (RAD != 0) else '' - #-- destriped GRACE/GRACE-FO coefficients + # destriped GRACE/GRACE-FO coefficients ds_str = '_FL' if DESTRIPE else '' - #-- distributing removed mass uniformly over ocean + # distributing removed mass uniformly over ocean ocean_str = '_OCN' if REDISTRIBUTE_REMOVED else '' - #-- input and output spatial units + # input and output spatial units unit_list = ['cmwe', 'mmGH', 'mmCU', u'\u03BCGal', 'mbar'] unit_name = ['Equivalent Water Thickness', 'Geoid Height', 'Elastic Crustal Uplift', 'Gravitational Undulation', 'Equivalent Surface Pressure'] - #-- input file format + # input file format input_format = '{0}{1}_L{2:d}{3}{4}{5}_{6:03d}.{7}' - #-- output file format + # output file format output_format = '{0}{1}_L{2:d}{3}{4}{5}_{6}{7}_{8:03d}-{9:03d}.{10}' - #-- GRACE months to read + # GRACE months to read months = sorted(set(np.arange(START,END+1)) - set(MISSING)) nmon = len(months) - #-- Output Degree Spacing + # Output Degree Spacing dlon,dlat = (DDEG[0],DDEG[0]) if (len(DDEG) == 1) else (DDEG[0],DDEG[1]) - #-- Output Degree Interval + # Output Degree Interval if (INTERVAL == 1): - #-- (-180:180,90:-90) + # (-180:180,90:-90) nlon = np.int64((360.0/dlon)+1.0) nlat = np.int64((180.0/dlat)+1.0) elif (INTERVAL == 2): - #-- (Degree spacing)/2 + # (Degree spacing)/2 nlon = np.int64(360.0/dlon) nlat = np.int64(180.0/dlat) elif (INTERVAL == 3): - #-- non-global grid set with BOUNDS parameter + # non-global grid set with BOUNDS parameter minlon,maxlon,minlat,maxlat = BOUNDS.copy() lon = np.arange(minlon+dlon/2.0,maxlon+dlon/2.0,dlon) lat = np.arange(maxlat-dlat/2.0,minlat-dlat/2.0,-dlat) nlon = len(lon) nlat = len(lat) - #-- Setting output parameters for each fit type + # Setting output parameters for each fit type coef_str = ['x{0:d}'.format(o) for o in range(ORDER+1)] unit_suffix = [' yr^{0:d}'.format(-o) if o else '' for o in range(ORDER+1)] - if (ORDER == 0):#-- Mean + if (ORDER == 0):# Mean unit_longname = ['Mean'] - elif (ORDER == 1):#-- Trend + elif (ORDER == 1):# Trend unit_longname = ['Constant','Trend'] - elif (ORDER == 2):#-- Quadratic + elif (ORDER == 2):# Quadratic unit_longname = ['Constant','Linear','Quadratic'] - #-- filename strings for cyclical terms + # filename strings for cyclical terms cyclic_str = {} cyclic_str['SEMI'] = ['SS','SC'] cyclic_str['ANN'] = ['AS','AC'] cyclic_str['S2'] = ['S2S','S2C'] - #-- unit longnames for cyclical terms + # unit longnames for cyclical terms cyclic_longname = {} cyclic_longname['SEMI'] = ['Semi-Annual Sine', 'Semi-Annual Cosine'] cyclic_longname['ANN'] = ['Annual Sine', 'Annual Cosine'] @@ -206,83 +206,83 @@ def regress_grace_maps(LMAX, RAD, unit_suffix.extend(['','']) amp_str.append(flag) - #-- input data spatial object + # input data spatial object spatial_list = [] for t,grace_month in enumerate(months): - #-- input GRACE/GRACE-FO spatial file + # input GRACE/GRACE-FO spatial file fi = input_format.format(FILE_PREFIX,unit_list[UNITS-1],LMAX, order_str,gw_str,ds_str,grace_month,suffix) - #-- read GRACE/GRACE-FO spatial file + # read GRACE/GRACE-FO spatial file if (DATAFORM == 'ascii'): dinput = spatial(spacing=[dlon,dlat],nlon=nlon, nlat=nlat).from_ascii(os.path.join(OUTPUT_DIRECTORY,fi)) elif (DATAFORM == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) dinput = spatial().from_netCDF4(os.path.join(OUTPUT_DIRECTORY,fi)) elif (DATAFORM == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) dinput = spatial().from_HDF5(os.path.join(OUTPUT_DIRECTORY,fi)) - #-- append to spatial list + # append to spatial list dinput.month = grace_month nlat,nlon = dinput.shape spatial_list.append(dinput) - #-- concatenate list to single spatial object + # concatenate list to single spatial object grid = spatial().from_list(spatial_list) spatial_list = None - #-- Fitting seasonal components + # Fitting seasonal components ncomp = len(coef_str) ncycles = 2*len(CYCLES) - #-- Allocating memory for output variables + # Allocating memory for output variables out = dinput.zeros_like() out.data = np.zeros((nlat,nlon,ncomp)) out.error = np.zeros((nlat,nlon,ncomp)) out.mask = np.ones((nlat,nlon,ncomp),dtype=bool) - #-- Fit Significance + # Fit Significance FS = {} - #-- SSE: Sum of Squares Error - #-- AIC: Akaike information criterion - #-- BIC: Bayesian information criterion - #-- R2Adj: Adjusted Coefficient of Determination + # SSE: Sum of Squares Error + # AIC: Akaike information criterion + # BIC: Bayesian information criterion + # R2Adj: Adjusted Coefficient of Determination for key in ['SSE','AIC','BIC','R2Adj']: FS[key] = dinput.zeros_like() - #-- calculate the regression coefficients and fit significance + # calculate the regression coefficients and fit significance for i in range(nlat): for j in range(nlon): - #-- Calculating the regression coefficients + # Calculating the regression coefficients tsbeta = tsregress(grid.time, grid.data[i,j,:], ORDER=ORDER, CYCLES=CYCLES, CONF=0.95) - #-- save regression components + # save regression components for k in range(0, ncomp): out.data[i,j,k] = tsbeta['beta'][k] out.error[i,j,k] = tsbeta['error'][k] out.mask[i,j,k] = False - #-- Fit significance terms - #-- Degrees of Freedom + # Fit significance terms + # Degrees of Freedom nu = tsbeta['DOF'] - #-- Converting Mean Square Error to Sum of Squares Error + # Converting Mean Square Error to Sum of Squares Error FS['SSE'].data[i,j] = tsbeta['MSE']*nu FS['AIC'].data[i,j] = tsbeta['AIC'] FS['BIC'].data[i,j] = tsbeta['BIC'] FS['R2Adj'].data[i,j] = tsbeta['R2Adj'] - #-- list of output files + # list of output files output_files = [] - #-- Output spatial files + # Output spatial files for i in range(0,ncomp): - #-- output spatial file name + # output spatial file name f1 = output_format.format(FILE_PREFIX,unit_list[UNITS-1],LMAX, order_str,gw_str,ds_str,coef_str[i],'',START,END,suffix) f2 = output_format.format(FILE_PREFIX,unit_list[UNITS-1],LMAX, order_str,gw_str,ds_str,coef_str[i],'_ERROR',START,END,suffix) - #-- full attributes + # full attributes UNITS_TITLE = '{0}{1}'.format(unit_list[UNITS-1],unit_suffix[i]) LONGNAME = unit_name[UNITS-1] FILE_TITLE = 'GRACE/GRACE-FO_Spatial_Data_{0}'.format(unit_longname[i]) - #-- output regression fit to file + # output regression fit to file output = out.index(i, date=False) output_data(output, FILENAME=os.path.join(OUTPUT_DIRECTORY,f1), DATAFORM=DATAFORM, UNITS=UNITS_TITLE, LONGNAME=LONGNAME, @@ -290,157 +290,157 @@ def regress_grace_maps(LMAX, RAD, output_data(output, FILENAME=os.path.join(OUTPUT_DIRECTORY,f2), DATAFORM=DATAFORM, UNITS=UNITS_TITLE, LONGNAME=LONGNAME, TITLE=FILE_TITLE, KEY='error', VERBOSE=VERBOSE, MODE=MODE) - #-- add output files to list object + # add output files to list object output_files.append(os.path.join(OUTPUT_DIRECTORY,f1)) output_files.append(os.path.join(OUTPUT_DIRECTORY,f2)) - #-- if fitting coefficients with cyclical components + # if fitting coefficients with cyclical components if (ncycles > 0): - #-- output spatial titles for amplitudes + # output spatial titles for amplitudes amp_title = {'ANN':'Annual Amplitude','SEMI':'Semi-Annual Amplitude', 'S2':'S2 Tidal Alias Amplitude'} ph_title = {'ANN':'Annual Phase','SEMI':'Semi-Annual Phase', 'S2':'S2 Tidal Alias Phase'} - #-- output amplitude and phase of cyclical components + # output amplitude and phase of cyclical components for i,flag in enumerate(amp_str): - #-- Indice pointing to the cyclical components + # Indice pointing to the cyclical components j = 1 + ORDER + 2*i - #-- Allocating memory for output amplitude and phase + # Allocating memory for output amplitude and phase amp = dinput.zeros_like() ph = dinput.zeros_like() - #-- calculating amplitude and phase of spatial field + # calculating amplitude and phase of spatial field amp.data,ph.data = tsamplitude(out.data[:,:,j],out.data[:,:,j+1]) - #-- convert phase from -180:180 to 0:360 + # convert phase from -180:180 to 0:360 ii,jj = np.nonzero(ph.data < 0) ph.data[ii,jj] += 360.0 - #-- Amplitude Error + # Amplitude Error comp1 = out.error[:,:,j]*out.data[:,:,j]/amp.data comp2 = out.error[:,:,j+1]*out.data[:,:,j+1]/amp.data amp.error = np.sqrt(comp1**2 + comp2**2) - #-- Phase Error (degrees) + # Phase Error (degrees) comp1 = out.error[:,:,j]*out.data[:,:,j+1]/(amp.data**2) comp2 = out.error[:,:,j+1]*out.data[:,:,j]/(amp.data**2) ph.error = (180.0/np.pi)*np.sqrt(comp1**2 + comp2**2) - #-- output file names for amplitude, phase and errors + # output file names for amplitude, phase and errors f3 = output_format.format(FILE_PREFIX,unit_list[UNITS-1],LMAX, order_str,gw_str,ds_str,flag,'',START,END,suffix) f4 = output_format.format(FILE_PREFIX,unit_list[UNITS-1],LMAX, order_str,gw_str,ds_str,flag,'_PHASE',START,END,suffix) - #-- output spatial error file name + # output spatial error file name f5 = output_format.format(FILE_PREFIX,unit_list[UNITS-1],LMAX, order_str,gw_str,ds_str,flag,'_ERROR',START,END,suffix) f6 = output_format.format(FILE_PREFIX,unit_list[UNITS-1],LMAX, order_str,gw_str,ds_str,flag,'_PHASE_ERROR',START,END,suffix) - #-- full attributes + # full attributes AMP_UNITS = unit_list[UNITS-1] PH_UNITS = 'degrees' LONGNAME = unit_name[UNITS-1] AMP_TITLE = 'GRACE/GRACE-FO_Spatial_Data_{0}'.format(amp_title[flag]) PH_TITLE = 'GRACE/GRACE-FO_Spatial_Data_{0}'.format(ph_title[flag]) - #-- Output seasonal amplitude and phase to files + # Output seasonal amplitude and phase to files output_data(amp, FILENAME=os.path.join(OUTPUT_DIRECTORY,f3), DATAFORM=DATAFORM, UNITS=AMP_UNITS, LONGNAME=LONGNAME, TITLE=AMP_TITLE, VERBOSE=VERBOSE, MODE=MODE) output_data(ph, FILENAME=os.path.join(OUTPUT_DIRECTORY,f4), DATAFORM=DATAFORM, UNITS=PH_UNITS, LONGNAME='Phase', TITLE=PH_TITLE, VERBOSE=VERBOSE, MODE=MODE) - #-- Output seasonal amplitude and phase error to files + # Output seasonal amplitude and phase error to files output_data(amp, FILENAME=os.path.join(OUTPUT_DIRECTORY,f5), DATAFORM=DATAFORM, UNITS=AMP_UNITS, LONGNAME=LONGNAME, TITLE=AMP_TITLE, KEY='error', VERBOSE=VERBOSE, MODE=MODE) output_data(ph, FILENAME=os.path.join(OUTPUT_DIRECTORY,f6), DATAFORM=DATAFORM, UNITS=PH_UNITS, LONGNAME='Phase', TITLE=PH_TITLE, KEY='error', VERBOSE=VERBOSE, MODE=MODE) - #-- add output files to list object + # add output files to list object output_files.append(os.path.join(OUTPUT_DIRECTORY,f3)) output_files.append(os.path.join(OUTPUT_DIRECTORY,f4)) output_files.append(os.path.join(OUTPUT_DIRECTORY,f5)) output_files.append(os.path.join(OUTPUT_DIRECTORY,f6)) - #-- Output fit significance + # Output fit significance signif_longname = {'SSE':'Sum of Squares Error', 'AIC':'Akaike information criterion', 'BIC':'Bayesian information criterion', 'R2Adj':'Adjusted Coefficient of Determination'} - #-- for each fit significance term + # for each fit significance term for key,fs in FS.items(): - #-- output file names for fit significance + # output file names for fit significance signif_str = '{0}_'.format(key) f7 = output_format.format(FILE_PREFIX,unit_list[UNITS-1],LMAX, order_str,gw_str,ds_str,signif_str,coef_str[ORDER],START,END,suffix) - #-- full attributes + # full attributes LONGNAME = signif_longname[key] - #-- output fit significance to file + # output fit significance to file output_data(fs, FILENAME=os.path.join(OUTPUT_DIRECTORY,f7), DATAFORM=DATAFORM, UNITS=key, LONGNAME=LONGNAME, TITLE=nu, VERBOSE=VERBOSE, MODE=MODE) - #-- add output files to list object + # add output files to list object output_files.append(os.path.join(OUTPUT_DIRECTORY,f7)) - #-- return the list of output files + # return the list of output files return output_files -#-- PURPOSE: wrapper function for outputting data to file +# PURPOSE: wrapper function for outputting data to file def output_data(data, FILENAME=None, KEY='data', DATAFORM=None, UNITS=None, LONGNAME=None, TITLE=None, VERBOSE=0, MODE=0o775): output = data.copy() setattr(output,'data',getattr(data,KEY)) if (DATAFORM == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) output.to_ascii(FILENAME,date=False,verbose=VERBOSE) elif (DATAFORM == 'netCDF4'): - #-- netcdf (.nc) + # netcdf (.nc) output.to_netCDF4(FILENAME,date=False,verbose=VERBOSE, units=UNITS,longname=LONGNAME,title=TITLE) elif (DATAFORM == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) output.to_HDF5(FILENAME,date=False,verbose=VERBOSE, units=UNITS,longname=LONGNAME,title=TITLE) - #-- change the permissions mode of the output file + # change the permissions mode of the output file os.chmod(FILENAME, MODE) -#-- PURPOSE: print a file log for the GRACE/GRACE-FO regression +# PURPOSE: print a file log for the GRACE/GRACE-FO regression def output_log_file(arguments,output_files): - #-- format: GRACE_processing_run_2002-04-01_PID-70335.log + # format: GRACE_processing_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'GRACE_processing_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print output files + # print output files logging.info('\n\nOUTPUT FILES:') for f in output_files: logging.info('{0}'.format(f)) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: print a error file log for the GRACE/GRACE-FO regression +# PURPOSE: print a error file log for the GRACE/GRACE-FO regression def output_error_log_file(arguments): - #-- format: GRACE_processing_failed_run_2002-04-01_PID-70335.log + # format: GRACE_processing_failed_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'GRACE_processing_failed_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print traceback error + # print traceback error logging.info('\n\nTRACEBACK ERROR:') traceback.print_exc(file=fid) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Reads in GRACE/GRACE-FO spatial files and calculates the @@ -449,7 +449,7 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters + # command line parameters parser.add_argument('--output-directory','-O', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), @@ -457,14 +457,14 @@ def arguments(): parser.add_argument('--file-prefix','-P', type=str, help='Prefix string for input and output files') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month for time series regression') @@ -476,19 +476,19 @@ def arguments(): parser.add_argument('--missing','-N', metavar='MISSING', type=int, nargs='+', default=MISSING, help='Missing GRACE/GRACE-FO months') - #-- Gaussian smoothing radius (km) + # Gaussian smoothing radius (km) parser.add_argument('--radius','-R', type=float, default=0, help='Gaussian smoothing radius (km)') - #-- Use a decorrelation (destriping) filter + # Use a decorrelation (destriping) filter parser.add_argument('--destripe','-d', default=False, action='store_true', help='Use decorrelation (destriping) filter') - #-- output units + # output units parser.add_argument('--units','-U', type=int, default=1, choices=[1,2,3,4,5], help='Output units') - #-- output grid parameters + # output grid parameters parser.add_argument('--spacing', type=float, nargs='+', default=[0.5,0.5], metavar=('dlon','dlat'), help='Spatial resolution of output data') @@ -499,55 +499,55 @@ def arguments(): parser.add_argument('--bounds', type=float, nargs=4, metavar=('lon_min','lon_max','lat_min','lat_max'), help='Bounding box for non-global grid') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input/output data format') parser.add_argument('--redistribute-removed', default=False, action='store_true', help='Redistribute removed mass fields over the ocean') - #-- regression parameters - #-- 0: mean - #-- 1: trend - #-- 2: acceleration + # regression parameters + # 0: mean + # 1: trend + # 2: acceleration parser.add_argument('--order', type=int, default=2, help='Regression fit polynomial order') - #-- regression fit cyclical terms + # regression fit cyclical terms parser.add_argument('--cycles', type=float, default=[0.5,1.0,161.0/365.25], nargs='+', help='Regression fit cyclical terms') - #-- Output log file for each job in forms - #-- GRACE_processing_run_2002-04-01_PID-00000.log - #-- GRACE_processing_failed_run_2002-04-01_PID-00000.log + # Output log file for each job in forms + # GRACE_processing_run_2002-04-01_PID-00000.log + # GRACE_processing_failed_run_2002-04-01_PID-00000.log parser.add_argument('--log', default=False, action='store_true', help='Output log file for each job') - #-- print information about each input and output file + # print information about each input and output file parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- try to run the analysis with listed parameters + # try to run the analysis with listed parameters try: info(args) - #-- run regress_grace_maps algorithm with parameters + # run regress_grace_maps algorithm with parameters output_files = regress_grace_maps( args.lmax, args.radius, @@ -569,17 +569,17 @@ def main(): VERBOSE=args.verbose, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) - if args.log:#-- write failed job completion log file + if args.log:# write failed job completion log file output_error_log_file(args) else: - if args.log:#-- write successful job completion log file + if args.log:# write successful job completion log file output_log_file(args,output_files) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/run_grace_date.py b/scripts/run_grace_date.py index f72ca1d3..ee083c6b 100755 --- a/scripts/run_grace_date.py +++ b/scripts/run_grace_date.py @@ -76,89 +76,89 @@ from gravity_toolkit.grace_months_index import grace_months_index def run_grace_date(base_dir, PROC, DREL, VERBOSE=0, MODE=0o775): - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[VERBOSE]) - #-- allocate python dictionaries for each processing center + # allocate python dictionaries for each processing center DSET = {} VALID = {} - #-- CSR RL04/5/6 at LMAX 60 + # CSR RL04/5/6 at LMAX 60 DSET['CSR'] = {'RL04':['GAC', 'GAD', 'GSM'], 'RL05':['GAC', 'GAD', 'GSM'], 'RL06':['GAC', 'GAD', 'GSM']} VALID['CSR'] = ['RL04','RL05','RL06'] - #-- GFZ RL04/5 at LMAX 90 - #-- GFZ RL06 at LMAX 60 + # GFZ RL04/5 at LMAX 90 + # GFZ RL06 at LMAX 60 DSET['GFZ'] = {'RL04':['GAA', 'GAB', 'GAC', 'GAD', 'GSM'], 'RL05':['GAA', 'GAB', 'GAC', 'GAD', 'GSM'], 'RL06':['GAA', 'GAB', 'GAC', 'GAD', 'GSM']} VALID['GFZ'] = ['RL04','RL05','RL06'] - #-- JPL RL04/5/6 at LMAX 60 + # JPL RL04/5/6 at LMAX 60 DSET['JPL'] = {'RL04':['GAA', 'GAB', 'GAC', 'GAD', 'GSM'], 'RL05':['GAA', 'GAB', 'GAC', 'GAD', 'GSM'], 'RL06':['GAA', 'GAB', 'GAC', 'GAD', 'GSM']} VALID['JPL'] = ['RL04','RL05','RL06'] - #-- for each processing center + # for each processing center for p in PROC: - #-- for each valid dataset release from the processing center + # for each valid dataset release from the processing center drel = [r for r in DREL if r in VALID[p]] for r in drel: - #-- for number of data products + # for number of data products for d in DSET[p][r]: logging.info('GRACE Date Program: {0} {1} {2}'.format(p,r,d)) - #-- run program for processing center, data release and product + # run program for processing center, data release and product grace_date(base_dir,PROC=p,DREL=r,DSET=d,OUTPUT=True,MODE=MODE) - #-- run grace months program for data releases + # run grace months program for data releases logging.info('GRACE Months Program') grace_months_index(base_dir, DREL=DREL, MODE=MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Wrapper program for running GRACE date and months programs """ ) - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), help='Working data directory') - #-- Data processing center or satellite mission + # Data processing center or satellite mission parser.add_argument('--center','-c', metavar='PROC', type=str, nargs='+', default=['CSR','GFZ','JPL'], choices=['CSR','GFZ','JPL'], help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, nargs='+', default=['RL06','v02.4'], help='GRACE/GRACE-FO Data Release') - #-- print information about each input and output file + # print information about each input and output file parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- run GRACE preliminary date program + # run GRACE preliminary date program run_grace_date(args.directory, args.center, args.release, VERBOSE=args.verbose, MODE=args.mode) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/run_sea_level_equation.py b/scripts/run_sea_level_equation.py index d0b800f6..f7b2ac5a 100644 --- a/scripts/run_sea_level_equation.py +++ b/scripts/run_sea_level_equation.py @@ -116,7 +116,7 @@ from gravity_toolkit.harmonics import harmonics from gravity_toolkit.spatial import spatial -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -125,7 +125,7 @@ def info(args): logging.info(f'parent process: {os.getppid():d}') logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: Computes Sea Level Fingerprints including polar motion feedback +# PURPOSE: Computes Sea Level Fingerprints including polar motion feedback def run_sea_level_equation(INPUT_FILE, OUTPUT_FILE, LANDMASK=None, LMAX=0, @@ -139,86 +139,86 @@ def run_sea_level_equation(INPUT_FILE, OUTPUT_FILE, DATE=False, MODE=0o775): - #-- Land-Sea Mask with Antarctica from Rignot (2017) and Greenland from GEUS - #-- 0=Ocean, 1=Land, 2=Lake, 3=Small Island, 4=Ice Shelf - #-- Open the land-sea NetCDF file for reading + # Land-Sea Mask with Antarctica from Rignot (2017) and Greenland from GEUS + # 0=Ocean, 1=Land, 2=Lake, 3=Small Island, 4=Ice Shelf + # Open the land-sea NetCDF file for reading landsea = spatial().from_netCDF4(LANDMASK, date=False, varname='LSMASK') - #-- create land function + # create land function nth,nphi = landsea.shape land_function = np.zeros((nth,nphi),dtype=np.float64) - #-- calculate colatitude in radians + # calculate colatitude in radians th = (90.0 - landsea.lat)*np.pi/180.0 - #-- extract land function from file - #-- combine land and island levels for land function + # extract land function from file + # combine land and island levels for land function indx,indy = np.nonzero((landsea.data >= 1) & (landsea.data <= 3)) land_function[indx,indy] = 1.0 - #-- read load love numbers + # read load love numbers hl,kl,ll = load_love_numbers(LMAX, LOVE_NUMBERS=LOVE_NUMBERS, REFERENCE=REFERENCE) - #-- read spherical harmonic coefficients of the load from input DATAFORM + # read spherical harmonic coefficients of the load from input DATAFORM if (DATAFORM == 'ascii'): - #-- read input ascii file (.txt) + # read input ascii file (.txt) load_Ylms = harmonics().from_ascii(INPUT_FILE, date=DATE) elif (DATAFORM == 'netCDF4'): - #-- read input netCDF4 file (.nc) + # read input netCDF4 file (.nc) load_Ylms = harmonics().from_netCDF4(INPUT_FILE, date=DATE) elif (DATAFORM == 'HDF5'): - #-- read input HDF5 file (.H5) + # read input HDF5 file (.H5) load_Ylms = harmonics().from_HDF5(INPUT_FILE, date=DATE) - #-- truncate harmonics to degree and order LMAX + # truncate harmonics to degree and order LMAX load_Ylms.truncate(lmax=LMAX, mmax=LMAX) - #-- expand dimensions to iterate over slices + # expand dimensions to iterate over slices load_Ylms.expand_dims() l1,m1,nt = load_Ylms.shape - #-- calculate the legendre functions using Holmes and Featherstone relation + # calculate the legendre functions using Holmes and Featherstone relation PLM, dPLM = plm_holmes(LMAX, np.cos(th)) - #-- allocate for pseudo-spectral sea level equation solver + # allocate for pseudo-spectral sea level equation solver sea_level = spatial(nlon=nphi, nlat=nth) sea_level.data = np.zeros((nth,nphi,nt)) sea_level.mask = np.zeros((nth,nphi,nt), dtype=bool) for i in range(nt): - #-- print iteration if running a series + # print iteration if running a series if (nt > 1): logging.info('Index {0:d} of {1:d}'.format(i+1,nt)) - #-- subset harmonics to indice + # subset harmonics to indice Ylms = load_Ylms.index(i, date=DATE) - #-- run pseudo-spectral sea level equation solver + # run pseudo-spectral sea level equation solver sea_level.data[:,:,i] = sea_level_equation(Ylms.clm, Ylms.slm, landsea.lon, landsea.lat, land_function.T, LMAX=LMAX, LOVE=(hl,kl,ll), BODY_TIDE_LOVE=BODY_TIDE_LOVE, FLUID_LOVE=FLUID_LOVE, POLAR=POLAR, PLM=PLM, ITERATIONS=ITERATIONS, FILL_VALUE=0).T sea_level.mask[:,:,i] = (sea_level.data[:,:,i] == 0) - #-- copy dimensions + # copy dimensions sea_level.lon = np.copy(landsea.lon) sea_level.lat = np.copy(landsea.lat) sea_level.time = np.copy(load_Ylms.time) if DATE else None - #-- remove singleton dimensions if necessary + # remove singleton dimensions if necessary sea_level.squeeze() - #-- save as output DATAFORM + # save as output DATAFORM if (DATAFORM == 'ascii'): - #-- ascii (.txt) - #-- only print ocean points + # ascii (.txt) + # only print ocean points sea_level.fill_value = 0 sea_level.update_mask() sea_level.to_ascii(OUTPUT_FILE, date=DATE) elif (DATAFORM == 'netCDF4'): - #-- netCDF4 (.nc) + # netCDF4 (.nc) sea_level.to_netCDF4(OUTPUT_FILE, date=DATE, units='centimeters', longname='Equivalent_Water_Thickness', title='Sea_Level_Fingerprint') elif (DATAFORM == 'HDF5'): - #-- HDF5 (.H5) + # HDF5 (.H5) sea_level.to_HDF5(OUTPUT_FILE, date=DATE, units='centimeters', longname='Equivalent_Water_Thickness', title='Sea_Level_Fingerprint') - #-- set the permissions mode of the output file + # set the permissions mode of the output file os.chmod(OUTPUT_FILE, MODE) -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Solves the sea level equation with the option of @@ -227,91 +227,91 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- input and output file + # command line parameters + # input and output file parser.add_argument('infile', type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='?', help='Input load file') parser.add_argument('outfile', type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='?', help='Output sea level fingerprints file') - #-- land mask file + # land mask file lsmask = utilities.get_data_path(['data','landsea_hd.nc']) parser.add_argument('--mask', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=lsmask, help='Land-sea mask for calculating sea level fingerprints') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=240, help='Maximum spherical harmonic degree') - #-- different treatments of the load Love numbers - #-- 0: Han and Wahr (1995) values from PREM - #-- 1: Gegout (2005) values from PREM - #-- 2: Wang et al. (2012) values from PREM + # different treatments of the load Love numbers + # 0: Han and Wahr (1995) values from PREM + # 1: Gegout (2005) values from PREM + # 2: Wang et al. (2012) values from PREM parser.add_argument('--love','-n', type=int, default=0, choices=[0,1,2], help='Treatment of the Load Love numbers') - #-- different treatments of the body tide Love numbers of degree 2 - #-- 0: Wahr (1981) and Wahr (1985) values from PREM - #-- 1: Farrell (1972) values from Gutenberg-Bullen oceanic mantle model + # different treatments of the body tide Love numbers of degree 2 + # 0: Wahr (1981) and Wahr (1985) values from PREM + # 1: Farrell (1972) values from Gutenberg-Bullen oceanic mantle model parser.add_argument('--body','-b', type=int, default=0, choices=[0,1], help='Treatment of the body tide Love number') - #-- different treatments of the fluid Love number of gravitational potential - #-- 0: Han and Wahr (1989) fluid love number - #-- 1: Munk and MacDonald (1960) secular love number - #-- 2: Munk and MacDonald (1960) fluid love number - #-- 3: Lambeck (1980) fluid love number + # different treatments of the fluid Love number of gravitational potential + # 0: Han and Wahr (1989) fluid love number + # 1: Munk and MacDonald (1960) secular love number + # 2: Munk and MacDonald (1960) fluid love number + # 3: Lambeck (1980) fluid love number parser.add_argument('--fluid','-f', type=int, default=0, choices=[0,1,2,3], help='Treatment of the fluid Love number') - #-- maximum number of iterations for the solver - #-- 0th iteration: distribute the water in a uniform layer (barystatic) + # maximum number of iterations for the solver + # 0th iteration: distribute the water in a uniform layer (barystatic) parser.add_argument('--iterations','-I', type=int, default=6, help='Maximum number of iterations') - #-- option for polar feedback + # option for polar feedback parser.add_argument('--polar-feedback', default=False, action='store_true', help='Include effects of polar feedback') - #-- option for setting reference frame for load love numbers - #-- reference frame options (CF, CM, CE) + # option for setting reference frame for load love numbers + # reference frame options (CF, CM, CE) parser.add_argument('--reference', type=str.upper, default='CF', choices=['CF','CM','CE'], help='Reference frame for load Love numbers') - #-- input and output data format (ascii, netCDF4, HDF5) + # input and output data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input and output data format') - #-- Input and output files have date information + # Input and output files have date information parser.add_argument('--date','-D', default=False, action='store_true', help='Input and output files have date information') - #-- print information about processing run + # print information about processing run parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of run') - #-- permissions mode of the output files (octal) + # permissions mode of the output files (octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- try to run the analysis with listed parameters + # try to run the analysis with listed parameters try: info(args) - #-- run sea level fingerprints program with parameters + # run sea level fingerprints program with parameters run_sea_level_equation(args.infile, args.outfile, LANDMASK=args.mask, LMAX=args.lmax, @@ -325,12 +325,12 @@ def main(): DATE=args.date, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/scripts/scale_grace_maps.py b/scripts/scale_grace_maps.py index 98c61620..656f7316 100644 --- a/scripts/scale_grace_maps.py +++ b/scripts/scale_grace_maps.py @@ -197,7 +197,7 @@ from gravity_toolkit.harmonic_summation import harmonic_summation from gravity_toolkit.tssmooth import tssmooth -#-- PURPOSE: keep track of threads +# PURPOSE: keep track of threads def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) @@ -207,9 +207,9 @@ def info(args): logging.info(f'process id: {os.getpid():d}') -#-- PURPOSE: import GRACE/GRACE-FO files for a given months range -#-- Calculates monthly scaled spatial maps from GRACE/GRACE-FO -#-- spherical harmonic coefficients +# PURPOSE: import GRACE/GRACE-FO files for a given months range +# Calculates monthly scaled spatial maps from GRACE/GRACE-FO +# spherical harmonic coefficients def scale_grace_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, START=None, END=None, @@ -249,58 +249,58 @@ def scale_grace_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, VERBOSE=0, MODE=0o775): - #-- recursively create output Directory if not currently existing + # recursively create output Directory if not currently existing if not os.access(OUTPUT_DIRECTORY, os.F_OK): os.makedirs(OUTPUT_DIRECTORY, mode=MODE, exist_ok=True) - #-- list object of output files for file logs (full path) + # list object of output files for file logs (full path) output_files = [] - #-- file information + # file information suffix = dict(ascii='txt', netCDF4='nc', HDF5='H5') - #-- output file format + # output file format file_format = '{0}{1}{2}_L{3:d}{4}{5}{6}_{7:03d}-{8:03d}.{9}' - #-- read arrays of kl, hl, and ll Love Numbers + # read arrays of kl, hl, and ll Love Numbers hl,kl,ll = load_love_numbers(LMAX, LOVE_NUMBERS=LOVE_NUMBERS, REFERENCE=REFERENCE) - #-- atmospheric ECMWF "jump" flag (if ATM) + # atmospheric ECMWF "jump" flag (if ATM) atm_str = '_wATM' if ATM else '' - #-- output string for both LMAX==MMAX and LMAX != MMAX cases + # output string for both LMAX==MMAX and LMAX != MMAX cases MMAX = np.copy(LMAX) if not MMAX else MMAX order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' - #-- output spatial units + # output spatial units unit_str = 'cmwe' unit_name = 'Equivalent Water Thickness' - #-- invalid value + # invalid value fill_value = -9999.0 - #-- Calculating the Gaussian smoothing for radius RAD + # Calculating the Gaussian smoothing for radius RAD if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) gw_str = f'_r{RAD:0.0f}km' else: - #-- else = 1 + # else = 1 wt = np.ones((LMAX+1)) gw_str = '' - #-- Read Ocean function and convert to Ylms for redistribution + # Read Ocean function and convert to Ylms for redistribution if REDISTRIBUTE_REMOVED: - #-- read Land-Sea Mask and convert to spherical harmonics + # read Land-Sea Mask and convert to spherical harmonics ocean_Ylms = ocean_stokes(LANDMASK, LMAX, MMAX=MMAX, LOVE=(hl,kl,ll)) - #-- Grid spacing + # Grid spacing dlon,dlat = (DDEG[0],DDEG[0]) if (len(DDEG) == 1) else (DDEG[0],DDEG[1]) - #-- Grid dimensions - if (INTERVAL == 1):#-- (0:360, 90:-90) + # Grid dimensions + if (INTERVAL == 1):# (0:360, 90:-90) nlon = np.int64((360.0/dlon)+1.0) nlat = np.int64((180.0/dlat)+1.0) - elif (INTERVAL == 2):#-- degree spacing/2 + elif (INTERVAL == 2):# degree spacing/2 nlon = np.int64((360.0/dlon)) nlat = np.int64((180.0/dlat)) - #-- read data for input scale files (ascii, netCDF4, HDF5) + # read data for input scale files (ascii, netCDF4, HDF5) if (DATAFORM == 'ascii'): kfactor = spatial(spacing=[dlon,dlat],nlat=nlat,nlon=nlon).from_ascii( SCALE_FILE,date=False) @@ -316,156 +316,156 @@ def scale_grace_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, kfactor = spatial().from_HDF5(SCALE_FILE,date=False) k_error = spatial().from_HDF5(ERROR_FILE,date=False) k_power = spatial().from_HDF5(POWER_FILE,date=False) - #-- input data shape + # input data shape nlat,nlon = kfactor.shape - #-- input GRACE/GRACE-FO spherical harmonic datafiles for date range - #-- replacing low-degree harmonics with SLR values if specified - #-- include degree 1 (geocenter) harmonics if specified - #-- correcting for Pole-Tide and Atmospheric Jumps if specified + # input GRACE/GRACE-FO spherical harmonic datafiles for date range + # replacing low-degree harmonics with SLR values if specified + # include degree 1 (geocenter) harmonics if specified + # correcting for Pole-Tide and Atmospheric Jumps if specified Ylms = grace_input_months(base_dir, PROC, DREL, DSET, LMAX, START, END, MISSING, SLR_C20, DEG1, MMAX=MMAX, SLR_21=SLR_21, SLR_22=SLR_22, SLR_C30=SLR_C30, SLR_C40=SLR_C40, SLR_C50=SLR_C50, DEG1_FILE=DEG1_FILE, MODEL_DEG1=MODEL_DEG1, ATM=ATM, POLE_TIDE=POLE_TIDE) - #-- create harmonics object from GRACE/GRACE-FO data + # create harmonics object from GRACE/GRACE-FO data GRACE_Ylms = harmonics().from_dict(Ylms) GRACE_Ylms.directory = Ylms['directory'] - #-- use a mean file for the static field to remove + # use a mean file for the static field to remove if MEAN_FILE: - #-- read data form for input mean file (ascii, netCDF4, HDF5, gfc) + # read data form for input mean file (ascii, netCDF4, HDF5, gfc) mean_Ylms = harmonics().from_file(MEAN_FILE,format=MEANFORM,date=False) - #-- remove the input mean + # remove the input mean GRACE_Ylms.subtract(mean_Ylms) else: GRACE_Ylms.mean(apply=True) - #-- date information of GRACE/GRACE-FO coefficients + # date information of GRACE/GRACE-FO coefficients nfiles = len(GRACE_Ylms.time) - #-- filter GRACE/GRACE-FO coefficients + # filter GRACE/GRACE-FO coefficients if DESTRIPE: - #-- destriping GRACE/GRACE-FO coefficients + # destriping GRACE/GRACE-FO coefficients ds_str = '_FL' GRACE_Ylms = GRACE_Ylms.destripe() else: - #-- using standard GRACE/GRACE-FO harmonics + # using standard GRACE/GRACE-FO harmonics ds_str = '' - #-- input GIA spherical harmonic datafiles + # input GIA spherical harmonic datafiles GIA_Ylms_rate = read_GIA_model(GIA_FILE,GIA=GIA,LMAX=LMAX,MMAX=MMAX) gia_str = '_{0}'.format(GIA_Ylms_rate['title']) if GIA else '' - #-- calculate the monthly mass change from GIA + # calculate the monthly mass change from GIA GIA_Ylms = GRACE_Ylms.zeros_like() GIA_Ylms.time[:] = np.copy(GRACE_Ylms.time) GIA_Ylms.month[:] = np.copy(GRACE_Ylms.month) - #-- monthly GIA calculated by gia_rate*time elapsed - #-- finding change in GIA each month + # monthly GIA calculated by gia_rate*time elapsed + # finding change in GIA each month for t in range(nfiles): GIA_Ylms.clm[:,:,t] = GIA_Ylms_rate['clm']*(GIA_Ylms.time[t]-2003.3) GIA_Ylms.slm[:,:,t] = GIA_Ylms_rate['slm']*(GIA_Ylms.time[t]-2003.3) - #-- default file prefix + # default file prefix if not FILE_PREFIX: fargs = (PROC,DREL,DSET,Ylms['title'],gia_str) FILE_PREFIX = '{0}_{1}_{2}{3}{4}_'.format(*fargs) - #-- input spherical harmonic datafiles to be removed from the GRACE data - #-- Remove sets of Ylms from the GRACE data before returning + # input spherical harmonic datafiles to be removed from the GRACE data + # Remove sets of Ylms from the GRACE data before returning remove_Ylms = GRACE_Ylms.zeros_like() remove_Ylms.time[:] = np.copy(GRACE_Ylms.time) remove_Ylms.month[:] = np.copy(GRACE_Ylms.month) if REMOVE_FILES: - #-- extend list if a single format was entered for all files + # extend list if a single format was entered for all files if len(REMOVE_FORMAT) < len(REMOVE_FILES): REMOVE_FORMAT = REMOVE_FORMAT*len(REMOVE_FILES) - #-- for each file to be removed + # for each file to be removed for REMOVE_FILE,REMOVEFORM in zip(REMOVE_FILES,REMOVE_FORMAT): if REMOVEFORM in ('ascii','netCDF4','HDF5'): - #-- ascii (.txt) - #-- netCDF4 (.nc) - #-- HDF5 (.H5) + # ascii (.txt) + # netCDF4 (.nc) + # HDF5 (.H5) Ylms = harmonics().from_file(REMOVE_FILE, format=REMOVEFORM) elif REMOVEFORM in ('index-ascii','index-netCDF4','index-HDF5'): - #-- read from index file + # read from index file _,removeform = REMOVEFORM.split('-') - #-- index containing files in data format + # index containing files in data format Ylms = harmonics().from_index(REMOVE_FILE, format=removeform) - #-- reduce to GRACE/GRACE-FO months and truncate to degree and order + # reduce to GRACE/GRACE-FO months and truncate to degree and order Ylms = Ylms.subset(GRACE_Ylms.month).truncate(lmax=LMAX,mmax=MMAX) - #-- distribute removed Ylms uniformly over the ocean + # distribute removed Ylms uniformly over the ocean if REDISTRIBUTE_REMOVED: - #-- calculate ratio between total removed mass and - #-- a uniformly distributed cm of water over the ocean + # calculate ratio between total removed mass and + # a uniformly distributed cm of water over the ocean ratio = Ylms.clm[0,0,:]/ocean_Ylms.clm[0,0] - #-- for each spherical harmonic - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX - #-- remove the ratio*ocean Ylms from Ylms - #-- note: x -= y is equivalent to x = x - y + # for each spherical harmonic + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + for l in range(m,LMAX+1):# LMAX+1 to include LMAX + # remove the ratio*ocean Ylms from Ylms + # note: x -= y is equivalent to x = x - y Ylms.clm[l,m,:] -= ratio*ocean_Ylms.clm[l,m] Ylms.slm[l,m,:] -= ratio*ocean_Ylms.slm[l,m] - #-- filter removed coefficients + # filter removed coefficients if DESTRIPE: Ylms = Ylms.destripe() - #-- add data for month t and INDEX_FILE to the total - #-- remove_clm and remove_slm matrices - #-- redistributing the mass over the ocean if specified + # add data for month t and INDEX_FILE to the total + # remove_clm and remove_slm matrices + # redistributing the mass over the ocean if specified remove_Ylms.add(Ylms) - #-- calculating GRACE/GRACE-FO error (Wahr et al. 2006) - #-- output GRACE error file (for both LMAX==MMAX and LMAX != MMAX cases) + # calculating GRACE/GRACE-FO error (Wahr et al. 2006) + # output GRACE error file (for both LMAX==MMAX and LMAX != MMAX cases) args = (PROC,DREL,DSET,LMAX,order_str,ds_str,atm_str,GRACE_Ylms.month[0], GRACE_Ylms.month[-1], suffix[DATAFORM]) delta_format = '{0}_{1}_{2}_DELTA_CLM_L{3:d}{4}{5}{6}_{7:03d}-{8:03d}.{9}' DELTA_FILE = os.path.join(GRACE_Ylms.directory,delta_format.format(*args)) - #-- check full path of the GRACE directory for delta file - #-- if file was previously calculated: will read file - #-- else: will calculate the GRACE/GRACE-FO error + # check full path of the GRACE directory for delta file + # if file was previously calculated: will read file + # else: will calculate the GRACE/GRACE-FO error if not os.access(DELTA_FILE, os.F_OK): - #-- add output delta file to list object + # add output delta file to list object output_files.append(DELTA_FILE) - #-- Delta coefficients of GRACE time series (Error components) + # Delta coefficients of GRACE time series (Error components) delta_Ylms = harmonics(lmax=LMAX,mmax=MMAX) delta_Ylms.clm = np.zeros((LMAX+1,MMAX+1)) delta_Ylms.slm = np.zeros((LMAX+1,MMAX+1)) - #-- Smoothing Half-Width (CNES is a 10-day solution) - #-- All other solutions are monthly solutions (HFWTH for annual = 6) + # Smoothing Half-Width (CNES is a 10-day solution) + # All other solutions are monthly solutions (HFWTH for annual = 6) if ((PROC == 'CNES') and (DREL in ('RL01','RL02'))): HFWTH = 19 else: HFWTH = 6 - #-- Equal to the noise of the smoothed time-series - #-- for each spherical harmonic order - for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX - #-- for each spherical harmonic degree - for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX - #-- Delta coefficients of GRACE time series + # Equal to the noise of the smoothed time-series + # for each spherical harmonic order + for m in range(0,MMAX+1):# MMAX+1 to include MMAX + # for each spherical harmonic degree + for l in range(m,LMAX+1):# LMAX+1 to include LMAX + # Delta coefficients of GRACE time series for cs,csharm in enumerate(['clm','slm']): - #-- calculate GRACE Error (Noise of smoothed time-series) - #-- With Annual and Semi-Annual Terms + # calculate GRACE Error (Noise of smoothed time-series) + # With Annual and Semi-Annual Terms val1 = getattr(GRACE_Ylms, csharm) smth = tssmooth(GRACE_Ylms.time, val1[l,m,:], HFWTH=HFWTH) - #-- number of smoothed points + # number of smoothed points nsmth = len(smth['data']) tsmth = np.mean(smth['time']) - #-- GRACE delta Ylms - #-- variance of data-(smoothed+annual+semi) + # GRACE delta Ylms + # variance of data-(smoothed+annual+semi) val2 = getattr(delta_Ylms, csharm) val2[l,m] = np.sqrt(np.sum(smth['noise']**2)/nsmth) - #-- save GRACE/GRACE-FO delta harmonics to file + # save GRACE/GRACE-FO delta harmonics to file delta_Ylms.time = np.copy(tsmth) delta_Ylms.month = np.int64(nsmth) delta_Ylms.to_file(DELTA_FILE,format=DATAFORM) else: - #-- read GRACE/GRACE-FO delta harmonics from file + # read GRACE/GRACE-FO delta harmonics from file delta_Ylms = harmonics().from_file(DELTA_FILE,format=DATAFORM) - #-- copy time and number of smoothed fields + # copy time and number of smoothed fields tsmth = np.squeeze(delta_Ylms.time) nsmth = np.int64(delta_Ylms.month) - #-- Output spatial data object + # Output spatial data object grid = spatial() grid.lon = np.copy(kfactor.lon) grid.lat = np.copy(kfactor.lat) @@ -474,96 +474,96 @@ def scale_grace_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, grid.data = np.zeros((nlat,nlon,nfiles)) grid.mask = np.zeros((nlat,nlon,nfiles),dtype=bool) - #-- Computing plms for converting to spatial domain + # Computing plms for converting to spatial domain phi = grid.lon[np.newaxis,:]*np.pi/180.0 theta = (90.0-grid.lat)*np.pi/180.0 PLM, dPLM = plm_holmes(LMAX, np.cos(theta)) - #-- square of legendre polynomials truncated to order MMAX + # square of legendre polynomials truncated to order MMAX mm = np.arange(0,MMAX+1) PLM2 = PLM[:,mm,:]**2 - #-- dfactor is the degree dependent coefficients - #-- for converting to centimeters water equivalent (cmwe) + # dfactor is the degree dependent coefficients + # for converting to centimeters water equivalent (cmwe) dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).cmwe - #-- converting harmonics to truncated, smoothed coefficients in units - #-- combining harmonics to calculate output spatial fields + # converting harmonics to truncated, smoothed coefficients in units + # combining harmonics to calculate output spatial fields for i,gm in enumerate(GRACE_Ylms.month): - #-- GRACE/GRACE-FO harmonics for time t + # GRACE/GRACE-FO harmonics for time t Ylms = GRACE_Ylms.index(i) - #-- Remove GIA rate for time + # Remove GIA rate for time Ylms.subtract(GIA_Ylms.index(i)) - #-- Remove monthly files to be removed + # Remove monthly files to be removed Ylms.subtract(remove_Ylms.index(i)) - #-- smooth harmonics and convert to output units + # smooth harmonics and convert to output units Ylms.convolve(dfactor*wt) - #-- convert spherical harmonics to output spatial grid + # convert spherical harmonics to output spatial grid grid.data[:,:,i] = harmonic_summation(Ylms.clm, Ylms.slm, grid.lon, grid.lat, LMAX=LMAX, MMAX=MMAX, PLM=PLM).T - #-- copy time variables for month + # copy time variables for month grid.time[i] = np.copy(Ylms.time) grid.month[i] = np.copy(Ylms.month) - #-- update spacing and dimensions + # update spacing and dimensions grid.update_spacing() grid.update_extents() grid.update_dimensions() - #-- scale output data with kfactor + # scale output data with kfactor grid = grid.scale(kfactor.data) grid.replace_invalid(fill_value, mask=kfactor.mask) - #-- output monthly files to ascii, netCDF4 or HDF5 + # output monthly files to ascii, netCDF4 or HDF5 args = (FILE_PREFIX,'',unit_str,LMAX,order_str,gw_str,ds_str, grid.month[0],grid.month[-1],suffix[DATAFORM]) FILE=os.path.join(OUTPUT_DIRECTORY,file_format.format(*args)) if (DATAFORM == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) grid.to_ascii(FILE, date=True, verbose=VERBOSE) elif (DATAFORM == 'netCDF4'): - #-- netCDF4 + # netCDF4 grid.to_netCDF4(FILE, date=True, verbose=VERBOSE, units=unit_str, longname=unit_name, title='GRACE/GRACE-FO Spatial Data') elif (DATAFORM == 'HDF5'): - #-- HDF5 + # HDF5 grid.to_HDF5(FILE, date=True, verbose=VERBOSE, units=unit_str, longname=unit_name, title='GRACE/GRACE-FO Spatial Data') - #-- set the permissions mode of the output files + # set the permissions mode of the output files os.chmod(FILE, MODE) - #-- add file to list + # add file to list output_files.append(FILE) - #-- calculate power of scaled GRACE/GRACE-FO data + # calculate power of scaled GRACE/GRACE-FO data scaled_power = grid.sum(power=2.0).power(0.5) - #-- calculate residual leakage errors - #-- scaled by ratio of GRACE and synthetic power + # calculate residual leakage errors + # scaled by ratio of GRACE and synthetic power ratio = scaled_power.scale(k_power.power(-1).data) error = k_error.scale(ratio.data) - #-- output monthly error files to ascii, netCDF4 or HDF5 + # output monthly error files to ascii, netCDF4 or HDF5 args = (FILE_PREFIX,'ERROR_',unit_str,LMAX,order_str,gw_str,ds_str, grid.month[0],grid.month[-1],suffix[DATAFORM]) FILE = os.path.join(OUTPUT_DIRECTORY,file_format.format(*args)) if (DATAFORM == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) error.to_ascii(FILE, date=False, verbose=VERBOSE) elif (DATAFORM == 'netCDF4'): - #-- netCDF4 + # netCDF4 error.to_netCDF4(FILE, date=False, verbose=VERBOSE, units=unit_str, longname=unit_name, title='GRACE/GRACE-FO Scaling Error') elif (DATAFORM == 'HDF5'): - #-- HDF5 + # HDF5 error.to_HDF5(FILE, date=False, verbose=VERBOSE, units=unit_str, longname=unit_name, title='GRACE/GRACE-FO Scaling Error') - #-- set the permissions mode of the output files + # set the permissions mode of the output files os.chmod(FILE, MODE) - #-- add file to list + # add file to list output_files.append(FILE) - #-- Output spatial data object + # Output spatial data object delta = spatial() delta.lon = np.copy(kfactor.lon) delta.lat = np.copy(kfactor.lat) @@ -571,101 +571,101 @@ def scale_grace_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, delta.month = np.copy(nsmth) delta.data = np.zeros((nlat,nlon)) delta.mask = np.zeros((nlat,nlon),dtype=bool) - #-- calculate scaled spatial error - #-- Calculating cos(m*phi)^2 and sin(m*phi)^2 + # calculate scaled spatial error + # Calculating cos(m*phi)^2 and sin(m*phi)^2 m = delta_Ylms.m[:,np.newaxis] ccos = np.cos(np.dot(m,phi))**2 ssin = np.sin(np.dot(m,phi))**2 - #-- truncate delta harmonics to spherical harmonic range + # truncate delta harmonics to spherical harmonic range Ylms = delta_Ylms.truncate(LMAX,lmin=LMIN,mmax=MMAX) - #-- convolve delta harmonics with degree dependent factors - #-- smooth harmonics and convert to output units + # convolve delta harmonics with degree dependent factors + # smooth harmonics and convert to output units Ylms = Ylms.convolve(dfactor*wt).power(2.0).scale(1.0/nsmth) - #-- Calculate fourier coefficients - d_cos = np.zeros((MMAX+1,nlat))#-- [m,th] - d_sin = np.zeros((MMAX+1,nlat))#-- [m,th] - #-- Calculating delta spatial values + # Calculate fourier coefficients + d_cos = np.zeros((MMAX+1,nlat))# [m,th] + d_sin = np.zeros((MMAX+1,nlat))# [m,th] + # Calculating delta spatial values for k in range(0,nlat): - #-- summation over all spherical harmonic degrees + # summation over all spherical harmonic degrees d_cos[:,k] = np.sum(PLM2[:,:,k]*Ylms.clm, axis=0) d_sin[:,k] = np.sum(PLM2[:,:,k]*Ylms.slm, axis=0) - #-- Multiplying by c/s(phi#m) to get spatial error map + # Multiplying by c/s(phi#m) to get spatial error map delta.data[:] = np.sqrt(np.dot(ccos.T,d_cos) + np.dot(ssin.T,d_sin)).T - #-- update spacing and dimensions + # update spacing and dimensions delta.update_spacing() delta.update_extents() delta.update_dimensions() - #-- scale output harmonic errors with kfactor + # scale output harmonic errors with kfactor delta = delta.scale(kfactor.data) delta.replace_invalid(fill_value, mask=kfactor.mask) - #-- output monthly files to ascii, netCDF4 or HDF5 + # output monthly files to ascii, netCDF4 or HDF5 args = (FILE_PREFIX,'DELTA_',unit_str,LMAX,order_str,gw_str,ds_str, grid.month[0],grid.month[-1],suffix[DATAFORM]) FILE=os.path.join(OUTPUT_DIRECTORY,file_format.format(*args)) if (DATAFORM == 'ascii'): - #-- ascii (.txt) + # ascii (.txt) delta.to_ascii(FILE, date=True, verbose=VERBOSE) elif (DATAFORM == 'netCDF4'): - #-- netCDF4 + # netCDF4 delta.to_netCDF4(FILE, date=True, verbose=VERBOSE, units=unit_str, longname=unit_name, title='GRACE/GRACE-FO Spatial Error') elif (DATAFORM == 'HDF5'): - #-- HDF5 + # HDF5 delta.to_HDF5(FILE, date=True, verbose=VERBOSE, units=unit_str, longname=unit_name, title='GRACE/GRACE-FO Spatial Error') - #-- set the permissions mode of the output files + # set the permissions mode of the output files os.chmod(FILE, MODE) - #-- add file to list + # add file to list output_files.append(FILE) - #-- return the list of output files + # return the list of output files return output_files -#-- PURPOSE: print a file log for the GRACE/GRACE-FO analysis +# PURPOSE: print a file log for the GRACE/GRACE-FO analysis def output_log_file(arguments,output_files): - #-- format: scale_GRACE_maps_run_2002-04-01_PID-70335.log + # format: scale_GRACE_maps_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'scale_GRACE_maps_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print output files + # print output files logging.info('\n\nOUTPUT FILES:') for f in output_files: logging.info('{0}'.format(f)) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: print a error file log for the GRACE/GRACE-FO analysis +# PURPOSE: print a error file log for the GRACE/GRACE-FO analysis def output_error_log_file(arguments): - #-- format: scale_GRACE_maps_failed_run_2002-04-01_PID-70335.log + # format: scale_GRACE_maps_failed_run_2002-04-01_PID-70335.log args = (time.strftime('%Y-%m-%d',time.localtime()), os.getpid()) LOGFILE = 'scale_GRACE_maps_failed_run_{0}_PID-{1:d}.log'.format(*args) - #-- create a unique log and open the log file + # create a unique log and open the log file DIRECTORY = os.path.expanduser(arguments.output_directory) fid = utilities.create_unique_file(os.path.join(DIRECTORY,LOGFILE)) logging.basicConfig(stream=fid, level=logging.INFO) - #-- print argument values sorted alphabetically + # print argument values sorted alphabetically logging.info('ARGUMENTS:') for arg, value in sorted(vars(arguments).items()): logging.info('{0}: {1}'.format(arg, value)) - #-- print traceback error + # print traceback error logging.info('\n\nTRACEBACK ERROR:') traceback.print_exc(file=fid) - #-- close the log file + # close the log file fid.close() -#-- PURPOSE: create argument parser +# PURPOSE: create argument parser def arguments(): parser = argparse.ArgumentParser( description="""Calculates scaled spatial maps from @@ -674,8 +674,8 @@ def arguments(): fromfile_prefix_chars="@" ) parser.convert_arg_line_to_args = utilities.convert_arg_line_to_args - #-- command line parameters - #-- working data directory + # command line parameters + # working data directory parser.add_argument('--directory','-D', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=os.getcwd(), @@ -687,30 +687,30 @@ def arguments(): parser.add_argument('--file-prefix','-P', type=str, help='Prefix string for input and output files') - #-- Data processing center or satellite mission + # Data processing center or satellite mission parser.add_argument('--center','-c', metavar='PROC', type=str, required=True, help='GRACE/GRACE-FO Processing Center') - #-- GRACE/GRACE-FO data release + # GRACE/GRACE-FO data release parser.add_argument('--release','-r', metavar='DREL', type=str, default='RL06', help='GRACE/GRACE-FO Data Release') - #-- GRACE/GRACE-FO Level-2 data product + # GRACE/GRACE-FO Level-2 data product parser.add_argument('--product','-p', metavar='DSET', type=str, default='GSM', help='GRACE/GRACE-FO Level-2 data product') - #-- minimum spherical harmonic degree + # minimum spherical harmonic degree parser.add_argument('--lmin', type=int, default=1, help='Minimum spherical harmonic degree') - #-- maximum spherical harmonic degree and order + # maximum spherical harmonic degree and order parser.add_argument('--lmax','-l', type=int, default=60, help='Maximum spherical harmonic degree') parser.add_argument('--mmax','-m', type=int, default=None, help='Maximum spherical harmonic order') - #-- start and end GRACE/GRACE-FO months + # start and end GRACE/GRACE-FO months parser.add_argument('--start','-S', type=int, default=4, help='Starting GRACE/GRACE-FO month') @@ -722,34 +722,34 @@ def arguments(): parser.add_argument('--missing','-N', metavar='MISSING', type=int, nargs='+', default=MISSING, help='Missing GRACE/GRACE-FO months') - #-- different treatments of the load Love numbers - #-- 0: Han and Wahr (1995) values from PREM - #-- 1: Gegout (2005) values from PREM - #-- 2: Wang et al. (2012) values from PREM + # different treatments of the load Love numbers + # 0: Han and Wahr (1995) values from PREM + # 1: Gegout (2005) values from PREM + # 2: Wang et al. (2012) values from PREM parser.add_argument('--love','-n', type=int, default=0, choices=[0,1,2], help='Treatment of the Load Love numbers') - #-- option for setting reference frame for gravitational load love number - #-- reference frame options (CF, CM, CE) + # option for setting reference frame for gravitational load love number + # reference frame options (CF, CM, CE) parser.add_argument('--reference', type=str.upper, default='CF', choices=['CF','CM','CE'], help='Reference frame for load Love numbers') - #-- Gaussian smoothing radius (km) + # Gaussian smoothing radius (km) parser.add_argument('--radius','-R', type=float, default=0, help='Gaussian smoothing radius (km)') - #-- Use a decorrelation (destriping) filter + # Use a decorrelation (destriping) filter parser.add_argument('--destripe','-d', default=False, action='store_true', help='Use decorrelation (destriping) filter') - #-- output grid parameters + # output grid parameters parser.add_argument('--spacing', type=float, nargs='+', default=[0.5,0.5], metavar=('dlon','dlat'), help='Spatial resolution of output data') parser.add_argument('--interval', type=int, default=2, choices=[1,2], help=('Output grid interval (1: global, 2: centered global)')) - #-- GIA model type list + # GIA model type list models = {} models['IJ05-R2'] = 'Ivins R2 GIA Models' models['W12a'] = 'Whitehouse GIA Models' @@ -763,33 +763,33 @@ def arguments(): models['ascii'] = 'reformatted GIA in ascii format' models['netCDF4'] = 'reformatted GIA in netCDF4 format' models['HDF5'] = 'reformatted GIA in HDF5 format' - #-- GIA model type + # GIA model type parser.add_argument('--gia','-G', type=str, metavar='GIA', choices=models.keys(), help='GIA model type to read') - #-- full path to GIA file + # full path to GIA file parser.add_argument('--gia-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GIA file to read') - #-- use atmospheric jump corrections from Fagiolini et al. (2015) + # use atmospheric jump corrections from Fagiolini et al. (2015) parser.add_argument('--atm-correction', default=False, action='store_true', help='Apply atmospheric jump correction coefficients') - #-- correct for pole tide drift follow Wahr et al. (2015) + # correct for pole tide drift follow Wahr et al. (2015) parser.add_argument('--pole-tide', default=False, action='store_true', help='Correct for pole tide drift') - #-- Update Degree 1 coefficients with SLR or derived values - #-- Tellus: GRACE/GRACE-FO TN-13 from PO.DAAC - #-- https://grace.jpl.nasa.gov/data/get-data/geocenter/ - #-- SLR: satellite laser ranging from CSR - #-- ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ - #-- UCI: Sutterley and Velicogna, Remote Sensing (2019) - #-- https://www.mdpi.com/2072-4292/11/18/2108 - #-- Swenson: GRACE-derived coefficients from Sean Swenson - #-- https://doi.org/10.1029/2007JB005338 - #-- GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS - #-- http://gravis.gfz-potsdam.de/corrections + # Update Degree 1 coefficients with SLR or derived values + # Tellus: GRACE/GRACE-FO TN-13 from PO.DAAC + # https://grace.jpl.nasa.gov/data/get-data/geocenter/ + # SLR: satellite laser ranging from CSR + # ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ + # UCI: Sutterley and Velicogna, Remote Sensing (2019) + # https://www.mdpi.com/2072-4292/11/18/2108 + # Swenson: GRACE-derived coefficients from Sean Swenson + # https://doi.org/10.1029/2007JB005338 + # GFZ: GRACE/GRACE-FO coefficients from GFZ GravIS + # http://gravis.gfz-potsdam.de/corrections parser.add_argument('--geocenter', metavar='DEG1', type=str, choices=['Tellus','SLR','SLF','UCI','Swenson','GFZ'], @@ -800,7 +800,7 @@ def arguments(): parser.add_argument('--interpolate-geocenter', default=False, action='store_true', help='Least-squares model missing Degree 1 coefficients') - #-- replace low degree harmonics with values from Satellite Laser Ranging + # replace low degree harmonics with values from Satellite Laser Ranging parser.add_argument('--slr-c20', type=str, default=None, choices=['CSR','GFZ','GSFC'], help='Replace C20 coefficients with SLR values') @@ -819,19 +819,19 @@ def arguments(): parser.add_argument('--slr-c50', type=str, default=None, choices=['CSR','GSFC','LARES'], help='Replace C50 coefficients with SLR values') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--format','-F', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5'], help='Input/output data format') - #-- mean file to remove + # mean file to remove parser.add_argument('--mean-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), help='GRACE/GRACE-FO mean file to remove from the harmonic data') - #-- input data format (ascii, netCDF4, HDF5) + # input data format (ascii, netCDF4, HDF5) parser.add_argument('--mean-format', type=str, default='netCDF4', choices=['ascii','netCDF4','HDF5','gfc'], help='Input data format for GRACE/GRACE-FO mean file') - #-- monthly files to be removed from the GRACE/GRACE-FO data + # monthly files to be removed from the GRACE/GRACE-FO data parser.add_argument('--remove-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+', help='Monthly files to be removed from the GRACE/GRACE-FO data') @@ -844,7 +844,7 @@ def arguments(): parser.add_argument('--redistribute-removed', default=False, action='store_true', help='Redistribute removed mass fields over the ocean') - #-- scaling factor files + # scaling factor files parser.add_argument('--scale-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), required=True, help='Scaling factor file') @@ -854,42 +854,42 @@ def arguments(): parser.add_argument('--power-file', type=lambda p: os.path.abspath(os.path.expanduser(p)), required=True, help='Scaling factor power file') - #-- land-sea mask for redistributing fluxes + # land-sea mask for redistributing fluxes lsmask = utilities.get_data_path(['data','landsea_hd.nc']) parser.add_argument('--mask', type=lambda p: os.path.abspath(os.path.expanduser(p)), default=lsmask, help='Land-sea mask for redistributing land water flux') - #-- Output log file for each job in forms - #-- scale_GRACE_maps_run_2002-04-01_PID-00000.log - #-- scale_GRACE_maps_failed_run_2002-04-01_PID-00000.log + # Output log file for each job in forms + # scale_GRACE_maps_run_2002-04-01_PID-00000.log + # scale_GRACE_maps_failed_run_2002-04-01_PID-00000.log parser.add_argument('--log', default=False, action='store_true', help='Output log file for each job') - #-- print information about processing run + # print information about processing run parser.add_argument('--verbose','-V', action='count', default=0, help='Verbose output of processing run') - #-- permissions mode of the local directories and files (number in octal) + # permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', type=lambda x: int(x,base=8), default=0o775, help='Permissions mode of output files') - #-- return the parser + # return the parser return parser -#-- This is the main part of the program that calls the individual functions +# This is the main part of the program that calls the individual functions def main(): - #-- Read the system arguments listed after the program + # Read the system arguments listed after the program parser = arguments() args,_ = parser.parse_known_args() - #-- create logger + # create logger loglevels = [logging.CRITICAL,logging.INFO,logging.DEBUG] logging.basicConfig(level=loglevels[args.verbose]) - #-- try to run the analysis with listed parameters + # try to run the analysis with listed parameters try: info(args) - #-- run scale_grace_maps algorithm with parameters + # run scale_grace_maps algorithm with parameters output_files = scale_grace_maps( args.directory, args.center, @@ -935,17 +935,17 @@ def main(): VERBOSE=args.verbose, MODE=args.mode) except Exception as e: - #-- if there has been an error exception - #-- print the type, value, and stack trace of the - #-- current exception being handled + # if there has been an error exception + # print the type, value, and stack trace of the + # current exception being handled logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) - if args.log:#-- write failed job completion log file + if args.log:# write failed job completion log file output_error_log_file(args) else: - if args.log:#-- write successful job completion log file + if args.log:# write successful job completion log file output_log_file(args,output_files) -#-- run main program +# run main program if __name__ == '__main__': main() diff --git a/test/test_download_and_read.py b/test/test_download_and_read.py index 26e9ec27..0506e774 100644 --- a/test/test_download_and_read.py +++ b/test/test_download_and_read.py @@ -15,11 +15,11 @@ from gravity_toolkit.read_GRACE_harmonics import read_GRACE_harmonics from read_GRACE_geocenter.read_GRACE_geocenter import read_GRACE_geocenter -#-- PURPOSE: Download a GRACE file from PO.DAAC and check that read program runs +# PURPOSE: Download a GRACE file from PO.DAAC and check that read program runs def test_podaac_download_and_read(username,webdav): HOST=['https://podaac-tools.jpl.nasa.gov','drive','files','allData','grace', 'L2','CSR','RL06','GSM-2_2002095-2002120_GRAC_UTCSR_BA01_0600.gz'] - #-- download and read as virtual file object + # download and read as virtual file object FILE = gravity_toolkit.utilities.from_drive(HOST,username=username, password=webdav,verbose=True) Ylms = read_GRACE_harmonics(FILE, 60) @@ -29,11 +29,11 @@ def test_podaac_download_and_read(username,webdav): assert all((Ylms[key] == val) for key,val in test.items()) assert (Ylms['clm'][2,0] == -0.484169355584e-03) -#-- PURPOSE: Download a GRACE file from GFZ and check that read program runs +# PURPOSE: Download a GRACE file from GFZ and check that read program runs def test_gfz_ftp_download_and_read(): HOST=['isdcftp.gfz-potsdam.de','grace','Level-2','CSR','RL06', 'GSM-2_2002095-2002120_GRAC_UTCSR_BA01_0600.gz'] - #-- download and read as virtual file object + # download and read as virtual file object FILE = gravity_toolkit.utilities.from_ftp(HOST,verbose=True) Ylms = read_GRACE_harmonics(FILE, 60) keys = ['time', 'start', 'end', 'clm', 'slm', 'eclm', 'eslm', 'header'] @@ -42,23 +42,23 @@ def test_gfz_ftp_download_and_read(): assert all((Ylms[key] == val) for key,val in test.items()) assert (Ylms['clm'][2,0] == -0.484169355584e-03) -#-- PURPOSE: Download a GRACE-FO COST-G file from the GFZ ICGEM +# PURPOSE: Download a GRACE-FO COST-G file from the GFZ ICGEM def test_gfz_icgem_costg_download_and_read(): - #-- attempt to download from ftp server + # attempt to download from ftp server try: HOST=['icgem.gfz-potsdam.de','02_COST-G','Grace-FO', 'GSM-2_2018152-2018181_GRFO_COSTG_BF01_0100.gfc'] FILE = gravity_toolkit.utilities.from_ftp(HOST,verbose=True) except: pass - #-- attempt to download from http server + # attempt to download from http server try: HOST=['http://icgem.gfz-potsdam.de','getseries','02_COST-G', 'Grace-FO','GSM-2_2018152-2018181_GRFO_COSTG_BF01_0100.gfc'] FILE = gravity_toolkit.utilities.from_http(HOST,verbose=True) except: return - #-- read as virtual file object + # read as virtual file object Ylms = read_GRACE_harmonics(FILE, 60) keys = ['time', 'start', 'end', 'clm', 'slm', 'eclm', 'eslm', 'header'] test = dict(start=2458270.5, end=2458299.5) @@ -66,15 +66,15 @@ def test_gfz_icgem_costg_download_and_read(): assert all((Ylms[key] == val) for key,val in test.items()) assert (Ylms['clm'][2,0] == -0.484165436067e-03) -#-- PURPOSE: Download a Swarm file from ESA and check that read program runs +# PURPOSE: Download a Swarm file from ESA and check that read program runs def test_esa_swarm_download_and_read(): - #-- build url for Swarm file + # build url for Swarm file HOST='https://swarm-diss.eo.esa.int' swarm_file='SW_OPER_EGF_SHA_2__20131201T000000_20131231T235959_0101.ZIP' parameters = gravity_toolkit.utilities.urlencode({'file': posixpath.join('swarm','Level2longterm','EGF',swarm_file)}) remote_file = [HOST,'?do=download&{0}'.format(parameters)] - #-- download and read as virtual file object + # download and read as virtual file object gravity_toolkit.utilities.from_http(remote_file, local=swarm_file,verbose=True) Ylms = read_gfc_harmonics(swarm_file) @@ -83,15 +83,15 @@ def test_esa_swarm_download_and_read(): assert all((key in Ylms.keys()) for key in keys) assert all((Ylms[key] == val) for key,val in test.items()) assert (Ylms['clm'][2,0] == -0.48416530506600003e-03) - #-- clean up + # clean up os.remove(swarm_file) -#-- PURPOSE: Download a GRACE ITSG GRAZ file and check that read program runs +# PURPOSE: Download a GRACE ITSG GRAZ file and check that read program runs def test_itsg_graz_download_and_read(): HOST=['http://ftp.tugraz.at','outgoing','ITSG','GRACE', 'ITSG-Grace_operational','monthly','monthly_n60', 'ITSG-Grace_operational_n60_2018-06.gfc'] - #-- download and read as virtual file object + # download and read as virtual file object gravity_toolkit.utilities.from_http(HOST,local=HOST[-1],verbose=True) Ylms = read_gfc_harmonics(HOST[-1]) keys = ['time', 'start', 'end', 'clm', 'slm', 'eclm', 'eslm'] @@ -99,39 +99,39 @@ def test_itsg_graz_download_and_read(): assert all((key in Ylms.keys()) for key in keys) assert all((Ylms[key] == val) for key,val in test.items()) assert (Ylms['clm'][2,0] == -0.4841694727612e-03) - #-- clean up + # clean up os.remove(HOST[-1]) -#-- PURPOSE: Download Sutterley and Velicogna (2019) geocenter files +# PURPOSE: Download Sutterley and Velicogna (2019) geocenter files @pytest.fixture(scope="module", autouse=True) def download_geocenter(): - #-- download geocenter files to filepath + # download geocenter files to filepath filename = inspect.getframeinfo(inspect.currentframe()).filename filepath = os.path.dirname(os.path.abspath(filename)) gravity_toolkit.utilities.from_figshare(filepath,verbose=True) - #-- run tests + # run tests yield - #-- clean up + # clean up shutil.rmtree(os.path.join(filepath,'geocenter')) -#-- parameterize processing center and data release +# parameterize processing center and data release @pytest.mark.parametrize("PROC", ['CSR','GFZ','JPL']) @pytest.mark.parametrize("DREL", ['RL06']) -#-- PURPOSE: read Sutterley and Velicogna (2019) geocenter files +# PURPOSE: read Sutterley and Velicogna (2019) geocenter files def test_geocenter_read(PROC, DREL): filename = inspect.getframeinfo(inspect.currentframe()).filename filepath = os.path.dirname(os.path.abspath(filename)) MODEL = dict(RL04='OMCT', RL05='OMCT', RL06='MPIOM') args = (PROC,DREL,MODEL[DREL],'SLF_iter') FILE = '{0}_{1}_{2}_{3}.txt'.format(*args) - #-- assert that file exists + # assert that file exists geocenter_file = os.path.join(filepath,'geocenter',FILE) assert os.access(geocenter_file, os.F_OK) - #-- test geocenter read program + # test geocenter read program DEG1 = read_GRACE_geocenter(geocenter_file) keys = ['time', 'JD', 'month', 'C10', 'C11', 'S11','header'] assert all((key in DEG1.keys()) for key in keys) - #-- test geocenter class + # test geocenter class DATA = gravity_toolkit.geocenter().from_UCI(geocenter_file) for key in ['time', 'month', 'C10', 'C11', 'S11']: val = getattr(DATA, key) diff --git a/test/test_legendre.py b/test/test_legendre.py index 5c91df75..a4371bcd 100644 --- a/test/test_legendre.py +++ b/test/test_legendre.py @@ -5,7 +5,7 @@ import numpy as np import gravity_toolkit -#-- PURPOSE: test unnormalized Legendre polynomials +# PURPOSE: test unnormalized Legendre polynomials def test_unnormalized(l=3, x=[-1.0, -0.9, -0.8]): obs = gravity_toolkit.legendre(l, x) expected = np.array([ @@ -16,7 +16,7 @@ def test_unnormalized(l=3, x=[-1.0, -0.9, -0.8]): ]) assert np.isclose(obs, expected, atol=1e-05).all() -#-- PURPOSE: test fully-normalized Legendre polynomials +# PURPOSE: test fully-normalized Legendre polynomials def test_normalized(l=3, x=[-1.0, -0.9, -0.8]): obs = gravity_toolkit.legendre(l, x, NORMALIZE=True) expected = np.array([ @@ -27,7 +27,7 @@ def test_normalized(l=3, x=[-1.0, -0.9, -0.8]): ]) assert np.isclose(obs, expected, atol=1e-05).all() -#-- PURPOSE: test fully-normalized zonal Legendre polynomials +# PURPOSE: test fully-normalized zonal Legendre polynomials def test_zonal(l=3, x=[-1.0, -0.9, -0.8]): obs,_ = gravity_toolkit.legendre_polynomials(l, x) expected = np.array([ @@ -38,14 +38,14 @@ def test_zonal(l=3, x=[-1.0, -0.9, -0.8]): ]) assert np.isclose(obs, expected, atol=1e-05).all() -#-- PURPOSE: compare fully-normalized Legendre polynomials +# PURPOSE: compare fully-normalized Legendre polynomials def test_plms(l=240, x=0.1): obs = gravity_toolkit.legendre(l, x, NORMALIZE=True) - #-- calculate associated Legendre polynomials + # calculate associated Legendre polynomials holmes,_ = gravity_toolkit.plm_holmes(l, x) colombo,_ = gravity_toolkit.plm_colombo(l, x) mohlenkamp = gravity_toolkit.plm_mohlenkamp(l, x) - #-- compare Legendre polynomials + # compare Legendre polynomials assert np.isclose(obs, holmes[l,:]).all() assert np.isclose(holmes, colombo).all() assert np.isclose(holmes, mohlenkamp).all() diff --git a/test/test_love_numbers.py b/test/test_love_numbers.py index 2547bed7..b77ed8b7 100644 --- a/test/test_love_numbers.py +++ b/test/test_love_numbers.py @@ -12,7 +12,7 @@ import gravity_toolkit.read_love_numbers from gravity_toolkit.utilities import get_data_path -#-- PURPOSE: Define Load Love Numbers for lower degree harmonics +# PURPOSE: Define Load Love Numbers for lower degree harmonics def get_love_numbers(): """ Gets a list of Load Love Numbers for degrees 0 to 3 @@ -22,7 +22,7 @@ def get_love_numbers(): ll = [0.0, 0.13026466961444, 0.023882296795977, 0.069842389427609] return dict(hl=hl,kl=kl,ll=ll) -#-- PURPOSE: Check that Load Love Numbers match expected for reference frame +# PURPOSE: Check that Load Love Numbers match expected for reference frame def test_love_numbers(): # valid low degree Love numbers for reference frame CF VALID = get_love_numbers() @@ -35,7 +35,7 @@ def test_love_numbers(): for v,t in zip(VALID[key],TEST[key])) assert (TEST['l'].max() == 1000) -#-- PURPOSE: Check that Gegout (2005) Load Love Numbers can be read +# PURPOSE: Check that Gegout (2005) Load Love Numbers can be read def test_Gegout_love_numbers(): # path to load Love numbers file love_numbers_file = get_data_path(['data','Load_Love2_CE.dat']) @@ -45,11 +45,11 @@ def test_Gegout_love_numbers(): HEADER=3, COLUMNS=COLUMNS, FORMAT='dict', REFERENCE='CM') assert (TEST['l'].max() == 1024) -#-- PURPOSE: Check that Wang et al. (2012) Load Love Numbers can be read +# PURPOSE: Check that Wang et al. (2012) Load Love Numbers can be read def test_Wang_love_numbers(): # path to load Love numbers file (truncated from degree 46341) love_numbers_file = get_data_path(['data','PREM-LLNs-truncated.dat']) - COLUMNS = ['l','hl','ll','kl','nl','nk'] + COLUMNS = ['l','hl','ll','kl','nl','nk'] # read load Love numbers and convert to reference frame CE TEST = gravity_toolkit.read_love_numbers(love_numbers_file, HEADER=1, COLUMNS=COLUMNS, FORMAT='dict', REFERENCE='CE') diff --git a/test/test_time.py b/test/test_time.py index c5ae8b18..a7c1cad8 100644 --- a/test/test_time.py +++ b/test/test_time.py @@ -15,18 +15,18 @@ import gravity_toolkit.time import gravity_toolkit.utilities -#-- parameterize calendar dates +# parameterize calendar dates @pytest.mark.parametrize("YEAR", np.random.randint(1992,2020,size=2)) @pytest.mark.parametrize("MONTH", np.random.randint(1,13,size=2)) -#-- PURPOSE: verify forward and backwards time conversions +# PURPOSE: verify forward and backwards time conversions def test_julian(YEAR,MONTH): - #-- days per month in a leap and a standard year - #-- only difference is February (29 vs. 28) + # days per month in a leap and a standard year + # only difference is February (29 vs. 28) dpm_leap = np.array([31,29,31,30,31,30,31,31,30,31,30,31]) dpm_stnd = np.array([31,28,31,30,31,30,31,31,30,31,30,31]) DPM = dpm_stnd if np.mod(YEAR,4) else dpm_leap assert (np.sum(DPM) == gravity_toolkit.time.calendar_days(YEAR).sum()) - #-- calculate Modified Julian Day (MJD) from calendar date + # calculate Modified Julian Day (MJD) from calendar date DAY = np.random.randint(1,DPM[MONTH-1]+1) HOUR = np.random.randint(0,23+1) MINUTE = np.random.randint(0,59+1) @@ -34,11 +34,11 @@ def test_julian(YEAR,MONTH): MJD = gravity_toolkit.time.convert_calendar_dates(YEAR, MONTH, DAY, hour=HOUR, minute=MINUTE, second=SECOND, epoch=(1858,11,17,0,0,0)) - #-- convert MJD to calendar date + # convert MJD to calendar date JD = np.squeeze(MJD) + 2400000.5 YY,MM,DD,HH,MN,SS = gravity_toolkit.time.convert_julian(JD, format='tuple', astype=np.float64) - #-- assert dates + # assert dates eps = np.finfo(np.float16).eps assert (YY == YEAR) assert (MM == MONTH) @@ -47,41 +47,41 @@ def test_julian(YEAR,MONTH): assert (MN == MINUTE) assert (np.abs(SS - SECOND) < eps) -#-- parameterize calendar dates +# parameterize calendar dates @pytest.mark.parametrize("YEAR", np.random.randint(1992,2020,size=2)) @pytest.mark.parametrize("MONTH", np.random.randint(1,13,size=2)) -#-- PURPOSE: verify forward and backwards time conversions +# PURPOSE: verify forward and backwards time conversions def test_decimal_dates(YEAR,MONTH): - #-- days per month in a leap and a standard year - #-- only difference is February (29 vs. 28) + # days per month in a leap and a standard year + # only difference is February (29 vs. 28) dpm_leap = np.array([31,29,31,30,31,30,31,31,30,31,30,31]) dpm_stnd = np.array([31,28,31,30,31,30,31,31,30,31,30,31]) DPM = dpm_stnd if np.mod(YEAR,4) else dpm_leap assert (np.sum(DPM) == gravity_toolkit.time.calendar_days(YEAR).sum()) - #-- calculate Modified Julian Day (MJD) from calendar date + # calculate Modified Julian Day (MJD) from calendar date DAY = np.random.randint(1,DPM[MONTH-1]+1) HOUR = np.random.randint(0,23+1) MINUTE = np.random.randint(0,59+1) SECOND = 60.0*np.random.random_sample(1) - #-- calculate year-decimal time + # calculate year-decimal time tdec = gravity_toolkit.time.convert_calendar_decimal(YEAR, MONTH, day=DAY, hour=HOUR, minute=MINUTE, second=SECOND) - #-- day of the year 1 = Jan 1, 365 = Dec 31 (std) + # day of the year 1 = Jan 1, 365 = Dec 31 (std) day_temp = np.mod(tdec, 1)*np.sum(DPM) DofY = np.floor(day_temp) + 1 - #-- cumulative sum of the calendar dates + # cumulative sum of the calendar dates day_cumulative = np.cumsum(np.concatenate(([0],DPM))) + 1 - #-- finding which month date is in + # finding which month date is in i = np.nonzero((DofY >= day_cumulative[0:-1]) & (DofY < day_cumulative[1:])) month_range = np.arange(1,13) month = month_range[i] - #-- finding day of the month + # finding day of the month day = (DofY - day_cumulative[i]) + 1 - #-- convert residuals into time (hour, minute and second) + # convert residuals into time (hour, minute and second) hour_temp = np.mod(day_temp,1)*24.0 minute_temp = np.mod(hour_temp,1)*60.0 second = np.mod(minute_temp,1)*60.0 - #-- assert dates + # assert dates eps = np.finfo(np.float16).eps assert (np.floor(tdec) == YEAR) assert (month == MONTH) @@ -90,48 +90,48 @@ def test_decimal_dates(YEAR,MONTH): assert (np.floor(minute_temp) == MINUTE) assert (np.abs(second - SECOND) < eps) -#-- PURPOSE: test UNIX time +# PURPOSE: test UNIX time def test_unix_time(): - #-- ATLAS Standard Data Epoch + # ATLAS Standard Data Epoch UNIX = gravity_toolkit.utilities.get_unix_time('2018-01-01 00:00:00') assert (UNIX == 1514764800) -#-- PURPOSE: test parsing time strings +# PURPOSE: test parsing time strings def test_parse_date_string(): - #-- time string for Modified Julian Days + # time string for Modified Julian Days time_string = 'days since 1858-11-17T00:00:00' epoch,to_secs = gravity_toolkit.time.parse_date_string(time_string) - #-- check the epoch and the time unit conversion factors + # check the epoch and the time unit conversion factors assert np.all(epoch == [1858,11,17,0,0,0]) assert (to_secs == 86400.0) - #-- time string for ATLAS Standard Data Epoch + # time string for ATLAS Standard Data Epoch time_string = 'seconds since 2018-01-01T00:00:00' epoch,to_secs = gravity_toolkit.time.parse_date_string(time_string) - #-- check the epoch and the time unit conversion factors + # check the epoch and the time unit conversion factors assert np.all(epoch == [2018,1,1,0,0,0]) assert (to_secs == 1.0) - #-- time string for unitless case + # time string for unitless case time_string = '2000-01-01T12:00:00' epoch,to_secs = gravity_toolkit.time.parse_date_string(time_string) - #-- check the epoch and the time unit conversion factors + # check the epoch and the time unit conversion factors assert np.all(epoch == [2000,1,1,12,0,0]) assert (to_secs == 0.0) -#-- PURPOSE: test months adjustment for special cases -#-- parameterize calendar dates +# PURPOSE: test months adjustment for special cases +# parameterize calendar dates @pytest.mark.parametrize("PROC", ['CSR','GFZ','GSFC','JPL']) def test_adjust_months(PROC): - #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with - #-- Accelerometer shutoffs make the relation between month number - #-- and date more complicated as days from other months are used - #-- For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) - #-- For JPL: Dec 2011 (120) is centered in Jan 2012 (121) - #-- For all: May 2015 (161) is centered in Apr 2015 (160) - #-- For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) + # The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + # Accelerometer shutoffs make the relation between month number + # and date more complicated as days from other months are used + # For CSR and GFZ: Nov 2011 (119) is centered in Oct 2011 (118) + # For JPL: Dec 2011 (120) is centered in Jan 2012 (121) + # For all: May 2015 (161) is centered in Apr 2015 (160) + # For GSFC: Oct 2018 (202) is centered in Nov 2018 (203) - #-- dates with special months for each processing center + # dates with special months for each processing center center_dates = dict(CSR=[],GFZ=[],GSFC=[],JPL=[]) - #-- CSR dates to test (year-decimal, GRACE month) + # CSR dates to test (year-decimal, GRACE month) center_dates['CSR'].append([2011.62465753, 116]) center_dates['CSR'].append([2011.70821918, 117]) center_dates['CSR'].append([2011.79178082, 118]) @@ -146,7 +146,7 @@ def test_adjust_months(PROC): center_dates['CSR'].append([2015.31917808, 161]) center_dates['CSR'].append([2015.53698630, 163]) center_dates['CSR'].append([2015.62465753, 164]) - #-- GFZ dates to test (year-decimal, GRACE month) + # GFZ dates to test (year-decimal, GRACE month) center_dates['GFZ'].append([2011.62465753, 116]) center_dates['GFZ'].append([2011.70821918, 117]) center_dates['GFZ'].append([2011.79178082, 118]) @@ -161,14 +161,14 @@ def test_adjust_months(PROC): center_dates['GFZ'].append([2015.31917808, 161]) center_dates['GFZ'].append([2015.53698630, 163]) center_dates['GFZ'].append([2015.62465753, 164]) - #-- GSFC dates to test (year-decimal, GRACE month) + # GSFC dates to test (year-decimal, GRACE month) center_dates['GSFC'].append([2018.45479452, 198]) center_dates['GSFC'].append([2018.53835616, 199]) center_dates['GSFC'].append([2018.84794520, 202]) center_dates['GSFC'].append([2018.87397260, 203]) center_dates['GSFC'].append([2018.95753424, 204]) center_dates['GSFC'].append([2019.04109589, 205]) - #-- JPL dates to test (year-decimal, GRACE month) + # JPL dates to test (year-decimal, GRACE month) center_dates['JPL'].append([2011.62465753, 116]) center_dates['JPL'].append([2011.70821918, 117]) center_dates['JPL'].append([2011.79178082, 118]) @@ -184,11 +184,11 @@ def test_adjust_months(PROC): center_dates['JPL'].append([2015.53698630, 163]) center_dates['JPL'].append([2015.62465753, 164]) - #-- get dates and months for center + # get dates and months for center tdec,months = np.transpose(center_dates[PROC]) - #-- GRACE/GRACE-FO months with duplicates + # GRACE/GRACE-FO months with duplicates temp = np.array(12.0*(tdec-2002.0)+1,dtype='i') assert np.any(temp != months.astype('i')) - #-- run months adjustment to fix special cases + # run months adjustment to fix special cases temp = gravity_toolkit.time.adjust_months(temp) assert np.all(temp == months.astype('i'))