Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix OOL Dependency Handling, RH Clipping, and Bufr Export Rounding #307

Merged
merged 6 commits into from
Oct 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion src/pypromice/postprocess/bufr_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,8 @@ class BUFRVariables:
)
# https://vocabulary-manager.eumetsat.int/vocabularies/BUFR/WMO/32/TABLE_B/012101
# Scale: 2, unit: K
airTemperature: float = attrs.field(converter=round_converter(2))
# NOTE: The expected scale is 2, but our instantanous data is rounded to 1 decimal.
airTemperature: float = attrs.field(converter=round_converter(1))
# There is also a Dewpoint temperature in this template: 012103 which is currently unused.
# https://vocabulary-manager.eumetsat.int/vocabularies/BUFR/WMO/32/TABLE_B/012103
# Scale: 0, unit: %
Expand Down
54 changes: 30 additions & 24 deletions src/pypromice/process/L1toL2.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def toL2(
.ffill().bfill())
mask = (np.abs(ds.gps_alt - baseline_elevation) < 100) & ds.gps_alt.notnull()
ds[['gps_alt','gps_lon', 'gps_lat']] = ds[['gps_alt','gps_lon', 'gps_lat']].where(mask)

# removing dlr and ulr that are missing t_rad
# this is done now becasue t_rad can be filtered either manually or with persistence
ds['dlr'] = ds.dlr.where(ds.t_rad.notnull())
Expand All @@ -113,12 +113,12 @@ def toL2(
if ~ds['t_i'].isnull().all():
ds['rh_i_cor'] = correctHumidity(ds['rh_i'], ds['t_i'],
T_0, T_100, ews, ei0)

# Determiune cloud cover for on-ice stations
cc = calcCloudCoverage(ds['t_u'], T_0, eps_overcast, eps_clear, # Calculate cloud coverage
ds['dlr'], ds.attrs['station_id'])
ds['cc'] = (('time'), cc.data)

# Determine surface temperature
ds['t_surf'] = calcSurfaceTemperature(T_0, ds['ulr'], ds['dlr'], # Calculate surface temperature
emissivity)
Expand All @@ -136,7 +136,7 @@ def toL2(
else:
lat = ds['gps_lat'].mean()
lon = ds['gps_lon'].mean()

# smoothing tilt and rot
ds['tilt_x'] = smoothTilt(ds['tilt_x'])
ds['tilt_y'] = smoothTilt(ds['tilt_y'])
Expand All @@ -151,8 +151,8 @@ def toL2(
ZenithAngle_rad, ZenithAngle_deg = calcZenith(lat, Declination_rad, # Calculate zenith
HourAngle_rad, deg2rad,
rad2deg)


# Correct Downwelling shortwave radiation
DifFrac = 0.2 + 0.8 * cc
CorFac_all = calcCorrectionFactor(Declination_rad, phi_sensor_rad, # Calculate correction
Expand Down Expand Up @@ -186,9 +186,9 @@ def toL2(
TOA_crit_nopass = (ds['dsr_cor'] > (0.9 * isr_toa + 10)) # Determine filter
ds['dsr_cor'][TOA_crit_nopass] = np.nan # Apply filter and interpolate
ds['usr_cor'][TOA_crit_nopass] = np.nan
ds['dsr_cor'] = ds.dsr_cor.where(ds.dsr.notnull())
ds['usr_cor'] = ds.usr_cor.where(ds.usr.notnull())

ds['dsr_cor'] = ds.dsr_cor.where(ds.dsr.notnull())
ds['usr_cor'] = ds.usr_cor.where(ds.usr.notnull())
# # Check sun position
# sundown = ZenithAngle_deg >= 90
# _checkSunPos(ds, OKalbedos, sundown, sunonlowerdome, TOA_crit_nopass)
Expand All @@ -205,27 +205,33 @@ def toL2(
ds['precip_l_cor'], ds['precip_l_rate']= correctPrecip(ds['precip_l'],
ds['wspd_l'])

# Get directional wind speed
ds['wdir_u'] = ds['wdir_u'].where(ds['wspd_u'] != 0)
ds['wspd_x_u'], ds['wspd_y_u'] = calcDirWindSpeeds(ds['wspd_u'], ds['wdir_u'])
get_directional_wind_speed(ds) # Get directional wind speed

ds = clip_values(ds, vars_df)
return ds

def get_directional_wind_speed(ds: xr.Dataset) -> xr.Dataset:
"""
Calculate directional wind speed from wind speed and direction and mutates the dataset
"""

ds['wdir_u'] = ds['wdir_u'].where(ds['wspd_u'] != 0)
ds['wspd_x_u'], ds['wspd_y_u'] = calcDirWindSpeeds(ds['wspd_u'], ds['wdir_u'])

if ds.attrs['number_of_booms']==2:
ds['wdir_l'] = ds['wdir_l'].where(ds['wspd_l'] != 0)
if ds.attrs['number_of_booms']==2:
ds['wdir_l'] = ds['wdir_l'].where(ds['wspd_l'] != 0)
ds['wspd_x_l'], ds['wspd_y_l'] = calcDirWindSpeeds(ds['wspd_l'], ds['wdir_l'])

if hasattr(ds, 'wdir_i'):
if ~ds['wdir_i'].isnull().all() and ~ds['wspd_i'].isnull().all():
ds['wdir_i'] = ds['wdir_i'].where(ds['wspd_i'] != 0)
ds['wspd_x_i'], ds['wspd_y_i'] = calcDirWindSpeeds(ds['wspd_i'], ds['wdir_i'])


ds = clip_values(ds, vars_df)
ds['wdir_i'] = ds['wdir_i'].where(ds['wspd_i'] != 0)
ds['wspd_x_i'], ds['wspd_y_i'] = calcDirWindSpeeds(ds['wspd_i'], ds['wdir_i'])
return ds


def calcDirWindSpeeds(wspd, wdir, deg2rad=np.pi/180):
'''Calculate directional wind speed from wind speed and direction

Parameters
----------
wspd : xr.Dataarray
Expand All @@ -234,16 +240,16 @@ def calcDirWindSpeeds(wspd, wdir, deg2rad=np.pi/180):
Wind direction data array
deg2rad : float
Degree to radians coefficient. The default is np.pi/180

Returns
-------
wspd_x : xr.Dataarray
Wind speed in X direction
wspd_y : xr.Datarray
Wind speed in Y direction
'''
'''
wspd_x = wspd * np.sin(wdir * deg2rad)
wspd_y = wspd * np.cos(wdir * deg2rad)
wspd_y = wspd * np.cos(wdir * deg2rad)
return wspd_x, wspd_y


Expand Down Expand Up @@ -328,7 +334,7 @@ def smoothTilt(da: xr.DataArray, threshold=0.2):
either X or Y smoothed tilt inclinometer measurements
'''
# we calculate the moving standard deviation over a 3-day sliding window
# hourly resampling is necessary to make sure the same threshold can be used
# hourly resampling is necessary to make sure the same threshold can be used
# for 10 min and hourly data
moving_std_gap_filled = da.to_series().resample('h').median().rolling(
3*24, center=True, min_periods=2
Expand Down
27 changes: 15 additions & 12 deletions src/pypromice/process/value_clipping.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
import pandas
import xarray

from pypromice.utilities.dependency_graph import DependencyGraph


def clip_values(
ds: xarray.Dataset,
Expand All @@ -27,9 +29,15 @@ def clip_values(
cols = ["lo", "hi", "OOL"]
assert set(cols) <= set(var_configurations.columns)

variable_limits = var_configurations[cols].dropna(how="all")
for var, row in variable_limits.iterrows():
variable_limits = var_configurations[cols].assign(
dependents=lambda df: df.OOL.fillna("").str.split(),
# Find the closure of dependents using the DependencyGraph class
dependents_closure=lambda df: DependencyGraph.from_child_mapping(
df.dependents
).child_closure_mapping(),
)

for var, row in variable_limits.iterrows():
if var not in list(ds.variables):
continue

Expand All @@ -38,15 +46,10 @@ def clip_values(
if ~np.isnan(row.hi):
ds[var] = ds[var].where(ds[var] <= row.hi)

other_vars = row.OOL
if isinstance(other_vars, str) and ~ds[var].isnull().all():
for o in other_vars.split():
if o not in list(ds.variables):
continue
else:
if ~np.isnan(row.lo):
ds[var] = ds[var].where(ds[var] >= row.lo)
if ~np.isnan(row.hi):
ds[var] = ds[var].where(ds[var] <= row.hi)
# Flag dependents as NaN if parent is NaN
for o in row.dependents_closure:
if o not in list(ds.variables):
continue
ds[o] = ds[o].where(ds[var].notnull())
ladsmund marked this conversation as resolved.
Show resolved Hide resolved

return ds
Loading
Loading