Skip to content

Commit

Permalink
style: fixes for pylint
Browse files Browse the repository at this point in the history
  • Loading branch information
mdtanker committed Jun 4, 2024
1 parent d8d4d99 commit 118ebc3
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 22 deletions.
27 changes: 16 additions & 11 deletions src/polartoolkit/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,9 @@ def sample_shp(name: str) -> str:
known_hash = "70e86b3bf9775dd824014afb91da470263edf23159a9fe34107897d1bae9623e"
elif name == "Roosevelt_Island":
known_hash = "83434284808d067b8b18b649e41287a63f01eb2ce581b2c34ee44ae3a1a5ca2a"
else:
msg = "invalid name string"
raise ValueError(msg)
path = pooch.retrieve(
url=f"https://github.com/mdtanker/polartoolkit/raw/main/data/{name}.zip",
path=f"{pooch.os_cache('pooch')}/polartoolkit/shapefiles",
Expand Down Expand Up @@ -565,9 +568,9 @@ def preprocessing_5k(fname: str, action: str, _pooch2: typing.Any) -> str:
initial_registration = "g"

if region is None:
region = initial_region
region = initial_region # pylint: disable=possibly-used-before-assignment
if registration is None:
registration = initial_registration
registration = initial_registration # pylint: disable=possibly-used-before-assignment

# This is the path to the processed (magnitude) grid
path = pooch.retrieve(
Expand All @@ -577,13 +580,13 @@ def preprocessing_5k(fname: str, action: str, _pooch2: typing.Any) -> str:
downloader=EarthDataDownloader(),
known_hash="fa0957618b8bd98099f4a419d7dc0e3a2c562d89e9791b4d0ed55e6017f52416",
progressbar=True,
processor=preprocessor,
processor=preprocessor, # pylint: disable=possibly-used-before-assignment
)

with xr.open_dataarray(path) as grid:
resampled = resample_grid(
grid,
initial_spacing=initial_spacing,
initial_spacing=initial_spacing, # pylint: disable=possibly-used-before-assignment
initial_region=initial_region,
initial_registration=initial_registration,
spacing=spacing,
Expand Down Expand Up @@ -810,6 +813,9 @@ def geomap(
layer = "ATA_GeoMAP_sources_v2022_08"
elif version == "quality":
layer = "ATA_GeoMAP_quality_v2022_08"
else:
msg = "invalid version string"
raise ValueError(msg)

if region is None:
data = pyogrio.read_dataframe(fname2, layer=layer)
Expand Down Expand Up @@ -1621,9 +1627,9 @@ def preprocessing_5k(fname: str, action: str, _pooch2: typing.Any) -> str:
initial_registration = "p"

if region is None:
region = initial_region
region = initial_region # pylint: disable=possibly-used-before-assignment
if registration is None:
registration = initial_registration
registration = initial_registration # pylint: disable=possibly-used-before-assignment

if layer == "surface":
path = pooch.retrieve(
Expand All @@ -1632,7 +1638,7 @@ def preprocessing_5k(fname: str, action: str, _pooch2: typing.Any) -> str:
path=f"{pooch.os_cache('pooch')}/polartoolkit/topography",
known_hash="7748a79fffa41024c175cff7142066940b3e88f710eaf4080193c46b2b59e1f0",
progressbar=True,
processor=preprocessor,
processor=preprocessor, # pylint: disable=possibly-used-before-assignment
)
elif layer == "bed":
path = pooch.retrieve(
Expand All @@ -1641,7 +1647,7 @@ def preprocessing_5k(fname: str, action: str, _pooch2: typing.Any) -> str:
path=f"{pooch.os_cache('pooch')}/polartoolkit/topography",
known_hash="74d55acb219deb87dc5be019d6dafeceb7b1ebcf9095866f257671d12670a5e2",
progressbar=True,
processor=preprocessor,
processor=preprocessor, # pylint: disable=possibly-used-before-assignment
)
else:
msg = "invalid layer string"
Expand All @@ -1651,7 +1657,7 @@ def preprocessing_5k(fname: str, action: str, _pooch2: typing.Any) -> str:

resampled = resample_grid(
grid,
initial_spacing=initial_spacing,
initial_spacing=initial_spacing, # pylint: disable=possibly-used-before-assignment
initial_region=initial_region,
initial_registration=initial_registration,
spacing=spacing,
Expand Down Expand Up @@ -3725,12 +3731,11 @@ def preprocessing(fname: str, action: str, _pooch2: typing.Any) -> str:
region,
registration,
)

else:
msg = "invalid version string"
raise ValueError(msg)

return typing.cast(xr.DataArray, resampled)
return typing.cast(xr.DataArray, resampled) # pylint: disable=possibly-used-before-assignment


def gia(
Expand Down
20 changes: 16 additions & 4 deletions src/polartoolkit/maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -1024,6 +1024,9 @@ def add_colorbar(
bins=bins,
)[0]
max_bin_height = bins.max() / bins.sum() * 100
else:
msg = "hist_type must be 0 or 1"
raise ValueError(msg)

if zmin == zmax:
msg = "Grid is a constant value, can't make a colorbar histogram!"
Expand Down Expand Up @@ -1142,7 +1145,7 @@ def add_coast(
raise ValueError(msg)

fig.plot(
data,
data, # pylint: disable=used-before-assignment
projection=projection,
region=region,
pen=pen,
Expand Down Expand Up @@ -1638,7 +1641,9 @@ def interactive_map(
points_ll: pd.DataFrame = utils.epsg3031_to_latlon(points)
elif hemisphere == "north":
points_ll = utils.epsg3413_to_latlon(points)

else:
msg = "hemisphere must be north or south"
raise ValueError(msg)
# if points supplied, center map on points
center_ll = [np.nanmedian(points_ll.lat), np.nanmedian(points_ll.lon)]
# add points to geodataframe
Expand All @@ -1656,12 +1661,17 @@ def interactive_map(
center_ll = [-90, 0]
elif hemisphere == "north":
center_ll = [90, -45]

else:
msg = "hemisphere must be north or south"
raise ValueError(msg)
if center_yx is not None:
if hemisphere == "south":
center_ll = utils.epsg3031_to_latlon(center_yx)
elif hemisphere == "north":
center_ll = utils.epsg3413_to_latlon(center_yx)
else:
msg = "hemisphere must be north or south"
raise ValueError(msg)

if hemisphere == "south":
if basemap_type == "BlueMarble":
Expand Down Expand Up @@ -1689,7 +1699,9 @@ def interactive_map(
else:
msg = "invalid string for basemap_type"
raise ValueError(msg)

else:
msg = "hemisphere must be north or south"
raise ValueError(msg)
# create the map
m = ipyleaflet.Map(
center=center_ll,
Expand Down
8 changes: 7 additions & 1 deletion src/polartoolkit/profile.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,6 +386,9 @@ def default_layers(
spacing=spacing,
verbose=verbose,
)
else:
msg = "version must be either 'bedmap2' or 'bedmachine'"
raise ValueError(msg)

layer_names = [
"ice",
Expand Down Expand Up @@ -914,6 +917,9 @@ def plot_profile(
y = layers_reg[3]
elif kwargs.get("start_end_label_position", "B") == "B":
y = layers_reg[2]
else:
msg = "invalid start_end_label_position string"
raise ValueError(msg)

fig.text(
x=x1,
Expand Down Expand Up @@ -1518,7 +1524,7 @@ def rel_dist(
df1 = df.copy()

# from https://stackoverflow.com/a/75824992/18686384
df1["x_lag"] = df1["x"].shift(1)
df1["x_lag"] = df1["x"].shift(1) # pylint: disable=used-before-assignment
df1["y_lag"] = df1["y"].shift(1)
df1["rel_dist"] = np.sqrt(
(df1["x"] - df1["x_lag"]) ** 2 + (df1["y"] - df1["y_lag"]) ** 2
Expand Down
17 changes: 11 additions & 6 deletions src/polartoolkit/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -688,6 +688,9 @@ def mask_from_shp(
xds = grid.rio.write_crs(crs).rio.set_spatial_dims(
original_dims[1], original_dims[0]
)
else:
msg = "can't supply both xr_grid and grid_file."
raise ValueError(msg)

masked_grd = xds.rio.clip(
shp.geometry,
Expand All @@ -703,7 +706,7 @@ def mask_from_shp(
output = mask_grd

try:
output = output.drop_vars("spatial_ref")
output = output.drop_vars("spatial_ref") # pylint: disable=used-before-assignment
except ValueError as e:
logging.exception(e)

Expand Down Expand Up @@ -1778,12 +1781,12 @@ def get_min_max(
invert=False,
)

if robust:
if robust is True:
v_min, v_max = np.nanquantile(masked, [0.02, 0.98])
else:
elif robust is False:
v_min, v_max = np.nanmin(masked), np.nanmax(masked)

assert v_min <= v_max, "min value should be less than or equal to max value"
assert v_min <= v_max, "min value should be less than or equal to max value" # pylint: disable=possibly-used-before-assignment
return (v_min, v_max)


Expand Down Expand Up @@ -1914,9 +1917,8 @@ def mask_from_polygon(
ds = grid.to_dataset()
elif isinstance(grid, xr.DataArray):
ds = grid.to_dataset()

# if no grid given, make a dummy one with supplied region and spacing
if grid is None:
elif grid is None:
coords = vd.grid_coordinates(
region=region,
spacing=spacing,
Expand All @@ -1925,6 +1927,9 @@ def mask_from_polygon(
ds = vd.make_xarray_grid(
coords, np.ones_like(coords[0]), dims=("y", "x"), data_names="z"
)
else:
msg = "grid must be a xr.DataArray, a filename, or None"
raise ValueError(msg)

masked = vd.convexhull_mask(
data_coords,
Expand Down

0 comments on commit 118ebc3

Please sign in to comment.