Skip to content

Commit

Permalink
Cleaned test_xarray and added aoi, large_aoi to init
Browse files Browse the repository at this point in the history
  • Loading branch information
Jack-Hayes committed Dec 11, 2024
1 parent 2468431 commit 999d960
Show file tree
Hide file tree
Showing 4 changed files with 54 additions and 56 deletions.
2 changes: 1 addition & 1 deletion src/coincident/io/xarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def to_dataset(
bands: list[str] | None = None,
aoi: gpd.GeoDataFrame | None = None,
mask: bool = False,
**kwargs: dict[str, Any],
**kwargs: Any,
) -> xr.DataArray:
"""
Convert a GeoDataFrame to an xarray DataArray using odc.stac
Expand Down
17 changes: 17 additions & 0 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,22 @@
from __future__ import annotations

import geopandas as gpd
import pytest

# import os
# if not os.environ.get('MAXAR_API_KEY'):
# os.environ['MAXAR_API_KEY'] = 'fake-test-key'


@pytest.fixture
def aoi():
# 11 vertices, 1,361km^2
aoi_url = "https://raw.githubusercontent.com/SlideRuleEarth/sliderule-python/main/data/grandmesa.geojson"
return gpd.read_file(aoi_url)


@pytest.fixture
def large_aoi():
# 260 vertices, large area 269,590 km^2
aoi_url = "https://raw.githubusercontent.com/unitedstates/districts/refs/heads/gh-pages/states/CO/shape.geojson"
return gpd.read_file(aoi_url)
21 changes: 6 additions & 15 deletions tests/test_search.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# ruff: noqa: F401
# ruff: noqa: F811
# F401 for ruff deeming 'aoi' and 'large_aoi' being unused imports
# F811 for ruff deeming 'aoi' and 'large_aoi' variables being undefined
from __future__ import annotations

import typing
Expand All @@ -7,12 +11,13 @@
from geopandas.testing import assert_geodataframe_equal

import coincident
from tests import aoi, large_aoi # Importing the fixture from __init__.py

# Decorate tests requiring internet (slow & flaky)
network = pytest.mark.network

try:
import maxar_platform.discovery # noqa: F401
import maxar_platform.discovery

not_authenticated = False
except: # noqa: E722
Expand All @@ -22,20 +27,6 @@
)


@pytest.fixture
def aoi():
# 11 vertices, 1,361km^2
aoi_url = "https://raw.githubusercontent.com/SlideRuleEarth/sliderule-python/main/data/grandmesa.geojson"
return gpd.read_file(aoi_url)


@pytest.fixture
def large_aoi():
# 260 vertices, large area 269,590 km^2
aoi_url = "https://raw.githubusercontent.com/unitedstates/districts/refs/heads/gh-pages/states/CO/shape.geojson"
return gpd.read_file(aoi_url)


@typing.no_type_check
def test_no_dataset_specified():
with pytest.raises(
Expand Down
70 changes: 30 additions & 40 deletions tests/test_xarray.py
Original file line number Diff line number Diff line change
@@ -1,84 +1,74 @@
# ruff: noqa: F401
# ruff: noqa: F811
# F401 for ruff deeming 'aoi' being unused import
# F811 for ruff deeming 'aoi' variable being undefined
from __future__ import annotations

import matplotlib as mpl # because ruff
import pytest
import xarray as xr

import coincident
from coincident.io.xarray import plot_esa_worldcover, to_dataset
from tests import aoi # Importing the fixture from __init__.py

# These tests are based on the workflow from https://coincident.readthedocs.io/en/latest/examples/contextual_data.html
# I couldn't think of a way to generalized these tests since the
# xarray funcs are pretty tailored to COP30 and ESA WC
# Decorate tests requiring internet (slow & flaky)
network = pytest.mark.network

# mypy doesn't like the resolution argument in to_dataset
# Argument "resolution" to "to_dataset" has incompatible type "float"; expected "dict[str, Any]" [arg-type]

@pytest.fixture
def search_aoi():
"""Fixture to load a simplified AOI from realistic lidar data."""
workunit = "CO_WestCentral_2019"
df_wesm = coincident.search.wesm.read_wesm_csv()
gf_lidar = coincident.search.wesm.load_by_fid(
df_wesm[df_wesm.workunit == workunit].index
)
return gf_lidar.simplify(0.01)


@pytest.mark.network
def test_to_dataset_with_cop30(search_aoi):
@network
def test_to_dataset_with_cop30(aoi):
"""Test `to_dataset` functionality with COP30 dataset."""
gf_cop30 = coincident.search.search(
dataset="cop30",
intersects=search_aoi,
intersects=aoi,
)
ds = to_dataset(
gf_cop30,
aoi=search_aoi,
resolution=0.00081, # ~90m
mask=True,
aoi=aoi,
resolution=0.1, # ~1km
).compute()
assert isinstance(ds, xr.Dataset), "Expected output to be an xarray Dataset."
assert "data" in ds.data_vars, "Expected 'data' variable in the Dataset."


@pytest.mark.network
def test_to_dataset_with_worldcover(search_aoi):
@network
def test_to_dataset_with_worldcover(aoi):
"""Test `to_dataset` functionality with WorldCover dataset."""
gf_wc = coincident.search.search(
dataset="worldcover",
intersects=search_aoi,
intersects=aoi,
datetime=["2020"],
)
ds = to_dataset(
gf_wc,
bands=["map"],
aoi=search_aoi,
resolution=0.00081, # ~90m
mask=True,
aoi=aoi,
resolution=0.1, # ~1km
).compute()
ds = ds.rename(map="landcover")
assert isinstance(ds, xr.Dataset), "Expected output to be an xarray Dataset."
assert "landcover" in ds.data_vars, "Expected 'landcover' variable in the Dataset."
assert "map" in ds.data_vars, "Expected 'map' variable in the Dataset."


# Tests for `plot_esa_worldcover`
@pytest.mark.network
def test_plot_esa_worldcover_valid(search_aoi):
@network
def test_plot_esa_worldcover_valid(aoi):
"""Test `plot_esa_worldcover` with valid WorldCover dataset."""
gf_wc = coincident.search.search(
dataset="worldcover",
intersects=search_aoi,
datetime=["2020"],
intersects=aoi,
datetime=["2021"],
)
ds = to_dataset(
gf_wc,
bands=["map"],
aoi=search_aoi,
resolution=0.00081, # ~90m
mask=True,
aoi=aoi,
resolution=0.1, # ~1km
).compute()
ds = ds.rename(map="landcover")
ax = plot_esa_worldcover(ds)
assert ax is not None, "Expected a valid Matplotlib Axes object."
assert len(ax.images) > 0, "Expected at least one image in the plot."
ax.set_title("ESA WorldCover")
# https://matplotlib.org/stable/users/prev_whats_new/whats_new_3.4.0.html
# https://github.com/matplotlib/matplotlib/blob/main/lib/matplotlib/tests/test_contour.py#L146
assert any(
isinstance(c, mpl.collections.QuadMesh) for c in ax.get_children()
), "Expected at least one pcolormesh object in the plot."

2 comments on commit 999d960

@Jack-Hayes
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@scottyhq
This commit addresses the following changes and improvements:

Added aoi and large_aoi to tests/init.py:

These fixtures were moved to init.py so they can be reused across multiple test files.
This helps avoid redefinition issues and ensures the test functions in tests/test_search.py and tests/test_xarray.py can access these fixtures without having to redefine them in each file.
Refactored tests in tests/test_search.py and tests/test_xarray.py:

Added the # ruff: noqa: F811 and # ruff: noqa: F401 comments to prevent import and redefinition warnings related to the aoi and large_aoi fixtures.
Cleaned up and standardized the imports and the fixture usage to ensure better clarity and avoid conflicts.
Resolved conversations and feedback from PR reviews:

The issues raised during the PR review (referenced in the links below) were addressed by improving the organization and usage of the test fixtures and by cleaning up the test_xarray.py and test_search.py files.
Adjusted code to make it easier for new tests to use the aoi and large_aoi fixtures and made sure there is no redundant fixture declaration within the test files themselves.
Review feedback references:

@scottyhq
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These fixtures were moved to init.py so they can be reused across multiple test files.

I should've read the pytest docs before suggesting this. I ended up moving them to conftest.py as recommended here https://docs.pytest.org/en/latest/how-to/fixtures.html#scope-sharing-fixtures-across-classes-modules-packages-or-session

Please sign in to comment.