From 478116f759d247cfc6d4e35c596e7b5736ef5add Mon Sep 17 00:00:00 2001 From: Scott Henderson Date: Fri, 1 Nov 2024 17:59:10 +0100 Subject: [PATCH] Add cascading_seach function (#18) * towards cascade search * add cascading search, restructure docs --- README.md | 40 +---- docs/conf.py | 2 +- docs/examples/cascading_search.ipynb | 213 ++++++++++++++++++++++++++ docs/examples/index.md | 11 ++ docs/{ => examples}/quickstart.ipynb | 2 +- docs/index.md | 23 +-- docs/installation.md | 18 --- docs/user_guide/contribute.md | 19 +++ docs/{ => user_guide}/datasets.md | 0 docs/user_guide/index.md | 29 ++++ docs/user_guide/installation.md | 37 +++++ docs/{ => user_guide}/introduction.md | 2 +- pixi.lock | 4 +- pyproject.toml | 3 +- src/coincident/search/__init__.py | 4 +- src/coincident/search/main.py | 74 +++++++++ src/coincident/search/stac.py | 8 +- tests/test_search.py | 59 ++++--- 18 files changed, 442 insertions(+), 106 deletions(-) create mode 100644 docs/examples/cascading_search.ipynb create mode 100644 docs/examples/index.md rename docs/{ => examples}/quickstart.ipynb (99%) delete mode 100644 docs/installation.md create mode 100644 docs/user_guide/contribute.md rename docs/{ => user_guide}/datasets.md (100%) create mode 100644 docs/user_guide/index.md create mode 100644 docs/user_guide/installation.md rename docs/{ => user_guide}/introduction.md (95%) diff --git a/README.md b/README.md index 5224122..04d6af3 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ [![Actions Status][actions-badge]][actions-link] [![Documentation Status][rtd-badge]][rtd-link] + [![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/uw-cryo/coincident) @@ -32,44 +33,7 @@ See here for more information: **This tool is under active development, there are no stable releases yet!** -## Development - -Use [pixi](https://pixi.sh) for environment management - -```bash -git clone https://github.com/uw-cryo/coincident.git -cd coincident -git checkout -b newfeature -pixi shell --environment dev # type `exit` to deactivate -pre-commit install - -# Or run pre-configured commands: -pixi run networktest # or 'test' -pixi run precommit # also runs automatically upon commits -pixi run lint -pixi run docs -``` - -## Authentication - -Some datasets require authentication to _search_ (Maxar) others only require -authentication to _download_ data (NASA). `coincident` assumes you have the -following Environment Variables defined: - -```bash -export EARTHDATA_USERNAME=aaaaa -export EARTHDATA_PASSWORD=bbbbb -export MAXAR_API_KEY=ccccc -export PC_SDK_SUBSCRIPTION_KEY=ddddd -``` - -Sign up for credentials at the following webpages: - -- [](https://urs.earthdata.nasa.gov) -- [](https://developers.maxar.com/docs/authentication/guides/api-key) -- [](https://planetarycomputer.developer.azure-api.net) - -### Acknowledgements +## Acknowledgements - Python packaging template provided by diff --git a/docs/conf.py b/docs/conf.py index 57c86af..7007742 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -65,7 +65,7 @@ # NOTE: consider adding back in once for distinct sections (user guide, examples, API reference) # https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/layout.html#primary-sidebar-left -html_sidebars = {"**": []} +# html_sidebars = {"**": []} myst_enable_extensions = [ "colon_fence", diff --git a/docs/examples/cascading_search.ipynb b/docs/examples/cascading_search.ipynb new file mode 100644 index 0000000..b325354 --- /dev/null +++ b/docs/examples/cascading_search.ipynb @@ -0,0 +1,213 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Cascading search\n", + "\n", + "In the [Quickstart](./quickstart.ipynb) notebook we covered searching datasets one-by-one and gradually reducing the spatial domain of our search based on overlapping footprints. \n", + "\n", + "`coincident` also provides a `cascading_search()`[#coincident.search.cascading_search] method as a convenience to perform this same type of search in one go. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import coincident\n", + "import geopandas as gpd\n", + "import xyzservices.providers as xyz\n", + "import matplotlib.pyplot as plt\n", + "\n", + "%matplotlib inline" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Identify a primary dataset\n", + "\n", + "Start by loading a full resolution polygon of a 3DEP LiDAR workunit which has a known start_datetime and end_datatime:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "workunit = \"CO_WestCentral_2019\"\n", + "df_wesm = coincident.search.wesm.read_wesm_csv()\n", + "gf_lidar = coincident.search.wesm.load_by_fid(\n", + " df_wesm[df_wesm.workunit == workunit].index\n", + ")\n", + "gf_lidar" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Search secondary datasets\n", + "\n", + "Provide a list that will be searched in order. The list contains tuples of dataset aliases and the temporal pad in days to search before the primary dataset start and end dates" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "secondary_datasets = [\n", + " (\"maxar\", 14), # +/- 14 days from lidar\n", + " (\"gedi\", 40), # +/- 40 days from lidar\n", + " (\"icesat-2\", 60),\n", + "]\n", + "\n", + "dataframes = coincident.search.cascading_search(\n", + " gf_lidar,\n", + " secondary_datasets,\n", + " min_overlap_area=30, # km^2\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Visualize results\n", + "\n", + "Below we visualize cropped footprints from each secondary dataset." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gf_maxar = dataframes[0]\n", + "print(len(gf_maxar))\n", + "m = gf_lidar.explore(\n", + " style_kwds=dict(fill=None, color=\"black\"), tiles=xyz.CartoDB.Positron\n", + ") # basemap\n", + "gf_maxar.explore(m=m, column=\"datetime\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gf_gedi = dataframes[1]\n", + "print(len(gf_gedi))\n", + "m = gf_lidar.explore(\n", + " style_kwds=dict(fill=None, color=\"black\"), tiles=xyz.CartoDB.Positron\n", + ") # basemap\n", + "gf_gedi.explore(m=m, column=\"datetime\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gf_is2 = dataframes[2]\n", + "print(len(gf_maxar))\n", + "m = gf_lidar.explore(\n", + " style_kwds=dict(fill=None, color=\"black\"), tiles=xyz.CartoDB.Positron\n", + ") # basemap\n", + "gf_is2.explore(m=m, column=\"datetime\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(figsize=(10, 5))\n", + "for df in dataframes:\n", + " if \"constellation\" in df.columns:\n", + " label = df.constellation.iloc[0]\n", + " else:\n", + " label = df.collection.iloc[0]\n", + " plt.scatter(x=df[\"datetime\"], y=df[\"collection\"], s=50, marker=\"d\", label=label)\n", + "\n", + "# NOTE: probably a more robust way to set aspect depending on date range\n", + "ax.set_aspect(6)\n", + "plt.axvline(\n", + " gf_lidar.start_datetime.iloc[0],\n", + " color=\"black\",\n", + " linestyle=\"--\",\n", + " linewidth=0.5,\n", + " label=\"LiDAR\",\n", + ")\n", + "plt.axvline(gf_lidar.end_datetime.iloc[0], color=\"black\", linestyle=\"--\", linewidth=0.5)\n", + "plt.title(\"CO_WestCentral_2019 Overlaps\")\n", + "plt.legend(loc=\"lower right\")\n", + "fig.autofmt_xdate()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Save results\n", + "\n", + "The footprints of the last secondary dataset show where we have *spatial* intersections across all datasets. We save this a single MultiPolygon to use in QGIS or [geojson.io](https://geojson.io)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gf_is2.dissolve()[[\"geometry\"]].to_file(\"/tmp/CO_WestCentral_2019_overlaps.geojson\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Summary \n", + "\n", + "- The CO_WestCentral_2019 USGS 3DEP LiDAR was acquired between 2019-08-21 and 2019-09-19\n", + "- We found 7 Maxar Stereo acquisitions with 14 days of the LiDAR\n", + "- We found 14 GEDI acquisitions that overlap the lidar+stereo footprints within 40 days of LiDAR \n", + "- We found 7 ICESat-2 acquisitions that overlap combined lidar+stereo+GEDI footprints within 60 days of LiDAR\n", + "\n", + "The final 'overlap' polygons have at least a 30k^2 area in which at least two of the datasets intersect. Acquisition dates for any given footprint vary from 60 days before the LiDAR was acquired through 60 days afterwards. " + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "dev", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/examples/index.md b/docs/examples/index.md new file mode 100644 index 0000000..1f95832 --- /dev/null +++ b/docs/examples/index.md @@ -0,0 +1,11 @@ +# Examples + +This section contains Jupyter Notebooks with narrative workflows using the +`coincident` library. + +```{toctree} +:maxdepth: 1 + +quickstart +cascading_search +``` diff --git a/docs/quickstart.ipynb b/docs/examples/quickstart.ipynb similarity index 99% rename from docs/quickstart.ipynb rename to docs/examples/quickstart.ipynb index 863be35..6491ee0 100644 --- a/docs/quickstart.ipynb +++ b/docs/examples/quickstart.ipynb @@ -14,7 +14,7 @@ "1. reduce to region with coincident maxar stereo within an acceptable temporal range\n", "1. optionally reduce further with additional datasets such as icesat-2 and gedi altimetry\n", "\n", - "This notebook provides and example starting from USGS 3DEP LiDAR in Colorado, USA" + "This notebook provides an example starting from USGS 3DEP LiDAR in Colorado, USA" ] }, { diff --git a/docs/index.md b/docs/index.md index e5cecd0..27c73f4 100644 --- a/docs/index.md +++ b/docs/index.md @@ -5,26 +5,9 @@ ``` ```{toctree} -:maxdepth: 2 :hidden: -installation -introduction -datasets -``` - -```{toctree} -:maxdepth: 2 -:hidden: -:caption: Examples - -quickstart -``` - -```{toctree} -:maxdepth: 2 -:hidden: -:caption: API - -api +user_guide/index.md +examples/index.md +api.rst ``` diff --git a/docs/installation.md b/docs/installation.md deleted file mode 100644 index 2ed0b0a..0000000 --- a/docs/installation.md +++ /dev/null @@ -1,18 +0,0 @@ -# Installation - -If you'd like to install into an existing environment we recommend installing -coincident directly from GitHub: - -```bash -pip install git+https://github.com/uw-cryo/coincident.git@main -``` - -Alternatively, you can install a fresh locked environment using the -[GitHub CLI](https://cli.github.com) and [pixi.sh](https://pixi.sh/latest/): - -```bash -gh repo clone uw-cryo/coincident -cd coincident -pixi install -pixi shell -``` diff --git a/docs/user_guide/contribute.md b/docs/user_guide/contribute.md new file mode 100644 index 0000000..8f5a429 --- /dev/null +++ b/docs/user_guide/contribute.md @@ -0,0 +1,19 @@ +# Contributing Guide + +Contributions are welcome! + +We recommend use [pixi](https://pixi.sh) for environment management + +```bash +git clone https://github.com/uw-cryo/coincident.git +cd coincident +git checkout -b newfeature +pixi shell --environment dev # type `exit` to deactivate +pre-commit install + +# Or run pre-configured commands: +pixi run networktest # or 'test' +pixi run precommit # also runs automatically upon commits +pixi run lint +pixi run docs +``` diff --git a/docs/datasets.md b/docs/user_guide/datasets.md similarity index 100% rename from docs/datasets.md rename to docs/user_guide/datasets.md diff --git a/docs/user_guide/index.md b/docs/user_guide/index.md new file mode 100644 index 0000000..11fee0b --- /dev/null +++ b/docs/user_guide/index.md @@ -0,0 +1,29 @@ +# User Guide + +This section contains basic guidance on installing and using the `coincident` +library + +```{toctree} +:maxdepth: 1 +:hidden: +:caption: Getting Started + +installation +introduction +``` + +```{toctree} +:maxdepth: 1 +:hidden: +:caption: Datasets + +datasets +``` + +```{toctree} +:maxdepth: 1 +:hidden: +:caption: Contributing + +contribute +``` diff --git a/docs/user_guide/installation.md b/docs/user_guide/installation.md new file mode 100644 index 0000000..dac1b6d --- /dev/null +++ b/docs/user_guide/installation.md @@ -0,0 +1,37 @@ +# Installation + +If you'd like to install into an existing environment we recommend installing +coincident directly from GitHub: + +```bash +pip install git+https://github.com/uw-cryo/coincident.git@main +``` + +Alternatively, you can install a fresh locked environment using the +[GitHub CLI](https://cli.github.com) and [pixi.sh](https://pixi.sh/latest/): + +```bash +gh repo clone uw-cryo/coincident +cd coincident +pixi install +pixi shell +``` + +## Authentication + +Some datasets require authentication to _search_ (Maxar) others only require +authentication to _download_ data (NASA). `coincident` assumes you have the +following Environment Variables defined: + +```bash +export EARTHDATA_USERNAME=aaaaa +export EARTHDATA_PASSWORD=bbbbb +export MAXAR_API_KEY=ccccc +export PC_SDK_SUBSCRIPTION_KEY=ddddd +``` + +Sign up for credentials at the following webpages: + +- https://urs.earthdata.nasa.gov +- https://developers.maxar.com/docs/authentication/guides/api-key +- https://planetarycomputer.developer.azure-api.net diff --git a/docs/introduction.md b/docs/user_guide/introduction.md similarity index 95% rename from docs/introduction.md rename to docs/user_guide/introduction.md index 2161aca..b0d6749 100644 --- a/docs/introduction.md +++ b/docs/user_guide/introduction.md @@ -39,4 +39,4 @@ gf.explore(column="workunit", popup=True) `coincident` also provides a number of convenience functions, some of which only pertain to specific datasets. For example, loading raster imagery via [Xarray](https://docs.xarray.dev/en/stable) or creating visualizations of browse -imagery. Refer to [the API Docs](./api) for a listing of functions. +imagery. Refer to [the API Docs](../api) for a listing of functions. diff --git a/pixi.lock b/pixi.lock index fa6a627..2324a29 100644 --- a/pixi.lock +++ b/pixi.lock @@ -3133,9 +3133,9 @@ packages: requires_python: '>=3.8' - kind: pypi name: coincident - version: 0.1.dev32+ge147cb5 + version: 0.1.dev34+gd35fb3f.d20241101 path: . - sha256: 144af2002bfff5ea2468d95487eeaafe1ea5f1f473c2660f2362a27d8a83aea5 + sha256: a63116f47c1734a666e812f4002a1e91949d0229437b571ba9ae4e913c2c5a0b requires_dist: - cloudpathlib[s3]<0.21,>=0.20.0 - geopandas<2,>=1.0.1 diff --git a/pyproject.toml b/pyproject.toml index ed9e622..ac527ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -192,7 +192,8 @@ messages_control.disable = [ "missing-function-docstring", "wrong-import-position", "C0103", # Allow ALLCAPS variable names - "W0212", # Access to a protected member _... + "W0212", # Access to a protected member + "W0102", # Dangerous default value as argument [] ] [tool.pixi.project] diff --git a/src/coincident/search/__init__.py b/src/coincident/search/__init__.py index cb094a4..a4a7b6a 100644 --- a/src/coincident/search/__init__.py +++ b/src/coincident/search/__init__.py @@ -6,7 +6,7 @@ from __future__ import annotations -from coincident.search.main import search +from coincident.search.main import cascading_search, search from coincident.search.stac import to_pystac_items -__all__ = ["search", "to_pystac_items"] +__all__ = ["search", "cascading_search", "to_pystac_items"] diff --git a/src/coincident/search/main.py b/src/coincident/search/main.py index 8d15e39..9669812 100644 --- a/src/coincident/search/main.py +++ b/src/coincident/search/main.py @@ -10,6 +10,7 @@ from coincident.datasets import _alias_to_Dataset from coincident.datasets.general import Dataset +from coincident.overlaps import subset_by_minimum_area from coincident.search import stac, wesm _pystac_client = _ItemSearch("no_url") @@ -212,3 +213,76 @@ def _validate_spatial_bounds( if len(intersects) > 1: message = "GeoDataFrame contains multiple geometries, search requires a single geometry" raise ValueError(message) + + +def cascading_search( + primary_dataset: gpd.GeoDataFrame, + secondary_datasets: list[tuple[str, int]] = [ # noqa: B006 + ("maxar", 14), + ("icesat", 40), + ("gedi", 40), + ], + min_overlap_area: float = 20, +) -> list[gpd.GeoDataFrame]: + """ + Perform an cascading search to find overlapping datasets acquired within specific time ranges. + + Secondary datasets are searched based only on spatial overlap areas with previous datasets. In other words, the overlapping area is progressively reduced. + + Temporal buffer is applied as either (datetime-buffer <= acquisition <= datetime+buffer) + or (start_datetime-buffer <= acquisition <= end_datetime+buffer) + + Parameters + ---------- + primary_dataset : gpd.GeoDataFrame + The primary dataset having 'datetime' or 'start_dateteime' and 'end_datetime' columns. + + secondary_datasets : list of tuple, optional + Each tuple contains the name of the secondary dataset and temporal buffer in days. + + min_overlap_area : int, optional + The minimum overlap area in km^2. Default is 20. + + Returns + ------- + list of gpd.GeoDataFrame + A list of GeoDataFrames containing the search results for each secondary dataset. + """ + # Do searches on simple geometry, but intersect results with original geometry + search_geometry = primary_dataset.simplify(0.01) # or convex_hull? + detailed_geometry = primary_dataset[["geometry"]] + + if "end_datetime" in primary_dataset.columns: + start = primary_dataset.start_datetime.iloc[0] + end = primary_dataset.end_datetime.iloc[0] + else: + start = end = primary_dataset.datetime.iloc[0] + + results = [] + for dataset, temporal_buffer in secondary_datasets: + pad = gpd.pd.Timedelta(days=temporal_buffer) + date_range = [start - pad, end + pad] + + # Search secondary dataset + gfs = search( + dataset=dataset, + intersects=search_geometry, + datetime=date_range, + ) + + if dataset == "maxar": + gfs["stereo_pair_id"] = gfs.stereo_pair_identifiers.str[0] + gfs = gfs.dissolve(by="stereo_pair_id", as_index=False) + + # Keep track of original footprints + gfs["original_geometry"] = gfs["geometry"] + + gf_i = gfs.overlay(detailed_geometry, how="intersection") + gf_i = subset_by_minimum_area(gf_i, min_overlap_area) + results.append(gf_i) + + # We've refined our search polygon again, so update search GeoDataFrame + detailed_geometry = gf_i.dissolve()[["geometry"]] + search_geometry = detailed_geometry.simplify(0.01) + + return results diff --git a/src/coincident/search/stac.py b/src/coincident/search/stac.py index 4c38215..6378696 100644 --- a/src/coincident/search/stac.py +++ b/src/coincident/search/stac.py @@ -17,14 +17,14 @@ try: import maxar_platform except ImportError: - message = "'maxar-platform' package not found. Install for maxar functionality: https://pypi.org/project/maxar-platform/" - warnings.warn(message, stacklevel=2) + msg_notfound = "'maxar-platform' package not found. Install for maxar functionality: https://pypi.org/project/maxar-platform/" + warnings.warn(msg_notfound, stacklevel=2) try: import maxar_platform.discovery except maxar_platform.session.NoSessionCredentials: - message = "Unable to authenticate with Maxar API. Please set MAXAR_API_KEY environment variable." - warnings.warn(message, stacklevel=2) + msg_noauth = "Unable to authenticate with Maxar API. Please set MAXAR_API_KEY environment variable." + warnings.warn(msg_noauth, stacklevel=2) def to_geopandas( diff --git a/tests/test_search.py b/tests/test_search.py index 9c0cc70..b7611e7 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -4,7 +4,7 @@ import pytest from geopandas.testing import assert_geodataframe_equal -import coincident as m +import coincident # Decorate tests requiring internet (slow & flaky) network = pytest.mark.network @@ -12,13 +12,14 @@ @pytest.fixture def aoi(): + # 11 vertices, 1,361km^2 aoi_url = "https://raw.githubusercontent.com/SlideRuleEarth/sliderule-python/main/data/grandmesa.geojson" return gpd.read_file(aoi_url) @pytest.fixture def large_aoi(): - # 260 vertices, large area + # 260 vertices, large area 269,590 km^2 aoi_url = "https://raw.githubusercontent.com/unitedstates/districts/refs/heads/gh-pages/states/CO/shape.geojson" return gpd.read_file(aoi_url) @@ -27,36 +28,56 @@ def test_no_dataset_specified(): with pytest.raises( TypeError, match="missing 1 required positional argument: 'dataset'" ): - m.search.search(intersects="-120, 40, -121, 41") # type: ignore[call-arg] + coincident.search.search(intersects="-120, 40, -121, 41") # type: ignore[call-arg] def test_unknown_dataset_specified(): with pytest.raises(ValueError, match="is not a supported dataset"): - m.search.search(dataset="typo", intersects="-120, 40, -121, 41") + coincident.search.search(dataset="typo", intersects="-120, 40, -121, 41") def test_polygon_invalid_type(): with pytest.raises( ValueError, match="intersects value must be a GeoDataFrame or GeoSeries" ): - m.search.search(dataset="3dep", intersects="-120, 40, -121, 41") + coincident.search.search(dataset="3dep", intersects="-120, 40, -121, 41") def test_to_geopandas_empty_search_result(): with pytest.raises(ValueError, match="ItemCollection is empty"): - m.search.stac.to_geopandas([]) + coincident.search.stac.to_geopandas([]) def test_unconstrained_search_warns(): with pytest.warns(match="Neither `bbox` nor `intersects` provided"): - m.search.search(dataset="tdx") + coincident.search.search(dataset="tdx") + + +@network +def test_cascading_search(aoi): + aoi["datetime"] = gpd.pd.to_datetime("2019-06-12", utc=True) + pad = 30 + secondary_datasets = [("icesat-2", pad), ("gedi", pad)] + results = coincident.search.cascading_search(aoi, secondary_datasets) + + expected_min = aoi.datetime.iloc[0] - gpd.pd.Timedelta(days=pad) + expected_max = aoi.datetime.iloc[0] + gpd.pd.Timedelta(days=pad) + actual_min = results[0].datetime.min() + actual_max = results[0].datetime.max() + + assert isinstance(results, list) + assert len(results) == 2 + assert len(results[0]) == 5 + assert len(results[1]) == 4 + assert actual_min >= expected_min + assert actual_max <= expected_max # TODO: add more assertions / tests for this section @network @pytest.mark.filterwarnings("ignore:Server does not conform") def test_maxar_search(aoi): - gf = m.search.search( + gf = coincident.search.search( dataset="maxar", intersects=aoi, datetime="2023", @@ -71,7 +92,7 @@ def test_maxar_search(aoi): @network def test_maxar_large_aoi(large_aoi): - gf = m.search.search( + gf = coincident.search.search( dataset="maxar", intersects=large_aoi, datetime="2023", @@ -85,7 +106,7 @@ def test_maxar_large_aoi(large_aoi): # ======= @network def test_icesat2_search(aoi): - gf = m.search.search( + gf = coincident.search.search( dataset="icesat-2", intersects=aoi, datetime="2023", @@ -96,7 +117,7 @@ def test_icesat2_search(aoi): @network def test_gedi_search(aoi): - gf = m.search.search( + gf = coincident.search.search( dataset="gedi", intersects=aoi, datetime="2022", @@ -106,7 +127,9 @@ def test_gedi_search(aoi): @network def test_tdx_search(aoi): - gf = m.search.search(dataset="tdx", intersects=aoi, datetime=["2009", "2020"]) + gf = coincident.search.search( + dataset="tdx", intersects=aoi, datetime=["2009", "2020"] + ) assert len(gf) == 48 assert gf["sar:product_type"].unique() == "SSC" @@ -115,20 +138,20 @@ def test_tdx_search(aoi): # ======= @network def test_cop30_search(aoi): - gf = m.search.search(dataset="cop30", intersects=aoi) + gf = coincident.search.search(dataset="cop30", intersects=aoi) assert len(gf) == 4 @network def test_worldcover_search(aoi): - gf = m.search.search(dataset="worldcover", intersects=aoi, datetime="2020") + gf = coincident.search.search(dataset="worldcover", intersects=aoi, datetime="2020") assert len(gf) == 4 @network def test_round_trip_parquet(aoi): outpath = "/tmp/search_results.parquet" - A = m.search.search(dataset="cop30", intersects=aoi) + A = coincident.search.search(dataset="cop30", intersects=aoi) A.to_parquet(outpath) B = gpd.read_parquet(outpath) assert_geodataframe_equal(A, B) @@ -138,7 +161,7 @@ def test_round_trip_parquet(aoi): # ======= @network def test_wesm_search(aoi): - gf = m.search.search( + gf = coincident.search.search( dataset="3dep", intersects=aoi, ) @@ -148,7 +171,7 @@ def test_wesm_search(aoi): # NOTE ~10s on wifi @network def test_get_swath_polygon(): - gf = m.search.wesm.get_swath_polygons("CO_CameronPkFire_1_2021") + gf = coincident.search.wesm.get_swath_polygons("CO_CameronPkFire_1_2021") assert isinstance(gf, gpd.GeoDataFrame) assert len(gf) == 51 assert "start_datetime" in gf.columns @@ -158,4 +181,4 @@ def test_get_swath_polygon(): @network def test_swath_polygon_not_found(): with pytest.raises(ValueError, match="No swath polygons found for workunit="): - m.search.wesm.get_swath_polygons("AL_SWCentral_1_B22") + coincident.search.wesm.get_swath_polygons("AL_SWCentral_1_B22")