From 65d7f8dad57c0179899d10f8e8d95e1aa9444f25 Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Wed, 15 May 2024 08:05:34 +0300 Subject: [PATCH 01/15] deps: Add `multifutures`, `tenacity`, `httpx` and `deprecated` --- poetry.lock | 53 ++++++++++++++++++++++++++++--- pyproject.toml | 8 ++++- requirements/requirements-dev.txt | 7 ++-- requirements/requirements.txt | 4 ++- 4 files changed, 64 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4766ea0..1584a91 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1813,6 +1813,25 @@ files = [ {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] +[[package]] +name = "multifutures" +version = "0.3.2" +description = "Multiprocessing/multithreading made easy" +optional = false +python-versions = ">=3.9" +files = [ + {file = "multifutures-0.3.2-py3-none-any.whl", hash = "sha256:4ab88743d9734df949c58a97342ebeafacb4a933ebba3a57d67f69223a3b31d7"}, + {file = "multifutures-0.3.2.tar.gz", hash = "sha256:bd3745c42c9424b8c7001d0a9c358df217f29531a5517532cdcba3b4e5590beb"}, +] + +[package.dependencies] +exceptiongroup = "*" +limits = "*" +tqdm = "*" + +[package.extras] +full = ["loky", "psutil"] + [[package]] name = "mypy" version = "1.10.0" @@ -2217,13 +2236,13 @@ xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.2.1" +version = "4.2.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, - {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] @@ -3270,6 +3289,21 @@ pure-eval = "*" [package.extras] tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] +[[package]] +name = "tenacity" +version = "8.3.0" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"}, + {file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + [[package]] name = "tinycss2" version = "1.3.0" @@ -3365,6 +3399,17 @@ files = [ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] +[[package]] +name = "types-deprecated" +version = "1.2.9.20240311" +description = "Typing stubs for Deprecated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-Deprecated-1.2.9.20240311.tar.gz", hash = "sha256:0680e89989a8142707de8103f15d182445a533c1047fd9b7e8c5459101e9b90a"}, + {file = "types_Deprecated-1.2.9.20240311-py3-none-any.whl", hash = "sha256:d7793aaf32ff8f7e49a8ac781de4872248e0694c4b75a7a8a186c51167463f9d"}, +] + [[package]] name = "types-requests" version = "2.31.0.6" @@ -3693,4 +3738,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9, <4.0" -content-hash = "ac7b95f54ecb7360c01a2a125b8b69613a6a87816ef6fb8f9cdf688b9c1b919e" +content-hash = "0bbdf70240eb6c9c4252770db75516f2c4c65cf8f3c0c0ff08b24be40728a075" diff --git a/pyproject.toml b/pyproject.toml index d5cc1f8..7a365ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,16 +31,20 @@ classifiers = [ python = ">=3.9, <4.0" beautifulsoup4 = "*" dataretrieval = ">=1" +deprecated = "*" erddapy = "*" geopandas = "*" html5lib = "*" +httpx = "*" limits = "*" lxml = {version = "*", extras = ["html_clean"]} +multifutures = {version = "*"} numpy = "*" pandas = "*" pydantic = ">2" requests = "*" Shapely = "*" +tenacity = "*" tqdm = "*" typing-extensions = "*" xarray = "*" @@ -54,6 +58,7 @@ pytest = "*" pytest-cov = ">=3.0" pytest-recording = "*" pytest-xdist = "*" +types-deprecated = "*" types-requests = "*" urllib3 = "<2" # https://github.com/kevin1024/vcrpy/issues/688 @@ -93,10 +98,11 @@ log_cli = true filterwarnings = [ 'ignore:distutils Version classes are deprecated. Use packaging.version instead:DeprecationWarning', 'ignore:Deprecated call to `pkg_resources.declare_namespace:DeprecationWarning', + 'ignore::DeprecationWarning', ] [tool.mypy] -python_version = "3.8" +python_version = "3.9" plugins = ["pydantic.mypy"] show_error_codes = true show_column_numbers = true diff --git a/requirements/requirements-dev.txt b/requirements/requirements-dev.txt index 8de6cfe..cf276f5 100644 --- a/requirements/requirements-dev.txt +++ b/requirements/requirements-dev.txt @@ -29,7 +29,7 @@ docutils==0.21.2 ; python_version >= "3.9" and python_version < "4.0" dunamai==1.21.1 ; python_version >= "3.9" and python_version < "4.0" entrypoints==0.4 ; python_version >= "3.9" and python_version < "4.0" erddapy==2.2.0 ; python_version >= "3.9" and python_version < "4.0" -exceptiongroup==1.2.1 ; python_version >= "3.9" and python_version < "3.11" +exceptiongroup==1.2.1 ; python_version >= "3.9" and python_version < "4.0" execnet==2.1.1 ; python_version >= "3.9" and python_version < "4.0" executing==2.0.1 ; python_version >= "3.9" and python_version < "4.0" fastjsonschema==2.19.1 ; python_version >= "3.9" and python_version < "4.0" @@ -66,6 +66,7 @@ matplotlib-inline==0.1.7 ; python_version >= "3.9" and python_version < "4.0" matplotlib==3.8.4 ; python_version >= "3.9" and python_version < "4.0" mistune==0.8.4 ; python_version >= "3.9" and python_version < "4.0" multidict==6.0.5 ; python_version >= "3.9" and python_version < "4.0" +multifutures==0.3.2 ; python_version >= "3.9" and python_version < "4.0" mypy-extensions==1.0.0 ; python_version >= "3.9" and python_version < "4.0" mypy==1.10.0 ; python_version >= "3.9" and python_version < "4.0" nbclient==0.10.0 ; python_version >= "3.9" and python_version < "4.0" @@ -79,7 +80,7 @@ pandocfilters==1.5.1 ; python_version >= "3.9" and python_version < "4.0" parso==0.8.4 ; python_version >= "3.9" and python_version < "4.0" pexpect==4.9.0 ; python_version >= "3.9" and python_version < "4.0" and sys_platform != "win32" pillow==10.3.0 ; python_version >= "3.9" and python_version < "4.0" -platformdirs==4.2.1 ; python_version >= "3.9" and python_version < "4.0" +platformdirs==4.2.2 ; python_version >= "3.9" and python_version < "4.0" pluggy==1.5.0 ; python_version >= "3.9" and python_version < "4.0" prompt-toolkit==3.0.43 ; python_version >= "3.9" and python_version < "4.0" psutil==5.9.8 ; python_version >= "3.9" and python_version < "4.0" @@ -120,12 +121,14 @@ sphinxcontrib-qthelp==1.0.7 ; python_version >= "3.9" and python_version < "4.0" sphinxcontrib-serializinghtml==1.1.10 ; python_version >= "3.9" and python_version < "4.0" sphinxext-opengraph==0.9.1 ; python_version >= "3.9" and python_version < "4.0" stack-data==0.6.3 ; python_version >= "3.9" and python_version < "4.0" +tenacity==8.3.0 ; python_version >= "3.9" and python_version < "4.0" tinycss2==1.3.0 ; python_version >= "3.9" and python_version < "4.0" toml==0.10.2 ; python_version >= "3.9" and python_version < "4.0" tomli==2.0.1 ; python_full_version <= "3.11.0a6" and python_version >= "3.9" tornado==6.4 ; python_version >= "3.9" and python_version < "4.0" tqdm==4.66.4 ; python_version >= "3.9" and python_version < "4.0" traitlets==5.14.3 ; python_version >= "3.9" and python_version < "4.0" +types-deprecated==1.2.9.20240311 ; python_version >= "3.9" and python_version < "4.0" types-requests==2.31.0.6 ; python_version >= "3.9" and python_version < "4.0" types-urllib3==1.26.25.14 ; python_version >= "3.9" and python_version < "4.0" typing-extensions==4.11.0 ; python_version >= "3.9" and python_version < "4.0" diff --git a/requirements/requirements.txt b/requirements/requirements.txt index d38c766..3731cb9 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -11,7 +11,7 @@ colorama==0.4.6 ; python_version >= "3.9" and python_version < "4.0" and platfor dataretrieval==1.0.8 ; python_version >= "3.9" and python_version < "4.0" deprecated==1.2.14 ; python_version >= "3.9" and python_version < "4.0" erddapy==2.2.0 ; python_version >= "3.9" and python_version < "4.0" -exceptiongroup==1.2.1 ; python_version >= "3.9" and python_version < "3.11" +exceptiongroup==1.2.1 ; python_version >= "3.9" and python_version < "4.0" fiona==1.9.6 ; python_version >= "3.9" and python_version < "4.0" geopandas==0.14.4 ; python_version >= "3.9" and python_version < "4.0" h11==0.14.0 ; python_version >= "3.9" and python_version < "4.0" @@ -25,6 +25,7 @@ limits==3.12.0 ; python_version >= "3.9" and python_version < "4.0" lxml-html-clean==0.1.1 ; python_version >= "3.9" and python_version < "4.0" lxml==5.2.2 ; python_version >= "3.9" and python_version < "4.0" lxml[html-clean]==5.2.2 ; python_version >= "3.9" and python_version < "4.0" +multifutures==0.3.2 ; python_version >= "3.9" and python_version < "4.0" numpy==1.26.4 ; python_version >= "3.9" and python_version < "4.0" packaging==24.0 ; python_version >= "3.9" and python_version < "4.0" pandas==2.2.2 ; python_version >= "3.9" and python_version < "4.0" @@ -38,6 +39,7 @@ shapely==2.0.4 ; python_version >= "3.9" and python_version < "4.0" six==1.16.0 ; python_version >= "3.9" and python_version < "4.0" sniffio==1.3.1 ; python_version >= "3.9" and python_version < "4.0" soupsieve==2.5 ; python_version >= "3.9" and python_version < "4.0" +tenacity==8.3.0 ; python_version >= "3.9" and python_version < "4.0" tqdm==4.66.4 ; python_version >= "3.9" and python_version < "4.0" typing-extensions==4.11.0 ; python_version >= "3.9" and python_version < "4.0" tzdata==2024.1 ; python_version >= "3.9" and python_version < "4.0" From 8f1040d55b18f86a23e8e00e82eb5c9d3123d4a1 Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Fri, 26 Jan 2024 19:45:11 +0200 Subject: [PATCH 02/15] ioc: Add `fetch_ioc_station()`ioc --- docs/source/ioc.rst | 5 + searvey/__init__.py | 2 + searvey/custom_types.py | 15 +- searvey/ioc.py | 352 ++++++++++++++++++++++++++++++++++++++++ searvey/utils.py | 14 +- tests/ioc_api_test.py | 229 ++++++++++++++++++++++++++ 6 files changed, 607 insertions(+), 10 deletions(-) create mode 100644 tests/ioc_api_test.py diff --git a/docs/source/ioc.rst b/docs/source/ioc.rst index d02f5cc..42c7c0e 100644 --- a/docs/source/ioc.rst +++ b/docs/source/ioc.rst @@ -13,3 +13,8 @@ A list of IOC stations is provided with the ``get_ioc_stations()`` function with The station data can be retrieved with .. autofunction:: searvey.ioc.get_ioc_data + +New API +------- + +.. autofunction:: searvey.ioc.fetch_ioc_station diff --git a/searvey/__init__.py b/searvey/__init__.py index 50de672..073e307 100644 --- a/searvey/__init__.py +++ b/searvey/__init__.py @@ -1,6 +1,7 @@ from __future__ import annotations from searvey.coops import get_coops_stations +from searvey.ioc import fetch_ioc_station from searvey.ioc import get_ioc_data from searvey.ioc import get_ioc_stations from searvey.stations import get_stations @@ -9,6 +10,7 @@ __all__: list[str] = [ + "fetch_ioc_station", "get_coops_stations", "get_ioc_data", "get_ioc_stations", diff --git a/searvey/custom_types.py b/searvey/custom_types.py index fbb9b7a..0721383 100644 --- a/searvey/custom_types.py +++ b/searvey/custom_types.py @@ -1,17 +1,16 @@ from __future__ import annotations import datetime -from typing import Any -from typing import Dict +import typing as T from typing import TypeVar -from typing import Union +import numpy as np import numpy.typing as npt import pandas as pd -from typing_extensions import TypeAlias # "from typing" in Python 3.9+ +from typing_extensions import TypeAlias -StrDict: TypeAlias = Dict[str, Any] -DateTimeLike: TypeAlias = Union[str, datetime.date, datetime.datetime, pd.Timestamp] - -ScalarOrArray = TypeVar("ScalarOrArray", int, float, npt.NDArray[Any]) +StrDict: TypeAlias = T.Dict[str, T.Any] +ScalarOrArray = TypeVar("ScalarOrArray", int, float, npt.NDArray[T.Any]) +DateTimeLike: TypeAlias = T.Union[str, datetime.date, datetime.datetime, pd.Timestamp] +DatetimeLike: TypeAlias = T.Union[str, datetime.date, pd.Timestamp, datetime.datetime, np.datetime64] diff --git a/searvey/ioc.py b/searvey/ioc.py index 6749ca5..815b8f9 100644 --- a/searvey/ioc.py +++ b/searvey/ioc.py @@ -9,25 +9,33 @@ # We parse all 3 of them and we merge them. from __future__ import annotations +import collections import functools import io import logging +import typing as T import warnings +from collections import abc from typing import Optional from typing import Union import bs4 import geopandas as gpd import html5lib # noqa: F401 # imported but unused +import httpx import limits import lxml # noqa: F401 # imported but unused +import multifutures import pandas as pd import requests +import tenacity import xarray as xr +from deprecated import deprecated from shapely.geometry import MultiPolygon from shapely.geometry import Polygon from .custom_types import DateTimeLike +from .custom_types import DatetimeLike from .multi import multiprocess from .multi import multithread from .rate_limit import RateLimit @@ -35,6 +43,7 @@ from .utils import get_region from .utils import merge_datasets from .utils import NOW +from .utils import pairwise from .utils import resolve_timestamp @@ -250,6 +259,10 @@ def normalize_ioc_station_data(ioc_code: str, df: pd.DataFrame, truncate_seconds return df +@deprecated( + version="0.3.11", + reason="This function is deprecated and will be removed in the future. Replace it with `fetch_ioc_station`.", +) def get_ioc_station_data( ioc_code: str, endtime: DateTimeLike = NOW, @@ -284,6 +297,10 @@ def get_ioc_station_data( return df +@deprecated( + version="0.3.11", + reason="This function is deprecated and will be removed in the future. Replace it with `fetch_ioc_station`.", +) def get_ioc_data( ioc_metadata: pd.DataFrame, endtime: DateTimeLike = NOW, @@ -371,3 +388,338 @@ def get_ioc_data( # Do the final merging ds = xr.merge(datasets) return ds + + +############## API ################ + + +BASE_URL = "https://www.ioc-sealevelmonitoring.org/service.php?query=data×tart={timestart}×top={timestop}&code={ioc_code}" + +IOC_URL_TS_FORMAT = "%Y-%m-%dT%H:%M:%S" +IOC_JSON_TS_FORMAT = "%Y-%m-%d %H:%M:%S" + + +def _before_sleep(retry_state: T.Any) -> None: # pragma: no cover + logger.warning( + "Retrying %s: attempt %s ended with: %s", + retry_state.fn, + retry_state.attempt_number, + retry_state.outcome, + ) + + +RETRY: T.Callable[..., T.Any] = tenacity.retry( + stop=(tenacity.stop_after_delay(90) | tenacity.stop_after_attempt(10)), + wait=tenacity.wait_random(min=2, max=10), + retry=tenacity.retry_if_exception_type(httpx.TransportError), + before_sleep=_before_sleep, +) + + +def _fetch_url( + url: str, + client: httpx.Client, +) -> str: + try: + response = client.get(url) + except Exception: + logger.warning("Failed to retrieve: %s", url) + raise + data = response.text + return data + + +@RETRY +def fetch_url( + url: str, + client: httpx.Client, + rate_limit: multifutures.RateLimit | None = None, + **kwargs: T.Any, +) -> str: + if rate_limit is not None: # pragma: no cover + while rate_limit.reached(): + multifutures.wait() # pragma: no cover + return _fetch_url( + url=url, + client=client, + ) + + +def _parse_ioc_responses( + ioc_responses: list[multifutures.FutureResult], + executor: multifutures.ExecutorProtocol | None, +) -> list[multifutures.FutureResult]: + # Parse the json files using pandas + # This is a CPU heavy process, so let's use multiprocess + # Not all the urls contain data, so let's filter them out + kwargs = [] + for result in ioc_responses: + station_id = result.kwargs["station_id"] # type: ignore[index] + # if a url doesn't have any data instead of a 404, it returns an empty list `[]` + if result.result == "[]": + continue + # For some stations though we get a json like this: + # '[{"error":"code \'blri\' not found"}]' + # '[{"error":"code \'bmda2\' not found"}]' + # we should ignore these, too + elif result.result == f"""[{{"error":"code '{station_id}' not found"}}]""": + continue + # And if the IOC code does not match some pattern (5 letters?) then we get this error + elif result.result == '[{"error":"Incorrect code"}]': + continue + else: + kwargs.append(dict(station_id=station_id, content=io.StringIO(result.result))) + logger.debug("Starting JSON parsing") + results = multifutures.multiprocess(_parse_json, func_kwargs=kwargs, check=False, executor=executor) + multifutures.check_results(results) + logger.debug("Finished JSON parsing") + return results + + +def _ioc_date(ts: pd.Timestamp) -> str: + formatted: str = ts.strftime(IOC_URL_TS_FORMAT) + return formatted + + +def _generate_urls( + station_id: str, + start_date: pd.Timestamp, + end_date: pd.Timestamp, +) -> list[str]: + if end_date < start_date: + raise ValueError(f"'end_date' must be after 'start_date': {end_date} vs {start_date}") + if end_date == start_date: + return [] + duration = end_date - start_date + periods = duration.days // 30 + 2 + urls = [] + date_range = pd.date_range(start_date, end_date, periods=periods, unit="us", inclusive="both") + for start, stop in pairwise(date_range): + timestart = _ioc_date(start) + timestop = _ioc_date(stop) + url = BASE_URL.format(ioc_code=station_id, timestart=timestart, timestop=timestop) + urls.append(url) + return urls + + +def _normalize_df(df: pd.DataFrame) -> pd.DataFrame: + normalized = ( + df[df.sensor.isin(IOC_STATION_DATA_COLUMNS.values())] + .assign(stime=pd.DatetimeIndex(pd.to_datetime(df.stime.str.strip(), format=IOC_JSON_TS_FORMAT))) + .rename(columns={"stime": "time"}) + ) + # Occasionaly IOC contains complete garbage. E.g. duplicate timestamps on the same sensor. We should drop those. + # https://www.ioc-sealevelmonitoring.org/service.php?query=data×tart=2022-03-12T11:03:40×top=2022-04-11T09:04:26&code=acnj + duplicated_timestamps = normalized[["time", "sensor"]].duplicated() + if duplicated_timestamps.sum() > 0: + normalized = normalized[~duplicated_timestamps] + logger.warning( + "%s: Dropped duplicates: %d rows", normalized.attrs["station_id"], duplicated_timestamps.sum() + ) + normalized = normalized.pivot(index="time", columns="sensor", values="slevel") + normalized._mgr.items.name = "" + return normalized + + +def _parse_json(content: str, station_id: str) -> pd.DataFrame: + df = pd.read_json(content, orient="records") + df.attrs["station_id"] = f"IOC-{station_id}" + df = _normalize_df(df) + return df + + +def _group_results( + station_ids: abc.Collection[str], + parsed_responses: list[multifutures.FutureResult], +) -> dict[str, pd.DataFrame]: + # Group per IOC code + df_groups = collections.defaultdict(list) + for item in parsed_responses: + df_groups[item.kwargs["station_id"]].append(item.result) # type: ignore[index] + + # Concatenate dataframes and remove duplicates + dataframes: dict[str, pd.DataFrame] = {} + for station_id in station_ids: + if station_id in df_groups: + df_group = df_groups[station_id] + df = pd.concat(df_group) + df = df.sort_index() + logger.debug("IOC-%s: Total timestamps : %d", station_id, len(df)) + df = df[~df.index.duplicated()] + logger.debug("IOC-%s: Unique timestamps: %d", station_id, len(df)) + else: + logger.warning("IOC-%s: No data. Creating a dummy dataframe", station_id) + df = T.cast( + pd.DataFrame, pd.DataFrame(columns=["time"], dtype="datetime64[ns]").set_index("time") + ) + dataframes[station_id] = df + logger.debug("IOC-%s: Finished conversion to pandas", station_id) + + return dataframes + + +def _retrieve_ioc_data( + station_ids: abc.Collection[str], + start_dates: abc.Collection[pd.Timestamp], + end_dates: abc.Collection[pd.Timestamp], + rate_limit: multifutures.RateLimit, + http_client: httpx.Client, + executor: multifutures.ExecutorProtocol | None, +) -> list[multifutures.FutureResult]: + kwargs = [] + for station_id, start_date, end_date in zip(station_ids, start_dates, end_dates): + for url in _generate_urls(station_id=station_id, start_date=start_date, end_date=end_date): + if url: + kwargs.append( + dict( + station_id=station_id, + url=url, + client=http_client, + rate_limit=rate_limit, + ), + ) + with http_client: + logger.debug("Starting data retrieval") + results = multifutures.multithread( + func=fetch_url, func_kwargs=kwargs, check=False, executor=executor + ) + logger.debug("Finished data retrieval") + multifutures.check_results(results) + return results + + +def _resolve_rate_limit(rate_limit: multifutures.RateLimit | None) -> multifutures.RateLimit: + if rate_limit is None: + rate_limit = multifutures.RateLimit(rate_limit=limits.parse("5/second")) + return rate_limit + + +def _resolve_http_client(http_client: httpx.Client | None) -> httpx.Client: + if http_client is None: + timeout = httpx.Timeout(timeout=10, read=30) + http_client = httpx.Client(timeout=timeout) + return http_client + + +def _fetch_ioc( + station_ids: abc.Collection[str], + start_dates: pd.DatetimeIndex, + end_dates: pd.DatetimeIndex, + *, + rate_limit: multifutures.RateLimit | None, + http_client: httpx.Client | None, + multiprocessing_executor: multifutures.ExecutorProtocol | None, + multithreading_executor: multifutures.ExecutorProtocol | None, +) -> dict[str, pd.DataFrame]: + rate_limit = _resolve_rate_limit(rate_limit) + http_client = _resolve_http_client(http_client) + start_dates = _to_utc(start_dates) + end_dates = _to_utc(end_dates) + # Fetch json files from the IOC website + # We use multithreading in order to be able to use RateLimit + to take advantage of higher performance + ioc_responses: list[multifutures.FutureResult] = _retrieve_ioc_data( + station_ids=station_ids, + start_dates=start_dates, + end_dates=end_dates, + rate_limit=rate_limit, + http_client=http_client, + executor=multithreading_executor, + ) + # Parse the json files using pandas + # This is a CPU heavy process, so we are using multiprocessing here + parsed_responses: list[multifutures.FutureResult] = _parse_ioc_responses( + ioc_responses=ioc_responses, + executor=multiprocessing_executor, + ) + # OK, now we have a list of dataframes. We need to group them per ioc_code, concatenate them and remove duplicates + dataframes = _group_results(station_ids=station_ids, parsed_responses=parsed_responses) + return dataframes + + +def _to_utc(index: pd.DatetimeIndex) -> pd.DatetimeIndex: + if index.tz: + index = index.tz_convert("utc") + else: + index = index.tz_localize("utc") + return index + + +def _to_datetime_index(ts: pd.Timestamp) -> pd.DatetimeIndex: + index = pd.DatetimeIndex([ts]) + return index + + +def _resolve_start_date(now: pd.Timestamp, start_date: DatetimeLike | None) -> pd.DatetimeIndex: + if start_date is None: + resolved_start_date = T.cast(pd.Timestamp, now - pd.Timedelta(days=7)) + else: + resolved_start_date = pd.to_datetime(start_date) + index = _to_datetime_index(resolved_start_date) + return index + + +def _resolve_end_date(now: pd.Timestamp, end_date: DatetimeLike | None) -> pd.DatetimeIndex: + if end_date is None: + resolved_end_date = now + else: + resolved_end_date = pd.to_datetime(end_date) + index = _to_datetime_index(resolved_end_date) + return index + + +def fetch_ioc_station( + station_id: str, + start_date: DatetimeLike | None = None, + end_date: DatetimeLike | None = None, + *, + rate_limit: multifutures.RateLimit | None = None, + http_client: httpx.Client | None = None, + multiprocessing_executor: multifutures.ExecutorProtocol | None = None, + multithreading_executor: multifutures.ExecutorProtocol | None = None, +) -> pd.DataFrame: + """ + Make a query to the IOC API for tide gauge data for ``station_id`` + and return the results as a ``pandas.Dataframe``. + + ``start_date`` and ``end_date`` can be of any type that is valid for ``pandas.to_datetime()``. + If ``start_date`` or ``end_date`` are timezone-aware timestamps they are coersed to UTC. + The returned data are always in UTC. + + Each query to the IOC API can request up to 30 days of data. + When we request data for larger time spans, multiple requests are made. + This is where ``rate_limit``, ``multiprocessing_executor`` and ``multithreading_executor`` + come into play. + + In order to make the data retrieval more efficient, a multithreading pool is spawned + and the requests are executed concurrently, while adhering to the ``rate_limit``. + The parsing of the JSON responses is a CPU heavy process so it is made within a multiprocessing Pool. + + If no arguments are specified, then sensible defaults are being used, but if the pools need to be + configured, an `executor` instance needs to be passed as an argument. For example: + + .. code-block:: python + + executor = concurrent.futures.ProcessPoolExecutor(max_workers=4) + df = fetch_ioc_station("acap", multiprocessing_executor=executor) + + :param station_id: The station identifier. In IOC terminology, this is called ``ioc_code``. + :param start_date: The starting date of the query. Defaults to 7 days ago. + :param end_date: The finishing date of the query. Defaults to "now". + :param rate_limit: The rate limit for making requests to the IOC servers. Defaults to 5 requests/second. + :param http_client: The ``httpx.Client``. + :param multiprocessing_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. + :param multithreading_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. + """ + logger.info("IOC-%s: Starting scraping: %s - %s", station_id, start_date, end_date) + now = pd.Timestamp.now("utc") + df = _fetch_ioc( + station_ids=[station_id], + start_dates=_resolve_start_date(now, start_date), + end_dates=_resolve_end_date(now, end_date), + rate_limit=rate_limit, + http_client=http_client, + multiprocessing_executor=multiprocessing_executor, + multithreading_executor=multithreading_executor, + )[station_id] + logger.info("IOC-%s: Finished scraping: %s - %s", station_id, start_date, end_date) + return df diff --git a/searvey/utils.py b/searvey/utils.py index 9631534..db5e089 100644 --- a/searvey/utils.py +++ b/searvey/utils.py @@ -1,6 +1,7 @@ from __future__ import annotations import itertools +import typing as T from typing import Dict from typing import Final from typing import Iterable @@ -25,11 +26,20 @@ NOW: Final = "now" +try: + from itertools import pairwise +except ImportError: + + def pairwise(iterable: T.Iterable[_T]) -> T.Iterator[tuple[_T, _T]]: + # pairwise('ABCDEFG') --> AB BC CD DE EF FG + a, b = itertools.tee(iterable) + next(b, None) + return zip(a, b) + + # https://gis.stackexchange.com/questions/201789/verifying-formula-that-will-convert-longitude-0-360-to-180-to-180 # lon1 is the longitude varying from -180 to 180 or 180W-180E # lon3 is the longitude variable from 0 to 360 (all positive) - - def lon1_to_lon3(lon1: ScalarOrArray) -> ScalarOrArray: return lon1 % 360 diff --git a/tests/ioc_api_test.py b/tests/ioc_api_test.py new file mode 100644 index 0000000..1d04ae9 --- /dev/null +++ b/tests/ioc_api_test.py @@ -0,0 +1,229 @@ +from __future__ import annotations + +import unittest.mock + +import httpx +import multifutures +import pandas as pd +import pytest + +from searvey import fetch_ioc_station +from searvey.ioc import _fetch_url +from searvey.ioc import _generate_urls +from searvey.ioc import _ioc_date +from searvey.ioc import _resolve_end_date +from searvey.ioc import _resolve_http_client +from searvey.ioc import _resolve_rate_limit +from searvey.ioc import _resolve_start_date +from searvey.ioc import _to_utc +from searvey.ioc import fetch_url + + +def test_fetch_url(): + url = "https://google.com" + response = _fetch_url(url, client=httpx.Client()) + assert "The document has moved" in response + + +def test_fetch_url_failure(): + url = "http://localhost" + with pytest.raises(httpx.ConnectError) as exc: + _fetch_url(url, client=httpx.Client(timeout=0)) + assert "in progress" in str(exc) + + +def test_fetch_url_full(): + url = "https://google.com" + response = fetch_url(url, client=httpx.Client(), rate_limit=multifutures.RateLimit()) + assert "The document has moved" in response + + +def test_generate_urls(): + station_id = "AAA" + start_date = pd.Timestamp("2023-01-01") + end_date = pd.Timestamp("2023-06-01") + urls = _generate_urls( + station_id=station_id, + start_date=start_date, + end_date=end_date, + ) + assert len(urls) == 6 + assert all(isinstance(url, str) for url in urls) + assert all(station_id in url for url in urls) + assert _ioc_date(start_date) in urls[0] + assert _ioc_date(end_date) in urls[-1] + + +def test_generate_urls_raises_common_start_date_and_end_date(): + station_id = "AAA" + date = pd.Timestamp("2023-01-01") + date = pd.Timestamp("2023-06-01") + urls = _generate_urls( + station_id=station_id, + start_date=date, + end_date=date, + ) + assert len(urls) == 0 + assert not urls + assert urls == [] + + +def test_generate_urls_raises_when_end_date_before_start_date(): + start_date = pd.Timestamp("2023-01-01") + end_date = pd.Timestamp("2022-01-01") + with pytest.raises(ValueError) as exc: + _generate_urls( + station_id="aaaa", + start_date=start_date, + end_date=end_date, + ) + assert str(exc.value) == f"'end_date' must be after 'start_date': {end_date} vs {start_date}" + + +def test_resolve_rate_limit_returns_object_as_is(): + rate_limit = multifutures.RateLimit() + resolved = _resolve_rate_limit(rate_limit=rate_limit) + assert resolved is rate_limit + + +def test_resolve_http_client_returns_object_as_is(): + http_client = httpx.Client(timeout=httpx.Timeout(timeout=10, read=30)) + resolved = _resolve_http_client(http_client=http_client) + assert resolved is http_client + + +def test_to_utc(): + # timestamp + ts = pd.Timestamp("2004") + ts_utc = pd.Timestamp("2004", tz="utc") + assert _to_utc(ts) == ts_utc + # DatetimeIndex + index = pd.DatetimeIndex(["2004"]) + index_utc = pd.Timestamp("2004", tz="utc") + assert _to_utc(index) == index_utc + + +def test_to_utc_with_tz(): + # timestamp + ts_cet = pd.Timestamp("2004-01-01T01:00:00", tz="CET") + ts_utc = pd.Timestamp("2004-01-01T00:00:00", tz="utc") + assert _to_utc(ts_cet) == ts_utc + # DatetimeIndex + index_cet = pd.DatetimeIndex([ts_cet]) + index_utc = pd.DatetimeIndex([ts_utc]) + assert _to_utc(index_cet) == index_utc + + +def test_resolve_start_date_default(): + now = pd.Timestamp.now(tz="utc") + expected = now - pd.Timedelta(days=7) + resolved = _resolve_start_date(now=now, start_date=None) + assert resolved == expected + + +def test_resolve_start_date_specific_value(): + now = pd.Timestamp.now(tz="utc") + start_date = "2004" + expected = pd.DatetimeIndex([start_date]) + assert _resolve_start_date(now, start_date) == expected + + +def test_resolve_end_date_default(): + now = pd.Timestamp.now(tz="utc") + expected = now + resolved = _resolve_end_date(now=now, end_date=None) + assert resolved == expected + + +def test_resolve_end_date_specific_value(): + now = pd.Timestamp.now(tz="utc") + end_date = "2004" + expected = pd.DatetimeIndex([end_date]) + assert _resolve_end_date(now, end_date) == expected + + +@unittest.mock.patch("searvey.ioc.fetch_url") +def test_fetch_ioc_station_empty_responses(mocked_fetch_url): + station_id = "blri" + start_date = "2023-09-01" + end_date = "2023-12-10" + # The period between start_date and end_date should hit 4 URLs + mocked_fetch_url.side_effect = [ + f"""[{{"error":"code '{station_id}' not found"}}]""", + f"""[{{"error":"code '{station_id}' not found"}}]""", + '[{"error":"Incorrect code"}]', + "[]", + ] + df = fetch_ioc_station( + station_id=station_id, + start_date=start_date, + end_date=end_date, + ) + assert isinstance(df, pd.DataFrame) + assert df.empty + + +@unittest.mock.patch("searvey.ioc.fetch_url") +def test_fetch_ioc_station_normal_call(mocked_fetch_url): + station_id = "acnj" + start_date = "2022-03-12T11:04:00" + end_date = "2022-03-12T11:06:00" + mocked_fetch_url.side_effect = [ + """ [\ + {"slevel":0.905,"stime":"2022-03-12 11:04:00","sensor":"wls"}, + {"slevel":0.906,"stime":"2022-03-12 11:05:00","sensor":"wls"}, + {"slevel":0.896,"stime":"2022-03-12 11:06:00","sensor":"wls"} + ]""", + ] + df = fetch_ioc_station( + station_id=station_id, + start_date=start_date, + end_date=end_date, + ) + assert isinstance(df, pd.DataFrame) + assert not df.empty + assert len(df) == 3 + + +@unittest.mock.patch("searvey.ioc.fetch_url") +def test_fetch_ioc_station_duplicated_timestamps(mocked_fetch_url): + station_id = "acnj" + start_date = "2022-03-12T11:04:00" + end_date = "2022-03-12T11:06:00" + mocked_fetch_url.side_effect = [ + """ [\ + {"slevel":0.905,"stime":"2022-03-12 11:04:00","sensor":"wls"}, + {"slevel":0.906,"stime":"2022-03-12 11:05:00","sensor":"wls"}, + {"slevel":0.906,"stime":"2022-03-12 11:05:00","sensor":"wls"}, + {"slevel":0.906,"stime":"2022-03-12 11:05:00","sensor":"wls"}, + {"slevel":0.896,"stime":"2022-03-12 11:06:00","sensor":"wls"} + ]""", + ] + df = fetch_ioc_station( + station_id=station_id, + start_date=start_date, + end_date=end_date, + ) + assert isinstance(df, pd.DataFrame) + assert not df.empty + assert len(df) == 3 + assert df.wls.max() == 0.906 + assert df.wls.min() == 0.896 + assert df.wls.median() == 0.905 + + +def test_ioc_webserver(): + station_id = "acnj" + start_date = "2022-03-12T11:04:00" + end_date = "2022-03-12T11:06:00" + df = fetch_ioc_station( + station_id=station_id, + start_date=start_date, + end_date=end_date, + ) + assert isinstance(df, pd.DataFrame) + assert not df.empty + assert len(df) == 3 + assert df.wls.max() == 0.906 + assert df.wls.min() == 0.896 + assert df.wls.median() == 0.905 From 6e32c93bf3a65021f0db863051b2ed869d863e73 Mon Sep 17 00:00:00 2001 From: "Soroosh.Mani" Date: Thu, 1 Feb 2024 17:02:28 -0500 Subject: [PATCH 03/15] coops: Add `fetch_coops_station()` - COOPS ignore errors vs raise - Add more documentation for COOPS - Get all types of stations --- docs/source/coops.rst | 6 + examples/coops_data.ipynb | 125 ++--- searvey/__init__.py | 2 + searvey/coops.py | 984 +++++++++++++++++++++++++++++++------- searvey/stations.py | 7 +- 5 files changed, 876 insertions(+), 248 deletions(-) diff --git a/docs/source/coops.rst b/docs/source/coops.rst index 811f60c..adc5980 100644 --- a/docs/source/coops.rst +++ b/docs/source/coops.rst @@ -55,3 +55,9 @@ CO-OPS query class The ``COOPS_Query`` class lets you send an individual query to the CO-OPS API by specifying a station, data product, and time interval. .. autoclass:: searvey.coops.COOPS_Query + +New API +------- + +.. autofunction:: searvey.coops.get_coops_stations +.. autofunction:: searvey.coops.fetch_coops_station diff --git a/examples/coops_data.ipynb b/examples/coops_data.ipynb index 150c5f6..0e95288 100644 --- a/examples/coops_data.ipynb +++ b/examples/coops_data.ipynb @@ -61,10 +61,10 @@ }, "outputs": [], "source": [ - "from searvey.coops import coops_stations\n", + "from searvey.coops import get_coops_stations\n", "\n", - "stations = coops_stations()\n", - "stations" + "all_stations = get_coops_stations(metadata_source='main')\n", + "all_stations" ] }, { @@ -81,7 +81,7 @@ "figure, axis = pyplot.subplots(1, 1)\n", "figure.set_size_inches(12, 12 / 1.61803398875)\n", "\n", - "stations.plot(ax=axis)\n", + "all_stations.drop_duplicates().plot(ax=axis, column='status', legend=True)\n", "countries.plot(color='lightgrey', ax=axis, zorder=-1)\n", "\n", "axis.set_title(f'all CO-OPS stations')" @@ -110,13 +110,12 @@ "outputs": [], "source": [ "import shapely\n", - "from searvey.coops import coops_stations_within_region\n", "\n", "# This is the actualBBox of the east coast, but this takes too long to retrieve.\n", "# Since we execute this notebook on CI, let's actually use Florida\n", "#east_coast = shapely.geometry.box(-85, 25, -65, 45)\n", "east_coast = shapely.geometry.box(-85, 25, -65, 30)\n", - "east_coast_stations = coops_stations_within_region(region=east_coast)\n", + "east_coast_stations = get_coops_stations(metadata_source='main', region=east_coast)\n", "east_coast_stations" ] }, @@ -134,7 +133,7 @@ "figure, axis = pyplot.subplots(1, 1)\n", "figure.set_size_inches(12, 12 / 1.61803398875)\n", "\n", - "east_coast_stations.plot(ax=axis)\n", + "east_coast_stations.plot(ax=axis, column='status', legend=True)\n", "\n", "xlim = axis.get_xlim()\n", "ylim = axis.get_ylim()\n", @@ -153,7 +152,7 @@ } }, "source": [ - "# retrieve a [CO-OPS data product from all stations within an arbitrary polygon](https://searvey.readthedocs.io/en/latest/coops.html#searvey.coops.coops_product_within_region)" + "# retrieve a CO-OPS data product from a [specific station](https://searvey.readthedocs.io/en/latest/coops.html#searvey.coops.COOPS_Station)" ] }, { @@ -170,16 +169,18 @@ }, "outputs": [], "source": [ + "import pytz\n", "from datetime import datetime\n", - "from searvey.coops import coops_product_within_region\n", + "from searvey.coops import fetch_coops_station\n", "\n", - "water_levels = coops_product_within_region(\n", - " 'water_level',\n", - " region=east_coast,\n", - " start_date=datetime(2022, 4, 2, 12),\n", - " end_date=datetime(2022, 4, 2, 12, 30),\n", + "station_id = '1612480'\n", + "station_water_levels = fetch_coops_station(\n", + " station_id=station_id,\n", + " start_date=datetime(2022, 4, 2, 12, tzinfo=pytz.utc),\n", + " end_date=datetime(2022, 4, 2, 12, 30, tzinfo=pytz.utc),\n", + " product='water_level',\n", ")\n", - "water_levels" + "station_water_levels" ] }, { @@ -196,88 +197,58 @@ }, "outputs": [], "source": [ - "import pandas\n", + "import pandas as pd\n", "\n", "figure, axis = pyplot.subplots(1, 1)\n", "figure.set_size_inches(12, 12 / 1.61803398875)\n", "\n", - "water_levels.max('t').plot.scatter(x='x', y='y', hue='v', cmap='viridis', ax=axis)\n", - "\n", - "xlim = axis.get_xlim()\n", - "ylim = axis.get_ylim()\n", - "countries.plot(color='lightgrey', ax=axis, zorder=-1)\n", - "axis.set_xlim(xlim)\n", - "axis.set_ylim(ylim)\n", + "axis.errorbar(\n", + " station_water_levels.index,\n", + " station_water_levels['value'],\n", + " yerr=station_water_levels['sigma'].values,\n", + ")\n", "\n", - "start_date = pandas.to_datetime(water_levels['t'].min().values)\n", - "end_date = pandas.to_datetime(water_levels['t'].max().values)\n", - "axis.set_title(f'maximum water levels observed by CO-OPS stations on the U.S. East Coast between {start_date} and {end_date}')" + "start_date = pd.to_datetime(station_water_levels.index.min())\n", + "end_date = pd.to_datetime(station_water_levels.index.max())\n", + "axis.set_title(f'water level observed by CO-OPS station {station_id} between {start_date} and {end_date}')" ] }, { "cell_type": "markdown", - "metadata": { - "pycharm": { - "name": "#%% md\n" - } - }, + "metadata": {}, "source": [ - "# retrieve a CO-OPS data product from a [specific station](https://searvey.readthedocs.io/en/latest/coops.html#searvey.coops.COOPS_Station)" + "# retrieve a CO-OPS data product from a lsit of stations" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "jupyter": { - "outputs_hidden": false - }, - "pycharm": { - "name": "#%%\n" - }, - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ - "from searvey.coops import COOPS_Station\n", + "from multifutures import multiprocess\n", "\n", - "station = COOPS_Station(1612480)\n", - "station_water_levels = station.product(\n", - " 'water_level',\n", - " start_date=datetime(2022, 4, 2, 12),\n", - " end_date=datetime(2022, 4, 2, 12, 30),\n", + "stations = east_coast_stations[(east_coast_stations.status == 'active') & (east_coast_stations.station_type == 'waterlevels')]\n", + "station_water_levels = pd.concat(\n", + " [\n", + " result.result for result in multiprocess(\n", + " fetch_coops_station,\n", + " [\n", + " {\n", + " 'station_id': nos_id,\n", + " 'start_date': datetime(2022, 4, 2, 12, tzinfo=pytz.utc),\n", + " 'end_date': datetime(2022, 4, 2, 12, 30, tzinfo=pytz.utc),\n", + " 'product': 'water_level',\n", + " } for nos_id, _ in stations.iterrows()\n", + " ],\n", + " ) \n", + " ],\n", + " keys=[nos_id for nos_id, _ in stations.iterrows()],\n", + " names=['nos_id', 'time']\n", ")\n", "station_water_levels" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "jupyter": { - "outputs_hidden": false - }, - "pycharm": { - "name": "#%%\n" - }, - "tags": [] - }, - "outputs": [], - "source": [ - "figure, axis = pyplot.subplots(1, 1)\n", - "figure.set_size_inches(12, 12 / 1.61803398875)\n", - "\n", - "axis.errorbar(\n", - " station_water_levels['t'],\n", - " station_water_levels['v'].isel(nos_id=0),\n", - " yerr=station_water_levels['s'].isel(nos_id=0).values,\n", - ")\n", - "\n", - "start_date = pandas.to_datetime(station_water_levels['t'].min().values)\n", - "end_date = pandas.to_datetime(station_water_levels['t'].max().values)\n", - "axis.set_title(f'water level observed by CO-OPS station {station.id} (\"{station.name}\") between {start_date} and {end_date}')" - ] - }, { "cell_type": "markdown", "metadata": { @@ -294,7 +265,7 @@ ], "metadata": { "kernelspec": { - "display_name": "python3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -308,7 +279,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.10.9" } }, "nbformat": 4, diff --git a/searvey/__init__.py b/searvey/__init__.py index 073e307..244081a 100644 --- a/searvey/__init__.py +++ b/searvey/__init__.py @@ -1,5 +1,6 @@ from __future__ import annotations +from searvey.coops import fetch_coops_station from searvey.coops import get_coops_stations from searvey.ioc import fetch_ioc_station from searvey.ioc import get_ioc_data @@ -10,6 +11,7 @@ __all__: list[str] = [ + "fetch_coops_station", "fetch_ioc_station", "get_coops_stations", "get_ioc_data", diff --git a/searvey/coops.py b/searvey/coops.py index c3cfda3..90f5863 100644 --- a/searvey/coops.py +++ b/searvey/coops.py @@ -2,12 +2,14 @@ # https://api.tidesandcurrents.noaa.gov/api/prod/ from __future__ import annotations +import collections import json import logging +import typing as T import warnings -from abc import ABC -from abc import abstractmethod +from collections import abc from datetime import datetime +from datetime import timedelta from enum import Enum from functools import lru_cache from pathlib import Path @@ -17,132 +19,89 @@ from typing import Union import geopandas +import httpx +import limits +import multifutures import numpy import pandas +import pandas as pd import requests import shapely +import tenacity import xarray from bs4 import BeautifulSoup from bs4 import element +from deprecated import deprecated from geopandas import GeoDataFrame from pandas import DataFrame from pandas import Series from shapely.geometry import MultiPolygon -from shapely.geometry import Point from shapely.geometry import Polygon from xarray import Dataset +from .custom_types import DatetimeLike from .utils import get_region +from .utils import pairwise logger = logging.getLogger(__name__) +# constants +COOPS_URL_TS_FORMAT = "%Y%m%d %H:%M" +COOPS_BASE_URL = "https://tidesandcurrents.noaa.gov/api/datagetter?" -class StationDataProduct(Enum): - pass - -class StationDataInterval(Enum): - pass - - -class StationDatum(Enum): - pass - - -class StationStatus(Enum): +class COOPS_StationStatus(Enum): ACTIVE = "active" DISCONTINUED = "discontinued" -class StationMetadataSource(Enum): - MAIN = "main" - NWS = "nws" - - -class Station(ABC): - """ - abstraction of a specific data station - """ - - id: str - location: Point - - def __init__(self, id: str, location: Point): - self.id = id - self.location = location - - @abstractmethod - def product( - self, - product: StationDataProduct, - start_date: datetime, - end_date: datetime | None = None, - interval: StationDataInterval | None = None, - datum: StationDatum | None = None, - ) -> Dataset: - """ - retrieve data for the current station within the specified parameters - - :param product: name of data product - :param start_date: start date - :param end_date: end date - :param interval: time interval of data - :param datum: vertical datum - :return: data for the current station within the specified parameters - """ - raise NotImplementedError() - - def __str__(self) -> str: - return f'{self.__class__.__name__} - "{self.id}" {self.location}' - - -class StationQuery(ABC): - """ - abstraction of an individual station data query - """ - - station_id: str - product: StationDataProduct - start_date: datetime - end_date: datetime - interval: StationDataInterval - datum: StationDatum - - def __init__( - self, - station_id: str, - product: StationDataProduct, - start_date: datetime, - end_date: datetime | None = None, - interval: StationDataInterval | None = None, - datum: StationDatum | None = None, - ): - self.station_id = station_id - self.product = product - self.start_date = start_date - self.end_date = end_date - self.interval = interval - self.datum = datum +StationStatus = COOPS_StationStatus - @property - @abstractmethod - def query(self) -> Dict[str, Any]: - raise NotImplementedError() - @property - @abstractmethod - def data(self) -> DataFrame: - """ - :return: data for the current query parameters - """ - raise NotImplementedError() - - def __str__(self) -> str: - return f'{self.__class__.__name__} - {self.product} at station "{self.station_id}" between {self.start_date} and {self.end_date} over {self.interval} in {self.datum}' +class COOPS_StationMetadataSource(Enum): + MAIN = "main" + NWS = "nws" -class COOPS_Product(StationDataProduct): # noqa: N801 +StationMetadataSource = COOPS_StationMetadataSource + +StationTypes = [ + "waterlevels", + "waterlevelsandmet", + "airgap", + "datums", + "supersededdatums", + "benchmarks", + "supersededbenchmarks", + "historicwl", + "met", + "harcon", + "tidepredictions", + "currentpredictions", + "currents", + "historiccurrents", + "surveycurrents", + "cond", + "watertemp", + "physocean", + "tcoon", + "visibility", + "1minute", + "historicmet", + "historicphysocean", + "highwater", + "lowwater", + "hightideflooding", + "ofs", + "partnerstations", +] + + +# NOTE: +# DAILY_MEAN is only available for Great Lakes stations and at LST +# for SALINITY couldn't find a station that provides data! +class COOPS_Product(Enum): # noqa: N801 WATER_LEVEL = ( "water_level" # Preliminary or verified water levels, depending on availability. @@ -157,10 +116,10 @@ class COOPS_Product(StationDataProduct): # noqa: N801 "visibility" # Visibility from the station's visibility sensor. A measure of atmospheric clarity. ) HUMIDITY = "humidity" # Relative humidity as measured at the station. - SALINITY = "salinity" # Salinity and specific gravity data for the station. + # SALINITY = "salinity" # Salinity and specific gravity data for the station. HOURLY_HEIGHT = "hourly_height" # Verified hourly height water level data for the station. HIGH_LOW = "high_low" # Verified high/low water level data for the station. - DAILY_MEAN = "daily_mean" # Verified daily mean water level data for the station. + # DAILY_MEAN = "daily_mean" # Verified daily mean water level data for the station. MONTHLY_MEAN = "monthly_mean" # Verified monthly mean water level data for the station. ONE_MINUTE_WATER_LEVEL = ( "one_minute_water_level" @@ -175,12 +134,251 @@ class COOPS_Product(StationDataProduct): # noqa: N801 ) -class COOPS_Interval(StationDataInterval): # noqa: N801 +COOPS_ProductFieldsNameMap = { + COOPS_Product.WATER_LEVEL: {"t": "time", "v": "value", "s": "sigma", "f": "flags", "q": "quality"}, + COOPS_Product.HOURLY_HEIGHT: {"t": "time", "v": "value", "s": "sigma", "f": "flags"}, + COOPS_Product.HIGH_LOW: {"t": "time", "v": "value", "ty": "type", "f": "flags"}, + COOPS_Product.DAILY_MEAN: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.MONTHLY_MEAN: { + "year": "year", + "month": "month", + "highest": "highest", + "MHHW": "MHHW", + "MHW": "MHW", + "MSL": "MSL", + "MTL": "MTL", + "MLW": "MLW", + "MLLW": "MLLW", + "DTL": "DTL", + "GT": "GT", + "MN": "MN", + "DHQ": "DHQ", + "DLQ": "DLQ", + "HWI": "HWI", + "LWI": "LWI", + "lowest": "lowest", + "inferred": "inferred", + }, + COOPS_Product.ONE_MINUTE_WATER_LEVEL: {"t": "time", "v": "value"}, + COOPS_Product.PREDICTIONS: {"t": "time", "v": "value"}, + COOPS_Product.AIR_GAP: {"t": "time", "v": "value", "s": "sigma", "f": "flags"}, + COOPS_Product.WIND: { + "t": "time", + "s": "speed", + "d": "degree", + "dr": "direction", + "g": "gust", + "f": "flags", + }, + COOPS_Product.AIR_PRESSURE: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.AIR_TEMPERATURE: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.VISIBILITY: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.HUMIDITY: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.WATER_TEMPERATURE: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.CONDUCTIVITY: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.SALINITY: {"t": "time", "s": "salinity", "g": "specific_gravity"}, + COOPS_Product.CURRENTS: {"t": "time", "s": "speed", "d": "direction", "b": "bin"}, + COOPS_Product.CURRENTS_PREDICTIONS: { + "Time": "time", + "Velocity_Major": "velocity", + "meanEbbDir": "ebb_dir", + "meanFloodDir": "flood_dir", + "Bin": "bin", + "Depth": "depth", + "Speed": "speed", + "Direction": "direction", + }, +} + + +COOPS_ProductFieldTypes = { + "DHQ": float, + "DLQ": float, + "DTL": float, + "GT": float, + "HWI": int, + "LWI": int, + "MHHW": float, + "MHW": float, + "MLLW": float, + "MLW": float, + "MN": float, + "MSL": float, + "MTL": float, + "bin": int, + "degree": float, + "depth": float, + "direction": str, + "ebb_dir": float, + "flags": str, # TODO: + "flood_dir": float, + "gust": float, + "highest": float, + "inferred": lambda x: bool(int(x)), + "lowest": float, + "month": int, + "quality": str, + "salinity": float, + "sigma": float, + "specific_gravity": float, + "speed": float, + "time": float, + "type": str, + "value": float, + "velocity": float, + "year": int, +} + + +class COOPS_Interval(Enum): # noqa: N801 H = "h" # Hourly Met data and harmonic predictions will be returned HILO = "hilo" # High/Low tide predictions for all stations. + MAX_SLACK = "max_slack" # Max flood/ebb currents (time and speed) and slack water (times) + ONE = 1 + FIVE = 5 + SIX = 6 + TEN = 10 + FIFTEEN = 15 + THIRTY = 30 + SIXTY = 60 + NONE = None + + +COOPS_ProductIntervalMap = { + COOPS_Product.WATER_LEVEL: [COOPS_Interval.NONE], + COOPS_Product.HOURLY_HEIGHT: [COOPS_Interval.NONE], + COOPS_Product.HIGH_LOW: [COOPS_Interval.NONE], + COOPS_Product.DAILY_MEAN: [COOPS_Interval.NONE], + COOPS_Product.MONTHLY_MEAN: [COOPS_Interval.NONE], + COOPS_Product.ONE_MINUTE_WATER_LEVEL: [COOPS_Interval.NONE], + COOPS_Product.PREDICTIONS: [ + COOPS_Interval.H, + COOPS_Interval.ONE, + COOPS_Interval.FIVE, + COOPS_Interval.SIX, + COOPS_Interval.TEN, + COOPS_Interval.FIFTEEN, + COOPS_Interval.THIRTY, + COOPS_Interval.SIXTY, + COOPS_Interval.HILO, + COOPS_Interval.NONE, + ], + COOPS_Product.CURRENTS: [COOPS_Interval.H, COOPS_Interval.SIX, COOPS_Interval.NONE], + COOPS_Product.CURRENTS_PREDICTIONS: [ + COOPS_Interval.H, + COOPS_Interval.ONE, + COOPS_Interval.SIX, + COOPS_Interval.TEN, + COOPS_Interval.THIRTY, + COOPS_Interval.SIXTY, + COOPS_Interval.MAX_SLACK, + COOPS_Interval.NONE, + ], + COOPS_Product.AIR_GAP: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.WIND: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.AIR_PRESSURE: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.AIR_TEMPERATURE: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.VISIBILITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.HUMIDITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.WATER_TEMPERATURE: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.CONDUCTIVITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.SALINITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.DATUMS: [COOPS_Interval.NONE], +} + + +COOPS_MaxInterval = { + COOPS_Product.WATER_LEVEL: {COOPS_Interval.NONE: timedelta(days=30)}, + COOPS_Product.HOURLY_HEIGHT: {COOPS_Interval.NONE: timedelta(days=365)}, + COOPS_Product.HIGH_LOW: {COOPS_Interval.NONE: timedelta(days=365)}, + COOPS_Product.DAILY_MEAN: {COOPS_Interval.NONE: timedelta(days=3650)}, + COOPS_Product.MONTHLY_MEAN: {COOPS_Interval.NONE: timedelta(days=73000)}, + COOPS_Product.ONE_MINUTE_WATER_LEVEL: {COOPS_Interval.NONE: timedelta(days=4)}, + COOPS_Product.PREDICTIONS: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.ONE: timedelta(days=365), + COOPS_Interval.FIVE: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=365), + COOPS_Interval.TEN: timedelta(days=365), + COOPS_Interval.FIFTEEN: timedelta(days=365), + COOPS_Interval.THIRTY: timedelta(days=365), + COOPS_Interval.SIXTY: timedelta(days=365), + COOPS_Interval.HILO: timedelta(days=365), + COOPS_Interval.NONE: timedelta(days=365), + }, + COOPS_Product.CURRENTS: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.CURRENTS_PREDICTIONS: { + COOPS_Interval.H: timedelta(days=30), + COOPS_Interval.ONE: timedelta(days=30), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.TEN: timedelta(days=30), + COOPS_Interval.THIRTY: timedelta(days=30), + COOPS_Interval.SIXTY: timedelta(days=30), + COOPS_Interval.MAX_SLACK: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.AIR_GAP: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.WIND: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.AIR_PRESSURE: { + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.AIR_TEMPERATURE: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.VISIBILITY: { + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.HUMIDITY: { + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.WATER_TEMPERATURE: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.CONDUCTIVITY: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.SALINITY: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.DATUMS: {COOPS_Interval.NONE: timedelta(days=30)}, +} + + +class COOPS_TidalDatum(Enum): # noqa: N801 + """ + CRD Only some stations on the Columbia River, WA/OR + IGLD Only Great Lakes stations. + LWD Only Great Lakes Stations + MLLW Note! Subordinate tide prediction stations must use datum=MLLW + NAVD This datum is not available for all stations. + """ - -class COOPS_TidalDatum(StationDatum): # noqa: N801 CRD = "CRD" # Columbia River Datum IGLD = "IGLD" # International Great Lakes Datum LWD = "LWD" # Great Lakes Low Water Datum (Chart Datum) @@ -200,17 +398,21 @@ class COOPS_VelocityType(Enum): # noqa: N801 class COOPS_Units(Enum): # noqa: N801 + # Note!Visibility data is kilometers (km) METRIC = "metric" + # Note!Visibility data is Nautical Miles (nm) ENGLISH = "english" class COOPS_TimeZone(Enum): # noqa: N801 + # Does not apply to products of datums or + # monthly_mean; daily_mean (Great Lakes) must use time_zone=lst GMT = "gmt" # Greenwich Mean Time LST = "lst" # Local Standard Time. The time local to the requested station. LST_LDT = "lst_ldt" # Local Standard/Local Daylight Time. The time local to the requested station. -class COOPS_Station(Station): # noqa: N801 +class COOPS_Station: """ a specific CO-OPS station """ @@ -259,7 +461,9 @@ def __init__(self, id: Union[int, str]): if isinstance(metadata, DataFrame): metadata = metadata.iloc[0] - Station.__init__(self, id=str(metadata.name), location=metadata.geometry) + # NOTE: .name is difference from ['name'] in this case! + self.id = str(metadata.name) + self.location = metadata["geometry"] self.nws_id = metadata["nws_id"] self.state = metadata["state"] @@ -394,7 +598,7 @@ def __repr__(self) -> str: return f"{self.__class__.__name__}({self.id})" -class COOPS_Query(StationQuery): # noqa: N801 +class COOPS_Query: """ abstraction of an individual query to the CO-OPS API https://api.tidesandcurrents.noaa.gov/api/prod/ @@ -443,10 +647,6 @@ def __init__( if interval is None: interval = COOPS_Interval.H - StationQuery.__init__( - self, station_id=station, product=product, start_date=start_date, end_date=end_date # type: ignore[arg-type] - ) - self.station_id = station self.product = product self.start_date = start_date @@ -647,8 +847,12 @@ def __coops_stations_html_tables() -> element.ResultSet: return soup.find_all("div", {"class": "table-responsive"}) +@deprecated( + version="0.3.11", + reason="This function is deprecated and will be removed in the future. Replace it with `get_coops_stations`.", +) @lru_cache(maxsize=1) -def coops_stations(station_status: StationStatus | None = None) -> GeoDataFrame: +def coops_stations(station_status: COOPS_StationStatus | None = None) -> GeoDataFrame: """ retrieve a list of CO-OPS stations with associated metadata @@ -706,8 +910,8 @@ def coops_stations(station_status: StationStatus | None = None) -> GeoDataFrame: tables = __coops_stations_html_tables() status_tables = { - StationStatus.ACTIVE: (0, "NWSTable"), - StationStatus.DISCONTINUED: (1, "HistNWSTable"), + COOPS_StationStatus.ACTIVE: (0, "NWSTable"), + COOPS_StationStatus.DISCONTINUED: (1, "HistNWSTable"), } dataframes = {} @@ -743,7 +947,7 @@ def coops_stations(station_status: StationStatus | None = None) -> GeoDataFrame: ) stations.set_index("nos_id", inplace=True) - if status == StationStatus.DISCONTINUED: + if status == COOPS_StationStatus.DISCONTINUED: with (Path(__file__).parent / "us_states.json").open() as us_states_file: us_states = json.load(us_states_file) @@ -775,8 +979,8 @@ def coops_stations(station_status: StationStatus | None = None) -> GeoDataFrame: stations["status"] = status.value dataframes[status] = stations - active_stations = dataframes[StationStatus.ACTIVE] - discontinued_stations = dataframes[StationStatus.DISCONTINUED] + active_stations = dataframes[COOPS_StationStatus.ACTIVE] + discontinued_stations = dataframes[COOPS_StationStatus.DISCONTINUED] discontinued_stations.loc[discontinued_stations.index.isin(active_stations.index), "status"] = ( StationStatus.ACTIVE.value ) @@ -789,11 +993,11 @@ def coops_stations(station_status: StationStatus | None = None) -> GeoDataFrame: discontinued_stations, ) ) - stations.loc[pandas.isna(stations["status"]), "status"] = StationStatus.ACTIVE.value + stations.loc[pandas.isna(stations["status"]), "status"] = COOPS_StationStatus.ACTIVE.value stations.sort_values(["status", "removed"], na_position="first", inplace=True) if station_status is not None: - if isinstance(station_status, StationStatus): + if isinstance(station_status, COOPS_StationStatus): station_status = station_status.value stations = stations[stations["status"] == station_status] @@ -803,9 +1007,13 @@ def coops_stations(station_status: StationStatus | None = None) -> GeoDataFrame: ) +@deprecated( + version="0.3.11", + reason="This function is deprecated and will be removed in the future. Replace it with `get_coops_stations`.", +) def coops_stations_within_region( region: Polygon | None = None, - station_status: StationStatus | None = None, + station_status: COOPS_StationStatus | None = None, ) -> GeoDataFrame: """ retrieve all stations within the specified region of interest @@ -833,18 +1041,23 @@ def coops_stations_within_region( [164 rows x 6 columns] """ + warnings.warn("Using older API, will be removed in the future!", DeprecationWarning) stations = coops_stations(station_status=station_status) if region is not None: return stations[stations.within(region)] return stations +@deprecated( + version="0.3.11", + reason="This function is deprecated and will be removed in the future. Replace it with `get_coops_stations`.", +) def coops_stations_within_bounds( minx: float, miny: float, maxx: float, maxy: float, - station_status: StationStatus | None = None, + station_status: COOPS_StationStatus | None = None, ) -> GeoDataFrame: return coops_stations_within_region( region=shapely.geometry.box(minx=minx, miny=miny, maxx=maxx, maxy=maxy), @@ -852,6 +1065,10 @@ def coops_stations_within_bounds( ) +@deprecated( + version="0.3.11", + reason="This function is deprecated and will be removed in the future. Replace it with `fetch_coops_station`.", +) def coops_product_within_region( product: COOPS_Product, region: Union[Polygon, MultiPolygon], @@ -859,7 +1076,7 @@ def coops_product_within_region( end_date: datetime | None = None, datum: COOPS_TidalDatum | None = None, interval: COOPS_Interval | None = None, - station_status: StationStatus | None = None, + station_status: COOPS_StationStatus | None = None, ) -> Dataset: """ retrieve CO-OPS data from within the specified region of interest @@ -892,6 +1109,7 @@ def coops_product_within_region( q (nos_id, t) object 'p' 'p' 'p' 'p' 'p' 'p' ... 'p' 'p' 'p' 'p' 'p' """ + warnings.warn("Using older API, will be removed in the future!", DeprecationWarning) stations = coops_stations_within_region(region=region, station_status=station_status) station_data = [ COOPS_Station(station).product( @@ -908,40 +1126,41 @@ def coops_product_within_region( def normalize_coops_stations(df: pandas.DataFrame) -> geopandas.GeoDataFrame: - df = ( - df.rename( - columns={ - "id": "nos_id", - "shefcode": "nws_id", - "lat": "lat", - "lng": "lon", - "details.removed": "removed", - }, - ) - .astype( - { - "nos_id": numpy.int32, - "nws_id": "string", - "lon": numpy.float32, - "lat": numpy.float32, - "state": "string", - "name": "string", - "removed": "datetime64[ns]", - }, - ) - .set_index("nos_id")[ - [ - "nws_id", - "name", - "state", - "lon", - "lat", - "removed", - ] + df = df.rename( + columns={ + "id": "nos_id", + "shefcode": "nws_id", + "lat": "lat", + "lng": "lon", + "details.removed": "removed", + }, + ).astype( + { + "nos_id": "string", + "nws_id": "string", + "lon": numpy.float32, + "lat": numpy.float32, + "state": "string", + "name": "string", + "removed": "datetime64[ns]", + }, + )[ + [ + "nos_id", + "nws_id", + "station_type", + "name", + "state", + "lon", + "lat", + "removed", ] + ] + df["status"] = COOPS_StationStatus.ACTIVE.value + df.loc[~df.removed.isna(), "status"] = COOPS_StationStatus.DISCONTINUED.value + df = df.drop_duplicates(subset=["nos_id", "nws_id", "station_type", "status", "removed"]).set_index( + "nos_id" ) - df["status"] = StationStatus.ACTIVE.value - df.loc[~df.removed.isna(), "status"] = StationStatus.DISCONTINUED.value gdf = geopandas.GeoDataFrame( data=df, geometry=geopandas.points_from_xy(df.lon, df.lat, crs="EPSG:4326"), @@ -949,6 +1168,16 @@ def normalize_coops_stations(df: pandas.DataFrame) -> geopandas.GeoDataFrame: return gdf +def _get_single_coops_station(station_type: str) -> pd.DataFrame: + url = f"https://api.tidesandcurrents.noaa.gov/mdapi/prod/webapi/stations.json?expand=details&type={station_type}" + + df_thistype = pandas.read_json(url) + df_thistype = pandas.json_normalize(df_thistype["stations"]) + df_thistype["station_type"] = station_type + + return df_thistype + + @lru_cache(maxsize=1) def _get_coops_stations() -> geopandas.GeoDataFrame: """ @@ -957,19 +1186,13 @@ def _get_coops_stations() -> geopandas.GeoDataFrame: :return: ``geopandas.GeoDataFrame`` with the station metadata """ - url_active = "https://api.tidesandcurrents.noaa.gov/mdapi/prod/webapi/stations.json?expand=details" - url_historic = "https://api.tidesandcurrents.noaa.gov/mdapi/prod/webapi/stations.json?type=historicwl&expand=details" - - df_active = pandas.read_json(url_active) - df_active = pandas.json_normalize(df_active["stations"]) - - df_historic = pandas.read_json(url_historic) - df_historic = pandas.json_normalize(df_historic["stations"]) - df_historic = df_historic[~df_historic.id.isin(df_active.id)] + results = multifutures.multiprocess( + _get_single_coops_station, func_kwargs=[{"station_type": st_ty} for st_ty in StationTypes] + ) - df = pandas.concat((df_active, df_historic)) + df_all = pandas.concat(r.result for r in results) - coops_stations = normalize_coops_stations(df) + coops_stations = normalize_coops_stations(df_all) return coops_stations @@ -979,9 +1202,30 @@ def get_coops_stations( lon_max: Optional[float] = None, lat_min: Optional[float] = None, lat_max: Optional[float] = None, - metadata_source: Union[StationMetadataSource, str] = "nws", + metadata_source: Union[COOPS_StationMetadataSource, str] = "nws", ) -> geopandas.GeoDataFrame: - md_src = StationMetadataSource(metadata_source) + """ + Return COOPS station metadata from either COOPS metadata API at + https://api.tidesandcurrents.noaa.gov/mdapi/prod/webapi + or from the COOPS NWS product table at + https://access.co-ops.nos.noaa.gov/nwsproducts.html + + If `region` is defined then the stations that are outside of the region are + filtered out.. If the coordinates of the Bounding Box are defined then + stations outside of the BBox are filtered out. If both ``region`` and the + Bounding Box are defined, then an exception is raised. + + Note: The longitudes of the COOPS stations are in the [-180, 180] range. + + :param region: ``Polygon`` or ``MultiPolygon`` denoting region of interest + :param lon_min: The minimum Longitude of the Bounding Box. + :param lon_max: The maximum Longitude of the Bounding Box. + :param lat_min: The minimum Latitude of the Bounding Box. + :param lat_max: The maximum Latitude of the Bounding Box. + :param metadata_source: Metadata API to be used: 'nws' or 'main' + :return: ``pandas.DataFrame`` with the station metadata + """ + md_src = COOPS_StationMetadataSource(metadata_source) region = get_region( region=region, @@ -992,13 +1236,415 @@ def get_coops_stations( symmetric=True, ) - if md_src == StationMetadataSource.MAIN: + if md_src == COOPS_StationMetadataSource.MAIN: coops_stations = _get_coops_stations() if region: coops_stations = coops_stations[coops_stations.within(region)] - elif md_src == StationMetadataSource.NWS: + elif md_src == COOPS_StationMetadataSource.NWS: coops_stations = coops_stations_within_region(region) else: raise ValueError("Unknown metadata source specified!") return coops_stations + + +def _before_sleep(retry_state: T.Any) -> None: # pragma: no cover + logger.warning( + "Retrying %s: attempt %s ended with: %s", + retry_state.fn, + retry_state.attempt_number, + retry_state.outcome, + ) + + +RETRY: T.Callable[..., T.Any] = tenacity.retry( + stop=(tenacity.stop_after_delay(90) | tenacity.stop_after_attempt(10)), + wait=tenacity.wait_random(min=2, max=10), + retry=tenacity.retry_if_exception_type(httpx.TransportError), + before_sleep=_before_sleep, +) + + +def _fetch_url( + url: str, + client: httpx.Client, +) -> str: + try: + response = client.get(url) + except Exception: + logger.warning("Failed to retrieve: %s", url) + raise + data = response.text + return data + + +@RETRY +def fetch_url( + url: str, + client: httpx.Client, + rate_limit: multifutures.RateLimit | None = None, + **kwargs: T.Any, +) -> str: + if rate_limit is not None: # pragma: no cover + while rate_limit.reached(): + multifutures.wait() # pragma: no cover + return _fetch_url( + url=url, + client=client, + ) + + +def _parse_coops_responses( + coops_responses: list[multifutures.FutureResult], + executor: multifutures.ExecutorProtocol | None, +) -> list[multifutures.FutureResult]: + # Parse the json files using pandas + # This is a CPU heavy process, so let's use multiprocess + # Not all the urls contain data, so let's filter them out + kwargs = [] + for result in coops_responses: + station_id = result.kwargs["station_id"] # type: ignore[index] + product = result.kwargs["product"] # type: ignore[index] + if "error" in result.result: + msg = json.loads(result.result)["error"]["message"] + logger.error(f"{station_id}: Encountered an error response for {result.kwargs}!") + logger.error(f"--> {msg}") + continue + else: + kwargs.append(dict(station_id=station_id, product=product, content=result.result)) + logger.debug("Starting JSON parsing") + results = multifutures.multiprocess(_parse_json, func_kwargs=kwargs, check=False, executor=executor) + multifutures.check_results(results) + logger.debug("Finished JSON parsing") + return results + + +def _coops_date(ts: pd.Timestamp) -> str: + formatted: str = ts.strftime(COOPS_URL_TS_FORMAT) + return formatted + + +def _generate_urls( + station_id: str, + start_date: pd.Timestamp, + end_date: pd.Timestamp, + product: COOPS_Product = COOPS_Product.WATER_LEVEL, + datum: COOPS_TidalDatum = COOPS_TidalDatum.MSL, + units: COOPS_Units = COOPS_Units.METRIC, + interval: COOPS_Interval = COOPS_Interval.NONE, + **aux_params: Any, +) -> list[str]: + if end_date < start_date: + raise ValueError(f"'end_date' must be after 'start_date': {end_date} vs {start_date}") + if end_date == start_date: + return [] + duration = end_date - start_date + periods = duration.days // COOPS_MaxInterval[product][interval].days + 2 + urls = [] + date_range = pd.date_range(start_date, end_date, periods=periods, unit="us", inclusive="both") + params = { + "station": station_id, + "product": product.value, + "datum": datum.value, + "units": units.value, + "time_zone": "gmt", # We always work with UTC/GMT + "format": "json", + "application": "oceanmodeling/stormevents", + } + if interval.value is not None: + params["interval"] = interval.value + params.update(**aux_params) + for start, stop in pairwise(date_range): + params["begin_date"] = _coops_date(start) + params["end_date"] = _coops_date(stop) + # TODO Use httpx + req = requests.Request("GET", COOPS_BASE_URL, params=params) + urls.append(str(req.prepare().url)) + print(urls[-1]) + return urls + + +def _normalize_df(df: pd.DataFrame, product: COOPS_Product) -> pd.DataFrame: + # TODO: Add more info (datum, unit, tz)? + + nos_id = df.attrs["station_id"] + normalized = df.rename(columns=COOPS_ProductFieldsNameMap[product]) + logger.debug("%s: df contains the following columns: %s", nos_id, normalized.columns) + + normalized[normalized == ""] = numpy.nan + normalized = normalized.astype( + {k: v for k, v in COOPS_ProductFieldTypes.items() if k in df.columns}, errors="ignore" + ) + # NOTE: Datum and mean products doesn't have time! + if "time" in normalized.columns: + normalized["time"] = pandas.to_datetime(normalized["time"], utc=True) + normalized.set_index("time", inplace=True) + + return normalized + + +def _parse_json(content: str, station_id: str, product: COOPS_Product) -> pd.DataFrame: + err_msg = "" + content_json = {} + try: + content_json = json.loads(content) + if not content_json: + err_msg = f"{station_id}: The station does not contain any data for product.value!" + except json.JSONDecodeError as e: + err_msg = f"{station_id}: Error decoding JSON {str(e)}" + + if err_msg: + logger.error(err_msg) + return pd.DataFrame() + + data = [] + if product == COOPS_Product.CURRENTS_PREDICTIONS: + data = content_json["current_predictions"]["cp"] + else: + data = content_json["data"] + + df = pd.DataFrame(data) + df.attrs["station_id"] = f"COOPS-{station_id}" + df = _normalize_df(df, product) + return df + + +def _group_results( + station_ids: abc.Collection[str], + parsed_responses: list[multifutures.FutureResult], +) -> dict[str, pd.DataFrame]: + # Group per COOPS code + df_groups = collections.defaultdict(list) + for item in parsed_responses: + df_groups[item.kwargs["station_id"]].append(item.result) # type: ignore[index] + + # Concatenate dataframes + dataframes: dict[str, pd.DataFrame] = {} + for station_id in station_ids: + if station_id in df_groups: + df_group = df_groups[station_id] + df = pd.concat(df_group) + df = df.sort_index() + logger.debug("COOPS-%s: Timestamps: %d", station_id, len(df)) + else: + logger.warning("COOPS-%s: No data. Creating a dummy dataframe", station_id) + df = T.cast( + pd.DataFrame, pd.DataFrame(columns=["time"], dtype="datetime64[ns]").set_index("time") + ) + dataframes[station_id] = df + logger.debug("COOPS-%s: Finished conversion to pandas", station_id) + + return dataframes + + +def _retrieve_coops_data( + station_ids: abc.Collection[str], + start_dates: abc.Collection[pd.Timestamp], + end_dates: abc.Collection[pd.Timestamp], + product: COOPS_Product, + datum: COOPS_TidalDatum, + units: COOPS_Units, + interval: COOPS_Interval, + rate_limit: multifutures.RateLimit, + http_client: httpx.Client, + executor: multifutures.ExecutorProtocol | None, + **aux_params: Any, +) -> list[multifutures.FutureResult]: + kwargs = [] + + valid_intervals = COOPS_ProductIntervalMap[product] + if interval not in valid_intervals: + raise ValueError( + "interval must be one of '{}'".format("', '".join(str(v.value) for v in valid_intervals)) + ) + + for station_id, start_date, end_date in zip(station_ids, start_dates, end_dates): + url_kwargs = { + "station_id": station_id, + "start_date": start_date, + "end_date": end_date, + "product": product, + "datum": datum, + "units": units, + "interval": interval, + **aux_params, + } + for url in _generate_urls(**url_kwargs): + if url: + kwargs.append( + dict( + station_id=station_id, + url=url, + client=http_client, + rate_limit=rate_limit, + product=product, + ), + ) + with http_client: + logger.debug("Starting data retrieval") + results = multifutures.multithread( + func=fetch_url, func_kwargs=kwargs, check=False, executor=executor + ) + logger.debug("Finished data retrieval") + multifutures.check_results(results) + return results + + +def _resolve_rate_limit(rate_limit: multifutures.RateLimit | None) -> multifutures.RateLimit: + if rate_limit is None: + rate_limit = multifutures.RateLimit(rate_limit=limits.parse("5/second")) + return rate_limit + + +def _resolve_http_client(http_client: httpx.Client | None) -> httpx.Client: + if http_client is None: + timeout = httpx.Timeout(timeout=10, read=30) + http_client = httpx.Client(timeout=timeout, follow_redirects=True) + return http_client + + +def _fetch_coops( + station_ids: abc.Collection[str], + start_dates: pd.DatetimeIndex, + end_dates: pd.DatetimeIndex, + *, + product: COOPS_Product | str, + datum: COOPS_TidalDatum | str, + units: COOPS_Units | str, + interval: COOPS_Interval | int | str | None, + rate_limit: multifutures.RateLimit | None, + http_client: httpx.Client | None, + multiprocessing_executor: multifutures.ExecutorProtocol | None, + multithreading_executor: multifutures.ExecutorProtocol | None, + **aux_params: Any, +) -> dict[str, pd.DataFrame]: + rate_limit = _resolve_rate_limit(rate_limit) + http_client = _resolve_http_client(http_client) + start_dates = _to_utc(start_dates) + end_dates = _to_utc(end_dates) + # Fetch json files from the COOPS website + # We use multithreading in order to be able to use RateLimit + to take advantage of higher performance + + # TODO: Process datetimes for GMT, etc? + + coops_responses: list[multifutures.FutureResult] = _retrieve_coops_data( + station_ids=station_ids, + start_dates=start_dates, + end_dates=end_dates, + product=COOPS_Product(product), + datum=COOPS_TidalDatum(datum), + units=COOPS_Units(units), + interval=COOPS_Interval(interval), + rate_limit=rate_limit, + http_client=http_client, + executor=multithreading_executor, + **aux_params, + ) + # Parse the json files using pandas + # This is a CPU heavy process, so we are using multiprocessing here + parsed_responses: list[multifutures.FutureResult] = _parse_coops_responses( + coops_responses=coops_responses, + executor=multiprocessing_executor, + ) + # OK, now we have a list of dataframes. We need to group them per coops_code, concatenate them and remove duplicates + dataframes = _group_results(station_ids=station_ids, parsed_responses=parsed_responses) + return dataframes + + +def _to_utc(index: pd.DatetimeIndex) -> pd.DatetimeIndex: + if index.tz: + index = index.tz_convert("utc") + else: + index = index.tz_localize("utc") + return index + + +def _to_datetime_index(ts: pd.Timestamp) -> pd.DatetimeIndex: + index = pd.DatetimeIndex([ts]) + return index + + +def _resolve_start_date(now: pd.Timestamp, start_date: DatetimeLike | None) -> pd.DatetimeIndex: + if start_date is None: + resolved_start_date = T.cast(pd.Timestamp, now - pd.Timedelta(days=7)) + else: + resolved_start_date = pd.to_datetime(start_date) + index = _to_datetime_index(resolved_start_date) + return index + + +def _resolve_end_date(now: pd.Timestamp, end_date: DatetimeLike | None) -> pd.DatetimeIndex: + if end_date is None: + resolved_end_date = now + else: + resolved_end_date = pd.to_datetime(end_date) + index = _to_datetime_index(resolved_end_date) + return index + + +def fetch_coops_station( + station_id: str, + start_date: DatetimeLike | None = None, + end_date: DatetimeLike | None = None, + *, + rate_limit: multifutures.RateLimit | None = None, + http_client: httpx.Client | None = None, + multiprocessing_executor: multifutures.ExecutorProtocol | None = None, + multithreading_executor: multifutures.ExecutorProtocol | None = None, + product: COOPS_Product | str = COOPS_Product.WATER_LEVEL, + datum: COOPS_TidalDatum | str = COOPS_TidalDatum.MSL, + units: COOPS_Units | str = COOPS_Units.METRIC, + interval: COOPS_Interval | str | int | None = COOPS_Interval.NONE, + **aux_params: Any, +) -> pd.DataFrame: + """ + Make a query to the COOPS API for tide gauge data for ``station_id`` + and return the results as a ``pandas.Dataframe``. + + ``start_date`` and ``end_date`` can be of any type that is valid for ``pandas.to_datetime()``. + If ``start_date`` or ``end_date`` are timezone-aware timestamps they are coersed to UTC. + The returned data are always in UTC. + + Each query to the COOPS API can request up to 30 days of data. + When we request data for larger time spans, multiple requests are made. + This is where ``rate_limit``, ``multiprocessing_executor`` and ``multithreading_executor`` + come into play. + + In order to make the data retrieval more efficient, a multithreading pool is spawned + and the requests are executed concurrently, while adhering to the ``rate_limit``. + The parsing of the JSON responses is a CPU heavy process so it is made within a multiprocessing Pool. + + If no arguments are specified, then sensible defaults are being used, but if the pools need to be + configured, an `executor` instance needs to be passed as an argument. For example: + + .. code-block:: python + + executor = concurrent.futures.ProcessPoolExecutor(max_workers=4) + df = fetch_coops_station("acap", multiprocessing_executor=executor) + + :param station_id: The station identifier. In COOPS terminology, this is called ``coops_code``. + :param start_date: The starting date of the query. Defaults to 7 days ago. + :param end_date: The finishing date of the query. Defaults to "now". + :param rate_limit: The rate limit for making requests to the COOPS servers. Defaults to 5 requests/second. + :param http_client: The ``httpx.Client``. + :param multiprocessing_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. + :param multithreading_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. + """ + logger.info("COOPS-%s: Starting scraping: %s - %s", station_id, start_date, end_date) + now = pd.Timestamp.now("utc") + df = _fetch_coops( + station_ids=[station_id], + start_dates=_resolve_start_date(now, start_date), + end_dates=_resolve_end_date(now, end_date), + product=COOPS_Product(product), + datum=COOPS_TidalDatum(datum), + units=COOPS_Units(units), + interval=COOPS_Interval(interval), + **aux_params, + rate_limit=rate_limit, + http_client=http_client, + multiprocessing_executor=multiprocessing_executor, + multithreading_executor=multithreading_executor, + )[station_id] + logger.info("COOPS-%s: Finished scraping: %s - %s", station_id, start_date, end_date) + return df diff --git a/searvey/stations.py b/searvey/stations.py index 9409c73..493dcc5 100644 --- a/searvey/stations.py +++ b/searvey/stations.py @@ -94,11 +94,14 @@ def _get_coops_stations( region: Polygon | MultiPolygon | None = None, ) -> gpd.GeoDataFrame: coops_gdf = coops.get_coops_stations(region=region, metadata_source="main") + # TODO: We don't have station type info in station API + # coops_gdf = coops_gdf.set_index("station_type", append=True) + coops_gdf = coops_gdf[~coops_gdf.index.duplicated()] coops_gdf = coops_gdf.assign( provider=Provider.COOPS.value, - provider_id=coops_gdf.index, + provider_id=coops_gdf.index.get_level_values("nos_id"), country=coops_gdf.state.where(coops_gdf.state.str.len() != 2, "USA"), - location=coops_gdf[["name", "state"]].agg(", ".join, axis=1), + location=coops_gdf[["name", "state"]].fillna("").agg(", ".join, axis=1).str.strip(", "), lon=coops_gdf.geometry.x, lat=coops_gdf.geometry.y, is_active=coops_gdf.status == "active", From bddf9b5eb94dd8ffbbd9beb56d77bdcad779d39b Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Tue, 14 May 2024 19:06:38 +0300 Subject: [PATCH 04/15] refactor: Split IOC/COOPS APIs into seperate modules --- examples/coops_data.ipynb | 4 +- searvey/__init__.py | 4 +- searvey/_common.py | 111 +++++++ searvey/_coops_api.py | 558 +++++++++++++++++++++++++++++++++ searvey/_ioc_api.py | 266 ++++++++++++++++ searvey/coops.py | 641 +------------------------------------- searvey/ioc.py | 343 -------------------- tests/common_test.py | 95 ++++++ tests/ioc_api_test.py | 100 +----- 9 files changed, 1041 insertions(+), 1081 deletions(-) create mode 100644 searvey/_common.py create mode 100644 searvey/_coops_api.py create mode 100644 searvey/_ioc_api.py create mode 100644 tests/common_test.py diff --git a/examples/coops_data.ipynb b/examples/coops_data.ipynb index 0e95288..7d95ccb 100644 --- a/examples/coops_data.ipynb +++ b/examples/coops_data.ipynb @@ -171,7 +171,7 @@ "source": [ "import pytz\n", "from datetime import datetime\n", - "from searvey.coops import fetch_coops_station\n", + "from searvey._coops_api import fetch_coops_station\n", "\n", "station_id = '1612480'\n", "station_water_levels = fetch_coops_station(\n", @@ -279,7 +279,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.10.14" } }, "nbformat": 4, diff --git a/searvey/__init__.py b/searvey/__init__.py index 244081a..83ddf0c 100644 --- a/searvey/__init__.py +++ b/searvey/__init__.py @@ -1,8 +1,8 @@ from __future__ import annotations -from searvey.coops import fetch_coops_station +from searvey._coops_api import fetch_coops_station +from searvey._ioc_api import fetch_ioc_station from searvey.coops import get_coops_stations -from searvey.ioc import fetch_ioc_station from searvey.ioc import get_ioc_data from searvey.ioc import get_ioc_stations from searvey.stations import get_stations diff --git a/searvey/_common.py b/searvey/_common.py new file mode 100644 index 0000000..4b77ef7 --- /dev/null +++ b/searvey/_common.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +import logging +import typing as T +import warnings +from datetime import timedelta + +import httpx +import limits +import multifutures +import pandas as pd +import tenacity + +from .custom_types import DatetimeLike + +logger = logging.getLogger(__name__) + + +def _to_utc(index: pd.DatetimeIndex | pd.Timestamp) -> pd.DatetimeIndex: + if index.tz: + ref = index + if isinstance(ref, pd.Timestamp): + ref = pd.DatetimeIndex([ref]) + if index.tz.utcoffset(ref[0]) != timedelta(): + warnings.warn("Converting to UTC!\nData is retrieved and stored in UTC time") + index = index.tz_convert("utc") + else: + warnings.warn("Assuming UTC!\nData is retrieved and stored in UTC time") + index = index.tz_localize("utc") + return index + + +def _to_datetime_index(ts: pd.Timestamp) -> pd.DatetimeIndex: + index = pd.DatetimeIndex([ts]) + return index + + +def _resolve_start_date(now: pd.Timestamp, start_date: DatetimeLike | None) -> pd.DatetimeIndex: + if start_date is None: + resolved_start_date = T.cast(pd.Timestamp, now - pd.Timedelta(days=7)) + else: + resolved_start_date = pd.to_datetime(start_date) + index = _to_datetime_index(resolved_start_date) + return index + + +def _resolve_end_date(now: pd.Timestamp, end_date: DatetimeLike | None) -> pd.DatetimeIndex: + if end_date is None: + resolved_end_date = now + else: + resolved_end_date = pd.to_datetime(end_date) + index = _to_datetime_index(resolved_end_date) + return index + + +def _resolve_rate_limit(rate_limit: multifutures.RateLimit | None) -> multifutures.RateLimit: + if rate_limit is None: + rate_limit = multifutures.RateLimit(rate_limit=limits.parse("5/second")) + return rate_limit + + +def _resolve_http_client(http_client: httpx.Client | None) -> httpx.Client: + if http_client is None: + timeout = httpx.Timeout(timeout=10, read=30) + http_client = httpx.Client(timeout=timeout) + return http_client + + +def _before_sleep(retry_state: T.Any) -> None: # pragma: no cover + logger.warning( + "Retrying %s: attempt %s ended with: %s", + retry_state.fn, + retry_state.attempt_number, + retry_state.outcome, + ) + + +RETRY: T.Callable[..., T.Any] = tenacity.retry( + stop=(tenacity.stop_after_delay(90) | tenacity.stop_after_attempt(10)), + wait=tenacity.wait_random(min=2, max=10), + retry=tenacity.retry_if_exception_type(httpx.TransportError), + before_sleep=_before_sleep, +) + + +def _fetch_url_main(url: str, client: httpx.Client, redirect: bool = False) -> str: + try: + response = client.get(url, follow_redirects=redirect) + except Exception: + logger.warning("Failed to retrieve: %s", url) + raise + data = response.text + return data + + +@RETRY +def _fetch_url( + url: str, + client: httpx.Client, + rate_limit: multifutures.RateLimit | None = None, + redirect: bool = False, + **kwargs: T.Any, +) -> str: + if rate_limit is not None: # pragma: no cover + while rate_limit.reached(): + multifutures.wait() # pragma: no cover + return _fetch_url_main( + url=url, + client=client, + redirect=redirect, + ) diff --git a/searvey/_coops_api.py b/searvey/_coops_api.py new file mode 100644 index 0000000..0611cec --- /dev/null +++ b/searvey/_coops_api.py @@ -0,0 +1,558 @@ +from __future__ import annotations + +import collections +import json +import logging +import typing as T +from collections import abc +from datetime import timedelta +from typing import Any + +import httpx +import multifutures +import numpy +import pandas +import pandas as pd + +from ._common import _fetch_url +from ._common import _resolve_end_date +from ._common import _resolve_http_client +from ._common import _resolve_rate_limit +from ._common import _resolve_start_date +from ._common import _to_utc +from .coops import COOPS_Interval +from .coops import COOPS_Product +from .coops import COOPS_TidalDatum +from .coops import COOPS_Units +from .custom_types import DatetimeLike +from .utils import pairwise + + +logger = logging.getLogger(__name__) + +# constants +COOPS_URL_TS_FORMAT = "%Y%m%d %H:%M" +COOPS_BASE_URL = "https://tidesandcurrents.noaa.gov/api/datagetter?" + +COOPS_ProductFieldTypes = { + "bin": int, + "degree": float, + "depth": float, + "direction": str, + "ebb_dir": float, + "flags": str, # TODO: + "flood_dir": float, + "gust": float, + "highest": float, + "inferred": bool, + "lowest": float, + "month": int, + "quality": str, + "salinity": float, + "sigma": float, + "specific_gravity": float, + "speed": float, + "time": "datetime64[ns]", + "type": str, + "value": float, + "velocity": float, + "year": int, + "datum": str, +} + + +COOPS_ProductIntervalMap = { + COOPS_Product.WATER_LEVEL: [COOPS_Interval.NONE], + COOPS_Product.HOURLY_HEIGHT: [COOPS_Interval.NONE], + COOPS_Product.HIGH_LOW: [COOPS_Interval.NONE], + # COOPS_Product.DAILY_MEAN: [COOPS_Interval.NONE], + COOPS_Product.MONTHLY_MEAN: [COOPS_Interval.NONE], + COOPS_Product.ONE_MINUTE_WATER_LEVEL: [COOPS_Interval.NONE], + COOPS_Product.PREDICTIONS: [ + COOPS_Interval.H, + COOPS_Interval.ONE, + COOPS_Interval.FIVE, + COOPS_Interval.SIX, + COOPS_Interval.TEN, + COOPS_Interval.FIFTEEN, + COOPS_Interval.THIRTY, + COOPS_Interval.SIXTY, + COOPS_Interval.HILO, + COOPS_Interval.NONE, + ], + COOPS_Product.CURRENTS: [COOPS_Interval.H, COOPS_Interval.SIX, COOPS_Interval.NONE], + COOPS_Product.CURRENTS_PREDICTIONS: [ + COOPS_Interval.H, + COOPS_Interval.ONE, + COOPS_Interval.SIX, + COOPS_Interval.TEN, + COOPS_Interval.THIRTY, + COOPS_Interval.SIXTY, + COOPS_Interval.MAX_SLACK, + COOPS_Interval.NONE, + ], + COOPS_Product.AIR_GAP: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.WIND: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.AIR_PRESSURE: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.AIR_TEMPERATURE: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.VISIBILITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.HUMIDITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.WATER_TEMPERATURE: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.CONDUCTIVITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + # COOPS_Product.SALINITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], + COOPS_Product.DATUMS: [COOPS_Interval.NONE], +} + + +COOPS_MaxInterval = { + COOPS_Product.WATER_LEVEL: {COOPS_Interval.NONE: timedelta(days=30)}, + COOPS_Product.HOURLY_HEIGHT: {COOPS_Interval.NONE: timedelta(days=365)}, + COOPS_Product.HIGH_LOW: {COOPS_Interval.NONE: timedelta(days=365)}, + # COOPS_Product.DAILY_MEAN: {COOPS_Interval.NONE: timedelta(days=3650)}, + COOPS_Product.MONTHLY_MEAN: {COOPS_Interval.NONE: timedelta(days=73000)}, + COOPS_Product.ONE_MINUTE_WATER_LEVEL: {COOPS_Interval.NONE: timedelta(days=4)}, + COOPS_Product.PREDICTIONS: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.ONE: timedelta(days=365), + COOPS_Interval.FIVE: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=365), + COOPS_Interval.TEN: timedelta(days=365), + COOPS_Interval.FIFTEEN: timedelta(days=365), + COOPS_Interval.THIRTY: timedelta(days=365), + COOPS_Interval.SIXTY: timedelta(days=365), + COOPS_Interval.HILO: timedelta(days=365), + COOPS_Interval.NONE: timedelta(days=365), + }, + COOPS_Product.CURRENTS: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.CURRENTS_PREDICTIONS: { + COOPS_Interval.H: timedelta(days=30), + COOPS_Interval.ONE: timedelta(days=30), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.TEN: timedelta(days=30), + COOPS_Interval.THIRTY: timedelta(days=30), + COOPS_Interval.SIXTY: timedelta(days=30), + COOPS_Interval.MAX_SLACK: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.AIR_GAP: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.WIND: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.AIR_PRESSURE: { + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.AIR_TEMPERATURE: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.VISIBILITY: { + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.HUMIDITY: { + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.WATER_TEMPERATURE: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + COOPS_Product.CONDUCTIVITY: { + COOPS_Interval.H: timedelta(days=365), + COOPS_Interval.SIX: timedelta(days=30), + COOPS_Interval.NONE: timedelta(days=30), + }, + # COOPS_Product.SALINITY: { + # COOPS_Interval.H: timedelta(days=365), + # COOPS_Interval.SIX: timedelta(days=30), + # COOPS_Interval.NONE: timedelta(days=30), + # }, + COOPS_Product.DATUMS: {COOPS_Interval.NONE: timedelta(days=30)}, +} + + +COOPS_ProductFieldsNameMap = { + COOPS_Product.WATER_LEVEL: {"t": "time", "v": "value", "s": "sigma", "f": "flags", "q": "quality"}, + COOPS_Product.HOURLY_HEIGHT: {"t": "time", "v": "value", "s": "sigma", "f": "flags"}, + COOPS_Product.HIGH_LOW: {"t": "time", "v": "value", "ty": "type", "f": "flags"}, + # COOPS_Product.DAILY_MEAN: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.MONTHLY_MEAN: { + "year": "year", + "month": "month", + "highest": "highest", + "MHHW": "MHHW", + "MHW": "MHW", + "MSL": "MSL", + "MTL": "MTL", + "MLW": "MLW", + "MLLW": "MLLW", + "DTL": "DTL", + "GT": "GT", + "MN": "MN", + "DHQ": "DHQ", + "DLQ": "DLQ", + "HWI": "HWI", + "LWI": "LWI", + "lowest": "lowest", + "inferred": "inferred", + }, + COOPS_Product.ONE_MINUTE_WATER_LEVEL: {"t": "time", "v": "value"}, + COOPS_Product.PREDICTIONS: {"t": "time", "v": "value"}, + COOPS_Product.AIR_GAP: {"t": "time", "v": "value", "s": "sigma", "f": "flags"}, + COOPS_Product.WIND: { + "t": "time", + "s": "speed", + "d": "degree", + "dr": "direction", + "g": "gust", + "f": "flags", + }, + COOPS_Product.AIR_PRESSURE: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.AIR_TEMPERATURE: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.VISIBILITY: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.HUMIDITY: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.WATER_TEMPERATURE: {"t": "time", "v": "value", "f": "flags"}, + COOPS_Product.CONDUCTIVITY: {"t": "time", "v": "value", "f": "flags"}, + # COOPS_Product.SALINITY: {"t": "time", "s": "salinity", "g": "specific_gravity"}, + COOPS_Product.CURRENTS: {"t": "time", "s": "speed", "d": "direction", "b": "bin"}, + COOPS_Product.CURRENTS_PREDICTIONS: { + "Time": "time", + "Velocity_Major": "velocity", + "meanEbbDir": "ebb_dir", + "meanFloodDir": "flood_dir", + "Bin": "bin", + "Depth": "depth", + "Speed": "speed", + "Direction": "direction", + }, + COOPS_Product.DATUMS: {"n": "datum", "v": "value"}, +} + + +def _parse_coops_responses( + coops_responses: list[multifutures.FutureResult], + executor: multifutures.ExecutorProtocol | None, +) -> list[multifutures.FutureResult]: + # Parse the json files using pandas + # This is a CPU heavy process, so let's use multiprocess + # Not all the urls contain data, so let's filter them out + kwargs = [] + for result in coops_responses: + station_id = result.kwargs["station_id"] # type: ignore[index] + product = result.kwargs["product"] # type: ignore[index] + if "error" in result.result: + msg = json.loads(result.result)["error"]["message"] + logger.error(f"{station_id}: Encountered an error response for {result.kwargs}!") + logger.error(f"--> {msg}") + continue + else: + kwargs.append(dict(station_id=station_id, product=product, content=result.result)) + logger.debug("Starting JSON parsing") + results = multifutures.multiprocess(_parse_json, func_kwargs=kwargs, check=False, executor=executor) + multifutures.check_results(results) + logger.debug("Finished JSON parsing") + return results + + +def _coops_date(ts: pd.Timestamp) -> str: + formatted: str = ts.strftime(COOPS_URL_TS_FORMAT) + return formatted + + +def _generate_urls( + station_id: str, + start_date: pd.Timestamp, + end_date: pd.Timestamp, + product: COOPS_Product = COOPS_Product.WATER_LEVEL, + datum: COOPS_TidalDatum = COOPS_TidalDatum.MSL, + units: COOPS_Units = COOPS_Units.METRIC, + interval: COOPS_Interval = COOPS_Interval.NONE, + **aux_params: Any, +) -> list[httpx.URL]: + if end_date < start_date: + raise ValueError(f"'end_date' must be after 'start_date': {end_date} vs {start_date}") + if end_date == start_date: + return [] + duration = end_date - start_date + periods = duration.days // COOPS_MaxInterval[product][interval].days + 2 + urls = [] + date_range = pd.date_range(start_date, end_date, periods=periods, unit="us", inclusive="both") + params = { + "station": station_id, + "product": product.value, + "datum": datum.value, + "units": units.value, + "time_zone": "gmt", # We always work with UTC/GMT + "format": "json", + "application": "oceanmodeling/stormevents", + } + if interval.value is not None: + params["interval"] = interval.value + params.update(**aux_params) + for start, stop in pairwise(date_range): + params["begin_date"] = _coops_date(start) + params["end_date"] = _coops_date(stop) + url = httpx.Request("GET", COOPS_BASE_URL, params=params).url + urls.append(url) + return urls + + +def _normalize_df(df: pd.DataFrame, product: COOPS_Product) -> pd.DataFrame: + # TODO: Add more info (datum, unit, tz)? + + nos_id = df.attrs["station_id"] + normalized = df.rename(columns=COOPS_ProductFieldsNameMap[product]) + logger.debug("%s: df contains the following columns: %s", nos_id, normalized.columns) + + normalized[normalized == ""] = numpy.nan + normalized = normalized.astype( + {k: v for k, v in COOPS_ProductFieldTypes.items() if k in normalized.columns}, errors="ignore" + ) + # NOTE: Datum and mean products doesn't have time! + if "time" in normalized.columns: + normalized["time"] = pandas.to_datetime(normalized["time"], utc=True) + normalized.set_index("time", inplace=True) + + return normalized + + +def _parse_json(content: str, station_id: str, product: COOPS_Product) -> pd.DataFrame: + err_msg = "" + content_json = {} + try: + content_json = json.loads(content) + if not content_json: + err_msg = f"{station_id}: The station does not contain any data for product.value!" + except json.JSONDecodeError as e: + err_msg = f"{station_id}: Error decoding JSON {str(e)}" + + if err_msg: + logger.error(err_msg) + return pd.DataFrame() + + data = [] + if product == COOPS_Product.CURRENTS_PREDICTIONS: + data = content_json["current_predictions"]["cp"] + elif product == COOPS_Product.PREDICTIONS: + data = content_json["predictions"] + elif product == COOPS_Product.DATUMS: + data = content_json["datums"] + else: + data = content_json["data"] + + df = pd.DataFrame(data) + df.attrs["station_id"] = f"COOPS-{station_id}" + df = _normalize_df(df, product) + return df + + +def _group_results( + station_ids: abc.Collection[str], + parsed_responses: list[multifutures.FutureResult], +) -> dict[str, pd.DataFrame]: + # Group per COOPS code + df_groups = collections.defaultdict(list) + for item in parsed_responses: + df_groups[item.kwargs["station_id"]].append(item.result) # type: ignore[index] + + # Concatenate dataframes + dataframes: dict[str, pd.DataFrame] = {} + for station_id in station_ids: + if station_id in df_groups: + df_group = df_groups[station_id] + df = pd.concat(df_group) + df = df.sort_index() + logger.debug("COOPS-%s: Timestamps: %d", station_id, len(df)) + else: + logger.warning("COOPS-%s: No data. Creating a dummy dataframe", station_id) + df = T.cast( + pd.DataFrame, pd.DataFrame(columns=["time"], dtype="datetime64[ns]").set_index("time") + ) + dataframes[station_id] = df + logger.debug("COOPS-%s: Finished conversion to pandas", station_id) + + return dataframes + + +def _retrieve_coops_data( + station_ids: abc.Collection[str], + start_dates: abc.Collection[pd.Timestamp], + end_dates: abc.Collection[pd.Timestamp], + product: COOPS_Product, + datum: COOPS_TidalDatum, + units: COOPS_Units, + interval: COOPS_Interval, + rate_limit: multifutures.RateLimit, + http_client: httpx.Client, + executor: multifutures.ExecutorProtocol | None, + **aux_params: Any, +) -> list[multifutures.FutureResult]: + kwargs = [] + + valid_intervals = COOPS_ProductIntervalMap[product] + if interval not in valid_intervals: + raise ValueError( + "interval must be one of '{}'".format("', '".join(str(v.value) for v in valid_intervals)) + ) + + for station_id, start_date, end_date in zip(station_ids, start_dates, end_dates): + url_kwargs = { + "station_id": station_id, + "start_date": start_date, + "end_date": end_date, + "product": product, + "datum": datum, + "units": units, + "interval": interval, + **aux_params, + } + for url in _generate_urls(**url_kwargs): + if url: + kwargs.append( + dict( + station_id=station_id, + url=url, + client=http_client, + rate_limit=rate_limit, + product=product, + redirect=True, + ), + ) + with http_client: + logger.debug("Starting data retrieval") + results = multifutures.multithread( + func=_fetch_url, func_kwargs=kwargs, check=False, executor=executor + ) + logger.debug("Finished data retrieval") + multifutures.check_results(results) + return results + + +def _fetch_coops( + station_ids: abc.Collection[str], + start_dates: pd.DatetimeIndex, + end_dates: pd.DatetimeIndex, + *, + product: COOPS_Product | str, + datum: COOPS_TidalDatum | str, + units: COOPS_Units | str, + interval: COOPS_Interval | int | str | None, + rate_limit: multifutures.RateLimit | None, + http_client: httpx.Client | None, + multiprocessing_executor: multifutures.ExecutorProtocol | None, + multithreading_executor: multifutures.ExecutorProtocol | None, + **aux_params: Any, +) -> dict[str, pd.DataFrame]: + rate_limit = _resolve_rate_limit(rate_limit) + http_client = _resolve_http_client(http_client) + start_dates = _to_utc(start_dates) + end_dates = _to_utc(end_dates) + # Fetch json files from the COOPS website + # We use multithreading in order to be able to use RateLimit + to take advantage of higher performance + + coops_responses: list[multifutures.FutureResult] = _retrieve_coops_data( + station_ids=station_ids, + start_dates=start_dates, + end_dates=end_dates, + product=COOPS_Product(product), + datum=COOPS_TidalDatum(datum), + units=COOPS_Units(units), + interval=COOPS_Interval(interval), + rate_limit=rate_limit, + http_client=http_client, + executor=multithreading_executor, + **aux_params, + ) + # Parse the json files using pandas + # This is a CPU heavy process, so we are using multiprocessing here + parsed_responses: list[multifutures.FutureResult] = _parse_coops_responses( + coops_responses=coops_responses, + executor=multiprocessing_executor, + ) + # OK, now we have a list of dataframes. We need to group them per coops_code, concatenate them and remove duplicates + dataframes = _group_results(station_ids=station_ids, parsed_responses=parsed_responses) + return dataframes + + +def fetch_coops_station( + station_id: str, + start_date: DatetimeLike | None = None, + end_date: DatetimeLike | None = None, + *, + rate_limit: multifutures.RateLimit | None = None, + http_client: httpx.Client | None = None, + multiprocessing_executor: multifutures.ExecutorProtocol | None = None, + multithreading_executor: multifutures.ExecutorProtocol | None = None, + product: COOPS_Product | str = COOPS_Product.WATER_LEVEL, + datum: COOPS_TidalDatum | str = COOPS_TidalDatum.MSL, + units: COOPS_Units | str = COOPS_Units.METRIC, + interval: COOPS_Interval | str | int | None = COOPS_Interval.NONE, + **aux_params: Any, +) -> pd.DataFrame: + """ + Make a query to the COOPS API for tide gauge data for ``station_id`` + and return the results as a ``pandas.Dataframe``. + + ``start_date`` and ``end_date`` can be of any type that is valid for ``pandas.to_datetime()``. + If ``start_date`` or ``end_date`` are timezone-aware timestamps they are coersed to UTC. + The returned data are always in UTC. + + Each query to the COOPS API can request up to 30 days of data. + When we request data for larger time spans, multiple requests are made. + This is where ``rate_limit``, ``multiprocessing_executor`` and ``multithreading_executor`` + come into play. + + In order to make the data retrieval more efficient, a multithreading pool is spawned + and the requests are executed concurrently, while adhering to the ``rate_limit``. + The parsing of the JSON responses is a CPU heavy process so it is made within a multiprocessing Pool. + + If no arguments are specified, then sensible defaults are being used, but if the pools need to be + configured, an `executor` instance needs to be passed as an argument. For example: + + .. code-block:: python + + executor = concurrent.futures.ProcessPoolExecutor(max_workers=4) + df = fetch_coops_station("acap", multiprocessing_executor=executor) + + :param station_id: The station identifier. In COOPS terminology, this is called ``coops_code``. + :param start_date: The starting date of the query. Defaults to 7 days ago. + :param end_date: The finishing date of the query. Defaults to "now". + :param rate_limit: The rate limit for making requests to the COOPS servers. Defaults to 5 requests/second. + :param http_client: The ``httpx.Client``. + :param multiprocessing_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. + :param multithreading_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. + """ + logger.info("COOPS-%s: Starting scraping: %s - %s", station_id, start_date, end_date) + now = pd.Timestamp.now("utc") + df = _fetch_coops( + station_ids=[station_id], + start_dates=_resolve_start_date(now, start_date), + end_dates=_resolve_end_date(now, end_date), + product=COOPS_Product(product), + datum=COOPS_TidalDatum(datum), + units=COOPS_Units(units), + interval=COOPS_Interval(interval), + **aux_params, + rate_limit=rate_limit, + http_client=http_client, + multiprocessing_executor=multiprocessing_executor, + multithreading_executor=multithreading_executor, + )[station_id] + logger.info("COOPS-%s: Finished scraping: %s - %s", station_id, start_date, end_date) + return df diff --git a/searvey/_ioc_api.py b/searvey/_ioc_api.py new file mode 100644 index 0000000..6f098af --- /dev/null +++ b/searvey/_ioc_api.py @@ -0,0 +1,266 @@ +from __future__ import annotations + +import collections +import io +import logging +import typing as T +from collections import abc + +import httpx +import multifutures +import pandas as pd + +from ._common import _fetch_url +from ._common import _resolve_end_date +from ._common import _resolve_http_client +from ._common import _resolve_rate_limit +from ._common import _resolve_start_date +from ._common import _to_utc +from .custom_types import DatetimeLike +from .ioc import IOC_STATION_DATA_COLUMNS +from .utils import pairwise + + +logger = logging.getLogger(__name__) + + +BASE_URL = "https://www.ioc-sealevelmonitoring.org/service.php?query=data×tart={timestart}×top={timestop}&code={ioc_code}" + +IOC_URL_TS_FORMAT = "%Y-%m-%dT%H:%M:%S" +IOC_JSON_TS_FORMAT = "%Y-%m-%d %H:%M:%S" + + +def _parse_ioc_responses( + ioc_responses: list[multifutures.FutureResult], + executor: multifutures.ExecutorProtocol | None, +) -> list[multifutures.FutureResult]: + # Parse the json files using pandas + # This is a CPU heavy process, so let's use multiprocess + # Not all the urls contain data, so let's filter them out + kwargs = [] + for result in ioc_responses: + station_id = result.kwargs["station_id"] # type: ignore[index] + # if a url doesn't have any data instead of a 404, it returns an empty list `[]` + if result.result == "[]": + continue + # For some stations though we get a json like this: + # '[{"error":"code \'blri\' not found"}]' + # '[{"error":"code \'bmda2\' not found"}]' + # we should ignore these, too + elif result.result == f"""[{{"error":"code '{station_id}' not found"}}]""": + continue + # And if the IOC code does not match some pattern (5 letters?) then we get this error + elif result.result == '[{"error":"Incorrect code"}]': + continue + else: + kwargs.append(dict(station_id=station_id, content=io.StringIO(result.result))) + logger.debug("Starting JSON parsing") + results = multifutures.multiprocess(_parse_json, func_kwargs=kwargs, check=False, executor=executor) + multifutures.check_results(results) + logger.debug("Finished JSON parsing") + return results + + +def _ioc_date(ts: pd.Timestamp) -> str: + formatted: str = ts.strftime(IOC_URL_TS_FORMAT) + return formatted + + +def _generate_urls( + station_id: str, + start_date: pd.Timestamp, + end_date: pd.Timestamp, +) -> list[str]: + if end_date < start_date: + raise ValueError(f"'end_date' must be after 'start_date': {end_date} vs {start_date}") + if end_date == start_date: + return [] + duration = end_date - start_date + periods = duration.days // 30 + 2 + urls = [] + date_range = pd.date_range(start_date, end_date, periods=periods, unit="us", inclusive="both") + for start, stop in pairwise(date_range): + timestart = _ioc_date(start) + timestop = _ioc_date(stop) + url = BASE_URL.format(ioc_code=station_id, timestart=timestart, timestop=timestop) + urls.append(url) + return urls + + +def _normalize_df(df: pd.DataFrame) -> pd.DataFrame: + normalized = ( + df[df.sensor.isin(IOC_STATION_DATA_COLUMNS.values())] + .assign(stime=pd.DatetimeIndex(pd.to_datetime(df.stime.str.strip(), format=IOC_JSON_TS_FORMAT))) + .rename(columns={"stime": "time"}) + ) + # Occasionally IOC contains complete garbage. E.g. duplicate timestamps on the same sensor. We should drop those. + # https://www.ioc-sealevelmonitoring.org/service.php?query=data×tart=2022-03-12T11:03:40×top=2022-04-11T09:04:26&code=acnj + duplicated_timestamps = normalized[["time", "sensor"]].duplicated() + if duplicated_timestamps.sum() > 0: + normalized = normalized[~duplicated_timestamps] + logger.warning( + "%s: Dropped duplicates: %d rows", normalized.attrs["station_id"], duplicated_timestamps.sum() + ) + normalized = normalized.pivot(index="time", columns="sensor", values="slevel") + normalized._mgr.items.name = "" + return normalized + + +def _parse_json(content: str, station_id: str) -> pd.DataFrame: + df = pd.read_json(content, orient="records") + df.attrs["station_id"] = f"IOC-{station_id}" + df = _normalize_df(df) + return df + + +def _group_results( + station_ids: abc.Collection[str], + parsed_responses: list[multifutures.FutureResult], +) -> dict[str, pd.DataFrame]: + # Group per IOC code + df_groups = collections.defaultdict(list) + for item in parsed_responses: + df_groups[item.kwargs["station_id"]].append(item.result) # type: ignore[index] + + # Concatenate dataframes and remove duplicates + dataframes: dict[str, pd.DataFrame] = {} + for station_id in station_ids: + if station_id in df_groups: + df_group = df_groups[station_id] + df = pd.concat(df_group) + df = df.sort_index() + logger.debug("IOC-%s: Total timestamps : %d", station_id, len(df)) + df = df[~df.index.duplicated()] + logger.debug("IOC-%s: Unique timestamps: %d", station_id, len(df)) + else: + logger.warning("IOC-%s: No data. Creating a dummy dataframe", station_id) + df = T.cast( + pd.DataFrame, pd.DataFrame(columns=["time"], dtype="datetime64[ns]").set_index("time") + ) + dataframes[station_id] = df + logger.debug("IOC-%s: Finished conversion to pandas", station_id) + + return dataframes + + +def _retrieve_ioc_data( + station_ids: abc.Collection[str], + start_dates: abc.Collection[pd.Timestamp], + end_dates: abc.Collection[pd.Timestamp], + rate_limit: multifutures.RateLimit, + http_client: httpx.Client, + executor: multifutures.ExecutorProtocol | None, +) -> list[multifutures.FutureResult]: + kwargs = [] + for station_id, start_date, end_date in zip(station_ids, start_dates, end_dates): + for url in _generate_urls(station_id=station_id, start_date=start_date, end_date=end_date): + if url: + kwargs.append( + dict( + station_id=station_id, + url=url, + client=http_client, + rate_limit=rate_limit, + ), + ) + with http_client: + logger.debug("Starting data retrieval") + results = multifutures.multithread( + func=_fetch_url, func_kwargs=kwargs, check=False, executor=executor + ) + logger.debug("Finished data retrieval") + multifutures.check_results(results) + return results + + +def _fetch_ioc( + station_ids: abc.Collection[str], + start_dates: pd.DatetimeIndex, + end_dates: pd.DatetimeIndex, + *, + rate_limit: multifutures.RateLimit | None, + http_client: httpx.Client | None, + multiprocessing_executor: multifutures.ExecutorProtocol | None, + multithreading_executor: multifutures.ExecutorProtocol | None, +) -> dict[str, pd.DataFrame]: + rate_limit = _resolve_rate_limit(rate_limit) + http_client = _resolve_http_client(http_client) + start_dates = _to_utc(start_dates) + end_dates = _to_utc(end_dates) + # Fetch json files from the IOC website + # We use multithreading in order to be able to use RateLimit + to take advantage of higher performance + ioc_responses: list[multifutures.FutureResult] = _retrieve_ioc_data( + station_ids=station_ids, + start_dates=start_dates, + end_dates=end_dates, + rate_limit=rate_limit, + http_client=http_client, + executor=multithreading_executor, + ) + # Parse the json files using pandas + # This is a CPU heavy process, so we are using multiprocessing here + parsed_responses: list[multifutures.FutureResult] = _parse_ioc_responses( + ioc_responses=ioc_responses, + executor=multiprocessing_executor, + ) + # OK, now we have a list of dataframes. We need to group them per ioc_code, concatenate them and remove duplicates + dataframes = _group_results(station_ids=station_ids, parsed_responses=parsed_responses) + return dataframes + + +def fetch_ioc_station( + station_id: str, + start_date: DatetimeLike | None = None, + end_date: DatetimeLike | None = None, + *, + rate_limit: multifutures.RateLimit | None = None, + http_client: httpx.Client | None = None, + multiprocessing_executor: multifutures.ExecutorProtocol | None = None, + multithreading_executor: multifutures.ExecutorProtocol | None = None, +) -> pd.DataFrame: + """ + Make a query to the IOC API for tide gauge data for ``station_id`` + and return the results as a ``pandas.Dataframe``. + + ``start_date`` and ``end_date`` can be of any type that is valid for ``pandas.to_datetime()``. + If ``start_date`` or ``end_date`` are timezone-aware timestamps they are coersed to UTC. + The returned data are always in UTC. + + Each query to the IOC API can request up to 30 days of data. + When we request data for larger time spans, multiple requests are made. + This is where ``rate_limit``, ``multiprocessing_executor`` and ``multithreading_executor`` + come into play. + + In order to make the data retrieval more efficient, a multithreading pool is spawned + and the requests are executed concurrently, while adhering to the ``rate_limit``. + The parsing of the JSON responses is a CPU heavy process so it is made within a multiprocessing Pool. + + If no arguments are specified, then sensible defaults are being used, but if the pools need to be + configured, an `executor` instance needs to be passed as an argument. For example: + + .. code-block:: python + + executor = concurrent.futures.ProcessPoolExecutor(max_workers=4) + df = fetch_ioc_station("acap", multiprocessing_executor=executor) + + :param station_id: The station identifier. In IOC terminology, this is called ``ioc_code``. + :param start_date: The starting date of the query. Defaults to 7 days ago. + :param end_date: The finishing date of the query. Defaults to "now". + :param rate_limit: The rate limit for making requests to the IOC servers. Defaults to 5 requests/second. + :param http_client: The ``httpx.Client``. + :param multiprocessing_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. + :param multithreading_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. + """ + logger.info("IOC-%s: Starting scraping: %s - %s", station_id, start_date, end_date) + now = pd.Timestamp.now("utc") + df = _fetch_ioc( + station_ids=[station_id], + start_dates=_resolve_start_date(now, start_date), + end_dates=_resolve_end_date(now, end_date), + rate_limit=rate_limit, + http_client=http_client, + multiprocessing_executor=multiprocessing_executor, + multithreading_executor=multithreading_executor, + )[station_id] + logger.info("IOC-%s: Finished scraping: %s - %s", station_id, start_date, end_date) + return df diff --git a/searvey/coops.py b/searvey/coops.py index 90f5863..5494132 100644 --- a/searvey/coops.py +++ b/searvey/coops.py @@ -2,14 +2,10 @@ # https://api.tidesandcurrents.noaa.gov/api/prod/ from __future__ import annotations -import collections import json import logging -import typing as T import warnings -from collections import abc from datetime import datetime -from datetime import timedelta from enum import Enum from functools import lru_cache from pathlib import Path @@ -19,15 +15,12 @@ from typing import Union import geopandas -import httpx -import limits import multifutures import numpy import pandas import pandas as pd import requests import shapely -import tenacity import xarray from bs4 import BeautifulSoup from bs4 import element @@ -39,17 +32,11 @@ from shapely.geometry import Polygon from xarray import Dataset -from .custom_types import DatetimeLike from .utils import get_region -from .utils import pairwise logger = logging.getLogger(__name__) -# constants -COOPS_URL_TS_FORMAT = "%Y%m%d %H:%M" -COOPS_BASE_URL = "https://tidesandcurrents.noaa.gov/api/datagetter?" - class COOPS_StationStatus(Enum): ACTIVE = "active" @@ -134,102 +121,6 @@ class COOPS_Product(Enum): # noqa: N801 ) -COOPS_ProductFieldsNameMap = { - COOPS_Product.WATER_LEVEL: {"t": "time", "v": "value", "s": "sigma", "f": "flags", "q": "quality"}, - COOPS_Product.HOURLY_HEIGHT: {"t": "time", "v": "value", "s": "sigma", "f": "flags"}, - COOPS_Product.HIGH_LOW: {"t": "time", "v": "value", "ty": "type", "f": "flags"}, - COOPS_Product.DAILY_MEAN: {"t": "time", "v": "value", "f": "flags"}, - COOPS_Product.MONTHLY_MEAN: { - "year": "year", - "month": "month", - "highest": "highest", - "MHHW": "MHHW", - "MHW": "MHW", - "MSL": "MSL", - "MTL": "MTL", - "MLW": "MLW", - "MLLW": "MLLW", - "DTL": "DTL", - "GT": "GT", - "MN": "MN", - "DHQ": "DHQ", - "DLQ": "DLQ", - "HWI": "HWI", - "LWI": "LWI", - "lowest": "lowest", - "inferred": "inferred", - }, - COOPS_Product.ONE_MINUTE_WATER_LEVEL: {"t": "time", "v": "value"}, - COOPS_Product.PREDICTIONS: {"t": "time", "v": "value"}, - COOPS_Product.AIR_GAP: {"t": "time", "v": "value", "s": "sigma", "f": "flags"}, - COOPS_Product.WIND: { - "t": "time", - "s": "speed", - "d": "degree", - "dr": "direction", - "g": "gust", - "f": "flags", - }, - COOPS_Product.AIR_PRESSURE: {"t": "time", "v": "value", "f": "flags"}, - COOPS_Product.AIR_TEMPERATURE: {"t": "time", "v": "value", "f": "flags"}, - COOPS_Product.VISIBILITY: {"t": "time", "v": "value", "f": "flags"}, - COOPS_Product.HUMIDITY: {"t": "time", "v": "value", "f": "flags"}, - COOPS_Product.WATER_TEMPERATURE: {"t": "time", "v": "value", "f": "flags"}, - COOPS_Product.CONDUCTIVITY: {"t": "time", "v": "value", "f": "flags"}, - COOPS_Product.SALINITY: {"t": "time", "s": "salinity", "g": "specific_gravity"}, - COOPS_Product.CURRENTS: {"t": "time", "s": "speed", "d": "direction", "b": "bin"}, - COOPS_Product.CURRENTS_PREDICTIONS: { - "Time": "time", - "Velocity_Major": "velocity", - "meanEbbDir": "ebb_dir", - "meanFloodDir": "flood_dir", - "Bin": "bin", - "Depth": "depth", - "Speed": "speed", - "Direction": "direction", - }, -} - - -COOPS_ProductFieldTypes = { - "DHQ": float, - "DLQ": float, - "DTL": float, - "GT": float, - "HWI": int, - "LWI": int, - "MHHW": float, - "MHW": float, - "MLLW": float, - "MLW": float, - "MN": float, - "MSL": float, - "MTL": float, - "bin": int, - "degree": float, - "depth": float, - "direction": str, - "ebb_dir": float, - "flags": str, # TODO: - "flood_dir": float, - "gust": float, - "highest": float, - "inferred": lambda x: bool(int(x)), - "lowest": float, - "month": int, - "quality": str, - "salinity": float, - "sigma": float, - "specific_gravity": float, - "speed": float, - "time": float, - "type": str, - "value": float, - "velocity": float, - "year": int, -} - - class COOPS_Interval(Enum): # noqa: N801 H = "h" # Hourly Met data and harmonic predictions will be returned HILO = "hilo" # High/Low tide predictions for all stations. @@ -244,132 +135,6 @@ class COOPS_Interval(Enum): # noqa: N801 NONE = None -COOPS_ProductIntervalMap = { - COOPS_Product.WATER_LEVEL: [COOPS_Interval.NONE], - COOPS_Product.HOURLY_HEIGHT: [COOPS_Interval.NONE], - COOPS_Product.HIGH_LOW: [COOPS_Interval.NONE], - COOPS_Product.DAILY_MEAN: [COOPS_Interval.NONE], - COOPS_Product.MONTHLY_MEAN: [COOPS_Interval.NONE], - COOPS_Product.ONE_MINUTE_WATER_LEVEL: [COOPS_Interval.NONE], - COOPS_Product.PREDICTIONS: [ - COOPS_Interval.H, - COOPS_Interval.ONE, - COOPS_Interval.FIVE, - COOPS_Interval.SIX, - COOPS_Interval.TEN, - COOPS_Interval.FIFTEEN, - COOPS_Interval.THIRTY, - COOPS_Interval.SIXTY, - COOPS_Interval.HILO, - COOPS_Interval.NONE, - ], - COOPS_Product.CURRENTS: [COOPS_Interval.H, COOPS_Interval.SIX, COOPS_Interval.NONE], - COOPS_Product.CURRENTS_PREDICTIONS: [ - COOPS_Interval.H, - COOPS_Interval.ONE, - COOPS_Interval.SIX, - COOPS_Interval.TEN, - COOPS_Interval.THIRTY, - COOPS_Interval.SIXTY, - COOPS_Interval.MAX_SLACK, - COOPS_Interval.NONE, - ], - COOPS_Product.AIR_GAP: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], - COOPS_Product.WIND: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], - COOPS_Product.AIR_PRESSURE: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], - COOPS_Product.AIR_TEMPERATURE: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], - COOPS_Product.VISIBILITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], - COOPS_Product.HUMIDITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], - COOPS_Product.WATER_TEMPERATURE: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], - COOPS_Product.CONDUCTIVITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], - COOPS_Product.SALINITY: [COOPS_Interval.SIX, COOPS_Interval.H, COOPS_Interval.NONE], - COOPS_Product.DATUMS: [COOPS_Interval.NONE], -} - - -COOPS_MaxInterval = { - COOPS_Product.WATER_LEVEL: {COOPS_Interval.NONE: timedelta(days=30)}, - COOPS_Product.HOURLY_HEIGHT: {COOPS_Interval.NONE: timedelta(days=365)}, - COOPS_Product.HIGH_LOW: {COOPS_Interval.NONE: timedelta(days=365)}, - COOPS_Product.DAILY_MEAN: {COOPS_Interval.NONE: timedelta(days=3650)}, - COOPS_Product.MONTHLY_MEAN: {COOPS_Interval.NONE: timedelta(days=73000)}, - COOPS_Product.ONE_MINUTE_WATER_LEVEL: {COOPS_Interval.NONE: timedelta(days=4)}, - COOPS_Product.PREDICTIONS: { - COOPS_Interval.H: timedelta(days=365), - COOPS_Interval.ONE: timedelta(days=365), - COOPS_Interval.FIVE: timedelta(days=365), - COOPS_Interval.SIX: timedelta(days=365), - COOPS_Interval.TEN: timedelta(days=365), - COOPS_Interval.FIFTEEN: timedelta(days=365), - COOPS_Interval.THIRTY: timedelta(days=365), - COOPS_Interval.SIXTY: timedelta(days=365), - COOPS_Interval.HILO: timedelta(days=365), - COOPS_Interval.NONE: timedelta(days=365), - }, - COOPS_Product.CURRENTS: { - COOPS_Interval.H: timedelta(days=365), - COOPS_Interval.SIX: timedelta(days=30), - COOPS_Interval.NONE: timedelta(days=30), - }, - COOPS_Product.CURRENTS_PREDICTIONS: { - COOPS_Interval.H: timedelta(days=30), - COOPS_Interval.ONE: timedelta(days=30), - COOPS_Interval.SIX: timedelta(days=30), - COOPS_Interval.TEN: timedelta(days=30), - COOPS_Interval.THIRTY: timedelta(days=30), - COOPS_Interval.SIXTY: timedelta(days=30), - COOPS_Interval.MAX_SLACK: timedelta(days=30), - COOPS_Interval.NONE: timedelta(days=30), - }, - COOPS_Product.AIR_GAP: { - COOPS_Interval.H: timedelta(days=365), - COOPS_Interval.SIX: timedelta(days=30), - COOPS_Interval.NONE: timedelta(days=30), - }, - COOPS_Product.WIND: { - COOPS_Interval.H: timedelta(days=365), - COOPS_Interval.SIX: timedelta(days=30), - COOPS_Interval.NONE: timedelta(days=30), - }, - COOPS_Product.AIR_PRESSURE: { - COOPS_Interval.SIX: timedelta(days=30), - COOPS_Interval.H: timedelta(days=365), - COOPS_Interval.NONE: timedelta(days=30), - }, - COOPS_Product.AIR_TEMPERATURE: { - COOPS_Interval.H: timedelta(days=365), - COOPS_Interval.SIX: timedelta(days=30), - COOPS_Interval.NONE: timedelta(days=30), - }, - COOPS_Product.VISIBILITY: { - COOPS_Interval.SIX: timedelta(days=30), - COOPS_Interval.H: timedelta(days=365), - COOPS_Interval.NONE: timedelta(days=30), - }, - COOPS_Product.HUMIDITY: { - COOPS_Interval.SIX: timedelta(days=30), - COOPS_Interval.H: timedelta(days=365), - COOPS_Interval.NONE: timedelta(days=30), - }, - COOPS_Product.WATER_TEMPERATURE: { - COOPS_Interval.H: timedelta(days=365), - COOPS_Interval.SIX: timedelta(days=30), - COOPS_Interval.NONE: timedelta(days=30), - }, - COOPS_Product.CONDUCTIVITY: { - COOPS_Interval.H: timedelta(days=365), - COOPS_Interval.SIX: timedelta(days=30), - COOPS_Interval.NONE: timedelta(days=30), - }, - COOPS_Product.SALINITY: { - COOPS_Interval.H: timedelta(days=365), - COOPS_Interval.SIX: timedelta(days=30), - COOPS_Interval.NONE: timedelta(days=30), - }, - COOPS_Product.DATUMS: {COOPS_Interval.NONE: timedelta(days=30)}, -} - - class COOPS_TidalDatum(Enum): # noqa: N801 """ CRD Only some stations on the Columbia River, WA/OR @@ -393,8 +158,8 @@ class COOPS_TidalDatum(Enum): # noqa: N801 class COOPS_VelocityType(Enum): # noqa: N801 - SPEED_DIR = "speed_dir" # Return results for speed and dirction - DEFAULT = "default" # Return results for velocity major, mean flood direction and mean ebb dirction + SPEED_DIR = "speed_dir" # Return results for speed and direction + DEFAULT = "default" # Return results for velocity major, mean flood direction and mean ebb direction class COOPS_Units(Enum): # noqa: N801 @@ -1246,405 +1011,3 @@ def get_coops_stations( raise ValueError("Unknown metadata source specified!") return coops_stations - - -def _before_sleep(retry_state: T.Any) -> None: # pragma: no cover - logger.warning( - "Retrying %s: attempt %s ended with: %s", - retry_state.fn, - retry_state.attempt_number, - retry_state.outcome, - ) - - -RETRY: T.Callable[..., T.Any] = tenacity.retry( - stop=(tenacity.stop_after_delay(90) | tenacity.stop_after_attempt(10)), - wait=tenacity.wait_random(min=2, max=10), - retry=tenacity.retry_if_exception_type(httpx.TransportError), - before_sleep=_before_sleep, -) - - -def _fetch_url( - url: str, - client: httpx.Client, -) -> str: - try: - response = client.get(url) - except Exception: - logger.warning("Failed to retrieve: %s", url) - raise - data = response.text - return data - - -@RETRY -def fetch_url( - url: str, - client: httpx.Client, - rate_limit: multifutures.RateLimit | None = None, - **kwargs: T.Any, -) -> str: - if rate_limit is not None: # pragma: no cover - while rate_limit.reached(): - multifutures.wait() # pragma: no cover - return _fetch_url( - url=url, - client=client, - ) - - -def _parse_coops_responses( - coops_responses: list[multifutures.FutureResult], - executor: multifutures.ExecutorProtocol | None, -) -> list[multifutures.FutureResult]: - # Parse the json files using pandas - # This is a CPU heavy process, so let's use multiprocess - # Not all the urls contain data, so let's filter them out - kwargs = [] - for result in coops_responses: - station_id = result.kwargs["station_id"] # type: ignore[index] - product = result.kwargs["product"] # type: ignore[index] - if "error" in result.result: - msg = json.loads(result.result)["error"]["message"] - logger.error(f"{station_id}: Encountered an error response for {result.kwargs}!") - logger.error(f"--> {msg}") - continue - else: - kwargs.append(dict(station_id=station_id, product=product, content=result.result)) - logger.debug("Starting JSON parsing") - results = multifutures.multiprocess(_parse_json, func_kwargs=kwargs, check=False, executor=executor) - multifutures.check_results(results) - logger.debug("Finished JSON parsing") - return results - - -def _coops_date(ts: pd.Timestamp) -> str: - formatted: str = ts.strftime(COOPS_URL_TS_FORMAT) - return formatted - - -def _generate_urls( - station_id: str, - start_date: pd.Timestamp, - end_date: pd.Timestamp, - product: COOPS_Product = COOPS_Product.WATER_LEVEL, - datum: COOPS_TidalDatum = COOPS_TidalDatum.MSL, - units: COOPS_Units = COOPS_Units.METRIC, - interval: COOPS_Interval = COOPS_Interval.NONE, - **aux_params: Any, -) -> list[str]: - if end_date < start_date: - raise ValueError(f"'end_date' must be after 'start_date': {end_date} vs {start_date}") - if end_date == start_date: - return [] - duration = end_date - start_date - periods = duration.days // COOPS_MaxInterval[product][interval].days + 2 - urls = [] - date_range = pd.date_range(start_date, end_date, periods=periods, unit="us", inclusive="both") - params = { - "station": station_id, - "product": product.value, - "datum": datum.value, - "units": units.value, - "time_zone": "gmt", # We always work with UTC/GMT - "format": "json", - "application": "oceanmodeling/stormevents", - } - if interval.value is not None: - params["interval"] = interval.value - params.update(**aux_params) - for start, stop in pairwise(date_range): - params["begin_date"] = _coops_date(start) - params["end_date"] = _coops_date(stop) - # TODO Use httpx - req = requests.Request("GET", COOPS_BASE_URL, params=params) - urls.append(str(req.prepare().url)) - print(urls[-1]) - return urls - - -def _normalize_df(df: pd.DataFrame, product: COOPS_Product) -> pd.DataFrame: - # TODO: Add more info (datum, unit, tz)? - - nos_id = df.attrs["station_id"] - normalized = df.rename(columns=COOPS_ProductFieldsNameMap[product]) - logger.debug("%s: df contains the following columns: %s", nos_id, normalized.columns) - - normalized[normalized == ""] = numpy.nan - normalized = normalized.astype( - {k: v for k, v in COOPS_ProductFieldTypes.items() if k in df.columns}, errors="ignore" - ) - # NOTE: Datum and mean products doesn't have time! - if "time" in normalized.columns: - normalized["time"] = pandas.to_datetime(normalized["time"], utc=True) - normalized.set_index("time", inplace=True) - - return normalized - - -def _parse_json(content: str, station_id: str, product: COOPS_Product) -> pd.DataFrame: - err_msg = "" - content_json = {} - try: - content_json = json.loads(content) - if not content_json: - err_msg = f"{station_id}: The station does not contain any data for product.value!" - except json.JSONDecodeError as e: - err_msg = f"{station_id}: Error decoding JSON {str(e)}" - - if err_msg: - logger.error(err_msg) - return pd.DataFrame() - - data = [] - if product == COOPS_Product.CURRENTS_PREDICTIONS: - data = content_json["current_predictions"]["cp"] - else: - data = content_json["data"] - - df = pd.DataFrame(data) - df.attrs["station_id"] = f"COOPS-{station_id}" - df = _normalize_df(df, product) - return df - - -def _group_results( - station_ids: abc.Collection[str], - parsed_responses: list[multifutures.FutureResult], -) -> dict[str, pd.DataFrame]: - # Group per COOPS code - df_groups = collections.defaultdict(list) - for item in parsed_responses: - df_groups[item.kwargs["station_id"]].append(item.result) # type: ignore[index] - - # Concatenate dataframes - dataframes: dict[str, pd.DataFrame] = {} - for station_id in station_ids: - if station_id in df_groups: - df_group = df_groups[station_id] - df = pd.concat(df_group) - df = df.sort_index() - logger.debug("COOPS-%s: Timestamps: %d", station_id, len(df)) - else: - logger.warning("COOPS-%s: No data. Creating a dummy dataframe", station_id) - df = T.cast( - pd.DataFrame, pd.DataFrame(columns=["time"], dtype="datetime64[ns]").set_index("time") - ) - dataframes[station_id] = df - logger.debug("COOPS-%s: Finished conversion to pandas", station_id) - - return dataframes - - -def _retrieve_coops_data( - station_ids: abc.Collection[str], - start_dates: abc.Collection[pd.Timestamp], - end_dates: abc.Collection[pd.Timestamp], - product: COOPS_Product, - datum: COOPS_TidalDatum, - units: COOPS_Units, - interval: COOPS_Interval, - rate_limit: multifutures.RateLimit, - http_client: httpx.Client, - executor: multifutures.ExecutorProtocol | None, - **aux_params: Any, -) -> list[multifutures.FutureResult]: - kwargs = [] - - valid_intervals = COOPS_ProductIntervalMap[product] - if interval not in valid_intervals: - raise ValueError( - "interval must be one of '{}'".format("', '".join(str(v.value) for v in valid_intervals)) - ) - - for station_id, start_date, end_date in zip(station_ids, start_dates, end_dates): - url_kwargs = { - "station_id": station_id, - "start_date": start_date, - "end_date": end_date, - "product": product, - "datum": datum, - "units": units, - "interval": interval, - **aux_params, - } - for url in _generate_urls(**url_kwargs): - if url: - kwargs.append( - dict( - station_id=station_id, - url=url, - client=http_client, - rate_limit=rate_limit, - product=product, - ), - ) - with http_client: - logger.debug("Starting data retrieval") - results = multifutures.multithread( - func=fetch_url, func_kwargs=kwargs, check=False, executor=executor - ) - logger.debug("Finished data retrieval") - multifutures.check_results(results) - return results - - -def _resolve_rate_limit(rate_limit: multifutures.RateLimit | None) -> multifutures.RateLimit: - if rate_limit is None: - rate_limit = multifutures.RateLimit(rate_limit=limits.parse("5/second")) - return rate_limit - - -def _resolve_http_client(http_client: httpx.Client | None) -> httpx.Client: - if http_client is None: - timeout = httpx.Timeout(timeout=10, read=30) - http_client = httpx.Client(timeout=timeout, follow_redirects=True) - return http_client - - -def _fetch_coops( - station_ids: abc.Collection[str], - start_dates: pd.DatetimeIndex, - end_dates: pd.DatetimeIndex, - *, - product: COOPS_Product | str, - datum: COOPS_TidalDatum | str, - units: COOPS_Units | str, - interval: COOPS_Interval | int | str | None, - rate_limit: multifutures.RateLimit | None, - http_client: httpx.Client | None, - multiprocessing_executor: multifutures.ExecutorProtocol | None, - multithreading_executor: multifutures.ExecutorProtocol | None, - **aux_params: Any, -) -> dict[str, pd.DataFrame]: - rate_limit = _resolve_rate_limit(rate_limit) - http_client = _resolve_http_client(http_client) - start_dates = _to_utc(start_dates) - end_dates = _to_utc(end_dates) - # Fetch json files from the COOPS website - # We use multithreading in order to be able to use RateLimit + to take advantage of higher performance - - # TODO: Process datetimes for GMT, etc? - - coops_responses: list[multifutures.FutureResult] = _retrieve_coops_data( - station_ids=station_ids, - start_dates=start_dates, - end_dates=end_dates, - product=COOPS_Product(product), - datum=COOPS_TidalDatum(datum), - units=COOPS_Units(units), - interval=COOPS_Interval(interval), - rate_limit=rate_limit, - http_client=http_client, - executor=multithreading_executor, - **aux_params, - ) - # Parse the json files using pandas - # This is a CPU heavy process, so we are using multiprocessing here - parsed_responses: list[multifutures.FutureResult] = _parse_coops_responses( - coops_responses=coops_responses, - executor=multiprocessing_executor, - ) - # OK, now we have a list of dataframes. We need to group them per coops_code, concatenate them and remove duplicates - dataframes = _group_results(station_ids=station_ids, parsed_responses=parsed_responses) - return dataframes - - -def _to_utc(index: pd.DatetimeIndex) -> pd.DatetimeIndex: - if index.tz: - index = index.tz_convert("utc") - else: - index = index.tz_localize("utc") - return index - - -def _to_datetime_index(ts: pd.Timestamp) -> pd.DatetimeIndex: - index = pd.DatetimeIndex([ts]) - return index - - -def _resolve_start_date(now: pd.Timestamp, start_date: DatetimeLike | None) -> pd.DatetimeIndex: - if start_date is None: - resolved_start_date = T.cast(pd.Timestamp, now - pd.Timedelta(days=7)) - else: - resolved_start_date = pd.to_datetime(start_date) - index = _to_datetime_index(resolved_start_date) - return index - - -def _resolve_end_date(now: pd.Timestamp, end_date: DatetimeLike | None) -> pd.DatetimeIndex: - if end_date is None: - resolved_end_date = now - else: - resolved_end_date = pd.to_datetime(end_date) - index = _to_datetime_index(resolved_end_date) - return index - - -def fetch_coops_station( - station_id: str, - start_date: DatetimeLike | None = None, - end_date: DatetimeLike | None = None, - *, - rate_limit: multifutures.RateLimit | None = None, - http_client: httpx.Client | None = None, - multiprocessing_executor: multifutures.ExecutorProtocol | None = None, - multithreading_executor: multifutures.ExecutorProtocol | None = None, - product: COOPS_Product | str = COOPS_Product.WATER_LEVEL, - datum: COOPS_TidalDatum | str = COOPS_TidalDatum.MSL, - units: COOPS_Units | str = COOPS_Units.METRIC, - interval: COOPS_Interval | str | int | None = COOPS_Interval.NONE, - **aux_params: Any, -) -> pd.DataFrame: - """ - Make a query to the COOPS API for tide gauge data for ``station_id`` - and return the results as a ``pandas.Dataframe``. - - ``start_date`` and ``end_date`` can be of any type that is valid for ``pandas.to_datetime()``. - If ``start_date`` or ``end_date`` are timezone-aware timestamps they are coersed to UTC. - The returned data are always in UTC. - - Each query to the COOPS API can request up to 30 days of data. - When we request data for larger time spans, multiple requests are made. - This is where ``rate_limit``, ``multiprocessing_executor`` and ``multithreading_executor`` - come into play. - - In order to make the data retrieval more efficient, a multithreading pool is spawned - and the requests are executed concurrently, while adhering to the ``rate_limit``. - The parsing of the JSON responses is a CPU heavy process so it is made within a multiprocessing Pool. - - If no arguments are specified, then sensible defaults are being used, but if the pools need to be - configured, an `executor` instance needs to be passed as an argument. For example: - - .. code-block:: python - - executor = concurrent.futures.ProcessPoolExecutor(max_workers=4) - df = fetch_coops_station("acap", multiprocessing_executor=executor) - - :param station_id: The station identifier. In COOPS terminology, this is called ``coops_code``. - :param start_date: The starting date of the query. Defaults to 7 days ago. - :param end_date: The finishing date of the query. Defaults to "now". - :param rate_limit: The rate limit for making requests to the COOPS servers. Defaults to 5 requests/second. - :param http_client: The ``httpx.Client``. - :param multiprocessing_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. - :param multithreading_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. - """ - logger.info("COOPS-%s: Starting scraping: %s - %s", station_id, start_date, end_date) - now = pd.Timestamp.now("utc") - df = _fetch_coops( - station_ids=[station_id], - start_dates=_resolve_start_date(now, start_date), - end_dates=_resolve_end_date(now, end_date), - product=COOPS_Product(product), - datum=COOPS_TidalDatum(datum), - units=COOPS_Units(units), - interval=COOPS_Interval(interval), - **aux_params, - rate_limit=rate_limit, - http_client=http_client, - multiprocessing_executor=multiprocessing_executor, - multithreading_executor=multithreading_executor, - )[station_id] - logger.info("COOPS-%s: Finished scraping: %s - %s", station_id, start_date, end_date) - return df diff --git a/searvey/ioc.py b/searvey/ioc.py index 815b8f9..2730f5a 100644 --- a/searvey/ioc.py +++ b/searvey/ioc.py @@ -9,33 +9,26 @@ # We parse all 3 of them and we merge them. from __future__ import annotations -import collections import functools import io import logging -import typing as T import warnings -from collections import abc from typing import Optional from typing import Union import bs4 import geopandas as gpd import html5lib # noqa: F401 # imported but unused -import httpx import limits import lxml # noqa: F401 # imported but unused -import multifutures import pandas as pd import requests -import tenacity import xarray as xr from deprecated import deprecated from shapely.geometry import MultiPolygon from shapely.geometry import Polygon from .custom_types import DateTimeLike -from .custom_types import DatetimeLike from .multi import multiprocess from .multi import multithread from .rate_limit import RateLimit @@ -43,7 +36,6 @@ from .utils import get_region from .utils import merge_datasets from .utils import NOW -from .utils import pairwise from .utils import resolve_timestamp @@ -388,338 +380,3 @@ def get_ioc_data( # Do the final merging ds = xr.merge(datasets) return ds - - -############## API ################ - - -BASE_URL = "https://www.ioc-sealevelmonitoring.org/service.php?query=data×tart={timestart}×top={timestop}&code={ioc_code}" - -IOC_URL_TS_FORMAT = "%Y-%m-%dT%H:%M:%S" -IOC_JSON_TS_FORMAT = "%Y-%m-%d %H:%M:%S" - - -def _before_sleep(retry_state: T.Any) -> None: # pragma: no cover - logger.warning( - "Retrying %s: attempt %s ended with: %s", - retry_state.fn, - retry_state.attempt_number, - retry_state.outcome, - ) - - -RETRY: T.Callable[..., T.Any] = tenacity.retry( - stop=(tenacity.stop_after_delay(90) | tenacity.stop_after_attempt(10)), - wait=tenacity.wait_random(min=2, max=10), - retry=tenacity.retry_if_exception_type(httpx.TransportError), - before_sleep=_before_sleep, -) - - -def _fetch_url( - url: str, - client: httpx.Client, -) -> str: - try: - response = client.get(url) - except Exception: - logger.warning("Failed to retrieve: %s", url) - raise - data = response.text - return data - - -@RETRY -def fetch_url( - url: str, - client: httpx.Client, - rate_limit: multifutures.RateLimit | None = None, - **kwargs: T.Any, -) -> str: - if rate_limit is not None: # pragma: no cover - while rate_limit.reached(): - multifutures.wait() # pragma: no cover - return _fetch_url( - url=url, - client=client, - ) - - -def _parse_ioc_responses( - ioc_responses: list[multifutures.FutureResult], - executor: multifutures.ExecutorProtocol | None, -) -> list[multifutures.FutureResult]: - # Parse the json files using pandas - # This is a CPU heavy process, so let's use multiprocess - # Not all the urls contain data, so let's filter them out - kwargs = [] - for result in ioc_responses: - station_id = result.kwargs["station_id"] # type: ignore[index] - # if a url doesn't have any data instead of a 404, it returns an empty list `[]` - if result.result == "[]": - continue - # For some stations though we get a json like this: - # '[{"error":"code \'blri\' not found"}]' - # '[{"error":"code \'bmda2\' not found"}]' - # we should ignore these, too - elif result.result == f"""[{{"error":"code '{station_id}' not found"}}]""": - continue - # And if the IOC code does not match some pattern (5 letters?) then we get this error - elif result.result == '[{"error":"Incorrect code"}]': - continue - else: - kwargs.append(dict(station_id=station_id, content=io.StringIO(result.result))) - logger.debug("Starting JSON parsing") - results = multifutures.multiprocess(_parse_json, func_kwargs=kwargs, check=False, executor=executor) - multifutures.check_results(results) - logger.debug("Finished JSON parsing") - return results - - -def _ioc_date(ts: pd.Timestamp) -> str: - formatted: str = ts.strftime(IOC_URL_TS_FORMAT) - return formatted - - -def _generate_urls( - station_id: str, - start_date: pd.Timestamp, - end_date: pd.Timestamp, -) -> list[str]: - if end_date < start_date: - raise ValueError(f"'end_date' must be after 'start_date': {end_date} vs {start_date}") - if end_date == start_date: - return [] - duration = end_date - start_date - periods = duration.days // 30 + 2 - urls = [] - date_range = pd.date_range(start_date, end_date, periods=periods, unit="us", inclusive="both") - for start, stop in pairwise(date_range): - timestart = _ioc_date(start) - timestop = _ioc_date(stop) - url = BASE_URL.format(ioc_code=station_id, timestart=timestart, timestop=timestop) - urls.append(url) - return urls - - -def _normalize_df(df: pd.DataFrame) -> pd.DataFrame: - normalized = ( - df[df.sensor.isin(IOC_STATION_DATA_COLUMNS.values())] - .assign(stime=pd.DatetimeIndex(pd.to_datetime(df.stime.str.strip(), format=IOC_JSON_TS_FORMAT))) - .rename(columns={"stime": "time"}) - ) - # Occasionaly IOC contains complete garbage. E.g. duplicate timestamps on the same sensor. We should drop those. - # https://www.ioc-sealevelmonitoring.org/service.php?query=data×tart=2022-03-12T11:03:40×top=2022-04-11T09:04:26&code=acnj - duplicated_timestamps = normalized[["time", "sensor"]].duplicated() - if duplicated_timestamps.sum() > 0: - normalized = normalized[~duplicated_timestamps] - logger.warning( - "%s: Dropped duplicates: %d rows", normalized.attrs["station_id"], duplicated_timestamps.sum() - ) - normalized = normalized.pivot(index="time", columns="sensor", values="slevel") - normalized._mgr.items.name = "" - return normalized - - -def _parse_json(content: str, station_id: str) -> pd.DataFrame: - df = pd.read_json(content, orient="records") - df.attrs["station_id"] = f"IOC-{station_id}" - df = _normalize_df(df) - return df - - -def _group_results( - station_ids: abc.Collection[str], - parsed_responses: list[multifutures.FutureResult], -) -> dict[str, pd.DataFrame]: - # Group per IOC code - df_groups = collections.defaultdict(list) - for item in parsed_responses: - df_groups[item.kwargs["station_id"]].append(item.result) # type: ignore[index] - - # Concatenate dataframes and remove duplicates - dataframes: dict[str, pd.DataFrame] = {} - for station_id in station_ids: - if station_id in df_groups: - df_group = df_groups[station_id] - df = pd.concat(df_group) - df = df.sort_index() - logger.debug("IOC-%s: Total timestamps : %d", station_id, len(df)) - df = df[~df.index.duplicated()] - logger.debug("IOC-%s: Unique timestamps: %d", station_id, len(df)) - else: - logger.warning("IOC-%s: No data. Creating a dummy dataframe", station_id) - df = T.cast( - pd.DataFrame, pd.DataFrame(columns=["time"], dtype="datetime64[ns]").set_index("time") - ) - dataframes[station_id] = df - logger.debug("IOC-%s: Finished conversion to pandas", station_id) - - return dataframes - - -def _retrieve_ioc_data( - station_ids: abc.Collection[str], - start_dates: abc.Collection[pd.Timestamp], - end_dates: abc.Collection[pd.Timestamp], - rate_limit: multifutures.RateLimit, - http_client: httpx.Client, - executor: multifutures.ExecutorProtocol | None, -) -> list[multifutures.FutureResult]: - kwargs = [] - for station_id, start_date, end_date in zip(station_ids, start_dates, end_dates): - for url in _generate_urls(station_id=station_id, start_date=start_date, end_date=end_date): - if url: - kwargs.append( - dict( - station_id=station_id, - url=url, - client=http_client, - rate_limit=rate_limit, - ), - ) - with http_client: - logger.debug("Starting data retrieval") - results = multifutures.multithread( - func=fetch_url, func_kwargs=kwargs, check=False, executor=executor - ) - logger.debug("Finished data retrieval") - multifutures.check_results(results) - return results - - -def _resolve_rate_limit(rate_limit: multifutures.RateLimit | None) -> multifutures.RateLimit: - if rate_limit is None: - rate_limit = multifutures.RateLimit(rate_limit=limits.parse("5/second")) - return rate_limit - - -def _resolve_http_client(http_client: httpx.Client | None) -> httpx.Client: - if http_client is None: - timeout = httpx.Timeout(timeout=10, read=30) - http_client = httpx.Client(timeout=timeout) - return http_client - - -def _fetch_ioc( - station_ids: abc.Collection[str], - start_dates: pd.DatetimeIndex, - end_dates: pd.DatetimeIndex, - *, - rate_limit: multifutures.RateLimit | None, - http_client: httpx.Client | None, - multiprocessing_executor: multifutures.ExecutorProtocol | None, - multithreading_executor: multifutures.ExecutorProtocol | None, -) -> dict[str, pd.DataFrame]: - rate_limit = _resolve_rate_limit(rate_limit) - http_client = _resolve_http_client(http_client) - start_dates = _to_utc(start_dates) - end_dates = _to_utc(end_dates) - # Fetch json files from the IOC website - # We use multithreading in order to be able to use RateLimit + to take advantage of higher performance - ioc_responses: list[multifutures.FutureResult] = _retrieve_ioc_data( - station_ids=station_ids, - start_dates=start_dates, - end_dates=end_dates, - rate_limit=rate_limit, - http_client=http_client, - executor=multithreading_executor, - ) - # Parse the json files using pandas - # This is a CPU heavy process, so we are using multiprocessing here - parsed_responses: list[multifutures.FutureResult] = _parse_ioc_responses( - ioc_responses=ioc_responses, - executor=multiprocessing_executor, - ) - # OK, now we have a list of dataframes. We need to group them per ioc_code, concatenate them and remove duplicates - dataframes = _group_results(station_ids=station_ids, parsed_responses=parsed_responses) - return dataframes - - -def _to_utc(index: pd.DatetimeIndex) -> pd.DatetimeIndex: - if index.tz: - index = index.tz_convert("utc") - else: - index = index.tz_localize("utc") - return index - - -def _to_datetime_index(ts: pd.Timestamp) -> pd.DatetimeIndex: - index = pd.DatetimeIndex([ts]) - return index - - -def _resolve_start_date(now: pd.Timestamp, start_date: DatetimeLike | None) -> pd.DatetimeIndex: - if start_date is None: - resolved_start_date = T.cast(pd.Timestamp, now - pd.Timedelta(days=7)) - else: - resolved_start_date = pd.to_datetime(start_date) - index = _to_datetime_index(resolved_start_date) - return index - - -def _resolve_end_date(now: pd.Timestamp, end_date: DatetimeLike | None) -> pd.DatetimeIndex: - if end_date is None: - resolved_end_date = now - else: - resolved_end_date = pd.to_datetime(end_date) - index = _to_datetime_index(resolved_end_date) - return index - - -def fetch_ioc_station( - station_id: str, - start_date: DatetimeLike | None = None, - end_date: DatetimeLike | None = None, - *, - rate_limit: multifutures.RateLimit | None = None, - http_client: httpx.Client | None = None, - multiprocessing_executor: multifutures.ExecutorProtocol | None = None, - multithreading_executor: multifutures.ExecutorProtocol | None = None, -) -> pd.DataFrame: - """ - Make a query to the IOC API for tide gauge data for ``station_id`` - and return the results as a ``pandas.Dataframe``. - - ``start_date`` and ``end_date`` can be of any type that is valid for ``pandas.to_datetime()``. - If ``start_date`` or ``end_date`` are timezone-aware timestamps they are coersed to UTC. - The returned data are always in UTC. - - Each query to the IOC API can request up to 30 days of data. - When we request data for larger time spans, multiple requests are made. - This is where ``rate_limit``, ``multiprocessing_executor`` and ``multithreading_executor`` - come into play. - - In order to make the data retrieval more efficient, a multithreading pool is spawned - and the requests are executed concurrently, while adhering to the ``rate_limit``. - The parsing of the JSON responses is a CPU heavy process so it is made within a multiprocessing Pool. - - If no arguments are specified, then sensible defaults are being used, but if the pools need to be - configured, an `executor` instance needs to be passed as an argument. For example: - - .. code-block:: python - - executor = concurrent.futures.ProcessPoolExecutor(max_workers=4) - df = fetch_ioc_station("acap", multiprocessing_executor=executor) - - :param station_id: The station identifier. In IOC terminology, this is called ``ioc_code``. - :param start_date: The starting date of the query. Defaults to 7 days ago. - :param end_date: The finishing date of the query. Defaults to "now". - :param rate_limit: The rate limit for making requests to the IOC servers. Defaults to 5 requests/second. - :param http_client: The ``httpx.Client``. - :param multiprocessing_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. - :param multithreading_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. - """ - logger.info("IOC-%s: Starting scraping: %s - %s", station_id, start_date, end_date) - now = pd.Timestamp.now("utc") - df = _fetch_ioc( - station_ids=[station_id], - start_dates=_resolve_start_date(now, start_date), - end_dates=_resolve_end_date(now, end_date), - rate_limit=rate_limit, - http_client=http_client, - multiprocessing_executor=multiprocessing_executor, - multithreading_executor=multithreading_executor, - )[station_id] - logger.info("IOC-%s: Finished scraping: %s - %s", station_id, start_date, end_date) - return df diff --git a/tests/common_test.py b/tests/common_test.py new file mode 100644 index 0000000..7fa4a1a --- /dev/null +++ b/tests/common_test.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +import httpx +import multifutures +import pandas as pd +import pytest + +from searvey._common import _fetch_url +from searvey._common import _fetch_url_main +from searvey._common import _resolve_end_date +from searvey._common import _resolve_http_client +from searvey._common import _resolve_rate_limit +from searvey._common import _resolve_start_date +from searvey._common import _to_utc + + +def test_fetch_url(): + url = "https://google.com" + response = _fetch_url_main(url, client=httpx.Client()) + assert "The document has moved" in response + + +def test_fetch_url_failure(): + url = "http://localhost" + with pytest.raises(httpx.ConnectError) as exc: + _fetch_url_main(url, client=httpx.Client(timeout=0)) + assert "in progress" in str(exc) + + +def test_fetch_url_full(): + url = "https://google.com" + response = _fetch_url(url, client=httpx.Client(), rate_limit=multifutures.RateLimit()) + assert "The document has moved" in response + + +def test_resolve_rate_limit_returns_object_as_is(): + rate_limit = multifutures.RateLimit() + resolved = _resolve_rate_limit(rate_limit=rate_limit) + assert resolved is rate_limit + + +def test_resolve_http_client_returns_object_as_is(): + http_client = httpx.Client(timeout=httpx.Timeout(timeout=10, read=30)) + resolved = _resolve_http_client(http_client=http_client) + assert resolved is http_client + + +def test_to_utc(): + # timestamp + ts = pd.Timestamp("2004") + ts_utc = pd.Timestamp("2004", tz="utc") + assert _to_utc(ts) == ts_utc + # DatetimeIndex + index = pd.DatetimeIndex(["2004"]) + index_utc = pd.Timestamp("2004", tz="utc") + assert _to_utc(index) == index_utc + + +def test_to_utc_with_tz(): + # timestamp + ts_cet = pd.Timestamp("2004-01-01T01:00:00", tz="CET") + ts_utc = pd.Timestamp("2004-01-01T00:00:00", tz="utc") + assert _to_utc(ts_cet) == ts_utc + # DatetimeIndex + index_cet = pd.DatetimeIndex([ts_cet]) + index_utc = pd.DatetimeIndex([ts_utc]) + assert _to_utc(index_cet) == index_utc + + +def test_resolve_start_date_default(): + now = pd.Timestamp.now(tz="utc") + expected = now - pd.Timedelta(days=7) + resolved = _resolve_start_date(now=now, start_date=None) + assert resolved == expected + + +def test_resolve_start_date_specific_value(): + now = pd.Timestamp.now(tz="utc") + start_date = "2004" + expected = pd.DatetimeIndex([start_date]) + assert _resolve_start_date(now, start_date) == expected + + +def test_resolve_end_date_default(): + now = pd.Timestamp.now(tz="utc") + expected = now + resolved = _resolve_end_date(now=now, end_date=None) + assert resolved == expected + + +def test_resolve_end_date_specific_value(): + now = pd.Timestamp.now(tz="utc") + end_date = "2004" + expected = pd.DatetimeIndex([end_date]) + assert _resolve_end_date(now, end_date) == expected diff --git a/tests/ioc_api_test.py b/tests/ioc_api_test.py index 1d04ae9..06b6caa 100644 --- a/tests/ioc_api_test.py +++ b/tests/ioc_api_test.py @@ -2,40 +2,12 @@ import unittest.mock -import httpx -import multifutures import pandas as pd import pytest from searvey import fetch_ioc_station -from searvey.ioc import _fetch_url -from searvey.ioc import _generate_urls -from searvey.ioc import _ioc_date -from searvey.ioc import _resolve_end_date -from searvey.ioc import _resolve_http_client -from searvey.ioc import _resolve_rate_limit -from searvey.ioc import _resolve_start_date -from searvey.ioc import _to_utc -from searvey.ioc import fetch_url - - -def test_fetch_url(): - url = "https://google.com" - response = _fetch_url(url, client=httpx.Client()) - assert "The document has moved" in response - - -def test_fetch_url_failure(): - url = "http://localhost" - with pytest.raises(httpx.ConnectError) as exc: - _fetch_url(url, client=httpx.Client(timeout=0)) - assert "in progress" in str(exc) - - -def test_fetch_url_full(): - url = "https://google.com" - response = fetch_url(url, client=httpx.Client(), rate_limit=multifutures.RateLimit()) - assert "The document has moved" in response +from searvey._ioc_api import _generate_urls +from searvey._ioc_api import _ioc_date def test_generate_urls(): @@ -80,69 +52,7 @@ def test_generate_urls_raises_when_end_date_before_start_date(): assert str(exc.value) == f"'end_date' must be after 'start_date': {end_date} vs {start_date}" -def test_resolve_rate_limit_returns_object_as_is(): - rate_limit = multifutures.RateLimit() - resolved = _resolve_rate_limit(rate_limit=rate_limit) - assert resolved is rate_limit - - -def test_resolve_http_client_returns_object_as_is(): - http_client = httpx.Client(timeout=httpx.Timeout(timeout=10, read=30)) - resolved = _resolve_http_client(http_client=http_client) - assert resolved is http_client - - -def test_to_utc(): - # timestamp - ts = pd.Timestamp("2004") - ts_utc = pd.Timestamp("2004", tz="utc") - assert _to_utc(ts) == ts_utc - # DatetimeIndex - index = pd.DatetimeIndex(["2004"]) - index_utc = pd.Timestamp("2004", tz="utc") - assert _to_utc(index) == index_utc - - -def test_to_utc_with_tz(): - # timestamp - ts_cet = pd.Timestamp("2004-01-01T01:00:00", tz="CET") - ts_utc = pd.Timestamp("2004-01-01T00:00:00", tz="utc") - assert _to_utc(ts_cet) == ts_utc - # DatetimeIndex - index_cet = pd.DatetimeIndex([ts_cet]) - index_utc = pd.DatetimeIndex([ts_utc]) - assert _to_utc(index_cet) == index_utc - - -def test_resolve_start_date_default(): - now = pd.Timestamp.now(tz="utc") - expected = now - pd.Timedelta(days=7) - resolved = _resolve_start_date(now=now, start_date=None) - assert resolved == expected - - -def test_resolve_start_date_specific_value(): - now = pd.Timestamp.now(tz="utc") - start_date = "2004" - expected = pd.DatetimeIndex([start_date]) - assert _resolve_start_date(now, start_date) == expected - - -def test_resolve_end_date_default(): - now = pd.Timestamp.now(tz="utc") - expected = now - resolved = _resolve_end_date(now=now, end_date=None) - assert resolved == expected - - -def test_resolve_end_date_specific_value(): - now = pd.Timestamp.now(tz="utc") - end_date = "2004" - expected = pd.DatetimeIndex([end_date]) - assert _resolve_end_date(now, end_date) == expected - - -@unittest.mock.patch("searvey.ioc.fetch_url") +@unittest.mock.patch("searvey._common._fetch_url") def test_fetch_ioc_station_empty_responses(mocked_fetch_url): station_id = "blri" start_date = "2023-09-01" @@ -163,7 +73,7 @@ def test_fetch_ioc_station_empty_responses(mocked_fetch_url): assert df.empty -@unittest.mock.patch("searvey.ioc.fetch_url") +@unittest.mock.patch("searvey._common._fetch_url") def test_fetch_ioc_station_normal_call(mocked_fetch_url): station_id = "acnj" start_date = "2022-03-12T11:04:00" @@ -185,7 +95,7 @@ def test_fetch_ioc_station_normal_call(mocked_fetch_url): assert len(df) == 3 -@unittest.mock.patch("searvey.ioc.fetch_url") +@unittest.mock.patch("searvey._common._fetch_url") def test_fetch_ioc_station_duplicated_timestamps(mocked_fetch_url): station_id = "acnj" start_date = "2022-03-12T11:04:00" From a6906cc71f8d40ca52e5dceb4282e8dda68cb4b9 Mon Sep 17 00:00:00 2001 From: "Soroosh.Mani" Date: Mon, 5 Feb 2024 21:57:46 -0500 Subject: [PATCH 05/15] tests: update coops for new API --- tests/coops_test.py | 188 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 187 insertions(+), 1 deletion(-) diff --git a/tests/coops_test.py b/tests/coops_test.py index a846107..8c2202c 100644 --- a/tests/coops_test.py +++ b/tests/coops_test.py @@ -1,8 +1,21 @@ +import contextlib from datetime import datetime +from datetime import timedelta +from urllib.parse import quote +import httpx +import numpy as np +import pandas as pd import pytest +import pytz from shapely.geometry import box +from searvey import fetch_coops_station +from searvey._coops_api import _coops_date +from searvey._coops_api import _generate_urls +from searvey._coops_api import COOPS_ProductFieldsNameMap +from searvey._coops_api import COOPS_ProductFieldTypes +from searvey.coops import COOPS_Product from searvey.coops import coops_product_within_region from searvey.coops import COOPS_Station from searvey.coops import coops_stations @@ -49,6 +62,7 @@ def test_coops_stations_main_api(): assert len(stations_main_api) > 0 assert list(stations_main_api.columns) == [ "nws_id", + "station_type", "name", "state", "lon", @@ -71,7 +85,7 @@ def test_coops_stations_main_api(): with pytest.raises(ValueError) as excinfo: get_coops_stations(metadata_source="someothersource") - assert "not a valid stationmetadatasource" in str(excinfo.value).lower() + assert "not a valid coops_stationmetadatasource" in str(excinfo.value).lower() @pytest.mark.vcr @@ -83,6 +97,7 @@ def test_coops_stations_within_region_main_api(): assert len(stations) > 0 assert list(stations.columns) == [ "nws_id", + "station_type", "name", "state", "lon", @@ -165,3 +180,174 @@ def test_coops_predictions_product(): ) assert len(data["t"]) > 0 + + +def test_generate_urls(): + station_id = "AAA" + start_date = pd.Timestamp("2023-01-01") + end_date = pd.Timestamp("2023-06-01") + urls = _generate_urls( + station_id=station_id, + start_date=start_date, + end_date=end_date, + ) + assert len(urls) == 6 + assert all(isinstance(url, httpx.URL) for url in urls) + assert all(station_id in str(url) for url in urls) + assert quote(_coops_date(start_date)) in str(urls[0]) + assert quote(_coops_date(end_date)) in str(urls[-1]) + + +def test_generate_urls_raises_common_start_date_and_end_date(): + station_id = "AAA" + date = pd.Timestamp("2023-06-01") + urls = _generate_urls( + station_id=station_id, + start_date=date, + end_date=date, + ) + assert len(urls) == 0 + assert not urls + assert urls == [] + + +def test_generate_urls_raises_when_end_date_before_start_date(): + start_date = pd.Timestamp("2023-01-01") + end_date = pd.Timestamp("2022-01-01") + with pytest.raises(ValueError) as exc: + _generate_urls( + station_id="aaaa", + start_date=start_date, + end_date=end_date, + ) + assert str(exc.value) == f"'end_date' must be after 'start_date': {end_date} vs {start_date}" + + +@pytest.mark.parametrize( + "station_id, product", + [ + (8654467, "water_level"), + (8636580, "one_minute_water_level"), + (8654467, "predictions"), + (8575431, "air_gap"), + (8654467, "wind"), + (8654467, "air_pressure"), + (8654467, "air_temperature"), + (8575437, "visibility"), + (8720233, "humidity"), + (8654467, "water_temperature"), + (8737048, "conductivity"), + ("cb1101", "currents"), + ("cb1101", "currents_predictions"), + (8654467, "datums"), + # (8636580, "hourly_height"), # Default dates won't work! + # (8636580, "high_low"), # Default dates won't work! + # (8518750, "monthly_mean"), # Default dates won't work! + # (8720233, "salinity"), # Can't find stations with this data! + # (8636580, "daily_mean"), # Not supported by searvey + ], +) +def test_coops_data_products_default_args(station_id, product): + df = fetch_coops_station(station_id, product=product) + assert all(col in COOPS_ProductFieldsNameMap[COOPS_Product(product)].values() for col in df.columns) + assert all( + df.dtypes[col] == COOPS_ProductFieldTypes[col] + for col in df.columns + if df.dtypes[col] != np.dtype("O") + ) + + +@pytest.fixture +def now_utc(): + # Seconds are truncated for COOPS query url + return pd.Timestamp.now("utc").floor("min") + + +@pytest.mark.parametrize( + "days_before_now, station_id, product", + [ + (7, 8654467, "water_level"), + (60, 8636580, "hourly_height"), + (60, 8636580, "high_low"), + (60, 8518750, "monthly_mean"), + (7, 8636580, "one_minute_water_level"), + (7, 8654467, "predictions"), + (7, 8575431, "air_gap"), + (7, 8654467, "wind"), + (7, 8654467, "air_pressure"), + (7, 8654467, "air_temperature"), + (7, 8575437, "visibility"), + (7, 8720233, "humidity"), + (7, 8654467, "water_temperature"), + (7, 8737048, "conductivity"), + (7, "cb1101", "currents"), + (7, "cb1101", "currents_predictions"), + (7, 8654467, "datums"), + # (7, 8720233, "salinity"), + # (30, 8636580, "daily_mean"), + ], +) +def test_coops_data_products_w_date_input(now_utc, days_before_now, station_id, product): + start_date = now_utc - timedelta(days=days_before_now) + end_date = now_utc + + df = fetch_coops_station( + station_id, + product=product, + start_date=start_date, + end_date=end_date, + ) + + assert all(col in COOPS_ProductFieldsNameMap[COOPS_Product(product)].values() for col in df.columns) + assert all( + df.dtypes[col] == COOPS_ProductFieldTypes[col] + for col in df.columns + if df.dtypes[col] != np.dtype("O") + ) + + if df.index.name == "time": + # When selecting between intervals one time prior to the first + # is also included (at most 1 out of range) + assert (~((start_date <= df.index) & (df.index <= end_date))).sum() <= 1 + assert df.index.tz is not None + assert df.index.tz.utcoffset(df.index[0]) == timedelta() + + +@pytest.mark.parametrize( + "msg_idx, date", + [ + (None, pd.Timestamp.now("utc")), + (None, pd.Timestamp.now("gmt")), + (None, pd.Timestamp.now(pytz.FixedOffset(0))), + (0, pd.Timestamp.now("est")), + (0, pd.Timestamp.now(pytz.FixedOffset(4))), + (0, pd.Timestamp.now(pytz.FixedOffset(-4))), + (1, pd.Timestamp.now()), + (1, datetime.now()), + ], +) +def test_coops_warn_utc(msg_idx, date): + msgs = ["Converting to UTC", "Assuming UTC"] + + warn_type = None + warn_count = 0 + warn_msg = "" + ctx = contextlib.nullcontext() + if msg_idx is not None: + warn_type = UserWarning + warn_count = 2 # One for start one for end! + warn_msg = msgs[msg_idx] + ctx = pytest.warns(warn_type, match=warn_msg) + + with ctx as record: + fetch_coops_station( + 8654467, + product="water_level", + start_date=date - timedelta(days=7), + end_date=date, + ) + + if record is not None: + assert len(record) == warn_count + if warn_count: + assert [warn_msg in r.message.args[0] for r in record] From b888b4ba1c852257ce61401cc7e73fd24a889c7e Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Fri, 14 Jun 2024 16:12:50 +0300 Subject: [PATCH 06/15] chore: Update pre-commit --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3c39871..fdaf062 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -41,13 +41,13 @@ repos: - id: "shellcheck" - repo: "https://github.com/python-jsonschema/check-jsonschema" - rev: "0.28.3" + rev: "0.28.5" hooks: - id: "check-github-workflows" - id: "check-readthedocs" - repo: "https://github.com/asottile/reorder_python_imports" - rev: "v3.12.0" + rev: "v3.13.0" hooks: - id: "reorder-python-imports" args: @@ -60,7 +60,7 @@ repos: - repo: "https://github.com/charliermarsh/ruff-pre-commit" # Ruff version. - rev: 'v0.4.4' + rev: 'v0.4.9' hooks: - id: "ruff" From b769ea32676a380e5b2435d5208073aa9b109b7d Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Mon, 17 Jun 2024 16:14:46 +0300 Subject: [PATCH 07/15] tests: switch to nbmake instead of nbconvert --- Makefile | 2 +- examples/IOC_data.ipynb | 2 +- examples/USGS_data.ipynb | 2 +- examples/coops_data.ipynb | 2 +- poetry.lock | 882 ++++++++++++++---------------- pyproject.toml | 5 +- requirements/requirements-dev.txt | 62 +-- requirements/requirements.txt | 26 +- 8 files changed, 469 insertions(+), 514 deletions(-) diff --git a/Makefile b/Makefile index 8379167..0e3115e 100644 --- a/Makefile +++ b/Makefile @@ -27,7 +27,7 @@ clean_notebooks: pre-commit run nbstripout -a exec_notebooks: - python -m nbconvert --to notebook --execute --ExecutePreprocessor.kernel_name=python3 --stdout examples/* >/dev/null + pytest --nbmake --nbmake-timeout=60 --nbmake-kernel=python3 $$(git ls-files | grep ipynb) docs: make -C docs html diff --git a/examples/IOC_data.ipynb b/examples/IOC_data.ipynb index e0480a0..6d06596 100644 --- a/examples/IOC_data.ipynb +++ b/examples/IOC_data.ipynb @@ -267,7 +267,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/examples/USGS_data.ipynb b/examples/USGS_data.ipynb index 14079c6..84d24b1 100644 --- a/examples/USGS_data.ipynb +++ b/examples/USGS_data.ipynb @@ -214,7 +214,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/examples/coops_data.ipynb b/examples/coops_data.ipynb index 7d95ccb..4ab1298 100644 --- a/examples/coops_data.ipynb +++ b/examples/coops_data.ipynb @@ -279,7 +279,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.14" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/poetry.lock b/poetry.lock index 1584a91..22fdf05 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,24 +13,24 @@ files = [ [[package]] name = "annotated-types" -version = "0.6.0" +version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [[package]] name = "anyio" -version = "4.3.0" +version = "4.4.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, + {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, ] [package.dependencies] @@ -127,33 +127,15 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] -[[package]] -name = "bleach" -version = "6.1.0" -description = "An easy safelist-based HTML-sanitizing tool." -optional = false -python-versions = ">=3.8" -files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, -] - -[package.dependencies] -six = ">=1.9.0" -webencodings = "*" - -[package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] - [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.6.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, ] [[package]] @@ -474,63 +456,63 @@ coverage = ">=6.0.2" [[package]] name = "coverage" -version = "7.5.1" +version = "7.5.3" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0884920835a033b78d1c73b6d3bbcda8161a900f38a488829a83982925f6c2e"}, - {file = "coverage-7.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:39afcd3d4339329c5f58de48a52f6e4e50f6578dd6099961cf22228feb25f38f"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b0ceee8147444347da6a66be737c9d78f3353b0681715b668b72e79203e4a"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9ca3f2fae0088c3c71d743d85404cec8df9be818a005ea065495bedc33da35"}, - {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd215c0c7d7aab005221608a3c2b46f58c0285a819565887ee0b718c052aa4e"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4bf0655ab60d754491004a5efd7f9cccefcc1081a74c9ef2da4735d6ee4a6223"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61c4bf1ba021817de12b813338c9be9f0ad5b1e781b9b340a6d29fc13e7c1b5e"}, - {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db66fc317a046556a96b453a58eced5024af4582a8dbdc0c23ca4dbc0d5b3146"}, - {file = "coverage-7.5.1-cp310-cp310-win32.whl", hash = "sha256:b016ea6b959d3b9556cb401c55a37547135a587db0115635a443b2ce8f1c7228"}, - {file = "coverage-7.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:df4e745a81c110e7446b1cc8131bf986157770fa405fe90e15e850aaf7619bc8"}, - {file = "coverage-7.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:796a79f63eca8814ca3317a1ea443645c9ff0d18b188de470ed7ccd45ae79428"}, - {file = "coverage-7.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fc84a37bfd98db31beae3c2748811a3fa72bf2007ff7902f68746d9757f3746"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6175d1a0559986c6ee3f7fccfc4a90ecd12ba0a383dcc2da30c2b9918d67d8a3"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fc81d5878cd6274ce971e0a3a18a8803c3fe25457165314271cf78e3aae3aa2"}, - {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:556cf1a7cbc8028cb60e1ff0be806be2eded2daf8129b8811c63e2b9a6c43bca"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9981706d300c18d8b220995ad22627647be11a4276721c10911e0e9fa44c83e8"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d7fed867ee50edf1a0b4a11e8e5d0895150e572af1cd6d315d557758bfa9c057"}, - {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef48e2707fb320c8f139424a596f5b69955a85b178f15af261bab871873bb987"}, - {file = "coverage-7.5.1-cp311-cp311-win32.whl", hash = "sha256:9314d5678dcc665330df5b69c1e726a0e49b27df0461c08ca12674bcc19ef136"}, - {file = "coverage-7.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fa567e99765fe98f4e7d7394ce623e794d7cabb170f2ca2ac5a4174437e90dd"}, - {file = "coverage-7.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b6cf3764c030e5338e7f61f95bd21147963cf6aa16e09d2f74f1fa52013c1206"}, - {file = "coverage-7.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ec92012fefebee89a6b9c79bc39051a6cb3891d562b9270ab10ecfdadbc0c34"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16db7f26000a07efcf6aea00316f6ac57e7d9a96501e990a36f40c965ec7a95d"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beccf7b8a10b09c4ae543582c1319c6df47d78fd732f854ac68d518ee1fb97fa"}, - {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8748731ad392d736cc9ccac03c9845b13bb07d020a33423fa5b3a36521ac6e4e"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7352b9161b33fd0b643ccd1f21f3a3908daaddf414f1c6cb9d3a2fd618bf2572"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7a588d39e0925f6a2bff87154752481273cdb1736270642aeb3635cb9b4cad07"}, - {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:68f962d9b72ce69ea8621f57551b2fa9c70509af757ee3b8105d4f51b92b41a7"}, - {file = "coverage-7.5.1-cp312-cp312-win32.whl", hash = "sha256:f152cbf5b88aaeb836127d920dd0f5e7edff5a66f10c079157306c4343d86c19"}, - {file = "coverage-7.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:5a5740d1fb60ddf268a3811bcd353de34eb56dc24e8f52a7f05ee513b2d4f596"}, - {file = "coverage-7.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2213def81a50519d7cc56ed643c9e93e0247f5bbe0d1247d15fa520814a7cd7"}, - {file = "coverage-7.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5037f8fcc2a95b1f0e80585bd9d1ec31068a9bcb157d9750a172836e98bc7a90"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3721c2c9e4c4953a41a26c14f4cef64330392a6d2d675c8b1db3b645e31f0e"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca498687ca46a62ae590253fba634a1fe9836bc56f626852fb2720f334c9e4e5"}, - {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cdcbc320b14c3e5877ee79e649677cb7d89ef588852e9583e6b24c2e5072661"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:57e0204b5b745594e5bc14b9b50006da722827f0b8c776949f1135677e88d0b8"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fe7502616b67b234482c3ce276ff26f39ffe88adca2acf0261df4b8454668b4"}, - {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9e78295f4144f9dacfed4f92935fbe1780021247c2fabf73a819b17f0ccfff8d"}, - {file = "coverage-7.5.1-cp38-cp38-win32.whl", hash = "sha256:1434e088b41594baa71188a17533083eabf5609e8e72f16ce8c186001e6b8c41"}, - {file = "coverage-7.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:0646599e9b139988b63704d704af8e8df7fa4cbc4a1f33df69d97f36cb0a38de"}, - {file = "coverage-7.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4cc37def103a2725bc672f84bd939a6fe4522310503207aae4d56351644682f1"}, - {file = "coverage-7.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc0b4d8bfeabd25ea75e94632f5b6e047eef8adaed0c2161ada1e922e7f7cece"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0a0f5e06881ecedfe6f3dd2f56dcb057b6dbeb3327fd32d4b12854df36bf26"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9735317685ba6ec7e3754798c8871c2f49aa5e687cc794a0b1d284b2389d1bd5"}, - {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d21918e9ef11edf36764b93101e2ae8cc82aa5efdc7c5a4e9c6c35a48496d601"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c3e757949f268364b96ca894b4c342b41dc6f8f8b66c37878aacef5930db61be"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:79afb6197e2f7f60c4824dd4b2d4c2ec5801ceb6ba9ce5d2c3080e5660d51a4f"}, - {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1d0d98d95dd18fe29dc66808e1accf59f037d5716f86a501fc0256455219668"}, - {file = "coverage-7.5.1-cp39-cp39-win32.whl", hash = "sha256:1cc0fe9b0b3a8364093c53b0b4c0c2dd4bb23acbec4c9240b5f284095ccf7981"}, - {file = "coverage-7.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:dde0070c40ea8bb3641e811c1cfbf18e265d024deff6de52c5950677a8fb1e0f"}, - {file = "coverage-7.5.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:6537e7c10cc47c595828b8a8be04c72144725c383c4702703ff4e42e44577312"}, - {file = "coverage-7.5.1.tar.gz", hash = "sha256:54de9ef3a9da981f7af93eafde4ede199e0846cd819eb27c88e2b712aae9708c"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, + {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, + {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, + {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, + {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, + {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, + {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, + {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, + {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, + {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, + {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, + {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, + {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, ] [package.dependencies] @@ -556,13 +538,13 @@ tests = ["pytest", "pytest-cov", "pytest-xdist"] [[package]] name = "dataretrieval" -version = "1.0.8" +version = "1.0.9" description = "Discover and retrieve water data from U.S. federal hydrologic web services." optional = false python-versions = ">=3.8" files = [ - {file = "dataretrieval-1.0.8-py3-none-any.whl", hash = "sha256:fb5060a54ff8544ec544e8ec000758477a891b7e250bc08a2f9d1f2227cb266f"}, - {file = "dataretrieval-1.0.8.tar.gz", hash = "sha256:a6d407dc79ed07becb33996c88fb2ef2d9722a0a22299928266f50cd2ed8a523"}, + {file = "dataretrieval-1.0.9-py3-none-any.whl", hash = "sha256:e21751102a6e5b6677fb917cc56c09e325ef16165779ebb260306575f558373d"}, + {file = "dataretrieval-1.0.9.tar.gz", hash = "sha256:5597d82f33fb2df9e43d555e6106ec8f950e1cd96cda11f2e739bfc91b7191b0"}, ] [package.dependencies] @@ -616,17 +598,6 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - [[package]] name = "deprecated" version = "1.2.14" @@ -669,17 +640,6 @@ files = [ [package.dependencies] packaging = ">=20.9" -[[package]] -name = "entrypoints" -version = "0.4" -description = "Discover and load entry points from installed packages." -optional = false -python-versions = ">=3.6" -files = [ - {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, - {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, -] - [[package]] name = "erddapy" version = "2.2.0" @@ -740,13 +700,13 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "fastjsonschema" -version = "2.19.1" +version = "2.20.0" description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" files = [ - {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, - {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, + {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, + {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, ] [package.extras] @@ -802,53 +762,53 @@ test = ["fiona[s3]", "pytest (>=7)", "pytest-cov", "pytz"] [[package]] name = "fonttools" -version = "4.51.0" +version = "4.53.0" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, - {file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, - {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, - {file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, - {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, - {file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, - {file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, - {file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, - {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, - {file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, - {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, - {file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, - {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, - {file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, - {file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, - {file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, - {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, - {file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, - {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, - {file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, - {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, - {file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, - {file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, - {file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, - {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, - {file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, - {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, - {file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, - {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, - {file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, - {file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, - {file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, - {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, - {file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, - {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, - {file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, - {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, - {file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, - {file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, - {file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, - {file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, - {file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, + {file = "fonttools-4.53.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:52a6e0a7a0bf611c19bc8ec8f7592bdae79c8296c70eb05917fd831354699b20"}, + {file = "fonttools-4.53.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:099634631b9dd271d4a835d2b2a9e042ccc94ecdf7e2dd9f7f34f7daf333358d"}, + {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e40013572bfb843d6794a3ce076c29ef4efd15937ab833f520117f8eccc84fd6"}, + {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:715b41c3e231f7334cbe79dfc698213dcb7211520ec7a3bc2ba20c8515e8a3b5"}, + {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74ae2441731a05b44d5988d3ac2cf784d3ee0a535dbed257cbfff4be8bb49eb9"}, + {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:95db0c6581a54b47c30860d013977b8a14febc206c8b5ff562f9fe32738a8aca"}, + {file = "fonttools-4.53.0-cp310-cp310-win32.whl", hash = "sha256:9cd7a6beec6495d1dffb1033d50a3f82dfece23e9eb3c20cd3c2444d27514068"}, + {file = "fonttools-4.53.0-cp310-cp310-win_amd64.whl", hash = "sha256:daaef7390e632283051e3cf3e16aff2b68b247e99aea916f64e578c0449c9c68"}, + {file = "fonttools-4.53.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a209d2e624ba492df4f3bfad5996d1f76f03069c6133c60cd04f9a9e715595ec"}, + {file = "fonttools-4.53.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f520d9ac5b938e6494f58a25c77564beca7d0199ecf726e1bd3d56872c59749"}, + {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eceef49f457253000e6a2d0f7bd08ff4e9fe96ec4ffce2dbcb32e34d9c1b8161"}, + {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1f3e34373aa16045484b4d9d352d4c6b5f9f77ac77a178252ccbc851e8b2ee"}, + {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:28d072169fe8275fb1a0d35e3233f6df36a7e8474e56cb790a7258ad822b6fd6"}, + {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a2a6ba400d386e904fd05db81f73bee0008af37799a7586deaa4aef8cd5971e"}, + {file = "fonttools-4.53.0-cp311-cp311-win32.whl", hash = "sha256:bb7273789f69b565d88e97e9e1da602b4ee7ba733caf35a6c2affd4334d4f005"}, + {file = "fonttools-4.53.0-cp311-cp311-win_amd64.whl", hash = "sha256:9fe9096a60113e1d755e9e6bda15ef7e03391ee0554d22829aa506cdf946f796"}, + {file = "fonttools-4.53.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d8f191a17369bd53a5557a5ee4bab91d5330ca3aefcdf17fab9a497b0e7cff7a"}, + {file = "fonttools-4.53.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:93156dd7f90ae0a1b0e8871032a07ef3178f553f0c70c386025a808f3a63b1f4"}, + {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bff98816cb144fb7b85e4b5ba3888a33b56ecef075b0e95b95bcd0a5fbf20f06"}, + {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:973d030180eca8255b1bce6ffc09ef38a05dcec0e8320cc9b7bcaa65346f341d"}, + {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4ee5a24e281fbd8261c6ab29faa7fd9a87a12e8c0eed485b705236c65999109"}, + {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5bc124fae781a4422f61b98d1d7faa47985f663a64770b78f13d2c072410c2"}, + {file = "fonttools-4.53.0-cp312-cp312-win32.whl", hash = "sha256:a239afa1126b6a619130909c8404070e2b473dd2b7fc4aacacd2e763f8597fea"}, + {file = "fonttools-4.53.0-cp312-cp312-win_amd64.whl", hash = "sha256:45b4afb069039f0366a43a5d454bc54eea942bfb66b3fc3e9a2c07ef4d617380"}, + {file = "fonttools-4.53.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:93bc9e5aaa06ff928d751dc6be889ff3e7d2aa393ab873bc7f6396a99f6fbb12"}, + {file = "fonttools-4.53.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2367d47816cc9783a28645bc1dac07f8ffc93e0f015e8c9fc674a5b76a6da6e4"}, + {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:907fa0b662dd8fc1d7c661b90782ce81afb510fc4b7aa6ae7304d6c094b27bce"}, + {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e0ad3c6ea4bd6a289d958a1eb922767233f00982cf0fe42b177657c86c80a8f"}, + {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:73121a9b7ff93ada888aaee3985a88495489cc027894458cb1a736660bdfb206"}, + {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ee595d7ba9bba130b2bec555a40aafa60c26ce68ed0cf509983e0f12d88674fd"}, + {file = "fonttools-4.53.0-cp38-cp38-win32.whl", hash = "sha256:fca66d9ff2ac89b03f5aa17e0b21a97c21f3491c46b583bb131eb32c7bab33af"}, + {file = "fonttools-4.53.0-cp38-cp38-win_amd64.whl", hash = "sha256:31f0e3147375002aae30696dd1dc596636abbd22fca09d2e730ecde0baad1d6b"}, + {file = "fonttools-4.53.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d6166192dcd925c78a91d599b48960e0a46fe565391c79fe6de481ac44d20ac"}, + {file = "fonttools-4.53.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef50ec31649fbc3acf6afd261ed89d09eb909b97cc289d80476166df8438524d"}, + {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f193f060391a455920d61684a70017ef5284ccbe6023bb056e15e5ac3de11d1"}, + {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba9f09ff17f947392a855e3455a846f9855f6cf6bec33e9a427d3c1d254c712f"}, + {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0c555e039d268445172b909b1b6bdcba42ada1cf4a60e367d68702e3f87e5f64"}, + {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a4788036201c908079e89ae3f5399b33bf45b9ea4514913f4dbbe4fac08efe0"}, + {file = "fonttools-4.53.0-cp39-cp39-win32.whl", hash = "sha256:d1a24f51a3305362b94681120c508758a88f207fa0a681c16b5a4172e9e6c7a9"}, + {file = "fonttools-4.53.0-cp39-cp39-win_amd64.whl", hash = "sha256:1e677bfb2b4bd0e5e99e0f7283e65e47a9814b0486cb64a41adf9ef110e078f2"}, + {file = "fonttools-4.53.0-py3-none-any.whl", hash = "sha256:6b4f04b1fbc01a3569d63359f2227c89ab294550de277fd09d8fca6185669fa4"}, + {file = "fonttools-4.53.0.tar.gz", hash = "sha256:c93ed66d32de1559b6fc348838c7572d5c0ac1e4a258e76763a5caddd8944002"}, ] [package.extras] @@ -1118,6 +1078,27 @@ qtconsole = ["qtconsole"] test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] +[[package]] +name = "ipywidgets" +version = "8.1.3" +description = "Jupyter interactive widgets" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ipywidgets-8.1.3-py3-none-any.whl", hash = "sha256:efafd18f7a142248f7cb0ba890a68b96abd4d6e88ddbda483c9130d12667eaf2"}, + {file = "ipywidgets-8.1.3.tar.gz", hash = "sha256:f5f9eeaae082b1823ce9eac2575272952f40d748893972956dc09700a6392d9c"}, +] + +[package.dependencies] +comm = ">=0.1.3" +ipython = ">=6.1.0" +jupyterlab-widgets = ">=3.0.11,<3.1.0" +traitlets = ">=4.3.1" +widgetsnbextension = ">=4.0.11,<4.1.0" + +[package.extras] +test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] + [[package]] name = "jedi" version = "0.19.1" @@ -1191,13 +1172,13 @@ referencing = ">=0.31.0" [[package]] name = "jupyter-client" -version = "8.6.1" +version = "8.6.2" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.6.1-py3-none-any.whl", hash = "sha256:3b7bd22f058434e3b9a7ea4b1500ed47de2713872288c0d511d19926f99b459f"}, - {file = "jupyter_client-8.6.1.tar.gz", hash = "sha256:e842515e2bab8e19186d89fdfea7abd15e39dd581f94e399f00e2af5a1652d3f"}, + {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, + {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, ] [package.dependencies] @@ -1210,7 +1191,7 @@ traitlets = ">=5.3" [package.extras] docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] name = "jupyter-core" @@ -1233,14 +1214,14 @@ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphin test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] [[package]] -name = "jupyterlab-pygments" -version = "0.3.0" -description = "Pygments theme using JupyterLab CSS variables" +name = "jupyterlab-widgets" +version = "3.0.11" +description = "Jupyter interactive widgets for JupyterLab" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, - {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, + {file = "jupyterlab_widgets-3.0.11-py3-none-any.whl", hash = "sha256:78287fd86d20744ace330a61625024cf5521e1c012a352ddc0a3cdc2348becd0"}, + {file = "jupyterlab_widgets-3.0.11.tar.gz", hash = "sha256:dd5ac679593c969af29c9bed054c24f26842baa51352114736756bc035deee27"}, ] [[package]] @@ -1642,39 +1623,40 @@ files = [ [[package]] name = "matplotlib" -version = "3.8.4" +version = "3.9.0" description = "Python plotting package" optional = false python-versions = ">=3.9" files = [ - {file = "matplotlib-3.8.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:abc9d838f93583650c35eca41cfcec65b2e7cb50fd486da6f0c49b5e1ed23014"}, - {file = "matplotlib-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f65c9f002d281a6e904976007b2d46a1ee2bcea3a68a8c12dda24709ddc9106"}, - {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce1edd9f5383b504dbc26eeea404ed0a00656c526638129028b758fd43fc5f10"}, - {file = "matplotlib-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd79298550cba13a43c340581a3ec9c707bd895a6a061a78fa2524660482fc0"}, - {file = "matplotlib-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:90df07db7b599fe7035d2f74ab7e438b656528c68ba6bb59b7dc46af39ee48ef"}, - {file = "matplotlib-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:ac24233e8f2939ac4fd2919eed1e9c0871eac8057666070e94cbf0b33dd9c338"}, - {file = "matplotlib-3.8.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:72f9322712e4562e792b2961971891b9fbbb0e525011e09ea0d1f416c4645661"}, - {file = "matplotlib-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:232ce322bfd020a434caaffbd9a95333f7c2491e59cfc014041d95e38ab90d1c"}, - {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6addbd5b488aedb7f9bc19f91cd87ea476206f45d7116fcfe3d31416702a82fa"}, - {file = "matplotlib-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4ccdc64e3039fc303defd119658148f2349239871db72cd74e2eeaa9b80b71"}, - {file = "matplotlib-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b7a2a253d3b36d90c8993b4620183b55665a429da8357a4f621e78cd48b2b30b"}, - {file = "matplotlib-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:8080d5081a86e690d7688ffa542532e87f224c38a6ed71f8fbed34dd1d9fedae"}, - {file = "matplotlib-3.8.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6485ac1f2e84676cff22e693eaa4fbed50ef5dc37173ce1f023daef4687df616"}, - {file = "matplotlib-3.8.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c89ee9314ef48c72fe92ce55c4e95f2f39d70208f9f1d9db4e64079420d8d732"}, - {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50bac6e4d77e4262c4340d7a985c30912054745ec99756ce213bfbc3cb3808eb"}, - {file = "matplotlib-3.8.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c4c869d4b60d769f7b4406eec39596648d9d70246428745a681c327a8ad30"}, - {file = "matplotlib-3.8.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b12ba985837e4899b762b81f5b2845bd1a28f4fdd1a126d9ace64e9c4eb2fb25"}, - {file = "matplotlib-3.8.4-cp312-cp312-win_amd64.whl", hash = "sha256:7a6769f58ce51791b4cb8b4d7642489df347697cd3e23d88266aaaee93b41d9a"}, - {file = "matplotlib-3.8.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:843cbde2f0946dadd8c5c11c6d91847abd18ec76859dc319362a0964493f0ba6"}, - {file = "matplotlib-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c13f041a7178f9780fb61cc3a2b10423d5e125480e4be51beaf62b172413b67"}, - {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb44f53af0a62dc80bba4443d9b27f2fde6acfdac281d95bc872dc148a6509cc"}, - {file = "matplotlib-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:606e3b90897554c989b1e38a258c626d46c873523de432b1462f295db13de6f9"}, - {file = "matplotlib-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9bb0189011785ea794ee827b68777db3ca3f93f3e339ea4d920315a0e5a78d54"}, - {file = "matplotlib-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:6209e5c9aaccc056e63b547a8152661324404dd92340a6e479b3a7f24b42a5d0"}, - {file = "matplotlib-3.8.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7064120a59ce6f64103c9cefba8ffe6fba87f2c61d67c401186423c9a20fd35"}, - {file = "matplotlib-3.8.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0e47eda4eb2614300fc7bb4657fced3e83d6334d03da2173b09e447418d499f"}, - {file = "matplotlib-3.8.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:493e9f6aa5819156b58fce42b296ea31969f2aab71c5b680b4ea7a3cb5c07d94"}, - {file = "matplotlib-3.8.4.tar.gz", hash = "sha256:8aac397d5e9ec158960e31c381c5ffc52ddd52bd9a47717e2a694038167dffea"}, + {file = "matplotlib-3.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2bcee1dffaf60fe7656183ac2190bd630842ff87b3153afb3e384d966b57fe56"}, + {file = "matplotlib-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f988bafb0fa39d1074ddd5bacd958c853e11def40800c5824556eb630f94d3b"}, + {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe428e191ea016bb278758c8ee82a8129c51d81d8c4bc0846c09e7e8e9057241"}, + {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf3978060a106fab40c328778b148f590e27f6fa3cd15a19d6892575bce387d"}, + {file = "matplotlib-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e7f03e5cbbfacdd48c8ea394d365d91ee8f3cae7e6ec611409927b5ed997ee4"}, + {file = "matplotlib-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:13beb4840317d45ffd4183a778685e215939be7b08616f431c7795276e067463"}, + {file = "matplotlib-3.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:063af8587fceeac13b0936c42a2b6c732c2ab1c98d38abc3337e430e1ff75e38"}, + {file = "matplotlib-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a2fa6d899e17ddca6d6526cf6e7ba677738bf2a6a9590d702c277204a7c6152"}, + {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550cdda3adbd596078cca7d13ed50b77879104e2e46392dcd7c75259d8f00e85"}, + {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cce0f31b351e3551d1f3779420cf8f6ec0d4a8cf9c0237a3b549fd28eb4abb"}, + {file = "matplotlib-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c53aeb514ccbbcbab55a27f912d79ea30ab21ee0531ee2c09f13800efb272674"}, + {file = "matplotlib-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5be985db2596d761cdf0c2eaf52396f26e6a64ab46bd8cd810c48972349d1be"}, + {file = "matplotlib-3.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c79f3a585f1368da6049318bdf1f85568d8d04b2e89fc24b7e02cc9b62017382"}, + {file = "matplotlib-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bdd1ecbe268eb3e7653e04f451635f0fb0f77f07fd070242b44c076c9106da84"}, + {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e85a1a6d732f645f1403ce5e6727fd9418cd4574521d5803d3d94911038e5"}, + {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a490715b3b9984fa609116481b22178348c1a220a4499cda79132000a79b4db"}, + {file = "matplotlib-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8146ce83cbc5dc71c223a74a1996d446cd35cfb6a04b683e1446b7e6c73603b7"}, + {file = "matplotlib-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:d91a4ffc587bacf5c4ce4ecfe4bcd23a4b675e76315f2866e588686cc97fccdf"}, + {file = "matplotlib-3.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:616fabf4981a3b3c5a15cd95eba359c8489c4e20e03717aea42866d8d0465956"}, + {file = "matplotlib-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd53c79fd02f1c1808d2cfc87dd3cf4dbc63c5244a58ee7944497107469c8d8a"}, + {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06a478f0d67636554fa78558cfbcd7b9dba85b51f5c3b5a0c9be49010cf5f321"}, + {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c40af649d19c85f8073e25e5806926986806fa6d54be506fbf02aef47d5a89"}, + {file = "matplotlib-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52146fc3bd7813cc784562cb93a15788be0b2875c4655e2cc6ea646bfa30344b"}, + {file = "matplotlib-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:0fc51eaa5262553868461c083d9adadb11a6017315f3a757fc45ec6ec5f02888"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bd4f2831168afac55b881db82a7730992aa41c4f007f1913465fb182d6fb20c0"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:290d304e59be2b33ef5c2d768d0237f5bd132986bdcc66f80bc9bcc300066a03"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff2e239c26be4f24bfa45860c20ffccd118d270c5b5d081fa4ea409b5469fcd"}, + {file = "matplotlib-3.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:af4001b7cae70f7eaacfb063db605280058246de590fa7874f00f62259f2df7e"}, + {file = "matplotlib-3.9.0.tar.gz", hash = "sha256:e6d29ea6c19e34b30fb7d88b7081f869a03014f66fe06d62cc77d5a6ea88ed7a"}, ] [package.dependencies] @@ -1683,12 +1665,15 @@ cycler = ">=0.10" fonttools = ">=4.22.0" importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} kiwisolver = ">=1.3.1" -numpy = ">=1.21" +numpy = ">=1.23" packaging = ">=20.0" pillow = ">=8" pyparsing = ">=2.3.1" python-dateutil = ">=2.7" +[package.extras] +dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setuptools (>=64)", "setuptools_scm (>=7)"] + [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -1892,62 +1877,24 @@ files = [ [[package]] name = "nbclient" -version = "0.10.0" +version = "0.6.8" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false -python-versions = ">=3.8.0" -files = [ - {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, - {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, -] - -[package.dependencies] -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -nbformat = ">=5.1" -traitlets = ">=5.4" - -[package.extras] -dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] - -[[package]] -name = "nbconvert" -version = "6.5.4" -description = "Converting Jupyter Notebooks" -optional = false -python-versions = ">=3.7" +python-versions = ">=3.7.0" files = [ - {file = "nbconvert-6.5.4-py3-none-any.whl", hash = "sha256:d679a947f849a966cbbd0bf6e7fedcfdb64be3b20ce7cef11ad55c13f5820e19"}, - {file = "nbconvert-6.5.4.tar.gz", hash = "sha256:9e3c7c6d491374cbdd5f35d268c05809357716d346f4573186bbeab32ee50bc1"}, + {file = "nbclient-0.6.8-py3-none-any.whl", hash = "sha256:7cce8b415888539180535953f80ea2385cdbb444944cdeb73ffac1556fdbc228"}, + {file = "nbclient-0.6.8.tar.gz", hash = "sha256:268fde3457cafe1539e32eb1c6d796bbedb90b9e92bacd3e43d83413734bb0e8"}, ] [package.dependencies] -beautifulsoup4 = "*" -bleach = "*" -defusedxml = "*" -entrypoints = ">=0.2.2" -jinja2 = ">=3.0" -jupyter-core = ">=4.7" -jupyterlab-pygments = "*" -lxml = "*" -MarkupSafe = ">=2.0" -mistune = ">=0.8.1,<2" -nbclient = ">=0.5.0" -nbformat = ">=5.1" -packaging = "*" -pandocfilters = ">=1.4.1" -pygments = ">=2.4.1" -tinycss2 = "*" -traitlets = ">=5.0" +jupyter-client = ">=6.1.5" +nbformat = ">=5.0" +nest-asyncio = "*" +traitlets = ">=5.2.2" [package.extras] -all = ["ipykernel", "ipython", "ipywidgets (>=7)", "nbsphinx (>=0.2.12)", "pre-commit", "pyppeteer (>=1,<1.1)", "pytest", "pytest-cov", "pytest-dependency", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "tornado (>=6.1)"] -docs = ["ipython", "nbsphinx (>=0.2.12)", "sphinx (>=1.5.1)", "sphinx-rtd-theme"] -serve = ["tornado (>=6.1)"] -test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "pytest", "pytest-cov", "pytest-dependency"] -webpdf = ["pyppeteer (>=1,<1.1)"] +sphinx = ["Sphinx (>=1.7)", "autodoc-traits", "mock", "moto", "myst-parser", "sphinx-book-theme"] +test = ["black", "check-manifest", "flake8", "ipykernel", "ipython", "ipywidgets", "mypy", "nbconvert", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "setuptools (>=60.0)", "testpath", "twine (>=1.11.0)", "xmltodict"] [[package]] name = "nbformat" @@ -1970,6 +1917,24 @@ traitlets = ">=5.1" docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] test = ["pep440", "pre-commit", "pytest", "testpath"] +[[package]] +name = "nbmake" +version = "1.5.4" +description = "Pytest plugin for testing notebooks" +optional = false +python-versions = "<4.0.0,>=3.8.0" +files = [ + {file = "nbmake-1.5.4-py3-none-any.whl", hash = "sha256:8e440a61a7d4ab303064aa86b8d2c088177c89960e2b4a0f91a768dc9f68382b"}, + {file = "nbmake-1.5.4.tar.gz", hash = "sha256:56417fe80d50069671122955532df6e26369a23f68b9c6e2191ae9cfef19abb2"}, +] + +[package.dependencies] +ipykernel = ">=5.4.0" +nbclient = ">=0.6.6,<0.7.0" +nbformat = ">=5.0.8,<6.0.0" +Pygments = ">=2.7.3,<3.0.0" +pytest = ">=6.1.0" + [[package]] name = "nest-asyncio" version = "1.6.0" @@ -1983,58 +1948,67 @@ files = [ [[package]] name = "numpy" -version = "1.26.4" +version = "2.0.0" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:04494f6ec467ccb5369d1808570ae55f6ed9b5809d7f035059000a37b8d7e86f"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2635dbd200c2d6faf2ef9a0d04f0ecc6b13b3cad54f7c67c61155138835515d2"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0a43f0974d501842866cc83471bdb0116ba0dffdbaac33ec05e6afed5b615238"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:8d83bb187fb647643bd56e1ae43f273c7f4dbcdf94550d7938cfc32566756514"}, + {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e843d186c8fb1b102bef3e2bc35ef81160ffef3194646a7fdd6a73c6b97196"}, + {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d7696c615765091cc5093f76fd1fa069870304beaccfd58b5dcc69e55ef49c1"}, + {file = "numpy-2.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b4c76e3d4c56f145d41b7b6751255feefae92edbc9a61e1758a98204200f30fc"}, + {file = "numpy-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd3a644e4807e73b4e1867b769fbf1ce8c5d80e7caaef0d90dcdc640dfc9787"}, + {file = "numpy-2.0.0-cp310-cp310-win32.whl", hash = "sha256:cee6cc0584f71adefe2c908856ccc98702baf95ff80092e4ca46061538a2ba98"}, + {file = "numpy-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:ed08d2703b5972ec736451b818c2eb9da80d66c3e84aed1deeb0c345fefe461b"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad0c86f3455fbd0de6c31a3056eb822fc939f81b1618f10ff3406971893b62a5"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7f387600d424f91576af20518334df3d97bc76a300a755f9a8d6e4f5cadd289"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:34f003cb88b1ba38cb9a9a4a3161c1604973d7f9d5552c38bc2f04f829536609"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b6f6a8f45d0313db07d6d1d37bd0b112f887e1369758a5419c0370ba915b3871"}, + {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f64641b42b2429f56ee08b4f427a4d2daf916ec59686061de751a55aafa22e4"}, + {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7039a136017eaa92c1848152827e1424701532ca8e8967fe480fe1569dae581"}, + {file = "numpy-2.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46e161722e0f619749d1cd892167039015b2c2817296104487cd03ed4a955995"}, + {file = "numpy-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0e50842b2295ba8414c8c1d9d957083d5dfe9e16828b37de883f51fc53c4016f"}, + {file = "numpy-2.0.0-cp311-cp311-win32.whl", hash = "sha256:2ce46fd0b8a0c947ae047d222f7136fc4d55538741373107574271bc00e20e8f"}, + {file = "numpy-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd6acc766814ea6443628f4e6751d0da6593dae29c08c0b2606164db026970c"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:354f373279768fa5a584bac997de6a6c9bc535c482592d7a813bb0c09be6c76f"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d2f62e55a4cd9c58c1d9a1c9edaedcd857a73cb6fda875bf79093f9d9086f85"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:1e72728e7501a450288fc8e1f9ebc73d90cfd4671ebbd631f3e7857c39bd16f2"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:84554fc53daa8f6abf8e8a66e076aff6ece62de68523d9f665f32d2fc50fd66e"}, + {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73aafd1afca80afecb22718f8700b40ac7cab927b8abab3c3e337d70e10e5a2"}, + {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d9f7d256fbc804391a7f72d4a617302b1afac1112fac19b6c6cec63fe7fe8a"}, + {file = "numpy-2.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ec84b9ba0654f3b962802edc91424331f423dcf5d5f926676e0150789cb3d95"}, + {file = "numpy-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:feff59f27338135776f6d4e2ec7aeeac5d5f7a08a83e80869121ef8164b74af9"}, + {file = "numpy-2.0.0-cp312-cp312-win32.whl", hash = "sha256:c5a59996dc61835133b56a32ebe4ef3740ea5bc19b3983ac60cc32be5a665d54"}, + {file = "numpy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a356364941fb0593bb899a1076b92dfa2029f6f5b8ba88a14fd0984aaf76d0df"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e61155fae27570692ad1d327e81c6cf27d535a5d7ef97648a17d922224b216de"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4554eb96f0fd263041baf16cf0881b3f5dafae7a59b1049acb9540c4d57bc8cb"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:903703372d46bce88b6920a0cd86c3ad82dae2dbef157b5fc01b70ea1cfc430f"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:3e8e01233d57639b2e30966c63d36fcea099d17c53bf424d77f088b0f4babd86"}, + {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cde1753efe513705a0c6d28f5884e22bdc30438bf0085c5c486cdaff40cd67a"}, + {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821eedb7165ead9eebdb569986968b541f9908979c2da8a4967ecac4439bae3d"}, + {file = "numpy-2.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a1712c015831da583b21c5bfe15e8684137097969c6d22e8316ba66b5baabe4"}, + {file = "numpy-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9c27f0946a3536403efb0e1c28def1ae6730a72cd0d5878db38824855e3afc44"}, + {file = "numpy-2.0.0-cp39-cp39-win32.whl", hash = "sha256:63b92c512d9dbcc37f9d81b123dec99fdb318ba38c8059afc78086fe73820275"}, + {file = "numpy-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f6bed7f840d44c08ebdb73b1825282b801799e325bcbdfa6bc5c370e5aecc65"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9416a5c2e92ace094e9f0082c5fd473502c91651fb896bc17690d6fc475128d6"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:17067d097ed036636fa79f6a869ac26df7db1ba22039d962422506640314933a"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ecb5b0582cd125f67a629072fed6f83562d9dd04d7e03256c9829bdec027ad"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cef04d068f5fb0518a77857953193b6bb94809a806bd0a14983a8f12ada060c9"}, + {file = "numpy-2.0.0.tar.gz", hash = "sha256:cf5d1c9e6837f8af9f92b6bd3e86d513cdc11f60fd62185cc49ec7d1aba34864"}, ] [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -2108,17 +2082,6 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] xml = ["lxml (>=4.9.2)"] -[[package]] -name = "pandocfilters" -version = "1.5.1" -description = "Utilities for writing pandoc filters in python" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, - {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, -] - [[package]] name = "parso" version = "0.8.4" @@ -2267,13 +2230,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "prompt-toolkit" -version = "3.0.43" +version = "3.0.47" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, ] [package.dependencies] @@ -2345,18 +2308,18 @@ files = [ [[package]] name = "pydantic" -version = "2.7.1" +version = "2.7.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"}, - {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"}, + {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, + {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.2" +pydantic-core = "2.18.4" typing-extensions = ">=4.6.1" [package.extras] @@ -2364,90 +2327,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.2" +version = "2.18.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"}, - {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"}, - {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"}, - {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"}, - {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"}, - {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"}, - {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"}, - {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"}, - {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"}, - {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"}, - {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"}, - {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"}, - {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"}, - {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"}, - {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"}, - {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"}, - {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"}, - {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"}, - {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"}, - {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"}, - {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"}, - {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"}, - {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"}, - {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"}, - {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"}, - {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"}, - {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"}, - {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"}, - {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"}, - {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"}, - {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"}, + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, + {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, + {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, + {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, + {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, + {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, + {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, + {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, + {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, + {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, + {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, + {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, + {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, + {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, + {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, + {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, + {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, + {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, + {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, + {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, + {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, + {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, + {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, + {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, + {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, + {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, + {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, + {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, + {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, + {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, ] [package.dependencies] @@ -2522,13 +2485,13 @@ certifi = "*" [[package]] name = "pytest" -version = "8.2.0" +version = "8.2.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"}, - {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"}, + {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, + {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, ] [package.dependencies] @@ -2823,13 +2786,13 @@ rpds-py = ">=0.7.0" [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -2952,36 +2915,36 @@ files = [ [[package]] name = "scipy" -version = "1.13.0" +version = "1.13.1" description = "Fundamental algorithms for scientific computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, - {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, - {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, - {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, - {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, - {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, - {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, - {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, - {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, - {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, - {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, - {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, - {file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, - {file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, - {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, - {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, - {file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, - {file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, - {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, - {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, - {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, - {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, - {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, - {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, - {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, + {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, + {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, + {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989"}, + {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f"}, + {file = "scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94"}, + {file = "scipy-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54"}, + {file = "scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9"}, + {file = "scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326"}, + {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299"}, + {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa"}, + {file = "scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59"}, + {file = "scipy-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b"}, + {file = "scipy-1.13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1"}, + {file = "scipy-1.13.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d"}, + {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627"}, + {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884"}, + {file = "scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16"}, + {file = "scipy-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949"}, + {file = "scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5"}, + {file = "scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24"}, + {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004"}, + {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d"}, + {file = "scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c"}, + {file = "scipy-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2"}, + {file = "scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c"}, ] [package.dependencies] @@ -2994,19 +2957,18 @@ test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "po [[package]] name = "setuptools" -version = "69.5.1" +version = "70.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, + {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, + {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shapely" @@ -3291,37 +3253,19 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "tenacity" -version = "8.3.0" +version = "8.4.1" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" files = [ - {file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"}, - {file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"}, + {file = "tenacity-8.4.1-py3-none-any.whl", hash = "sha256:28522e692eda3e1b8f5e99c51464efcc0b9fc86933da92415168bc1c4e2308fa"}, + {file = "tenacity-8.4.1.tar.gz", hash = "sha256:54b1412b878ddf7e1f1577cd49527bad8cdef32421bd599beac0c6c3f10582fd"}, ] [package.extras] doc = ["reno", "sphinx"] test = ["pytest", "tornado (>=4.5)", "typeguard"] -[[package]] -name = "tinycss2" -version = "1.3.0" -description = "A tiny CSS parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, - {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, -] - -[package.dependencies] -webencodings = ">=0.4" - -[package.extras] -doc = ["sphinx", "sphinx_rtd_theme"] -test = ["pytest", "ruff"] - [[package]] name = "toml" version = "0.10.2" @@ -3346,22 +3290,22 @@ files = [ [[package]] name = "tornado" -version = "6.4" +version = "6.4.1" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, - {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, - {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, - {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, - {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, ] [[package]] @@ -3377,6 +3321,7 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +ipywidgets = {version = ">=6", optional = true, markers = "extra == \"notebook\""} [package.extras] dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] @@ -3437,13 +3382,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -3514,6 +3459,17 @@ files = [ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] +[[package]] +name = "widgetsnbextension" +version = "4.0.11" +description = "Jupyter interactive widgets for Jupyter Notebook" +optional = false +python-versions = ">=3.7" +files = [ + {file = "widgetsnbextension-4.0.11-py3-none-any.whl", hash = "sha256:55d4d6949d100e0d08b94948a42efc3ed6dfdc0e9468b2c4b128c9a2ce3a7a36"}, + {file = "widgetsnbextension-4.0.11.tar.gz", hash = "sha256:8b22a8f1910bfd188e596fe7fc05dcbd87e810c8a4ba010bdb3da86637398474"}, +] + [[package]] name = "wrapt" version = "1.16.0" @@ -3595,13 +3551,13 @@ files = [ [[package]] name = "xarray" -version = "2024.5.0" +version = "2024.6.0" description = "N-D labeled arrays and datasets in Python" optional = false python-versions = ">=3.9" files = [ - {file = "xarray-2024.5.0-py3-none-any.whl", hash = "sha256:7ddedfe2294a0ab00f02d0fbdcb9c6300ec589f3cf436a9c7b7b577a12cd9bcf"}, - {file = "xarray-2024.5.0.tar.gz", hash = "sha256:e0eb1cb265f265126795f388ed9591f3c752f2aca491f6c0576711fd15b708f2"}, + {file = "xarray-2024.6.0-py3-none-any.whl", hash = "sha256:721a7394e8ec3d592b2d8ebe21eed074ac077dc1bb1bd777ce00e41700b4866c"}, + {file = "xarray-2024.6.0.tar.gz", hash = "sha256:0b91e0bc4dc0296947947640fe31ec6e867ce258d2f7cbc10bedf4a6d68340c7"}, ] [package.dependencies] @@ -3722,20 +3678,20 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.18.1" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <4.0" -content-hash = "0bbdf70240eb6c9c4252770db75516f2c4c65cf8f3c0c0ff08b24be40728a075" +content-hash = "f3782e345261011758eb7dd790b4475cd697195e4b2abc7c016cc42251381cfd" diff --git a/pyproject.toml b/pyproject.toml index 7a365ef..ed357cc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,7 +44,7 @@ pandas = "*" pydantic = ">2" requests = "*" Shapely = "*" -tenacity = "*" +tenacity = "*, !=8.4.0" # https://github.com/jd/tenacity/issues/471 tqdm = "*" typing-extensions = "*" xarray = "*" @@ -53,7 +53,7 @@ xarray = "*" covdefaults = "*" coverage = {version = "*", extras = ["toml"]} mypy = ">=1" -nbconvert = "*" +nbmake = "*" pytest = "*" pytest-cov = ">=3.0" pytest-recording = "*" @@ -75,6 +75,7 @@ toml = "*" ipykernel = "*" matplotlib = "*" scipy = "*" +tqdm = {version = "*", extras = ["notebook"]} [tool.poetry-dynamic-versioning] enable = true diff --git a/requirements/requirements-dev.txt b/requirements/requirements-dev.txt index cf276f5..2e7f0fc 100644 --- a/requirements/requirements-dev.txt +++ b/requirements/requirements-dev.txt @@ -1,13 +1,12 @@ alabaster==0.7.16 ; python_version >= "3.9" and python_version < "4.0" -annotated-types==0.6.0 ; python_version >= "3.9" and python_version < "4.0" -anyio==4.3.0 ; python_version >= "3.9" and python_version < "4.0" +annotated-types==0.7.0 ; python_version >= "3.9" and python_version < "4.0" +anyio==4.4.0 ; python_version >= "3.9" and python_version < "4.0" appnope==0.1.4 ; python_version >= "3.9" and python_version < "4.0" and platform_system == "Darwin" asttokens==2.4.1 ; python_version >= "3.9" and python_version < "4.0" attrs==23.2.0 ; python_version >= "3.9" and python_version < "4.0" babel==2.15.0 ; python_version >= "3.9" and python_version < "4.0" beautifulsoup4==4.12.3 ; python_version >= "3.9" and python_version < "4.0" -bleach==6.1.0 ; python_version >= "3.9" and python_version < "4.0" -certifi==2024.2.2 ; python_version >= "3.9" and python_version < "4.0" +certifi==2024.6.2 ; python_version >= "3.9" and python_version < "4.0" cffi==1.16.0 ; python_version >= "3.9" and python_version < "4.0" and implementation_name == "pypy" charset-normalizer==3.3.2 ; python_version >= "3.9" and python_version < "4.0" click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "4.0" @@ -17,24 +16,22 @@ colorama==0.4.6 ; python_version >= "3.9" and python_version < "4.0" and (platfo comm==0.2.2 ; python_version >= "3.9" and python_version < "4.0" contourpy==1.2.1 ; python_version >= "3.9" and python_version < "4.0" covdefaults==2.3.0 ; python_version >= "3.9" and python_version < "4.0" -coverage==7.5.1 ; python_version >= "3.9" and python_version < "4.0" -coverage[toml]==7.5.1 ; python_version >= "3.9" and python_version < "4.0" +coverage==7.5.3 ; python_version >= "3.9" and python_version < "4.0" +coverage[toml]==7.5.3 ; python_version >= "3.9" and python_version < "4.0" cycler==0.12.1 ; python_version >= "3.9" and python_version < "4.0" -dataretrieval==1.0.8 ; python_version >= "3.9" and python_version < "4.0" +dataretrieval==1.0.9 ; python_version >= "3.9" and python_version < "4.0" debugpy==1.8.1 ; python_version >= "3.9" and python_version < "4.0" decorator==5.1.1 ; python_version >= "3.9" and python_version < "4.0" -defusedxml==0.7.1 ; python_version >= "3.9" and python_version < "4.0" deprecated==1.2.14 ; python_version >= "3.9" and python_version < "4.0" docutils==0.21.2 ; python_version >= "3.9" and python_version < "4.0" dunamai==1.21.1 ; python_version >= "3.9" and python_version < "4.0" -entrypoints==0.4 ; python_version >= "3.9" and python_version < "4.0" erddapy==2.2.0 ; python_version >= "3.9" and python_version < "4.0" exceptiongroup==1.2.1 ; python_version >= "3.9" and python_version < "4.0" execnet==2.1.1 ; python_version >= "3.9" and python_version < "4.0" executing==2.0.1 ; python_version >= "3.9" and python_version < "4.0" -fastjsonschema==2.19.1 ; python_version >= "3.9" and python_version < "4.0" +fastjsonschema==2.20.0 ; python_version >= "3.9" and python_version < "4.0" fiona==1.9.6 ; python_version >= "3.9" and python_version < "4.0" -fonttools==4.51.0 ; python_version >= "3.9" and python_version < "4.0" +fonttools==4.53.0 ; python_version >= "3.9" and python_version < "4.0" furo==2024.5.6 ; python_version >= "3.9" and python_version < "4.0" geopandas==0.14.4 ; python_version >= "3.9" and python_version < "4.0" h11==0.14.0 ; python_version >= "3.9" and python_version < "4.0" @@ -48,13 +45,14 @@ importlib-resources==6.4.0 ; python_version >= "3.9" and python_version < "4.0" iniconfig==2.0.0 ; python_version >= "3.9" and python_version < "4.0" ipykernel==6.29.4 ; python_version >= "3.9" and python_version < "4.0" ipython==8.18.1 ; python_version >= "3.9" and python_version < "4.0" +ipywidgets==8.1.3 ; python_version >= "3.9" and python_version < "4.0" jedi==0.19.1 ; python_version >= "3.9" and python_version < "4.0" jinja2==3.1.4 ; python_version >= "3.9" and python_version < "4.0" jsonschema-specifications==2023.12.1 ; python_version >= "3.9" and python_version < "4.0" jsonschema==4.22.0 ; python_version >= "3.9" and python_version < "4.0" -jupyter-client==8.6.1 ; python_version >= "3.9" and python_version < "4.0" +jupyter-client==8.6.2 ; python_version >= "3.9" and python_version < "4.0" jupyter-core==5.7.2 ; python_version >= "3.9" and python_version < "4.0" -jupyterlab-pygments==0.3.0 ; python_version >= "3.9" and python_version < "4.0" +jupyterlab-widgets==3.0.11 ; python_version >= "3.9" and python_version < "4.0" kiwisolver==1.4.5 ; python_version >= "3.9" and python_version < "4.0" limits==3.12.0 ; python_version >= "3.9" and python_version < "4.0" lxml-html-clean==0.1.1 ; python_version >= "3.9" and python_version < "4.0" @@ -63,49 +61,48 @@ lxml[html-clean]==5.2.2 ; python_version >= "3.9" and python_version < "4.0" m2r2==0.3.3.post2 ; python_version >= "3.9" and python_version < "4.0" markupsafe==2.1.5 ; python_version >= "3.9" and python_version < "4.0" matplotlib-inline==0.1.7 ; python_version >= "3.9" and python_version < "4.0" -matplotlib==3.8.4 ; python_version >= "3.9" and python_version < "4.0" +matplotlib==3.9.0 ; python_version >= "3.9" and python_version < "4.0" mistune==0.8.4 ; python_version >= "3.9" and python_version < "4.0" multidict==6.0.5 ; python_version >= "3.9" and python_version < "4.0" multifutures==0.3.2 ; python_version >= "3.9" and python_version < "4.0" mypy-extensions==1.0.0 ; python_version >= "3.9" and python_version < "4.0" mypy==1.10.0 ; python_version >= "3.9" and python_version < "4.0" -nbclient==0.10.0 ; python_version >= "3.9" and python_version < "4.0" -nbconvert==6.5.4 ; python_version >= "3.9" and python_version < "4.0" +nbclient==0.6.8 ; python_version >= "3.9" and python_version < "4.0" nbformat==5.10.4 ; python_version >= "3.9" and python_version < "4.0" +nbmake==1.5.4 ; python_version >= "3.9" and python_version < "4.0" nest-asyncio==1.6.0 ; python_version >= "3.9" and python_version < "4.0" -numpy==1.26.4 ; python_version >= "3.9" and python_version < "4.0" -packaging==24.0 ; python_version >= "3.9" and python_version < "4.0" +numpy==2.0.0 ; python_version >= "3.9" and python_version < "4.0" +packaging==24.1 ; python_version >= "3.9" and python_version < "4.0" pandas==2.2.2 ; python_version >= "3.9" and python_version < "4.0" -pandocfilters==1.5.1 ; python_version >= "3.9" and python_version < "4.0" parso==0.8.4 ; python_version >= "3.9" and python_version < "4.0" pexpect==4.9.0 ; python_version >= "3.9" and python_version < "4.0" and sys_platform != "win32" pillow==10.3.0 ; python_version >= "3.9" and python_version < "4.0" platformdirs==4.2.2 ; python_version >= "3.9" and python_version < "4.0" pluggy==1.5.0 ; python_version >= "3.9" and python_version < "4.0" -prompt-toolkit==3.0.43 ; python_version >= "3.9" and python_version < "4.0" +prompt-toolkit==3.0.47 ; python_version >= "3.9" and python_version < "4.0" psutil==5.9.8 ; python_version >= "3.9" and python_version < "4.0" ptyprocess==0.7.0 ; python_version >= "3.9" and python_version < "4.0" and sys_platform != "win32" pure-eval==0.2.2 ; python_version >= "3.9" and python_version < "4.0" pycparser==2.22 ; python_version >= "3.9" and python_version < "4.0" and implementation_name == "pypy" -pydantic-core==2.18.2 ; python_version >= "3.9" and python_version < "4.0" -pydantic==2.7.1 ; python_version >= "3.9" and python_version < "4.0" +pydantic-core==2.18.4 ; python_version >= "3.9" and python_version < "4.0" +pydantic==2.7.4 ; python_version >= "3.9" and python_version < "4.0" pygments==2.18.0 ; python_version >= "3.9" and python_version < "4.0" pyparsing==3.1.2 ; python_version >= "3.9" and python_version < "4.0" pyproj==3.6.1 ; python_version >= "3.9" and python_version < "4.0" pytest-cov==5.0.0 ; python_version >= "3.9" and python_version < "4.0" pytest-recording==0.13.1 ; python_version >= "3.9" and python_version < "4.0" pytest-xdist==3.6.1 ; python_version >= "3.9" and python_version < "4.0" -pytest==8.2.0 ; python_version >= "3.9" and python_version < "4.0" +pytest==8.2.2 ; python_version >= "3.9" and python_version < "4.0" python-dateutil==2.9.0.post0 ; python_version >= "3.9" and python_version < "4.0" pytz==2024.1 ; python_version >= "3.9" and python_version < "4.0" pywin32==306 ; sys_platform == "win32" and platform_python_implementation != "PyPy" and python_version >= "3.9" and python_version < "4.0" pyyaml==6.0.1 ; python_version >= "3.9" and python_version < "4.0" pyzmq==26.0.3 ; python_version >= "3.9" and python_version < "4.0" referencing==0.35.1 ; python_version >= "3.9" and python_version < "4.0" -requests==2.31.0 ; python_version >= "3.9" and python_version < "4.0" +requests==2.32.3 ; python_version >= "3.9" and python_version < "4.0" rpds-py==0.18.1 ; python_version >= "3.9" and python_version < "4.0" -scipy==1.13.0 ; python_version >= "3.9" and python_version < "4.0" -setuptools==69.5.1 ; python_version >= "3.9" and python_version < "4.0" +scipy==1.13.1 ; python_version >= "3.9" and python_version < "4.0" +setuptools==70.0.0 ; python_version >= "3.9" and python_version < "4.0" shapely==2.0.4 ; python_version >= "3.9" and python_version < "4.0" six==1.16.0 ; python_version >= "3.9" and python_version < "4.0" sniffio==1.3.1 ; python_version >= "3.9" and python_version < "4.0" @@ -121,23 +118,24 @@ sphinxcontrib-qthelp==1.0.7 ; python_version >= "3.9" and python_version < "4.0" sphinxcontrib-serializinghtml==1.1.10 ; python_version >= "3.9" and python_version < "4.0" sphinxext-opengraph==0.9.1 ; python_version >= "3.9" and python_version < "4.0" stack-data==0.6.3 ; python_version >= "3.9" and python_version < "4.0" -tenacity==8.3.0 ; python_version >= "3.9" and python_version < "4.0" -tinycss2==1.3.0 ; python_version >= "3.9" and python_version < "4.0" +tenacity==8.4.1 ; python_version >= "3.9" and python_version < "4.0" toml==0.10.2 ; python_version >= "3.9" and python_version < "4.0" tomli==2.0.1 ; python_full_version <= "3.11.0a6" and python_version >= "3.9" -tornado==6.4 ; python_version >= "3.9" and python_version < "4.0" +tornado==6.4.1 ; python_version >= "3.9" and python_version < "4.0" tqdm==4.66.4 ; python_version >= "3.9" and python_version < "4.0" +tqdm[notebook]==4.66.4 ; python_version >= "3.9" and python_version < "4.0" traitlets==5.14.3 ; python_version >= "3.9" and python_version < "4.0" types-deprecated==1.2.9.20240311 ; python_version >= "3.9" and python_version < "4.0" types-requests==2.31.0.6 ; python_version >= "3.9" and python_version < "4.0" types-urllib3==1.26.25.14 ; python_version >= "3.9" and python_version < "4.0" -typing-extensions==4.11.0 ; python_version >= "3.9" and python_version < "4.0" +typing-extensions==4.12.2 ; python_version >= "3.9" and python_version < "4.0" tzdata==2024.1 ; python_version >= "3.9" and python_version < "4.0" urllib3==1.26.18 ; python_version >= "3.9" and python_version < "4.0" vcrpy==6.0.1 ; python_version >= "3.9" and python_version < "4.0" wcwidth==0.2.13 ; python_version >= "3.9" and python_version < "4.0" webencodings==0.5.1 ; python_version >= "3.9" and python_version < "4.0" +widgetsnbextension==4.0.11 ; python_version >= "3.9" and python_version < "4.0" wrapt==1.16.0 ; python_version >= "3.9" and python_version < "4.0" -xarray==2024.5.0 ; python_version >= "3.9" and python_version < "4.0" +xarray==2024.6.0 ; python_version >= "3.9" and python_version < "4.0" yarl==1.9.4 ; python_version >= "3.9" and python_version < "4.0" -zipp==3.18.1 ; python_version >= "3.9" and python_version < "3.10" +zipp==3.19.2 ; python_version >= "3.9" and python_version < "3.10" diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 3731cb9..977f02b 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -1,14 +1,14 @@ -annotated-types==0.6.0 ; python_version >= "3.9" and python_version < "4.0" -anyio==4.3.0 ; python_version >= "3.9" and python_version < "4.0" +annotated-types==0.7.0 ; python_version >= "3.9" and python_version < "4.0" +anyio==4.4.0 ; python_version >= "3.9" and python_version < "4.0" attrs==23.2.0 ; python_version >= "3.9" and python_version < "4.0" beautifulsoup4==4.12.3 ; python_version >= "3.9" and python_version < "4.0" -certifi==2024.2.2 ; python_version >= "3.9" and python_version < "4.0" +certifi==2024.6.2 ; python_version >= "3.9" and python_version < "4.0" charset-normalizer==3.3.2 ; python_version >= "3.9" and python_version < "4.0" click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "4.0" click==8.1.7 ; python_version >= "3.9" and python_version < "4.0" cligj==0.7.2 ; python_version >= "3.9" and python_version < "4" colorama==0.4.6 ; python_version >= "3.9" and python_version < "4.0" and platform_system == "Windows" -dataretrieval==1.0.8 ; python_version >= "3.9" and python_version < "4.0" +dataretrieval==1.0.9 ; python_version >= "3.9" and python_version < "4.0" deprecated==1.2.14 ; python_version >= "3.9" and python_version < "4.0" erddapy==2.2.0 ; python_version >= "3.9" and python_version < "4.0" exceptiongroup==1.2.1 ; python_version >= "3.9" and python_version < "4.0" @@ -26,25 +26,25 @@ lxml-html-clean==0.1.1 ; python_version >= "3.9" and python_version < "4.0" lxml==5.2.2 ; python_version >= "3.9" and python_version < "4.0" lxml[html-clean]==5.2.2 ; python_version >= "3.9" and python_version < "4.0" multifutures==0.3.2 ; python_version >= "3.9" and python_version < "4.0" -numpy==1.26.4 ; python_version >= "3.9" and python_version < "4.0" -packaging==24.0 ; python_version >= "3.9" and python_version < "4.0" +numpy==2.0.0 ; python_version >= "3.9" and python_version < "4.0" +packaging==24.1 ; python_version >= "3.9" and python_version < "4.0" pandas==2.2.2 ; python_version >= "3.9" and python_version < "4.0" -pydantic-core==2.18.2 ; python_version >= "3.9" and python_version < "4.0" -pydantic==2.7.1 ; python_version >= "3.9" and python_version < "4.0" +pydantic-core==2.18.4 ; python_version >= "3.9" and python_version < "4.0" +pydantic==2.7.4 ; python_version >= "3.9" and python_version < "4.0" pyproj==3.6.1 ; python_version >= "3.9" and python_version < "4.0" python-dateutil==2.9.0.post0 ; python_version >= "3.9" and python_version < "4.0" pytz==2024.1 ; python_version >= "3.9" and python_version < "4.0" -requests==2.31.0 ; python_version >= "3.9" and python_version < "4.0" +requests==2.32.3 ; python_version >= "3.9" and python_version < "4.0" shapely==2.0.4 ; python_version >= "3.9" and python_version < "4.0" six==1.16.0 ; python_version >= "3.9" and python_version < "4.0" sniffio==1.3.1 ; python_version >= "3.9" and python_version < "4.0" soupsieve==2.5 ; python_version >= "3.9" and python_version < "4.0" -tenacity==8.3.0 ; python_version >= "3.9" and python_version < "4.0" +tenacity==8.4.1 ; python_version >= "3.9" and python_version < "4.0" tqdm==4.66.4 ; python_version >= "3.9" and python_version < "4.0" -typing-extensions==4.11.0 ; python_version >= "3.9" and python_version < "4.0" +typing-extensions==4.12.2 ; python_version >= "3.9" and python_version < "4.0" tzdata==2024.1 ; python_version >= "3.9" and python_version < "4.0" urllib3==1.26.18 ; python_version >= "3.9" and python_version < "4.0" webencodings==0.5.1 ; python_version >= "3.9" and python_version < "4.0" wrapt==1.16.0 ; python_version >= "3.9" and python_version < "4.0" -xarray==2024.5.0 ; python_version >= "3.9" and python_version < "4.0" -zipp==3.18.1 ; python_version >= "3.9" and python_version < "3.10" +xarray==2024.6.0 ; python_version >= "3.9" and python_version < "4.0" +zipp==3.19.2 ; python_version >= "3.9" and python_version < "3.10" From 378dac396cd4a98ec171912fe0020c720129c6e4 Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Tue, 18 Jun 2024 17:11:33 +0300 Subject: [PATCH 08/15] fix: Use the correct version for the deprecation warnings --- searvey/coops.py | 8 ++++---- searvey/ioc.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/searvey/coops.py b/searvey/coops.py index 5494132..826eef5 100644 --- a/searvey/coops.py +++ b/searvey/coops.py @@ -613,7 +613,7 @@ def __coops_stations_html_tables() -> element.ResultSet: @deprecated( - version="0.3.11", + version="0.4.0", reason="This function is deprecated and will be removed in the future. Replace it with `get_coops_stations`.", ) @lru_cache(maxsize=1) @@ -773,7 +773,7 @@ def coops_stations(station_status: COOPS_StationStatus | None = None) -> GeoData @deprecated( - version="0.3.11", + version="0.4.0", reason="This function is deprecated and will be removed in the future. Replace it with `get_coops_stations`.", ) def coops_stations_within_region( @@ -814,7 +814,7 @@ def coops_stations_within_region( @deprecated( - version="0.3.11", + version="0.4.0", reason="This function is deprecated and will be removed in the future. Replace it with `get_coops_stations`.", ) def coops_stations_within_bounds( @@ -831,7 +831,7 @@ def coops_stations_within_bounds( @deprecated( - version="0.3.11", + version="0.4.0", reason="This function is deprecated and will be removed in the future. Replace it with `fetch_coops_station`.", ) def coops_product_within_region( diff --git a/searvey/ioc.py b/searvey/ioc.py index 2730f5a..1234f6a 100644 --- a/searvey/ioc.py +++ b/searvey/ioc.py @@ -252,7 +252,7 @@ def normalize_ioc_station_data(ioc_code: str, df: pd.DataFrame, truncate_seconds @deprecated( - version="0.3.11", + version="0.4.0", reason="This function is deprecated and will be removed in the future. Replace it with `fetch_ioc_station`.", ) def get_ioc_station_data( @@ -290,7 +290,7 @@ def get_ioc_station_data( @deprecated( - version="0.3.11", + version="0.4.0", reason="This function is deprecated and will be removed in the future. Replace it with `fetch_ioc_station`.", ) def get_ioc_data( From b02de893566ebc3497fb3bd5a2c355777f9b7524 Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Tue, 18 Jun 2024 17:17:46 +0300 Subject: [PATCH 09/15] fix: Remove spurious warnings about converting IOC timestamps to UTC IOC is always on UTC. --- searvey/_common.py | 11 ++++++++--- searvey/_coops_api.py | 4 ++-- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/searvey/_common.py b/searvey/_common.py index 4b77ef7..70a16f6 100644 --- a/searvey/_common.py +++ b/searvey/_common.py @@ -16,16 +16,21 @@ logger = logging.getLogger(__name__) -def _to_utc(index: pd.DatetimeIndex | pd.Timestamp) -> pd.DatetimeIndex: +def _to_utc( + index: pd.DatetimeIndex | pd.Timestamp, + *, + warn: bool = False, +) -> pd.DatetimeIndex: if index.tz: ref = index if isinstance(ref, pd.Timestamp): ref = pd.DatetimeIndex([ref]) - if index.tz.utcoffset(ref[0]) != timedelta(): + if warn and index.tz.utcoffset(ref[0]) != timedelta(): warnings.warn("Converting to UTC!\nData is retrieved and stored in UTC time") index = index.tz_convert("utc") else: - warnings.warn("Assuming UTC!\nData is retrieved and stored in UTC time") + if warn: + warnings.warn("Assuming UTC!\nData is retrieved and stored in UTC time") index = index.tz_localize("utc") return index diff --git a/searvey/_coops_api.py b/searvey/_coops_api.py index 0611cec..f70d98d 100644 --- a/searvey/_coops_api.py +++ b/searvey/_coops_api.py @@ -461,8 +461,8 @@ def _fetch_coops( ) -> dict[str, pd.DataFrame]: rate_limit = _resolve_rate_limit(rate_limit) http_client = _resolve_http_client(http_client) - start_dates = _to_utc(start_dates) - end_dates = _to_utc(end_dates) + start_dates = _to_utc(start_dates, warn=True) + end_dates = _to_utc(end_dates, warn=True) # Fetch json files from the COOPS website # We use multithreading in order to be able to use RateLimit + to take advantage of higher performance From 48664f340bbae863414a21e65f7b0998f1c08fd8 Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Tue, 18 Jun 2024 17:23:12 +0300 Subject: [PATCH 10/15] feat: Add `progress_bar` argument to `fetch_*` functions This argument controls whether the tqdm based progress bar will be shown or not. --- searvey/_coops_api.py | 22 ++++++++++++++++++++-- searvey/_ioc_api.py | 18 ++++++++++++++++-- 2 files changed, 36 insertions(+), 4 deletions(-) diff --git a/searvey/_coops_api.py b/searvey/_coops_api.py index f70d98d..35a8c2c 100644 --- a/searvey/_coops_api.py +++ b/searvey/_coops_api.py @@ -248,6 +248,7 @@ def _parse_coops_responses( coops_responses: list[multifutures.FutureResult], executor: multifutures.ExecutorProtocol | None, + progress_bar: bool, ) -> list[multifutures.FutureResult]: # Parse the json files using pandas # This is a CPU heavy process, so let's use multiprocess @@ -264,7 +265,13 @@ def _parse_coops_responses( else: kwargs.append(dict(station_id=station_id, product=product, content=result.result)) logger.debug("Starting JSON parsing") - results = multifutures.multiprocess(_parse_json, func_kwargs=kwargs, check=False, executor=executor) + results = multifutures.multiprocess( + _parse_json, + func_kwargs=kwargs, + check=False, + executor=executor, + progress_bar=progress_bar, + ) multifutures.check_results(results) logger.debug("Finished JSON parsing") return results @@ -401,6 +408,7 @@ def _retrieve_coops_data( rate_limit: multifutures.RateLimit, http_client: httpx.Client, executor: multifutures.ExecutorProtocol | None, + progress_bar: bool, **aux_params: Any, ) -> list[multifutures.FutureResult]: kwargs = [] @@ -437,7 +445,11 @@ def _retrieve_coops_data( with http_client: logger.debug("Starting data retrieval") results = multifutures.multithread( - func=_fetch_url, func_kwargs=kwargs, check=False, executor=executor + func=_fetch_url, + func_kwargs=kwargs, + check=False, + executor=executor, + progress_bar=progress_bar, ) logger.debug("Finished data retrieval") multifutures.check_results(results) @@ -457,6 +469,7 @@ def _fetch_coops( http_client: httpx.Client | None, multiprocessing_executor: multifutures.ExecutorProtocol | None, multithreading_executor: multifutures.ExecutorProtocol | None, + progress_bar: bool, **aux_params: Any, ) -> dict[str, pd.DataFrame]: rate_limit = _resolve_rate_limit(rate_limit) @@ -477,6 +490,7 @@ def _fetch_coops( rate_limit=rate_limit, http_client=http_client, executor=multithreading_executor, + progress_bar=progress_bar, **aux_params, ) # Parse the json files using pandas @@ -484,6 +498,7 @@ def _fetch_coops( parsed_responses: list[multifutures.FutureResult] = _parse_coops_responses( coops_responses=coops_responses, executor=multiprocessing_executor, + progress_bar=progress_bar, ) # OK, now we have a list of dataframes. We need to group them per coops_code, concatenate them and remove duplicates dataframes = _group_results(station_ids=station_ids, parsed_responses=parsed_responses) @@ -499,6 +514,7 @@ def fetch_coops_station( http_client: httpx.Client | None = None, multiprocessing_executor: multifutures.ExecutorProtocol | None = None, multithreading_executor: multifutures.ExecutorProtocol | None = None, + progress_bar: bool = False, product: COOPS_Product | str = COOPS_Product.WATER_LEVEL, datum: COOPS_TidalDatum | str = COOPS_TidalDatum.MSL, units: COOPS_Units | str = COOPS_Units.METRIC, @@ -537,6 +553,7 @@ def fetch_coops_station( :param http_client: The ``httpx.Client``. :param multiprocessing_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. :param multithreading_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. + :param progress_bar: If ``True`` then a progress bar is displayed for monitoring the progress of the outgoing requests. """ logger.info("COOPS-%s: Starting scraping: %s - %s", station_id, start_date, end_date) now = pd.Timestamp.now("utc") @@ -553,6 +570,7 @@ def fetch_coops_station( http_client=http_client, multiprocessing_executor=multiprocessing_executor, multithreading_executor=multithreading_executor, + progress_bar=progress_bar, )[station_id] logger.info("COOPS-%s: Finished scraping: %s - %s", station_id, start_date, end_date) return df diff --git a/searvey/_ioc_api.py b/searvey/_ioc_api.py index 6f098af..077ad0f 100644 --- a/searvey/_ioc_api.py +++ b/searvey/_ioc_api.py @@ -33,6 +33,7 @@ def _parse_ioc_responses( ioc_responses: list[multifutures.FutureResult], executor: multifutures.ExecutorProtocol | None, + progress_bar: bool, ) -> list[multifutures.FutureResult]: # Parse the json files using pandas # This is a CPU heavy process, so let's use multiprocess @@ -55,7 +56,9 @@ def _parse_ioc_responses( else: kwargs.append(dict(station_id=station_id, content=io.StringIO(result.result))) logger.debug("Starting JSON parsing") - results = multifutures.multiprocess(_parse_json, func_kwargs=kwargs, check=False, executor=executor) + results = multifutures.multiprocess( + _parse_json, func_kwargs=kwargs, check=False, executor=executor, progress_bar=progress_bar + ) multifutures.check_results(results) logger.debug("Finished JSON parsing") return results @@ -150,6 +153,7 @@ def _retrieve_ioc_data( rate_limit: multifutures.RateLimit, http_client: httpx.Client, executor: multifutures.ExecutorProtocol | None, + progress_bar: bool, ) -> list[multifutures.FutureResult]: kwargs = [] for station_id, start_date, end_date in zip(station_ids, start_dates, end_dates): @@ -166,7 +170,11 @@ def _retrieve_ioc_data( with http_client: logger.debug("Starting data retrieval") results = multifutures.multithread( - func=_fetch_url, func_kwargs=kwargs, check=False, executor=executor + func=_fetch_url, + func_kwargs=kwargs, + check=False, + executor=executor, + progress_bar=progress_bar, ) logger.debug("Finished data retrieval") multifutures.check_results(results) @@ -182,6 +190,7 @@ def _fetch_ioc( http_client: httpx.Client | None, multiprocessing_executor: multifutures.ExecutorProtocol | None, multithreading_executor: multifutures.ExecutorProtocol | None, + progress_bar: bool, ) -> dict[str, pd.DataFrame]: rate_limit = _resolve_rate_limit(rate_limit) http_client = _resolve_http_client(http_client) @@ -196,12 +205,14 @@ def _fetch_ioc( rate_limit=rate_limit, http_client=http_client, executor=multithreading_executor, + progress_bar=progress_bar, ) # Parse the json files using pandas # This is a CPU heavy process, so we are using multiprocessing here parsed_responses: list[multifutures.FutureResult] = _parse_ioc_responses( ioc_responses=ioc_responses, executor=multiprocessing_executor, + progress_bar=progress_bar, ) # OK, now we have a list of dataframes. We need to group them per ioc_code, concatenate them and remove duplicates dataframes = _group_results(station_ids=station_ids, parsed_responses=parsed_responses) @@ -217,6 +228,7 @@ def fetch_ioc_station( http_client: httpx.Client | None = None, multiprocessing_executor: multifutures.ExecutorProtocol | None = None, multithreading_executor: multifutures.ExecutorProtocol | None = None, + progress_bar: bool = False, ) -> pd.DataFrame: """ Make a query to the IOC API for tide gauge data for ``station_id`` @@ -250,6 +262,7 @@ def fetch_ioc_station( :param http_client: The ``httpx.Client``. :param multiprocessing_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. :param multithreading_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. + :param progress_bar: If ``True`` then a progress bar is displayed for monitoring the progress of the outgoing requests. """ logger.info("IOC-%s: Starting scraping: %s - %s", station_id, start_date, end_date) now = pd.Timestamp.now("utc") @@ -261,6 +274,7 @@ def fetch_ioc_station( http_client=http_client, multiprocessing_executor=multiprocessing_executor, multithreading_executor=multithreading_executor, + progress_bar=progress_bar, )[station_id] logger.info("IOC-%s: Finished scraping: %s - %s", station_id, start_date, end_date) return df From be215d8d85e901009add2e582842fa0142e39c5c Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Wed, 19 Jun 2024 07:43:53 +0300 Subject: [PATCH 11/15] feat: Add `searvey.__version__` --- searvey/__init__.py | 5 +++++ tests/import_test.py | 9 +++++++++ 2 files changed, 14 insertions(+) create mode 100644 tests/import_test.py diff --git a/searvey/__init__.py b/searvey/__init__.py index 83ddf0c..5d621e9 100644 --- a/searvey/__init__.py +++ b/searvey/__init__.py @@ -1,5 +1,7 @@ from __future__ import annotations +import importlib.metadata + from searvey._coops_api import fetch_coops_station from searvey._ioc_api import fetch_ioc_station from searvey.coops import get_coops_stations @@ -9,6 +11,8 @@ from searvey.stations import Provider from searvey.usgs import get_usgs_stations +__version__ = importlib.metadata.version(__name__) + __all__: list[str] = [ "fetch_coops_station", @@ -19,4 +23,5 @@ "get_stations", "get_usgs_stations", "Provider", + "__version__", ] diff --git a/tests/import_test.py b/tests/import_test.py new file mode 100644 index 0000000..6bf3185 --- /dev/null +++ b/tests/import_test.py @@ -0,0 +1,9 @@ +from __future__ import annotations + +import importlib.metadata + +import searvey + + +def test_version(): + assert searvey.__version__ == importlib.metadata.version("searvey") From f28baefd127083b4ac679de0b4daaecdd07cfad7 Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Tue, 18 Jun 2024 17:15:50 +0300 Subject: [PATCH 12/15] docs: Add deprecation warnings on sphinx --- searvey/coops.py | 13 +++++++++++-- searvey/ioc.py | 10 +++++++++- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/searvey/coops.py b/searvey/coops.py index 826eef5..398e521 100644 --- a/searvey/coops.py +++ b/searvey/coops.py @@ -619,6 +619,9 @@ def __coops_stations_html_tables() -> element.ResultSet: @lru_cache(maxsize=1) def coops_stations(station_status: COOPS_StationStatus | None = None) -> GeoDataFrame: """ + .. deprecated:: 0.4.0 + Use :func:`get_coops_stations` instead. + retrieve a list of CO-OPS stations with associated metadata :param station_status: one of ``active`` or ``discontinued`` @@ -781,7 +784,10 @@ def coops_stations_within_region( station_status: COOPS_StationStatus | None = None, ) -> GeoDataFrame: """ - retrieve all stations within the specified region of interest + .. deprecated:: 0.4.0 + Use :func:`get_coops_stations` instead. + + Retrieve all stations within the specified region of interest :param region: polygon or multipolygon denoting region of interest :param station_status: one of ``active`` or ``discontinued`` @@ -844,7 +850,10 @@ def coops_product_within_region( station_status: COOPS_StationStatus | None = None, ) -> Dataset: """ - retrieve CO-OPS data from within the specified region of interest + .. deprecated:: 0.4.0 + Use :func:`fetch_coops_station` instead. + + Retrieve CO-OPS data from within the specified region of interest :param product: CO-OPS product; one of ``water_level``, ``air_temperature``, ``water_temperature``, ``wind``, ``air_pressure``, ``air_gap``, ``conductivity``, ``visibility``, ``humidity``, ``salinity``, ``hourly_height``, ``high_low``, ``daily_mean``, ``monthly_mean``, ``one_minute_water_level``, ``predictions``, ``datums``, ``currents``, ``currents_predictions`` :param region: polygon or multipolygon denoting region of interest diff --git a/searvey/ioc.py b/searvey/ioc.py index 1234f6a..4868d79 100644 --- a/searvey/ioc.py +++ b/searvey/ioc.py @@ -262,7 +262,12 @@ def get_ioc_station_data( truncate_seconds: bool = True, rate_limit: Optional[RateLimit] = None, ) -> pd.DataFrame: - """Retrieve the TimeSeries of a single IOC station.""" + """ + .. deprecated:: 0.4.0 + Use :func:`fetch_ioc_station` instead. + + Retrieve the TimeSeries of a single IOC station. + """ if rate_limit: while rate_limit.reached(identifier="IOC"): @@ -302,6 +307,9 @@ def get_ioc_data( disable_progress_bar: bool = False, ) -> xr.Dataset: """ + .. deprecated:: 0.4.0 + Use :func:`fetch_ioc_station` instead. + Return the data of the stations specified in ``ioc_metadata`` as an ``xr.Dataset``. ``truncate_seconds`` needs some explaining. IOC has more than 1000 stations. From 1497b046e2d43127fdc85d469bd166ea769bdd09 Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Tue, 18 Jun 2024 17:24:23 +0300 Subject: [PATCH 13/15] docs: Various improvements --- docs/source/conf.py | 7 +++++++ docs/source/coops.rst | 4 ++-- docs/source/ioc.rst | 14 +++++++------- poetry.lock | 21 ++++++++++++++++++++- pyproject.toml | 1 + requirements/requirements-dev.txt | 1 + searvey/_ioc_api.py | 8 +++++++- searvey/ioc.py | 5 +++-- 8 files changed, 48 insertions(+), 13 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index f14e03e..197dcba 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -73,10 +73,17 @@ def repository_root(path: PathLike = None) -> Path: "sphinx.ext.autosummary", # The Napoleon extension allows for nicer argument formatting. "sphinx.ext.napoleon", + "sphinx_autodoc_typehints", "sphinxext.opengraph", "m2r2", ] +# sphinx_autodoc_typehints settings +always_use_bars_union = True +typehints_use_rtype = False +typehints_use_signature = False +typehints_use_signature_return = False + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] diff --git a/docs/source/coops.rst b/docs/source/coops.rst index adc5980..7935a3d 100644 --- a/docs/source/coops.rst +++ b/docs/source/coops.rst @@ -59,5 +59,5 @@ The ``COOPS_Query`` class lets you send an individual query to the CO-OPS API by New API ------- -.. autofunction:: searvey.coops.get_coops_stations -.. autofunction:: searvey.coops.fetch_coops_station +.. autofunction:: searvey.get_coops_stations +.. autofunction:: searvey.fetch_coops_station diff --git a/docs/source/ioc.rst b/docs/source/ioc.rst index 42c7c0e..eaf95ee 100644 --- a/docs/source/ioc.rst +++ b/docs/source/ioc.rst @@ -6,15 +6,15 @@ website is focused on operational monitoring of sea level measuring stations acr `Intergovernmental Oceanographic Commission (IOC) `_ aggregating data from more than 170 providers. -A list of IOC stations is provided with the ``get_ioc_stations()`` function with various subsetting options. +A DataFrame with the IOC station metadata can be retrieved with ``get_ioc_stations()`` +while the station data can be fetched with ``fetch_ioc_station()``: -.. autofunction:: searvey.ioc.get_ioc_stations +.. autofunction:: searvey.get_ioc_stations -The station data can be retrieved with -.. autofunction:: searvey.ioc.get_ioc_data +.. autofunction:: searvey.fetch_ioc_station -New API -------- +Deprecated API +`````````````` -.. autofunction:: searvey.ioc.fetch_ioc_station +.. autofunction:: searvey.get_ioc_data diff --git a/poetry.lock b/poetry.lock index 22fdf05..9f64a5a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3107,6 +3107,25 @@ docs = ["sphinxcontrib-websupport"] lint = ["flake8 (>=3.5.0)", "importlib_metadata", "mypy (==1.9.0)", "pytest (>=6.0)", "ruff (==0.3.7)", "sphinx-lint", "tomli", "types-docutils", "types-requests"] test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=6.0)", "setuptools (>=67.0)"] +[[package]] +name = "sphinx-autodoc-typehints" +version = "2.1.1" +description = "Type hints (PEP 484) support for the Sphinx autodoc extension" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinx_autodoc_typehints-2.1.1-py3-none-any.whl", hash = "sha256:22427d74786274add2b6d4afccb8b3c8c1843f48a704550f15a35fd948f8a4de"}, + {file = "sphinx_autodoc_typehints-2.1.1.tar.gz", hash = "sha256:0072b65f5ab2818c229d6d6c2cc993770af55d36bb7bfb16001e2fce4d14880c"}, +] + +[package.dependencies] +sphinx = ">=7.3.5" + +[package.extras] +docs = ["furo (>=2024.1.29)"] +numpy = ["nptyping (>=2.5)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.4.4)", "defusedxml (>=0.7.1)", "diff-cover (>=9)", "pytest (>=8.1.1)", "pytest-cov (>=5)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.11)"] + [[package]] name = "sphinx-basic-ng" version = "1.0.0b2" @@ -3694,4 +3713,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.9, <4.0" -content-hash = "f3782e345261011758eb7dd790b4475cd697195e4b2abc7c016cc42251381cfd" +content-hash = "5f360a280427f16ccab50b40f1a192508b8df1707cd6ca18151124b088ef5442" diff --git a/pyproject.toml b/pyproject.toml index ed357cc..0adb457 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,6 +70,7 @@ setuptools = "*" sphinx = "*" sphinxext-opengraph = "*" toml = "*" +sphinx-autodoc-typehints = "*" [tool.poetry.group.jupyter.dependencies] ipykernel = "*" diff --git a/requirements/requirements-dev.txt b/requirements/requirements-dev.txt index 2e7f0fc..901e1a9 100644 --- a/requirements/requirements-dev.txt +++ b/requirements/requirements-dev.txt @@ -108,6 +108,7 @@ six==1.16.0 ; python_version >= "3.9" and python_version < "4.0" sniffio==1.3.1 ; python_version >= "3.9" and python_version < "4.0" snowballstemmer==2.2.0 ; python_version >= "3.9" and python_version < "4.0" soupsieve==2.5 ; python_version >= "3.9" and python_version < "4.0" +sphinx-autodoc-typehints==2.1.1 ; python_version >= "3.9" and python_version < "4.0" sphinx-basic-ng==1.0.0b2 ; python_version >= "3.9" and python_version < "4.0" sphinx==7.3.7 ; python_version >= "3.9" and python_version < "4.0" sphinxcontrib-applehelp==1.0.8 ; python_version >= "3.9" and python_version < "4.0" diff --git a/searvey/_ioc_api.py b/searvey/_ioc_api.py index 077ad0f..a4f4441 100644 --- a/searvey/_ioc_api.py +++ b/searvey/_ioc_api.py @@ -234,6 +234,11 @@ def fetch_ioc_station( Make a query to the IOC API for tide gauge data for ``station_id`` and return the results as a ``pandas.Dataframe``. + .. code-block:: python + + fetch_ioc_station("acap2") + fetch_ioc_station("acap2", start_date="2023-01-01", end_date="2023-01-02") + ``start_date`` and ``end_date`` can be of any type that is valid for ``pandas.to_datetime()``. If ``start_date`` or ``end_date`` are timezone-aware timestamps they are coersed to UTC. The returned data are always in UTC. @@ -259,10 +264,11 @@ def fetch_ioc_station( :param start_date: The starting date of the query. Defaults to 7 days ago. :param end_date: The finishing date of the query. Defaults to "now". :param rate_limit: The rate limit for making requests to the IOC servers. Defaults to 5 requests/second. - :param http_client: The ``httpx.Client``. + :param http_client: The ``httpx.Client``. Can be used to setup e.g. an HTTP proxy. :param multiprocessing_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. :param multithreading_executor: An instance of a class implementing the ``concurrent.futures.Executor`` API. :param progress_bar: If ``True`` then a progress bar is displayed for monitoring the progress of the outgoing requests. + :return: ``pandas.DataFrame`` with the station data. """ logger.info("IOC-%s: Starting scraping: %s - %s", station_id, start_date, end_date) now = pd.Timestamp.now("utc") diff --git a/searvey/ioc.py b/searvey/ioc.py index 4868d79..8e2dbde 100644 --- a/searvey/ioc.py +++ b/searvey/ioc.py @@ -202,12 +202,12 @@ def get_ioc_stations( Note: The longitudes of the IOC stations are in the [-180, 180] range. - :param region: ``Polygon`` or ``MultiPolygon`` denoting region of interest + :param region: ``Polygon`` or ``MultiPolygon`` denoting region of interest. :param lon_min: The minimum Longitude of the Bounding Box. :param lon_max: The maximum Longitude of the Bounding Box. :param lat_min: The minimum Latitude of the Bounding Box. :param lat_max: The maximum Latitude of the Bounding Box. - :return: ``pandas.DataFrame`` with the station metadata + :return: ``pandas.DataFrame`` with the station metadata. """ region = get_region( region=region, @@ -341,6 +341,7 @@ def get_ioc_data( :param truncate_seconds: If ``True`` then timestamps are truncated to minutes (seconds are dropped) :param rate_limit: The default rate limit is 5 requests/second. :param disable_progress_bar: If ``True`` then the progress bar is not displayed. + :returns: An ``xr.Dataset`` with the station data. """ if period > IOC_MAX_DAYS_PER_REQUEST: From 78addc84c18f4db8ed09dbd2b2ccf95571a4c12c Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Wed, 19 Jun 2024 07:43:14 +0300 Subject: [PATCH 14/15] docs: Update IOC notebook --- Makefile | 2 +- examples/IOC_data.ipynb | 193 ++++---- poetry.lock | 786 +++++++++++++++++++++++++++++- pyproject.toml | 1 + requirements/requirements-dev.txt | 37 +- 5 files changed, 917 insertions(+), 102 deletions(-) diff --git a/Makefile b/Makefile index 0e3115e..3edd915 100644 --- a/Makefile +++ b/Makefile @@ -27,7 +27,7 @@ clean_notebooks: pre-commit run nbstripout -a exec_notebooks: - pytest --nbmake --nbmake-timeout=60 --nbmake-kernel=python3 $$(git ls-files | grep ipynb) + pytest --ff --nbmake --nbmake-timeout=90 --nbmake-kernel=python3 $$(git ls-files | grep ipynb) docs: make -C docs html diff --git a/examples/IOC_data.ipynb b/examples/IOC_data.ipynb index 6d06596..e8b0a8c 100644 --- a/examples/IOC_data.ipynb +++ b/examples/IOC_data.ipynb @@ -11,24 +11,14 @@ "source": [ "import logging\n", "\n", - "import shapely\n", + "import hvplot.pandas\n", "import geopandas as gpd\n", "import matplotlib.pyplot as plt\n", "import pandas as pd\n", + "import shapely\n", "import xarray as xr\n", "\n", - "from searvey import ioc\n", - "\n", - "logging.basicConfig(\n", - " level=20,\n", - " style=\"{\",\n", - " format=\"{asctime:s}; {levelname:8s}; {threadName:23s}; {name:<25s} {lineno:5d}; {message:s}\",\n", - ")\n", - "\n", - "logging.getLogger(\"urllib3\").setLevel(30)\n", - "logging.getLogger(\"parso\").setLevel(30)\n", - "\n", - "logger = logging.getLogger(__name__)" + "import searvey" ] }, { @@ -38,7 +28,9 @@ "tags": [] }, "source": [ - "## Retrieve Station Metadata" + "## Retrieve Station Metadata\n", + "\n", + "In order to retrieve station metadata we need to use the `get_ioc_stations()` function which returns a `geopandas.GeoDataFrame`:" ] }, { @@ -50,8 +42,8 @@ }, "outputs": [], "source": [ - "ioc_stations = ioc.get_ioc_stations()\n", - "ioc_stations" + "ioc_stations = searvey.get_ioc_stations()\n", + "len(ioc_stations)" ] }, { @@ -63,13 +55,7 @@ }, "outputs": [], "source": [ - "figure, axis = plt.subplots(1, 1)\n", - "figure.set_size_inches(12, 12 / 1.61803398875)\n", - "\n", - "countries = gpd.read_file(gpd.datasets.get_path('naturalearth_lowres'))\n", - "_ = countries.plot(color='lightgrey', ax=axis, zorder=-1)\n", - "_ = ioc_stations.plot(ax=axis)\n", - "_ = axis.set_title(f'all IOC stations')" + "ioc_stations.columns" ] }, { @@ -81,33 +67,33 @@ }, "outputs": [], "source": [ - "ioc_stations.columns" + "with pd.option_context('display.max_columns', None):\n", + " ioc_stations.sample(3).sort_index()" ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "id": "5", "metadata": { "tags": [] }, + "outputs": [], "source": [ - "## Retrieve station metadata from arbitrary polygon" + "world_plot = ioc_stations.hvplot(geo=True, tiles=True, hover_cols=[\"ioc_code\", \"location\"])\n", + "world_plot.opts(width=800, height=500)" ] }, { - "cell_type": "code", - "execution_count": null, + "cell_type": "markdown", "id": "6", "metadata": { "tags": [] }, - "outputs": [], "source": [ - "east_coast = shapely.geometry.box(-85, 25, -65, 45)\n", - "east_coast\n", + "## Retrieve station metadata from arbitrary polygon\n", "\n", - "east_stations = ioc.get_ioc_stations(region=east_coast)\n", - "east_stations" + "We can filter the IOC stations using any shapely object. E.g. to only select stations in the East Coast of US:" ] }, { @@ -119,143 +105,154 @@ }, "outputs": [], "source": [ - "east_stations[~east_stations.contacts.str.contains(\"NOAA\", na=False)]" - ] - }, - { - "cell_type": "markdown", - "id": "8", - "metadata": {}, - "source": [ - "## Retrieve IOC station data" + "east_coast = shapely.geometry.box(-85, 25, -65, 45)\n", + "east_coast_stations = searvey.get_ioc_stations(region=east_coast)\n", + "len(east_coast_stations)" ] }, { "cell_type": "code", "execution_count": null, - "id": "9", + "id": "8", "metadata": { "tags": [] }, "outputs": [], "source": [ - "east_data = ioc.get_ioc_data(\n", - " ioc_metadata=east_stations,\n", - " endtime=\"2020-05-30\",\n", - " period=3,\n", - ")\n", - "east_data" + "east_coast_stations.hvplot.points(geo=True, tiles=True)" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "## Retrieve IOC station data\n", + "\n", + "The function for retrieving data is called `fetch_ioc_station()` and it returns \n", + "\n", + "In its simplest form it only requires the station_id (i.e. IOC_CODE) and it will retrieve the last week of data:" ] }, { "cell_type": "code", "execution_count": null, "id": "10", - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ - "def drop_all_nan_vars(ds: xr.Dataset) -> xr.Dataset:\n", - " for var in ds.data_vars:\n", - " if ds[var].notnull().sum() == 0:\n", - " ds = ds.drop_vars(var)\n", - " return ds\n", - "\n", - "ds = drop_all_nan_vars(east_data.sel(ioc_code=\"setp1\"))\n", - "ds" + "df = searvey.fetch_ioc_station(\"acap2\")\n", + "df" ] }, { "cell_type": "markdown", "id": "11", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ - "As you can see not all the data are suitable for use...\n", - "\n", - "More specifically, the `rad` seems to have been re-calibrated in the afternoon of 2020-05-28:" + "We can also explicitly specify the start and the end date. E.g. to retrieve the first 10 days of May 2024:" ] }, { "cell_type": "code", "execution_count": null, "id": "12", - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ - "fix, axes = plt.subplots(1, 1)\n", - "\n", - "_ = ds.prs.plot(ax=axes, label=\"prs\")\n", - "_ = ds.rad.plot(ax=axes, label=\"rad\")\n", - "_ = ds.ra2.plot(ax=axes, label=\"ra2\")\n", - "axes.legend()" + "df = searvey.fetch_ioc_station(\n", + " station_id=\"alva\",\n", + " start_date=pd.Timestamp(\"2024-05-01\"),\n", + " end_date=pd.Timestamp(\"2024-05-10\"),\n", + ")\n", + "df" ] }, { "cell_type": "markdown", "id": "13", - "metadata": { - "tags": [] - }, + "metadata": {}, "source": [ - "Similarly some stations might have missing data" + "If we request more than 30 days, then multiple HTTP requests are send to the IOC servers via multithreading and the responses are merged to a single dataframe. \n", + "\n", + "In this case, setting `progress_bar=True` can be helpful in monitoring the progress of the HTTP requests. \n", + "For example to retrieve data for the first 6 months of 2020:" ] }, { "cell_type": "code", "execution_count": null, "id": "14", - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ - "bahamas = ds.where(ds.country == \"Bahamas\")\n", - "bahamas" + "df = searvey.fetch_ioc_station(\n", + " station_id=\"alva\",\n", + " start_date=pd.Timestamp(\"2020-01-01\"),\n", + " end_date=pd.Timestamp(\"2020-06-01\"),\n", + " progress_bar=True,\n", + ")\n", + "df" + ] + }, + { + "cell_type": "markdown", + "id": "15", + "metadata": {}, + "source": [ + "Keep in mind that each IOC station may return dataframes with different sensors/columns. For example the `setp1` station in Bahamas returns a bunch of them:" ] }, { "cell_type": "code", "execution_count": null, - "id": "15", + "id": "16", "metadata": {}, "outputs": [], "source": [ - "bahamas.ra2.plot()" + "bahamas = searvey.fetch_ioc_station(\n", + " station_id=\"setp1\",\n", + " start_date=pd.Timestamp(\"2020-05-25\"),\n", + " end_date=pd.Timestamp(\"2020-05-30\"),\n", + " progress_bar=False,\n", + ")\n", + "bahamas" ] }, { "cell_type": "markdown", - "id": "16", - "metadata": { - "tags": [] - }, + "id": "17", + "metadata": {}, "source": [ - "Trying to fill the missing values is not that difficult, but you probably need to review the results" + "Nevertheless, the returned timeseries are **not** ready to be used. \n", + "\n", + "E.g. we see that in the last days of May the `rad` sensor was offline for some time:" ] }, { "cell_type": "code", "execution_count": null, - "id": "17", - "metadata": { - "tags": [] - }, + "id": "18", + "metadata": {}, "outputs": [], "source": [ - "bahamas.ra2.interpolate_na(dim=\"time\", method=\"linear\").plot()" + "bahamas.rad.hvplot(grid=True)" + ] + }, + { + "cell_type": "markdown", + "id": "19", + "metadata": {}, + "source": [ + "So the IOC data **do** need some data-cleaning." ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "searvey", "language": "python", - "name": "python3" + "name": "searvey" }, "language_info": { "codemirror_mode": { diff --git a/poetry.lock b/poetry.lock index 9f64a5a..9165252 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,20 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +[[package]] +name = "affine" +version = "2.4.0" +description = "Matrices describing affine transformation of the plane" +optional = false +python-versions = ">=3.7" +files = [ + {file = "affine-2.4.0-py3-none-any.whl", hash = "sha256:8a3df80e2b2378aef598a83c1392efd47967afec4242021a0b06b4c7cbc61a92"}, + {file = "affine-2.4.0.tar.gz", hash = "sha256:a24d818d6a836c131976d22f8c27b8d3ca32d0af64c1d8d29deb7bafa4da1eea"}, +] + +[package.extras] +dev = ["coveralls", "flake8", "pydocstyle"] +test = ["pytest (>=4.6)", "pytest-cov"] + [[package]] name = "alabaster" version = "0.7.16" @@ -127,6 +142,88 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] +[[package]] +name = "bleach" +version = "6.1.0" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, +] + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.3)"] + +[[package]] +name = "bokeh" +version = "3.4.1" +description = "Interactive plots and applications in the browser from Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "bokeh-3.4.1-py3-none-any.whl", hash = "sha256:1e3c502a0a8205338fc74dadbfa321f8a0965441b39501e36796a47b4017b642"}, + {file = "bokeh-3.4.1.tar.gz", hash = "sha256:d824961e4265367b0750ce58b07e564ad0b83ca64b335521cd3421e9b9f10d89"}, +] + +[package.dependencies] +contourpy = ">=1.2" +Jinja2 = ">=2.9" +numpy = ">=1.16" +packaging = ">=16.8" +pandas = ">=1.2" +pillow = ">=7.1.0" +PyYAML = ">=3.10" +tornado = ">=6.2" +xyzservices = ">=2021.09.1" + +[[package]] +name = "cartopy" +version = "0.23.0" +description = "A Python library for cartographic visualizations with Matplotlib" +optional = false +python-versions = ">=3.9" +files = [ + {file = "Cartopy-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:374e66f816c3bafa48ffdbf6abaefa67063b405fac5f425f9be241cdf3498352"}, + {file = "Cartopy-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2bae450c4c913796cad0b7ce05aa2fa78d1788de47989f0a03183397648e24be"}, + {file = "Cartopy-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a40437596e8ac5e74575eab822c661f4e725bd995cfd9e445069695fe9086b42"}, + {file = "Cartopy-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:3292d6d403137eed80d32014c2f28de6282bed8824213f4b4c2170f388b24a1b"}, + {file = "Cartopy-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86b07b6794b616674e4e485b8574e9197bca54a4467d28dd01ae0bf178f8dc2b"}, + {file = "Cartopy-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8dece2aa8d5ff7bf989ded6b5f07c980fb5bb772952bc7cdeab469738abdecee"}, + {file = "Cartopy-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9dfd28352dc83d6b4e4cf85d84cb50fc4886d4c1510d61f4c7cf22477d1156f"}, + {file = "Cartopy-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:b2671b5354e43220f8e1074e7fe30a8b9f71cb38407c78e51db9c97772f0320b"}, + {file = "Cartopy-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:80b9fd666fd47f6370d29f7ad4e352828d54aaf688a03d0b83b51e141cfd77fa"}, + {file = "Cartopy-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:43e36b8b7e7e373a5698757458fd28fafbbbf5f3ebbe2d378f6a5ec3993d6dc0"}, + {file = "Cartopy-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:550173b91155d4d81cd14b4892cb6cabe3dd32bd34feacaa1ec78c0e56287832"}, + {file = "Cartopy-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:55219ee0fb069cc3254426e87382cde03546e86c3f7c6759f076823b1e3a44d9"}, + {file = "Cartopy-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6279af846bf77d9817ab8792a8e38ca561878f048bba1afdae3e3a30c5432bfd"}, + {file = "Cartopy-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843bf9dc0a18e1a8eed872c49e8092e8a8109e4dce285ad96752841e21e8161e"}, + {file = "Cartopy-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:350ff8802e2bc617c09bd6148aeb46e841775a846bfaa6e635a212d1eaf5ab66"}, + {file = "Cartopy-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:b52ab2274ad7504955854ef8d6f603e41f5d7163d02b29d369cecdbd29c2fda1"}, + {file = "Cartopy-0.23.0.tar.gz", hash = "sha256:231f37b35701f2ba31d94959cca75e6da04c2eea3a7f14ce1c75ee3b0eae7676"}, +] + +[package.dependencies] +matplotlib = ">=3.5" +numpy = ">=1.21" +packaging = ">=20" +pyproj = ">=3.3.1" +pyshp = ">=2.3" +shapely = ">=1.7" + +[package.extras] +doc = ["pydata-sphinx-theme", "sphinx", "sphinx-gallery"] +ows = ["OWSLib (>=0.20.0)", "pillow (>=6.1.0)"] +plotting = ["pillow (>=6.1.0)", "scipy (>=1.3.1)"] +speedups = ["fiona", "pykdtree"] +srtm = ["beautifulsoup4"] +test = ["coveralls", "pytest (>=5.1.2)", "pytest-cov", "pytest-mpl (>=0.11)", "pytest-xdist"] + [[package]] name = "certifi" version = "2024.6.2" @@ -349,6 +446,17 @@ click = ">=4.0" [package.extras] test = ["pytest-cov"] +[[package]] +name = "cloudpickle" +version = "3.0.0" +description = "Pickler class to extend the standard pickle.Pickler functionality" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cloudpickle-3.0.0-py3-none-any.whl", hash = "sha256:246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7"}, + {file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"}, +] + [[package]] name = "colorama" version = "0.4.6" @@ -360,6 +468,25 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "colorcet" +version = "3.1.0" +description = "Collection of perceptually uniform colormaps" +optional = false +python-versions = ">=3.7" +files = [ + {file = "colorcet-3.1.0-py3-none-any.whl", hash = "sha256:2a7d59cc8d0f7938eeedd08aad3152b5319b4ba3bcb7a612398cc17a384cb296"}, + {file = "colorcet-3.1.0.tar.gz", hash = "sha256:2921b3cd81a2288aaf2d63dbc0ce3c26dcd882e8c389cc505d6886bf7aa9a4eb"}, +] + +[package.extras] +all = ["colorcet[doc]", "colorcet[examples]", "colorcet[tests-extra]", "colorcet[tests]"] +doc = ["colorcet[examples]", "nbsite (>=0.8.4)", "sphinx-copybutton"] +examples = ["bokeh", "holoviews", "matplotlib", "numpy"] +tests = ["packaging", "pre-commit", "pytest (>=2.8.5)", "pytest-cov"] +tests-examples = ["colorcet[examples]", "nbval"] +tests-extra = ["colorcet[tests]", "pytest-mpl"] + [[package]] name = "comm" version = "0.2.2" @@ -536,6 +663,35 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] +[[package]] +name = "dask" +version = "2024.6.2" +description = "Parallel PyData with Task Scheduling" +optional = false +python-versions = ">=3.9" +files = [ + {file = "dask-2024.6.2-py3-none-any.whl", hash = "sha256:81b80ee015b2e057b93bb2d1bf13a866136e762e2b24bf54b6b621e8b86b7708"}, + {file = "dask-2024.6.2.tar.gz", hash = "sha256:d429d6b19e85fd1306ac37c188aaf99d03bbe69a6fe59d2b42882b2ac188686f"}, +] + +[package.dependencies] +click = ">=8.1" +cloudpickle = ">=1.5.0" +fsspec = ">=2021.09.0" +importlib-metadata = {version = ">=4.13.0", markers = "python_version < \"3.12\""} +packaging = ">=20.0" +partd = ">=1.2.0" +pyyaml = ">=5.3.1" +toolz = ">=0.10.0" + +[package.extras] +array = ["numpy (>=1.21)"] +complete = ["dask[array,dataframe,diagnostics,distributed]", "lz4 (>=4.3.2)", "pyarrow (>=7.0)", "pyarrow-hotfix"] +dataframe = ["dask-expr (>=1.1,<1.2)", "dask[array]", "pandas (>=1.3)"] +diagnostics = ["bokeh (>=2.4.2)", "jinja2 (>=2.10.3)"] +distributed = ["distributed (==2024.6.2)"] +test = ["pandas[test]", "pre-commit", "pytest", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist"] + [[package]] name = "dataretrieval" version = "1.0.9" @@ -825,6 +981,45 @@ ufo = ["fs (>=2.2.0,<3)"] unicode = ["unicodedata2 (>=15.1.0)"] woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] +[[package]] +name = "fsspec" +version = "2024.6.0" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.6.0-py3-none-any.whl", hash = "sha256:58d7122eb8a1a46f7f13453187bfea4972d66bf01618d37366521b1998034cee"}, + {file = "fsspec-2024.6.0.tar.gz", hash = "sha256:f579960a56e6d8038a9efc8f9c77279ec12e6299aa86b0769a7e9c46b94527c2"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +dev = ["pre-commit", "ruff"] +doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] +test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] +test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] +tqdm = ["tqdm"] + [[package]] name = "furo" version = "2024.5.6" @@ -861,6 +1056,39 @@ pandas = ">=1.4.0" pyproj = ">=3.3.0" shapely = ">=1.8.0" +[[package]] +name = "geoviews" +version = "1.12.0" +description = "GeoViews is a Python library that makes it easy to explore and visualize geographical, meteorological, and oceanographic datasets, such as those used in weather, climate, and remote sensing research." +optional = false +python-versions = ">=3.9" +files = [ + {file = "geoviews-1.12.0-py3-none-any.whl", hash = "sha256:5e8750d0e9a80dd4f5ce493d26cdde52880677c19d329a980b0a94e966dd3bd3"}, + {file = "geoviews-1.12.0.tar.gz", hash = "sha256:e2cbef0605e8fd1529bc643a31aeb61997f8f93c9b41a5aff8b2b355a76fa789"}, +] + +[package.dependencies] +bokeh = ">=3.4.0,<3.5.0" +cartopy = ">=0.18.0" +holoviews = ">=1.16.0" +numpy = "*" +packaging = "*" +panel = ">=1.0.0" +param = "*" +pyproj = "*" +shapely = "*" +xyzservices = "*" + +[package.extras] +all = ["cartopy (>=0.20.0)", "codecov", "datashader", "fiona", "geodatasets", "geopandas", "graphviz", "iris (>=3.5)", "lxml", "matplotlib (>2.2)", "mock", "nbsite (>=0.8.4,<0.9.0)", "nbval", "netcdf4", "pandas", "pooch", "pyct", "pytest", "pytest-cov", "pytest-github-actions-annotate-failures", "pyviz-comms", "rioxarray", "scipy", "selenium", "shapely", "xarray", "xesmf"] +build = ["bokeh (==3.4)", "param (>=1.9.2)", "pyct (>=0.4.4)", "setuptools"] +doc = ["cartopy (>=0.20.0)", "datashader", "fiona", "geodatasets", "geopandas", "graphviz", "iris (>=3.5)", "lxml", "matplotlib (>2.2)", "mock", "nbsite (>=0.8.4,<0.9.0)", "netcdf4", "pandas", "pooch", "pyct", "scipy", "selenium", "shapely", "xarray", "xesmf"] +examples-extra = ["datashader", "fiona", "geodatasets", "geopandas", "iris (>=3.5)", "matplotlib (>2.2)", "mock", "netcdf4", "pandas", "pooch", "pyct", "scipy", "shapely", "xarray", "xesmf"] +recommended = ["datashader", "geopandas", "matplotlib (>2.2)", "netcdf4", "pandas", "pooch", "pyct", "scipy", "shapely", "xarray"] +tests = ["fiona", "nbval", "pytest", "rioxarray"] +tests-ci = ["codecov", "pytest-cov", "pytest-github-actions-annotate-failures"] +tests-core = ["geopandas", "matplotlib (>2.2)", "netcdf4", "pandas", "pooch", "pytest", "pyviz-comms", "scipy", "shapely", "xarray"] + [[package]] name = "h11" version = "0.14.0" @@ -872,6 +1100,31 @@ files = [ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] +[[package]] +name = "holoviews" +version = "1.19.0" +description = "A high-level plotting API for the PyData ecosystem built on HoloViews." +optional = false +python-versions = ">=3.9" +files = [ + {file = "holoviews-1.19.0-py3-none-any.whl", hash = "sha256:a74b26dc3285b4f8b801e23f0e23b4ac93ab4ec162ea76c69ae585fff627a21b"}, + {file = "holoviews-1.19.0.tar.gz", hash = "sha256:cab1522f75a9b46377f9364b675befd79812e220059714470a58e21475d531ba"}, +] + +[package.dependencies] +bokeh = ">=3.1" +colorcet = "*" +numpy = ">=1.21" +packaging = "*" +pandas = ">=1.3" +panel = ">=1.0" +param = ">=2.0,<3.0" +pyviz-comms = ">=2.1" + +[package.extras] +recommended = ["matplotlib (>=3)", "plotly (>=4.0)"] +tests = ["pytest", "pytest-rerunfailures"] + [[package]] name = "html5lib" version = "1.1" @@ -938,6 +1191,49 @@ cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +[[package]] +name = "hvplot" +version = "0.10.0" +description = "A high-level plotting API for the PyData ecosystem built on HoloViews." +optional = false +python-versions = ">=3.8" +files = [ + {file = "hvplot-0.10.0-py3-none-any.whl", hash = "sha256:fe90ccb48163a6a62ae5bd6b008c2cb15cbf5b276f6ad6839ef5470b1c480d16"}, + {file = "hvplot-0.10.0.tar.gz", hash = "sha256:e87486a95bfe151ab52ef163a5e93d9cbd043992cf0b755ccadd2bf36fedd376"}, +] + +[package.dependencies] +bokeh = ">=1.0.0" +cartopy = {version = "*", optional = true, markers = "extra == \"geo\""} +colorcet = ">=2" +fiona = {version = "*", optional = true, markers = "extra == \"geo\""} +geopandas = {version = "*", optional = true, markers = "extra == \"geo\""} +geoviews = {version = ">=1.9.0", optional = true, markers = "extra == \"geo\""} +holoviews = ">=1.11.0" +numpy = ">=1.15" +packaging = "*" +pandas = "*" +panel = ">=0.11.0" +param = ">=1.12.0,<3.0" +pyproj = {version = "*", optional = true, markers = "extra == \"geo\""} +rasterio = {version = "*", optional = true, markers = "extra == \"geo\""} +rioxarray = {version = "*", optional = true, markers = "extra == \"geo\""} +spatialpandas = {version = ">=0.4.3", optional = true, markers = "extra == \"geo\""} + +[package.extras] +dev-extras = ["setuptools-scm (>=6)"] +doc = ["hvplot[examples]", "nbsite (>=0.8.4)", "sphinxext-rediraffe"] +examples = ["dask[dataframe] (>=2021.3.0)", "datashader (>=0.6.5)", "fugue[sql]", "geodatasets (>=2023.12.0)", "hvplot[fugue-sql]", "ibis-framework[duckdb]", "intake (>=0.6.5,<2.0.0)", "intake-parquet (>=0.2.3)", "intake-xarray (>=0.5.0)", "ipywidgets", "matplotlib", "networkx (>=2.6.3)", "notebook (>=5.4)", "numba (>=0.51.0)", "pillow (>=8.2.0)", "plotly", "polars", "pooch (>=1.6.0)", "s3fs (>=2022.1.0)", "scikit-image (>=0.17.2)", "scipy (>=1.5.3)", "selenium (>=3.141.0)", "streamz (>=0.3.0)", "xarray (>=0.18.2)", "xyzservices (>=2022.9.0)"] +examples-tests = ["hvplot[examples]", "hvplot[tests-nb]"] +fugue-sql = ["fugue-sql-antlr (>=0.2.0)", "jinja2", "qpd (>=0.4.4)", "sqlglot"] +geo = ["cartopy", "fiona", "geopandas", "geoviews (>=1.9.0)", "pyproj", "rasterio", "rioxarray", "spatialpandas (>=0.4.3)"] +graphviz = ["pygraphviz"] +hvdev = ["colorcet (>=0.0.1a1)", "datashader (>=0.0.1a1)", "holoviews (>=0.0.1a1)", "panel (>=0.0.1a1)", "param (>=0.0.1a1)", "pyviz-comms (>=0.0.1a1)"] +hvdev-geo = ["geoviews (>=0.0.1a1)"] +tests = ["fugue[sql]", "hvplot[fugue-sql]", "hvplot[tests-core]", "ibis-framework[duckdb]", "polars"] +tests-core = ["dask[dataframe]", "ipywidgets", "matplotlib", "parameterized", "plotly", "pooch", "pre-commit", "pytest", "pytest-cov", "ruff", "scipy", "xarray"] +tests-nb = ["nbval", "pytest-xdist"] + [[package]] name = "idna" version = "3.7" @@ -1366,6 +1662,67 @@ mongodb = ["pymongo (>4.1,<5)"] redis = ["redis (>3,!=4.5.2,!=4.5.3,<6.0.0)"] rediscluster = ["redis (>=4.2.0,!=4.5.2,!=4.5.3)"] +[[package]] +name = "linkify-it-py" +version = "2.0.3" +description = "Links recognition library with FULL unicode support." +optional = false +python-versions = ">=3.7" +files = [ + {file = "linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048"}, + {file = "linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79"}, +] + +[package.dependencies] +uc-micro-py = "*" + +[package.extras] +benchmark = ["pytest", "pytest-benchmark"] +dev = ["black", "flake8", "isort", "pre-commit", "pyproject-flake8"] +doc = ["myst-parser", "sphinx", "sphinx-book-theme"] +test = ["coverage", "pytest", "pytest-cov"] + +[[package]] +name = "llvmlite" +version = "0.43.0" +description = "lightweight wrapper around basic LLVM functionality" +optional = false +python-versions = ">=3.9" +files = [ + {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, + {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, + {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead"}, + {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a"}, + {file = "llvmlite-0.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed"}, + {file = "llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98"}, + {file = "llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57"}, + {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2"}, + {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749"}, + {file = "llvmlite-0.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91"}, + {file = "llvmlite-0.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7"}, + {file = "llvmlite-0.43.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7"}, + {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f"}, + {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844"}, + {file = "llvmlite-0.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9"}, + {file = "llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c"}, + {file = "llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8"}, + {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a"}, + {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867"}, + {file = "llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4"}, + {file = "llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}, +] + +[[package]] +name = "locket" +version = "1.0.0" +description = "File-based locks for Python on Linux and Windows" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3"}, + {file = "locket-1.0.0.tar.gz", hash = "sha256:5c0d4c052a8bbbf750e056a8e65ccd309086f4f0f18a2eac306a8dfa4112a632"}, +] + [[package]] name = "lxml" version = "5.2.2" @@ -1552,6 +1909,48 @@ files = [ docutils = ">=0.19" mistune = "0.8.4" +[[package]] +name = "markdown" +version = "3.6" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.5" @@ -1688,6 +2087,36 @@ files = [ [package.dependencies] traitlets = "*" +[[package]] +name = "mdit-py-plugins" +version = "0.4.1" +description = "Collection of plugins for markdown-it-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.1-py3-none-any.whl", hash = "sha256:1020dfe4e6bfc2c79fb49ae4e3f5b297f5ccd20f010187acc52af2921e27dc6a"}, + {file = "mdit_py_plugins-0.4.1.tar.gz", hash = "sha256:834b8ac23d1cd60cec703646ffd22ae97b7955a6d596eb1d304be1e251ae499c"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<4.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["myst-parser", "sphinx-book-theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "mistune" version = "0.8.4" @@ -1946,6 +2375,40 @@ files = [ {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, ] +[[package]] +name = "numba" +version = "0.60.0" +description = "compiling Python code using LLVM" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, + {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, + {file = "numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781"}, + {file = "numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e"}, + {file = "numba-0.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198"}, + {file = "numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8"}, + {file = "numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b"}, + {file = "numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703"}, + {file = "numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8"}, + {file = "numba-0.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2"}, + {file = "numba-0.60.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404"}, + {file = "numba-0.60.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c"}, + {file = "numba-0.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e"}, + {file = "numba-0.60.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d"}, + {file = "numba-0.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347"}, + {file = "numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74"}, + {file = "numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449"}, + {file = "numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b"}, + {file = "numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25"}, + {file = "numba-0.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab"}, + {file = "numba-0.60.0.tar.gz", hash = "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16"}, +] + +[package.dependencies] +llvmlite = "==0.43.*" +numpy = ">=1.22,<2.1" + [[package]] name = "numpy" version = "2.0.0" @@ -2082,6 +2545,64 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] xml = ["lxml (>=4.9.2)"] +[[package]] +name = "panel" +version = "1.4.4" +description = "The powerful data exploration & web app framework for Python." +optional = false +python-versions = ">=3.9" +files = [ + {file = "panel-1.4.4-py3-none-any.whl", hash = "sha256:b49bb9676567b0c0730bf69348c057247080811aec56364dd4fcfba80e5e09a0"}, + {file = "panel-1.4.4.tar.gz", hash = "sha256:659e9fc5b495e6519c5d07e8148fa5eeed9bc648356ec83fc299381ba5a726ef"}, +] + +[package.dependencies] +bleach = "*" +bokeh = ">=3.4.0,<3.5.0" +linkify-it-py = "*" +markdown = "*" +markdown-it-py = "*" +mdit-py-plugins = "*" +pandas = ">=1.2" +param = ">=2.1.0,<3.0" +pyviz-comms = ">=2.0.0" +requests = "*" +tqdm = ">=4.48.0" +typing-extensions = "*" +xyzservices = ">=2021.09.1" + +[package.extras] +all = ["aiohttp", "altair", "anywidget", "channels", "croniter", "dask-expr", "datashader", "diskcache", "django (<4)", "fastparquet", "flake8", "folium", "graphviz", "holoviews (>=1.16.0)", "hvplot", "ipyleaflet", "ipympl", "ipython (>=7.0)", "ipyvolume", "ipyvuetify", "ipywidgets", "ipywidgets-bokeh", "jupyter-bokeh (>=3.0.7)", "jupyter-server", "jupyterlab", "lxml", "matplotlib", "nbsite (>=0.8.4)", "nbval", "networkx (>=2.5)", "numba (<0.58)", "numpy", "pandas (<2.1.0)", "pandas (>=1.3)", "parameterized", "pillow", "playwright", "plotly", "plotly (>=4.0)", "pre-commit", "psutil", "pydeck", "pygraphviz", "pyinstrument (>=4.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-playwright", "pytest-rerunfailures", "pytest-xdist", "python-graphviz", "pyvista", "reacton", "scikit-image", "scikit-learn", "scipy", "seaborn", "streamz", "textual", "tomli", "twine", "vega-datasets", "vtk", "watchfiles", "xarray", "xgboost"] +all-pip = ["aiohttp", "altair", "anywidget", "channels", "croniter", "dask-expr", "datashader", "diskcache", "django (<4)", "fastparquet", "flake8", "folium", "graphviz", "holoviews (>=1.16.0)", "hvplot", "ipyleaflet", "ipympl", "ipython (>=7.0)", "ipyvolume", "ipyvuetify", "ipywidgets", "ipywidgets-bokeh", "jupyter-bokeh (>=3.0.7)", "jupyter-server", "jupyterlab", "lxml", "matplotlib", "nbsite (>=0.8.4)", "nbval", "networkx (>=2.5)", "numba (<0.58)", "numpy", "pandas (<2.1.0)", "pandas (>=1.3)", "parameterized", "pillow", "playwright", "plotly", "plotly (>=4.0)", "pre-commit", "psutil", "pydeck", "pyinstrument (>=4.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-playwright", "pytest-rerunfailures", "pytest-xdist", "pyvista", "reacton", "scikit-image", "scikit-learn", "scipy", "seaborn", "streamz", "textual", "tomli", "twine", "vega-datasets", "vtk", "watchfiles", "xarray", "xgboost"] +build = ["bleach", "bokeh (>=3.4.0,<3.5.0)", "cryptography (<39)", "markdown", "packaging", "param (>=2.0.0)", "pyviz-comms (>=2.0.0)", "requests", "setuptools (>=42)", "tqdm (>=4.48.0)", "urllib3 (<2.0)"] +doc = ["holoviews (>=1.16.0)", "jupyterlab", "lxml", "matplotlib", "nbsite (>=0.8.4)", "pandas (<2.1.0)", "pillow", "plotly"] +examples = ["aiohttp", "altair", "channels", "croniter", "dask-expr", "datashader", "django (<4)", "fastparquet", "folium", "graphviz", "holoviews (>=1.16.0)", "hvplot", "ipyleaflet", "ipympl", "ipyvolume", "ipyvuetify", "ipywidgets", "ipywidgets-bokeh", "jupyter-bokeh (>=3.0.7)", "networkx (>=2.5)", "plotly (>=4.0)", "pydeck", "pygraphviz", "pyinstrument (>=4.0)", "python-graphviz", "pyvista", "reacton", "scikit-image", "scikit-learn", "seaborn", "streamz", "textual", "vega-datasets", "vtk", "xarray", "xgboost"] +recommended = ["holoviews (>=1.16.0)", "jupyterlab", "matplotlib", "pillow", "plotly"] +tests = ["altair", "anywidget", "diskcache", "flake8", "folium", "holoviews (>=1.16.0)", "ipympl", "ipython (>=7.0)", "ipyvuetify", "ipywidgets-bokeh", "nbval", "numba (<0.58)", "numpy", "pandas (>=1.3)", "parameterized", "pre-commit", "psutil", "pytest", "pytest-asyncio", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "reacton", "scipy", "textual", "twine", "watchfiles"] +tests-core = ["altair", "anywidget", "diskcache", "flake8", "folium", "holoviews (>=1.16.0)", "ipython (>=7.0)", "nbval", "numpy", "pandas (>=1.3)", "parameterized", "pre-commit", "psutil", "pytest", "pytest-asyncio", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "scipy", "textual", "watchfiles"] +ui = ["jupyter-server", "playwright", "pytest-playwright", "tomli"] + +[[package]] +name = "param" +version = "2.1.0" +description = "Make your Python code clearer and more reliable by declaring Parameters." +optional = false +python-versions = ">=3.8" +files = [ + {file = "param-2.1.0-py3-none-any.whl", hash = "sha256:f31d3745d227347d29b5868c4e4e3077df07463889b91d3bb28e634fde211e1c"}, + {file = "param-2.1.0.tar.gz", hash = "sha256:a7b30b08b547e2b78b02aeba6ed34e3c6a638f8e4824a76a96ffa2d7cf57e71f"}, +] + +[package.extras] +all = ["param[doc]", "param[lint]", "param[tests-full]"] +doc = ["nbsite (==0.8.4)", "param[examples]", "sphinx-remove-toctrees"] +examples = ["aiohttp", "pandas", "panel"] +lint = ["flake8", "pre-commit"] +tests = ["coverage[toml]", "pytest", "pytest-asyncio"] +tests-deser = ["odfpy", "openpyxl", "pyarrow", "tables", "xlrd"] +tests-examples = ["nbval", "param[examples]", "pytest (<8.1)", "pytest-asyncio", "pytest-xdist"] +tests-full = ["cloudpickle", "gmpy", "ipython", "jsonschema", "nest-asyncio", "numpy", "pandas", "param[tests-deser]", "param[tests-examples]", "param[tests]"] + [[package]] name = "parso" version = "0.8.4" @@ -2097,6 +2618,24 @@ files = [ qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] testing = ["docopt", "pytest"] +[[package]] +name = "partd" +version = "1.4.2" +description = "Appendable key-value storage" +optional = false +python-versions = ">=3.9" +files = [ + {file = "partd-1.4.2-py3-none-any.whl", hash = "sha256:978e4ac767ec4ba5b86c6eaa52e5a2a3bc748a2ca839e8cc798f1cc6ce6efb0f"}, + {file = "partd-1.4.2.tar.gz", hash = "sha256:d022c33afbdc8405c226621b015e8067888173d85f7f5ecebb3cafed9a20f02c"}, +] + +[package.dependencies] +locket = "*" +toolz = "*" + +[package.extras] +complete = ["blosc", "numpy (>=1.20.0)", "pandas (>=1.3)", "pyzmq"] + [[package]] name = "pexpect" version = "4.9.0" @@ -2295,6 +2834,54 @@ files = [ [package.extras] tests = ["pytest"] +[[package]] +name = "pyarrow" +version = "16.1.0" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyarrow-16.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:17e23b9a65a70cc733d8b738baa6ad3722298fa0c81d88f63ff94bf25eaa77b9"}, + {file = "pyarrow-16.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4740cc41e2ba5d641071d0ab5e9ef9b5e6e8c7611351a5cb7c1d175eaf43674a"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98100e0268d04e0eec47b73f20b39c45b4006f3c4233719c3848aa27a03c1aef"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68f409e7b283c085f2da014f9ef81e885d90dcd733bd648cfba3ef265961848"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:a8914cd176f448e09746037b0c6b3a9d7688cef451ec5735094055116857580c"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:48be160782c0556156d91adbdd5a4a7e719f8d407cb46ae3bb4eaee09b3111bd"}, + {file = "pyarrow-16.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9cf389d444b0f41d9fe1444b70650fea31e9d52cfcb5f818b7888b91b586efff"}, + {file = "pyarrow-16.1.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:d0ebea336b535b37eee9eee31761813086d33ed06de9ab6fc6aaa0bace7b250c"}, + {file = "pyarrow-16.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e73cfc4a99e796727919c5541c65bb88b973377501e39b9842ea71401ca6c1c"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf9251264247ecfe93e5f5a0cd43b8ae834f1e61d1abca22da55b20c788417f6"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddf5aace92d520d3d2a20031d8b0ec27b4395cab9f74e07cc95edf42a5cc0147"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:25233642583bf658f629eb230b9bb79d9af4d9f9229890b3c878699c82f7d11e"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a33a64576fddfbec0a44112eaf844c20853647ca833e9a647bfae0582b2ff94b"}, + {file = "pyarrow-16.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:185d121b50836379fe012753cf15c4ba9638bda9645183ab36246923875f8d1b"}, + {file = "pyarrow-16.1.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:2e51ca1d6ed7f2e9d5c3c83decf27b0d17bb207a7dea986e8dc3e24f80ff7d6f"}, + {file = "pyarrow-16.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06ebccb6f8cb7357de85f60d5da50e83507954af617d7b05f48af1621d331c9a"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04707f1979815f5e49824ce52d1dceb46e2f12909a48a6a753fe7cafbc44a0c"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d32000693deff8dc5df444b032b5985a48592c0697cb6e3071a5d59888714e2"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8785bb10d5d6fd5e15d718ee1d1f914fe768bf8b4d1e5e9bf253de8a26cb1628"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e1369af39587b794873b8a307cc6623a3b1194e69399af0efd05bb202195a5a7"}, + {file = "pyarrow-16.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:febde33305f1498f6df85e8020bca496d0e9ebf2093bab9e0f65e2b4ae2b3444"}, + {file = "pyarrow-16.1.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b5f5705ab977947a43ac83b52ade3b881eb6e95fcc02d76f501d549a210ba77f"}, + {file = "pyarrow-16.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d27bf89dfc2576f6206e9cd6cf7a107c9c06dc13d53bbc25b0bd4556f19cf5f"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d07de3ee730647a600037bc1d7b7994067ed64d0eba797ac74b2bc77384f4c2"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbef391b63f708e103df99fbaa3acf9f671d77a183a07546ba2f2c297b361e83"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19741c4dbbbc986d38856ee7ddfdd6a00fc3b0fc2d928795b95410d38bb97d15"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f2c5fb249caa17b94e2b9278b36a05ce03d3180e6da0c4c3b3ce5b2788f30eed"}, + {file = "pyarrow-16.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:e6b6d3cd35fbb93b70ade1336022cc1147b95ec6af7d36906ca7fe432eb09710"}, + {file = "pyarrow-16.1.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:18da9b76a36a954665ccca8aa6bd9f46c1145f79c0bb8f4f244f5f8e799bca55"}, + {file = "pyarrow-16.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99f7549779b6e434467d2aa43ab2b7224dd9e41bdde486020bae198978c9e05e"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f07fdffe4fd5b15f5ec15c8b64584868d063bc22b86b46c9695624ca3505b7b4"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfe389a08ea374972bd4065d5f25d14e36b43ebc22fc75f7b951f24378bf0b5"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3b20bd67c94b3a2ea0a749d2a5712fc845a69cb5d52e78e6449bbd295611f3aa"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ba8ac20693c0bb0bf4b238751d4409e62852004a8cf031c73b0e0962b03e45e3"}, + {file = "pyarrow-16.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:31a1851751433d89a986616015841977e0a188662fcffd1a5677453f1df2de0a"}, + {file = "pyarrow-16.1.0.tar.gz", hash = "sha256:15fbb22ea96d11f0b5768504a3f961edab25eaf4197c341720c4a387f6c60315"}, +] + +[package.dependencies] +numpy = ">=1.16.6" + [[package]] name = "pycparser" version = "2.22" @@ -2483,6 +3070,17 @@ files = [ [package.dependencies] certifi = "*" +[[package]] +name = "pyshp" +version = "2.3.1" +description = "Pure Python read/write support for ESRI Shapefile format" +optional = false +python-versions = ">=2.7" +files = [ + {file = "pyshp-2.3.1-py2.py3-none-any.whl", hash = "sha256:67024c0ccdc352ba5db777c4e968483782dfa78f8e200672a90d2d30fd8b7b49"}, + {file = "pyshp-2.3.1.tar.gz", hash = "sha256:4caec82fd8dd096feba8217858068bacb2a3b5950f43c048c6dc32a3489d5af1"}, +] + [[package]] name = "pytest" version = "8.2.2" @@ -2587,6 +3185,25 @@ files = [ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] +[[package]] +name = "pyviz-comms" +version = "3.0.2" +description = "A JupyterLab extension for rendering HoloViz content." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyviz_comms-3.0.2-py3-none-any.whl", hash = "sha256:31541b976a21b7738557c3ea23bd8e44e94e736b9ed269570dcc28db4449d7e3"}, + {file = "pyviz_comms-3.0.2.tar.gz", hash = "sha256:3167df932656416c4bd711205dad47e986a3ebae1f316258ddc26f9e01513ef7"}, +] + +[package.dependencies] +param = "*" + +[package.extras] +all = ["pyviz-comms[build]", "pyviz-comms[tests]"] +build = ["jupyterlab (>=4.0,<5.0)", "keyring", "rfc3986", "setuptools (>=40.8.0)", "twine"] +tests = ["flake8", "pytest"] + [[package]] name = "pywin32" version = "306" @@ -2769,6 +3386,56 @@ files = [ [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} +[[package]] +name = "rasterio" +version = "1.3.10" +description = "Fast and direct raster I/O for use with Numpy and SciPy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rasterio-1.3.10-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:2ef27c3eff6f44f8b5d5de228003367c1843593edf648d85c0dc1319c00dc57d"}, + {file = "rasterio-1.3.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c711b497e9ef0c4f5e1c01e34ba910708e066e1c4a69c25df18d1bcc04481287"}, + {file = "rasterio-1.3.10-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:d1ac85857144cb8075e332e9d908b65426d30ddc1f59f7a04bcf6ed6fd3c0d47"}, + {file = "rasterio-1.3.10-cp310-cp310-win_amd64.whl", hash = "sha256:ef8a496740df1e68f7a3d3449aa3be9c3210c22f4bb78a4a9e1c290183abd9b1"}, + {file = "rasterio-1.3.10-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:97d867cada29f16cb83f1743217f775f8b982676fcdda77671d25abb26698159"}, + {file = "rasterio-1.3.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:505b3e659eb3b137192c25233bf7954bc4997b1a474bae9e129fbd5ac2619404"}, + {file = "rasterio-1.3.10-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:30f27e309a14a70c821d10a0ea18b110968dc2e2186b06a900aebd92094f4e00"}, + {file = "rasterio-1.3.10-cp311-cp311-win_amd64.whl", hash = "sha256:cbb2eea127328302f9e3158a000363a7d9eea22537378dee4f824a7fa2d78c05"}, + {file = "rasterio-1.3.10-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:3a9c4fb63e050e11bcd23e53f084ca186b445f976df1f70e7abd851c4072837f"}, + {file = "rasterio-1.3.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c7ddca79444fd3b933f4cd1a1773e9f7839d0ce5d76e600bdf92ee9a79b95f8"}, + {file = "rasterio-1.3.10-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:f9cd757e11cfb07ef39b1cc79a32497bf22aff7fec41fe330b868cb3043b4db5"}, + {file = "rasterio-1.3.10-cp312-cp312-win_amd64.whl", hash = "sha256:7e653968f64840654d277e0f86f8666ed8f3030ba36fa865f420f9bc38d619ee"}, + {file = "rasterio-1.3.10-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:7a22c0e0cf07dbed6576faf9a49bc4afa1afedd5a14441b64a3d3dd6d10dc274"}, + {file = "rasterio-1.3.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d29d30c2271fa265913bd3db93fa213d3a0894362ec704e7273cf30443098a90"}, + {file = "rasterio-1.3.10-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:287e8d0d0472c778aa0b6392e9c00894a80f2bace28fa6eddb76c0a895097947"}, + {file = "rasterio-1.3.10-cp38-cp38-win_amd64.whl", hash = "sha256:a420e5f25108b1c92c5d071cfd6518b3766f20a6eddb1b322d06c3d46a89fab6"}, + {file = "rasterio-1.3.10-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:73ea4d0e584f696ef115601bbb97ba8d2b68a67c2bb3b40999414d31b6c7cf89"}, + {file = "rasterio-1.3.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6eece6420d7d6ef9b9830633b8fcd15e86b8702cb13419abe251c16ca502cf3"}, + {file = "rasterio-1.3.10-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:0bbd62b45a35cab53cb7fe72419e823e47ab31ee2d055af8e21dc7f37fe5ed6c"}, + {file = "rasterio-1.3.10-cp39-cp39-win_amd64.whl", hash = "sha256:450f2bd45335308829da90566fbcbdb8e8aa0251a9d1f6ebb60667855dfb7554"}, + {file = "rasterio-1.3.10.tar.gz", hash = "sha256:ce182c735b4f9e8735d90600607ecab15ef895eb8aa660bf665751529477e326"}, +] + +[package.dependencies] +affine = "*" +attrs = "*" +certifi = "*" +click = ">=4.0" +click-plugins = "*" +cligj = ">=0.5" +importlib-metadata = {version = "*", markers = "python_version < \"3.10\""} +numpy = "*" +setuptools = "*" +snuggs = ">=1.4.1" + +[package.extras] +all = ["boto3 (>=1.2.4)", "ghp-import", "hypothesis", "ipython (>=2.0)", "matplotlib", "numpydoc", "packaging", "pytest (>=2.8.2)", "pytest-cov (>=2.2.0)", "shapely", "sphinx", "sphinx-rtd-theme"] +docs = ["ghp-import", "numpydoc", "sphinx", "sphinx-rtd-theme"] +ipython = ["ipython (>=2.0)"] +plot = ["matplotlib"] +s3 = ["boto3 (>=1.2.4)"] +test = ["boto3 (>=1.2.4)", "hypothesis", "packaging", "pytest (>=2.8.2)", "pytest-cov (>=2.2.0)", "shapely"] + [[package]] name = "referencing" version = "0.35.1" @@ -2805,6 +3472,45 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "retrying" +version = "1.3.4" +description = "Retrying" +optional = false +python-versions = "*" +files = [ + {file = "retrying-1.3.4-py3-none-any.whl", hash = "sha256:8cc4d43cb8e1125e0ff3344e9de678fefd85db3b750b81b2240dc0183af37b35"}, + {file = "retrying-1.3.4.tar.gz", hash = "sha256:345da8c5765bd982b1d1915deb9102fd3d1f7ad16bd84a9700b85f64d24e8f3e"}, +] + +[package.dependencies] +six = ">=1.7.0" + +[[package]] +name = "rioxarray" +version = "0.15.0" +description = "geospatial xarray extension powered by rasterio" +optional = false +python-versions = ">=3.9" +files = [ + {file = "rioxarray-0.15.0-py3-none-any.whl", hash = "sha256:d7c0b2efc21075f77fe04302b916a995320004695f3c31e4f06d9ab40acd4498"}, + {file = "rioxarray-0.15.0.tar.gz", hash = "sha256:d2a8429a5b6405913c7b6f515ef2992b05139c96eb39a2dc1c9f475ce0848c9c"}, +] + +[package.dependencies] +numpy = ">=1.21" +packaging = "*" +pyproj = ">=2.2" +rasterio = ">=1.2" +xarray = ">=0.17" + +[package.extras] +all = ["dask", "mypy", "nbsphinx", "netcdf4", "pre-commit", "pylint", "pytest (>=3.6)", "pytest-cov", "pytest-timeout", "scipy", "sphinx-click", "sphinx-rtd-theme"] +dev = ["dask", "mypy", "nbsphinx", "netcdf4", "pre-commit", "pylint", "pytest (>=3.6)", "pytest-cov", "pytest-timeout", "scipy", "sphinx-click", "sphinx-rtd-theme"] +doc = ["nbsphinx", "sphinx-click", "sphinx-rtd-theme"] +interp = ["scipy"] +test = ["dask", "netcdf4", "pytest (>=3.6)", "pytest-cov", "pytest-timeout"] + [[package]] name = "rpds-py" version = "0.18.1" @@ -3060,6 +3766,24 @@ files = [ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] +[[package]] +name = "snuggs" +version = "1.4.7" +description = "Snuggs are s-expressions for Numpy" +optional = false +python-versions = "*" +files = [ + {file = "snuggs-1.4.7-py3-none-any.whl", hash = "sha256:988dde5d4db88e9d71c99457404773dabcc7a1c45971bfbe81900999942d9f07"}, + {file = "snuggs-1.4.7.tar.gz", hash = "sha256:501cf113fe3892e14e2fee76da5cd0606b7e149c411c271898e6259ebde2617b"}, +] + +[package.dependencies] +numpy = "*" +pyparsing = ">=2.1.6" + +[package.extras] +test = ["hypothesis", "pytest"] + [[package]] name = "soupsieve" version = "2.5" @@ -3071,6 +3795,30 @@ files = [ {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] +[[package]] +name = "spatialpandas" +version = "0.4.10" +description = "Pandas extension arrays for spatial/geometric operations" +optional = false +python-versions = ">=3.9" +files = [ + {file = "spatialpandas-0.4.10-py2.py3-none-any.whl", hash = "sha256:e0ae0b0ee1ea4da1659654df9ba5a6437204f2242e3e33f5134536b45120e063"}, + {file = "spatialpandas-0.4.10.tar.gz", hash = "sha256:032e24ebb40f75c5c79cb79d7c281f2990e69ba382c0b24acb53da7bba60851c"}, +] + +[package.dependencies] +dask = "*" +fsspec = "*" +numba = "*" +pandas = "*" +param = "*" +pyarrow = ">=1.0" +retrying = "*" + +[package.extras] +examples = ["datashader", "descartes", "distributed", "geopandas", "holoviews", "matplotlib"] +tests = ["codecov", "flake8", "geopandas", "hilbertcurve", "hypothesis", "keyring", "moto[s3,server]", "pytest", "pytest-cov", "python-snappy", "rfc3986", "s3fs", "scipy", "shapely", "twine"] + [[package]] name = "sphinx" version = "7.3.7" @@ -3307,6 +4055,17 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "toolz" +version = "0.12.1" +description = "List processing tools and functional utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, + {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, +] + [[package]] name = "tornado" version = "6.4.1" @@ -3421,6 +4180,20 @@ files = [ {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] +[[package]] +name = "uc-micro-py" +version = "1.0.3" +description = "Micro subset of unicode data files for linkify-it-py projects." +optional = false +python-versions = ">=3.7" +files = [ + {file = "uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a"}, + {file = "uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5"}, +] + +[package.extras] +test = ["coverage", "pytest", "pytest-cov"] + [[package]] name = "urllib3" version = "1.26.18" @@ -3592,6 +4365,17 @@ io = ["cftime", "fsspec", "h5netcdf", "netCDF4", "pooch", "pydap", "scipy", "zar parallel = ["dask[complete]"] viz = ["matplotlib", "nc-time-axis", "seaborn"] +[[package]] +name = "xyzservices" +version = "2024.6.0" +description = "Source of XYZ tiles providers" +optional = false +python-versions = ">=3.8" +files = [ + {file = "xyzservices-2024.6.0-py3-none-any.whl", hash = "sha256:fecb2508f0f2b71c819aecf5df2c03cef001c56a4b49302e640f3b34710d25e4"}, + {file = "xyzservices-2024.6.0.tar.gz", hash = "sha256:58c1bdab4257d2551b9ef91cd48571f77b7c4d2bc45bf5e3c05ac97b3a4d7282"}, +] + [[package]] name = "yarl" version = "1.9.4" @@ -3713,4 +4497,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.9, <4.0" -content-hash = "5f360a280427f16ccab50b40f1a192508b8df1707cd6ca18151124b088ef5442" +content-hash = "caed084e392ac2d5a7888a9fe38d799fb806a14a5292741f214c494f10d368a2" diff --git a/pyproject.toml b/pyproject.toml index 0adb457..ecb6f5e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,6 +73,7 @@ toml = "*" sphinx-autodoc-typehints = "*" [tool.poetry.group.jupyter.dependencies] +hvplot = {version = "*", extras = ["geo"]} ipykernel = "*" matplotlib = "*" scipy = "*" diff --git a/requirements/requirements-dev.txt b/requirements/requirements-dev.txt index 901e1a9..32a20d8 100644 --- a/requirements/requirements-dev.txt +++ b/requirements/requirements-dev.txt @@ -1,3 +1,4 @@ +affine==2.4.0 ; python_version >= "3.9" and python_version < "4.0" alabaster==0.7.16 ; python_version >= "3.9" and python_version < "4.0" annotated-types==0.7.0 ; python_version >= "3.9" and python_version < "4.0" anyio==4.4.0 ; python_version >= "3.9" and python_version < "4.0" @@ -6,19 +7,25 @@ asttokens==2.4.1 ; python_version >= "3.9" and python_version < "4.0" attrs==23.2.0 ; python_version >= "3.9" and python_version < "4.0" babel==2.15.0 ; python_version >= "3.9" and python_version < "4.0" beautifulsoup4==4.12.3 ; python_version >= "3.9" and python_version < "4.0" +bleach==6.1.0 ; python_version >= "3.9" and python_version < "4.0" +bokeh==3.4.1 ; python_version >= "3.9" and python_version < "4.0" +cartopy==0.23.0 ; python_version >= "3.9" and python_version < "4.0" certifi==2024.6.2 ; python_version >= "3.9" and python_version < "4.0" cffi==1.16.0 ; python_version >= "3.9" and python_version < "4.0" and implementation_name == "pypy" charset-normalizer==3.3.2 ; python_version >= "3.9" and python_version < "4.0" click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "4.0" click==8.1.7 ; python_version >= "3.9" and python_version < "4.0" cligj==0.7.2 ; python_version >= "3.9" and python_version < "4" +cloudpickle==3.0.0 ; python_version >= "3.9" and python_version < "4.0" colorama==0.4.6 ; python_version >= "3.9" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") +colorcet==3.1.0 ; python_version >= "3.9" and python_version < "4.0" comm==0.2.2 ; python_version >= "3.9" and python_version < "4.0" contourpy==1.2.1 ; python_version >= "3.9" and python_version < "4.0" covdefaults==2.3.0 ; python_version >= "3.9" and python_version < "4.0" coverage==7.5.3 ; python_version >= "3.9" and python_version < "4.0" coverage[toml]==7.5.3 ; python_version >= "3.9" and python_version < "4.0" cycler==0.12.1 ; python_version >= "3.9" and python_version < "4.0" +dask==2024.6.2 ; python_version >= "3.9" and python_version < "4.0" dataretrieval==1.0.9 ; python_version >= "3.9" and python_version < "4.0" debugpy==1.8.1 ; python_version >= "3.9" and python_version < "4.0" decorator==5.1.1 ; python_version >= "3.9" and python_version < "4.0" @@ -32,15 +39,19 @@ executing==2.0.1 ; python_version >= "3.9" and python_version < "4.0" fastjsonschema==2.20.0 ; python_version >= "3.9" and python_version < "4.0" fiona==1.9.6 ; python_version >= "3.9" and python_version < "4.0" fonttools==4.53.0 ; python_version >= "3.9" and python_version < "4.0" +fsspec==2024.6.0 ; python_version >= "3.9" and python_version < "4.0" furo==2024.5.6 ; python_version >= "3.9" and python_version < "4.0" geopandas==0.14.4 ; python_version >= "3.9" and python_version < "4.0" +geoviews==1.12.0 ; python_version >= "3.9" and python_version < "4.0" h11==0.14.0 ; python_version >= "3.9" and python_version < "4.0" +holoviews==1.19.0 ; python_version >= "3.9" and python_version < "4.0" html5lib==1.1 ; python_version >= "3.9" and python_version < "4.0" httpcore==1.0.5 ; python_version >= "3.9" and python_version < "4.0" httpx==0.27.0 ; python_version >= "3.9" and python_version < "4.0" +hvplot[geo]==0.10.0 ; python_version >= "3.9" and python_version < "4.0" idna==3.7 ; python_version >= "3.9" and python_version < "4.0" imagesize==1.4.1 ; python_version >= "3.9" and python_version < "4.0" -importlib-metadata==7.1.0 ; python_version >= "3.9" and python_version < "3.10" +importlib-metadata==7.1.0 ; python_version >= "3.9" and python_version < "3.12" importlib-resources==6.4.0 ; python_version >= "3.9" and python_version < "4.0" iniconfig==2.0.0 ; python_version >= "3.9" and python_version < "4.0" ipykernel==6.29.4 ; python_version >= "3.9" and python_version < "4.0" @@ -55,13 +66,20 @@ jupyter-core==5.7.2 ; python_version >= "3.9" and python_version < "4.0" jupyterlab-widgets==3.0.11 ; python_version >= "3.9" and python_version < "4.0" kiwisolver==1.4.5 ; python_version >= "3.9" and python_version < "4.0" limits==3.12.0 ; python_version >= "3.9" and python_version < "4.0" +linkify-it-py==2.0.3 ; python_version >= "3.9" and python_version < "4.0" +llvmlite==0.43.0 ; python_version >= "3.9" and python_version < "4.0" +locket==1.0.0 ; python_version >= "3.9" and python_version < "4.0" lxml-html-clean==0.1.1 ; python_version >= "3.9" and python_version < "4.0" lxml==5.2.2 ; python_version >= "3.9" and python_version < "4.0" lxml[html-clean]==5.2.2 ; python_version >= "3.9" and python_version < "4.0" m2r2==0.3.3.post2 ; python_version >= "3.9" and python_version < "4.0" +markdown-it-py==3.0.0 ; python_version >= "3.9" and python_version < "4.0" +markdown==3.6 ; python_version >= "3.9" and python_version < "4.0" markupsafe==2.1.5 ; python_version >= "3.9" and python_version < "4.0" matplotlib-inline==0.1.7 ; python_version >= "3.9" and python_version < "4.0" matplotlib==3.9.0 ; python_version >= "3.9" and python_version < "4.0" +mdit-py-plugins==0.4.1 ; python_version >= "3.9" and python_version < "4.0" +mdurl==0.1.2 ; python_version >= "3.9" and python_version < "4.0" mistune==0.8.4 ; python_version >= "3.9" and python_version < "4.0" multidict==6.0.5 ; python_version >= "3.9" and python_version < "4.0" multifutures==0.3.2 ; python_version >= "3.9" and python_version < "4.0" @@ -71,10 +89,14 @@ nbclient==0.6.8 ; python_version >= "3.9" and python_version < "4.0" nbformat==5.10.4 ; python_version >= "3.9" and python_version < "4.0" nbmake==1.5.4 ; python_version >= "3.9" and python_version < "4.0" nest-asyncio==1.6.0 ; python_version >= "3.9" and python_version < "4.0" +numba==0.60.0 ; python_version >= "3.9" and python_version < "4.0" numpy==2.0.0 ; python_version >= "3.9" and python_version < "4.0" packaging==24.1 ; python_version >= "3.9" and python_version < "4.0" pandas==2.2.2 ; python_version >= "3.9" and python_version < "4.0" +panel==1.4.4 ; python_version >= "3.9" and python_version < "4.0" +param==2.1.0 ; python_version >= "3.9" and python_version < "4.0" parso==0.8.4 ; python_version >= "3.9" and python_version < "4.0" +partd==1.4.2 ; python_version >= "3.9" and python_version < "4.0" pexpect==4.9.0 ; python_version >= "3.9" and python_version < "4.0" and sys_platform != "win32" pillow==10.3.0 ; python_version >= "3.9" and python_version < "4.0" platformdirs==4.2.2 ; python_version >= "3.9" and python_version < "4.0" @@ -83,23 +105,29 @@ prompt-toolkit==3.0.47 ; python_version >= "3.9" and python_version < "4.0" psutil==5.9.8 ; python_version >= "3.9" and python_version < "4.0" ptyprocess==0.7.0 ; python_version >= "3.9" and python_version < "4.0" and sys_platform != "win32" pure-eval==0.2.2 ; python_version >= "3.9" and python_version < "4.0" +pyarrow==16.1.0 ; python_version >= "3.9" and python_version < "4.0" pycparser==2.22 ; python_version >= "3.9" and python_version < "4.0" and implementation_name == "pypy" pydantic-core==2.18.4 ; python_version >= "3.9" and python_version < "4.0" pydantic==2.7.4 ; python_version >= "3.9" and python_version < "4.0" pygments==2.18.0 ; python_version >= "3.9" and python_version < "4.0" pyparsing==3.1.2 ; python_version >= "3.9" and python_version < "4.0" pyproj==3.6.1 ; python_version >= "3.9" and python_version < "4.0" +pyshp==2.3.1 ; python_version >= "3.9" and python_version < "4.0" pytest-cov==5.0.0 ; python_version >= "3.9" and python_version < "4.0" pytest-recording==0.13.1 ; python_version >= "3.9" and python_version < "4.0" pytest-xdist==3.6.1 ; python_version >= "3.9" and python_version < "4.0" pytest==8.2.2 ; python_version >= "3.9" and python_version < "4.0" python-dateutil==2.9.0.post0 ; python_version >= "3.9" and python_version < "4.0" pytz==2024.1 ; python_version >= "3.9" and python_version < "4.0" +pyviz-comms==3.0.2 ; python_version >= "3.9" and python_version < "4.0" pywin32==306 ; sys_platform == "win32" and platform_python_implementation != "PyPy" and python_version >= "3.9" and python_version < "4.0" pyyaml==6.0.1 ; python_version >= "3.9" and python_version < "4.0" pyzmq==26.0.3 ; python_version >= "3.9" and python_version < "4.0" +rasterio==1.3.10 ; python_version >= "3.9" and python_version < "4.0" referencing==0.35.1 ; python_version >= "3.9" and python_version < "4.0" requests==2.32.3 ; python_version >= "3.9" and python_version < "4.0" +retrying==1.3.4 ; python_version >= "3.9" and python_version < "4.0" +rioxarray==0.15.0 ; python_version >= "3.9" and python_version < "4.0" rpds-py==0.18.1 ; python_version >= "3.9" and python_version < "4.0" scipy==1.13.1 ; python_version >= "3.9" and python_version < "4.0" setuptools==70.0.0 ; python_version >= "3.9" and python_version < "4.0" @@ -107,7 +135,9 @@ shapely==2.0.4 ; python_version >= "3.9" and python_version < "4.0" six==1.16.0 ; python_version >= "3.9" and python_version < "4.0" sniffio==1.3.1 ; python_version >= "3.9" and python_version < "4.0" snowballstemmer==2.2.0 ; python_version >= "3.9" and python_version < "4.0" +snuggs==1.4.7 ; python_version >= "3.9" and python_version < "4.0" soupsieve==2.5 ; python_version >= "3.9" and python_version < "4.0" +spatialpandas==0.4.10 ; python_version >= "3.9" and python_version < "4.0" sphinx-autodoc-typehints==2.1.1 ; python_version >= "3.9" and python_version < "4.0" sphinx-basic-ng==1.0.0b2 ; python_version >= "3.9" and python_version < "4.0" sphinx==7.3.7 ; python_version >= "3.9" and python_version < "4.0" @@ -122,6 +152,7 @@ stack-data==0.6.3 ; python_version >= "3.9" and python_version < "4.0" tenacity==8.4.1 ; python_version >= "3.9" and python_version < "4.0" toml==0.10.2 ; python_version >= "3.9" and python_version < "4.0" tomli==2.0.1 ; python_full_version <= "3.11.0a6" and python_version >= "3.9" +toolz==0.12.1 ; python_version >= "3.9" and python_version < "4.0" tornado==6.4.1 ; python_version >= "3.9" and python_version < "4.0" tqdm==4.66.4 ; python_version >= "3.9" and python_version < "4.0" tqdm[notebook]==4.66.4 ; python_version >= "3.9" and python_version < "4.0" @@ -131,6 +162,7 @@ types-requests==2.31.0.6 ; python_version >= "3.9" and python_version < "4.0" types-urllib3==1.26.25.14 ; python_version >= "3.9" and python_version < "4.0" typing-extensions==4.12.2 ; python_version >= "3.9" and python_version < "4.0" tzdata==2024.1 ; python_version >= "3.9" and python_version < "4.0" +uc-micro-py==1.0.3 ; python_version >= "3.9" and python_version < "4.0" urllib3==1.26.18 ; python_version >= "3.9" and python_version < "4.0" vcrpy==6.0.1 ; python_version >= "3.9" and python_version < "4.0" wcwidth==0.2.13 ; python_version >= "3.9" and python_version < "4.0" @@ -138,5 +170,6 @@ webencodings==0.5.1 ; python_version >= "3.9" and python_version < "4.0" widgetsnbextension==4.0.11 ; python_version >= "3.9" and python_version < "4.0" wrapt==1.16.0 ; python_version >= "3.9" and python_version < "4.0" xarray==2024.6.0 ; python_version >= "3.9" and python_version < "4.0" +xyzservices==2024.6.0 ; python_version >= "3.9" and python_version < "4.0" yarl==1.9.4 ; python_version >= "3.9" and python_version < "4.0" -zipp==3.19.2 ; python_version >= "3.9" and python_version < "3.10" +zipp==3.19.2 ; python_version >= "3.9" and python_version < "3.12" From 536ddbce3a9be892a9777f4b67e21c4b66b5f1ee Mon Sep 17 00:00:00 2001 From: Panos Mavrogiorgos Date: Wed, 19 Jun 2024 15:25:52 +0300 Subject: [PATCH 15/15] ci: Add worfklow for testing installation from PyPI. Fixes #145 --- .github/workflows/install_from_pypi.yml | 38 +++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 .github/workflows/install_from_pypi.yml diff --git a/.github/workflows/install_from_pypi.yml b/.github/workflows/install_from_pypi.yml new file mode 100644 index 0000000..047b949 --- /dev/null +++ b/.github/workflows/install_from_pypi.yml @@ -0,0 +1,38 @@ +name: "Test installation from PyPI" + +on: + workflow_dispatch: + schedule: + - cron: "4 5 * * *" # Every day at 05:04 + +jobs: + test_pypi_installation: + name: test PyPI installation + runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - os: "ubuntu-latest" + python: "3.12" + steps: + - uses: actions/checkout@main + - uses: actions/setup-python@main + with: + python-version: ${{ matrix.python }} + # Debug + - run: type -a python + - run: python --version + - run: python -m pip --version + - run: python -m pip cache info + # Install the package from pypi + - run: python -m pip install searvey + - run: python -m pip freeze + # Checkout the version of code that got installed from PyPI + - run: git fetch --tags + - run: git checkout v$(python -c 'import importlib.metadata; print(importlib.metadata.version("searvey"))') + # Install test dependencies + - run: pip install -U $(cat requirements/requirements-dev.txt| grep --extended-regexp 'pytest=|pytest-recording=|urllib3=' | cut -d ';' -f1) + # Remove the source code (just to be sure that it is not being used) + - run: rm -rf searvey + # Run the tests + - run: make test