Skip to content

Commit

Permalink
Merge branch 'main' into redo-blockwise-interp
Browse files Browse the repository at this point in the history
  • Loading branch information
dcherian authored Dec 18, 2024
2 parents 97a388e + 8afed74 commit ef24840
Show file tree
Hide file tree
Showing 9 changed files with 93 additions and 31 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/pypi-release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ jobs:
path: dist
- name: Publish package to TestPyPI
if: github.event_name == 'push'
uses: pypa/[email protected].2
uses: pypa/[email protected].3
with:
repository_url: https://test.pypi.org/legacy/
verbose: true
Expand All @@ -110,6 +110,6 @@ jobs:
name: releases
path: dist
- name: Publish package to PyPI
uses: pypa/[email protected].2
uses: pypa/[email protected].3
with:
verbose: true
3 changes: 3 additions & 0 deletions doc/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -626,12 +626,14 @@ Attributes relating to the recursive tree-like structure of a ``DataTree``.
DataTree.depth
DataTree.width
DataTree.subtree
DataTree.subtree_with_keys
DataTree.descendants
DataTree.siblings
DataTree.lineage
DataTree.parents
DataTree.ancestors
DataTree.groups
DataTree.xindexes

Data Contents
-------------
Expand All @@ -645,6 +647,7 @@ This interface echoes that of ``xarray.Dataset``.
DataTree.dims
DataTree.sizes
DataTree.data_vars
DataTree.ds
DataTree.coords
DataTree.attrs
DataTree.encoding
Expand Down
8 changes: 7 additions & 1 deletion doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@ New Features
- Better support wrapping additional array types (e.g. ``cupy`` or ``jax``) by calling generalized
duck array operations throughout more xarray methods. (:issue:`7848`, :pull:`9798`).
By `Sam Levang <https://github.com/slevang>`_.

- Add ``unit`` - keyword argument to :py:func:`date_range` and ``microsecond`` parsing to
iso8601-parser (:pull:`9885`).
By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_.

Breaking changes
~~~~~~~~~~~~~~~~
Expand Down Expand Up @@ -59,6 +61,10 @@ Internal Changes
~~~~~~~~~~~~~~~~
- Move non-CF related ``ensure_dtype_not_object`` from conventions to backends (:pull:`9828`).
By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_.
- Move handling of scalar datetimes into ``_possibly_convert_objects``
within ``as_compatible_data``. This is consistent with how lists of these objects
will be converted (:pull:`9900`).
By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_.

.. _whats-new.2024.11.0:

Expand Down
14 changes: 12 additions & 2 deletions xarray/coding/cftime_offsets.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,12 @@
from xarray.core.utils import attempt_import, emit_user_level_warning

if TYPE_CHECKING:
from xarray.core.types import InclusiveOptions, Self, TypeAlias
from xarray.core.types import (
InclusiveOptions,
PDDatetimeUnitOptions,
Self,
TypeAlias,
)


DayOption: TypeAlias = Literal["start", "end"]
Expand Down Expand Up @@ -971,7 +976,6 @@ def cftime_range(
Include boundaries; whether to set each bound as closed or open.
.. versionadded:: 2023.02.0
calendar : str, default: "standard"
Calendar type for the datetimes.
Expand Down Expand Up @@ -1180,6 +1184,7 @@ def date_range(
normalize=False,
name=None,
inclusive: InclusiveOptions = "both",
unit: PDDatetimeUnitOptions = "ns",
calendar="standard",
use_cftime=None,
):
Expand Down Expand Up @@ -1210,6 +1215,10 @@ def date_range(
Include boundaries; whether to set each bound as closed or open.
.. versionadded:: 2023.02.0
unit : {"s", "ms", "us", "ns"}, default "ns"
Specify the desired resolution of the result.
.. versionadded:: 2024.12.0
calendar : str, default: "standard"
Calendar type for the datetimes.
use_cftime : boolean, optional
Expand Down Expand Up @@ -1245,6 +1254,7 @@ def date_range(
normalize=normalize,
name=name,
inclusive=inclusive,
unit=unit,
)
except pd.errors.OutOfBoundsDatetime as err:
if use_cftime is False:
Expand Down
10 changes: 6 additions & 4 deletions xarray/coding/cftimeindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,14 +92,15 @@ def trailing_optional(xs):
return xs[0] + optional(trailing_optional(xs[1:]))


def build_pattern(date_sep=r"\-", datetime_sep=r"T", time_sep=r"\:"):
def build_pattern(date_sep=r"\-", datetime_sep=r"T", time_sep=r"\:", micro_sep=r"."):
pieces = [
(None, "year", r"\d{4}"),
(date_sep, "month", r"\d{2}"),
(date_sep, "day", r"\d{2}"),
(datetime_sep, "hour", r"\d{2}"),
(time_sep, "minute", r"\d{2}"),
(time_sep, "second", r"\d{2}"),
(micro_sep, "microsecond", r"\d{1,6}"),
]
pattern_list = []
for sep, name, sub_pattern in pieces:
Expand Down Expand Up @@ -131,11 +132,12 @@ def _parse_iso8601_with_reso(date_type, timestr):
result = parse_iso8601_like(timestr)
replace = {}

for attr in ["year", "month", "day", "hour", "minute", "second"]:
for attr in ["year", "month", "day", "hour", "minute", "second", "microsecond"]:
value = result.get(attr, None)
if value is not None:
# Note ISO8601 conventions allow for fractional seconds.
# TODO: Consider adding support for sub-second resolution?
if attr == "microsecond":
# convert match string into valid microsecond value
value = 10 ** (6 - len(value)) * int(value)
replace[attr] = int(value)
resolution = attr
return default.replace(**replace), resolution
Expand Down
1 change: 1 addition & 0 deletions xarray/core/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,6 +249,7 @@ def copy(
"Y", "M", "W", "D", "h", "m", "s", "ms", "us", "μs", "ns", "ps", "fs", "as", None
]
NPDatetimeUnitOptions = Literal["D", "h", "m", "s", "ms", "us", "ns"]
PDDatetimeUnitOptions = Literal["s", "ms", "us", "ns"]

QueryEngineOptions = Literal["python", "numexpr", None]
QueryParserOptions = Literal["pandas", "python"]
Expand Down
22 changes: 10 additions & 12 deletions xarray/core/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import numbers
import warnings
from collections.abc import Callable, Hashable, Mapping, Sequence
from datetime import timedelta
from functools import partial
from types import EllipsisType
from typing import TYPE_CHECKING, Any, NoReturn, cast
Expand Down Expand Up @@ -232,10 +231,16 @@ def _as_nanosecond_precision(data):

def _possibly_convert_objects(values):
"""Convert arrays of datetime.datetime and datetime.timedelta objects into
datetime64 and timedelta64, according to the pandas convention. For the time
being, convert any non-nanosecond precision DatetimeIndex or TimedeltaIndex
objects to nanosecond precision. While pandas is relaxing this in version
2.0.0, in xarray we will need to make sure we are ready to handle
datetime64 and timedelta64, according to the pandas convention.
* datetime.datetime
* datetime.timedelta
* pd.Timestamp
* pd.Timedelta
For the time being, convert any non-nanosecond precision DatetimeIndex or
TimedeltaIndex objects to nanosecond precision. While pandas is relaxing this
in version 2.0.0, in xarray we will need to make sure we are ready to handle
non-nanosecond precision datetimes or timedeltas in our code before allowing
such values to pass through unchanged. Converting to nanosecond precision
through pandas.Series objects ensures that datetimes and timedeltas are
Expand Down Expand Up @@ -305,13 +310,6 @@ def convert_non_numpy_type(data):
if isinstance(data, tuple):
data = utils.to_0d_object_array(data)

if isinstance(data, pd.Timestamp):
# TODO: convert, handle datetime objects, too
data = np.datetime64(data.value, "ns")

if isinstance(data, timedelta):
data = np.timedelta64(getattr(data, "value", data), "ns")

# we don't want nested self-described arrays
if isinstance(data, pd.Series | pd.DataFrame):
pandas_data = data.values
Expand Down
52 changes: 47 additions & 5 deletions xarray/tests/test_cftimeindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,23 @@
standard_or_gregorian = "standard"


def date_dict(year=None, month=None, day=None, hour=None, minute=None, second=None):
def date_dict(
year=None,
month=None,
day=None,
hour=None,
minute=None,
second=None,
microsecond=None,
):
return dict(
year=year, month=month, day=day, hour=hour, minute=minute, second=second
year=year,
month=month,
day=day,
hour=hour,
minute=minute,
second=second,
microsecond=microsecond,
)


Expand Down Expand Up @@ -86,6 +100,30 @@ def date_dict(year=None, month=None, day=None, hour=None, minute=None, second=No
year="1999", month="01", day="01", hour="12", minute="34", second="56"
),
),
"microsecond-1": (
"19990101T123456.123456",
date_dict(
year="1999",
month="01",
day="01",
hour="12",
minute="34",
second="56",
microsecond="123456",
),
),
"microsecond-2": (
"19990101T123456.1",
date_dict(
year="1999",
month="01",
day="01",
hour="12",
minute="34",
second="56",
microsecond="1",
),
),
}


Expand All @@ -98,9 +136,12 @@ def test_parse_iso8601_like(string, expected):
result = parse_iso8601_like(string)
assert result == expected

with pytest.raises(ValueError):
parse_iso8601_like(string + "3")
parse_iso8601_like(string + ".3")
if result["microsecond"] is None:
with pytest.raises(ValueError):
parse_iso8601_like(string + "3")
if result["second"] is None:
with pytest.raises(ValueError):
parse_iso8601_like(string + ".3")


_CFTIME_CALENDARS = [
Expand Down Expand Up @@ -301,6 +342,7 @@ def test_cftimeindex_days_in_month_accessor(index):
("19990202T01", (1999, 2, 2, 1), "hour"),
("19990202T0101", (1999, 2, 2, 1, 1), "minute"),
("19990202T010156", (1999, 2, 2, 1, 1, 56), "second"),
("19990202T010156.123456", (1999, 2, 2, 1, 1, 56, 123456), "microsecond"),
],
)
def test_parse_iso8601_with_reso(date_type, string, date_args, reso):
Expand Down
10 changes: 5 additions & 5 deletions xarray/tests/test_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,27 +90,27 @@ def text_in_fig() -> set[str]:
"""
Return the set of all text in the figure
"""
return {t.get_text() for t in plt.gcf().findobj(mpl.text.Text)} # type: ignore[attr-defined] # mpl error?
return {t.get_text() for t in plt.gcf().findobj(mpl.text.Text)}


def find_possible_colorbars() -> list[mpl.collections.QuadMesh]:
# nb. this function also matches meshes from pcolormesh
return plt.gcf().findobj(mpl.collections.QuadMesh) # type: ignore[return-value] # mpl error?
return plt.gcf().findobj(mpl.collections.QuadMesh)


def substring_in_axes(substring: str, ax: mpl.axes.Axes) -> bool:
"""
Return True if a substring is found anywhere in an axes
"""
alltxt: set[str] = {t.get_text() for t in ax.findobj(mpl.text.Text)} # type: ignore[attr-defined] # mpl error?
alltxt: set[str] = {t.get_text() for t in ax.findobj(mpl.text.Text)}
return any(substring in txt for txt in alltxt)


def substring_not_in_axes(substring: str, ax: mpl.axes.Axes) -> bool:
"""
Return True if a substring is not found anywhere in an axes
"""
alltxt: set[str] = {t.get_text() for t in ax.findobj(mpl.text.Text)} # type: ignore[attr-defined] # mpl error?
alltxt: set[str] = {t.get_text() for t in ax.findobj(mpl.text.Text)}
check = [(substring not in txt) for txt in alltxt]
return all(check)

Expand All @@ -122,7 +122,7 @@ def property_in_axes_text(
Return True if the specified text in an axes
has the property assigned to property_str
"""
alltxt: list[mpl.text.Text] = ax.findobj(mpl.text.Text) # type: ignore[assignment]
alltxt: list[mpl.text.Text] = ax.findobj(mpl.text.Text)
return all(
plt.getp(t, property) == property_str
for t in alltxt
Expand Down

0 comments on commit ef24840

Please sign in to comment.