Skip to content

Commit

Permalink
Merge branch 'main' into api_stability
Browse files Browse the repository at this point in the history
  • Loading branch information
kmuehlbauer authored Dec 16, 2024
2 parents 9fcfad4 + 0945e0e commit f2327df
Show file tree
Hide file tree
Showing 19 changed files with 139 additions and 217 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/ci-additional.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ jobs:
python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
- name: Upload mypy coverage to Codecov
uses: codecov/codecov-action@v5.0.7
uses: codecov/codecov-action@v5.1.1
with:
file: mypy_report/cobertura.xml
flags: mypy
Expand Down Expand Up @@ -174,7 +174,7 @@ jobs:
python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
- name: Upload mypy coverage to Codecov
uses: codecov/codecov-action@v5.0.7
uses: codecov/codecov-action@v5.1.1
with:
file: mypy_report/cobertura.xml
flags: mypy-min
Expand Down Expand Up @@ -230,7 +230,7 @@ jobs:
python -m pyright xarray/
- name: Upload pyright coverage to Codecov
uses: codecov/codecov-action@v5.0.7
uses: codecov/codecov-action@v5.1.1
with:
file: pyright_report/cobertura.xml
flags: pyright
Expand Down Expand Up @@ -286,7 +286,7 @@ jobs:
python -m pyright xarray/
- name: Upload pyright coverage to Codecov
uses: codecov/codecov-action@v5.0.7
uses: codecov/codecov-action@v5.1.1
with:
file: pyright_report/cobertura.xml
flags: pyright39
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,9 @@ jobs:
path: pytest.xml

- name: Upload code coverage to Codecov
uses: codecov/[email protected]
uses: codecov/[email protected]
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
file: ./coverage.xml
flags: unittests
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/pypi-release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ jobs:
path: dist
- name: Publish package to TestPyPI
if: github.event_name == 'push'
uses: pypa/[email protected].2
uses: pypa/[email protected].3
with:
repository_url: https://test.pypi.org/legacy/
verbose: true
Expand All @@ -110,6 +110,6 @@ jobs:
name: releases
path: dist
- name: Publish package to PyPI
uses: pypa/[email protected].2
uses: pypa/[email protected].3
with:
verbose: true
2 changes: 1 addition & 1 deletion .github/workflows/upstream-dev-ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ jobs:
run: |
python -m mypy --install-types --non-interactive --cobertura-xml-report mypy_report
- name: Upload mypy coverage to Codecov
uses: codecov/codecov-action@v5.0.7
uses: codecov/codecov-action@v5.1.1
with:
file: mypy_report/cobertura.xml
flags: mypy
Expand Down
2 changes: 1 addition & 1 deletion asv_bench/benchmarks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def randn(shape, frac_nan=None, chunks=None, seed=0):

def randint(low, high=None, size=None, frac_minus=None, seed=0):
rng = np.random.default_rng(seed)
x = rng.randint(low, high, size)
x = rng.integers(low, high, size)
if frac_minus is not None:
inds = rng.choice(range(x.size), int(x.size * frac_minus))
x.flat[inds] = -1
Expand Down
18 changes: 14 additions & 4 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ What's New
np.random.seed(123456)
.. _whats-new.2024.11.1:
.. _whats-new.2024.12.0:

v.2024.11.1 (unreleased)
v.2024.12.0 (unreleased)
------------------------

New Features
Expand All @@ -28,18 +28,28 @@ New Features

Breaking changes
~~~~~~~~~~~~~~~~

- Methods including ``dropna``, ``rank``, ``idxmax``, ``idxmin`` require
non-dimension arguments to be passed as keyword arguments. The previous
behavior, which allowed ``.idxmax('foo', 'all')`` was too easily confused with
``'all'`` being a dimension. The updated equivalent is ``.idxmax('foo',
how='all')``. The previous behavior was deprecated in v2023.10.0.
By `Maximilian Roos <https://github.com/max-sixty>`_.

Deprecations
~~~~~~~~~~~~

- Finalize deprecation of ``closed`` parameters of :py:func:`cftime_range` and
:py:func:`date_range` (:pull:`9882`).
By `Kai Mühlbauer <https://github.com/kmuehlbauer>`_.

Bug fixes
~~~~~~~~~
- Fix type annotations for ``get_axis_num``. (:issue:`9822`, :pull:`9827`).
By `Bruce Merry <https://github.com/bmerry>`_.
- Fix unintended load on datasets when calling :py:meth:`DataArray.plot.scatter` (:pull:`9818`).
By `Jimmy Westling <https://github.com/illviljan>`_.
- Fix interpolation when non-numeric coordinate variables are present (:issue:`8099`, :issue:`9839`).
By `Deepak Cherian <https://github.com/dcherian>`_.


Documentation
~~~~~~~~~~~~~
Expand Down
4 changes: 1 addition & 3 deletions xarray/backends/zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -1135,9 +1135,7 @@ def set_variables(self, variables, check_encoding_set, writer, unlimited_dims=No
else:
encoded_attrs[DIMENSION_KEY] = dims

encoding["exists_ok" if _zarr_v3() else "overwrite"] = (
True if self._mode == "w" else False
)
encoding["overwrite"] = True if self._mode == "w" else False

zarr_array = self._create_new_array(
name=name,
Expand Down
73 changes: 5 additions & 68 deletions xarray/coding/cftime_offsets.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,13 @@
)
from xarray.core.common import _contains_datetime_like_objects, is_np_datetime_like
from xarray.core.pdcompat import (
NoDefault,
count_not_none,
nanosecond_precision_timestamp,
no_default,
)
from xarray.core.utils import attempt_import, emit_user_level_warning

if TYPE_CHECKING:
from xarray.core.types import InclusiveOptions, Self, SideOptions, TypeAlias
from xarray.core.types import InclusiveOptions, Self, TypeAlias


DayOption: TypeAlias = Literal["start", "end"]
Expand Down Expand Up @@ -943,51 +941,14 @@ def _generate_range(start, end, periods, offset):
current = next_date


def _translate_closed_to_inclusive(closed):
"""Follows code added in pandas #43504."""
emit_user_level_warning(
"Following pandas, the `closed` parameter is deprecated in "
"favor of the `inclusive` parameter, and will be removed in "
"a future version of xarray.",
FutureWarning,
)
if closed is None:
inclusive = "both"
elif closed in ("left", "right"):
inclusive = closed
else:
raise ValueError(
f"Argument `closed` must be either 'left', 'right', or None. "
f"Got {closed!r}."
)
return inclusive


def _infer_inclusive(
closed: NoDefault | SideOptions, inclusive: InclusiveOptions | None
) -> InclusiveOptions:
"""Follows code added in pandas #43504."""
if closed is not no_default and inclusive is not None:
raise ValueError(
"Following pandas, deprecated argument `closed` cannot be "
"passed if argument `inclusive` is not None."
)
if closed is not no_default:
return _translate_closed_to_inclusive(closed)
if inclusive is None:
return "both"
return inclusive


def cftime_range(
start=None,
end=None,
periods=None,
freq=None,
normalize=False,
name=None,
closed: NoDefault | SideOptions = no_default,
inclusive: None | InclusiveOptions = None,
inclusive: InclusiveOptions = "both",
calendar="standard",
) -> CFTimeIndex:
"""Return a fixed frequency CFTimeIndex.
Expand All @@ -1006,16 +967,7 @@ def cftime_range(
Normalize start/end dates to midnight before generating date range.
name : str, default: None
Name of the resulting index
closed : {None, "left", "right"}, default: "NO_DEFAULT"
Make the interval closed with respect to the given frequency to the
"left", "right", or both sides (None).
.. deprecated:: 2023.02.0
Following pandas, the ``closed`` parameter is deprecated in favor
of the ``inclusive`` parameter, and will be removed in a future
version of xarray.
inclusive : {None, "both", "neither", "left", "right"}, default None
inclusive : {"both", "neither", "left", "right"}, default "both"
Include boundaries; whether to set each bound as closed or open.
.. versionadded:: 2023.02.0
Expand Down Expand Up @@ -1193,8 +1145,6 @@ def cftime_range(
offset = to_offset(freq)
dates = np.array(list(_generate_range(start, end, periods, offset)))

inclusive = _infer_inclusive(closed, inclusive)

if inclusive == "neither":
left_closed = False
right_closed = False
Expand Down Expand Up @@ -1229,8 +1179,7 @@ def date_range(
tz=None,
normalize=False,
name=None,
closed: NoDefault | SideOptions = no_default,
inclusive: None | InclusiveOptions = None,
inclusive: InclusiveOptions = "both",
calendar="standard",
use_cftime=None,
):
Expand All @@ -1257,20 +1206,10 @@ def date_range(
Normalize start/end dates to midnight before generating date range.
name : str, default: None
Name of the resulting index
closed : {None, "left", "right"}, default: "NO_DEFAULT"
Make the interval closed with respect to the given frequency to the
"left", "right", or both sides (None).
.. deprecated:: 2023.02.0
Following pandas, the `closed` parameter is deprecated in favor
of the `inclusive` parameter, and will be removed in a future
version of xarray.
inclusive : {None, "both", "neither", "left", "right"}, default: None
inclusive : {"both", "neither", "left", "right"}, default: "both"
Include boundaries; whether to set each bound as closed or open.
.. versionadded:: 2023.02.0
calendar : str, default: "standard"
Calendar type for the datetimes.
use_cftime : boolean, optional
Expand All @@ -1294,8 +1233,6 @@ def date_range(
if tz is not None:
use_cftime = False

inclusive = _infer_inclusive(closed, inclusive)

if _is_standard_calendar(calendar) and use_cftime is not True:
try:
return pd.date_range(
Expand Down
71 changes: 43 additions & 28 deletions xarray/core/dask_array_ops.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import math
from functools import partial

from xarray.core import dtypes, nputils

Expand Down Expand Up @@ -75,6 +76,47 @@ def least_squares(lhs, rhs, rcond=None, skipna=False):
return coeffs, residuals


def _fill_with_last_one(a, b):
import numpy as np

# cumreduction apply the push func over all the blocks first so,
# the only missing part is filling the missing values using the
# last data of the previous chunk
return np.where(np.isnan(b), a, b)


def _dtype_push(a, axis, dtype=None):
from xarray.core.duck_array_ops import _push

# Not sure why the blelloch algorithm force to receive a dtype
return _push(a, axis=axis)


def _reset_cumsum(a, axis, dtype=None):
import numpy as np

cumsum = np.cumsum(a, axis=axis)
reset_points = np.maximum.accumulate(np.where(a == 0, cumsum, 0), axis=axis)
return cumsum - reset_points


def _last_reset_cumsum(a, axis, keepdims=None):
import numpy as np

# Take the last cumulative sum taking into account the reset
# This is useful for blelloch method
return np.take(_reset_cumsum(a, axis=axis), axis=axis, indices=[-1])


def _combine_reset_cumsum(a, b, axis):
import numpy as np

# It is going to sum the previous result until the first
# non nan value
bitmask = np.cumprod(b != 0, axis=axis)
return np.where(bitmask, b + a, b)


def push(array, n, axis, method="blelloch"):
"""
Dask-aware bottleneck.push
Expand All @@ -91,16 +133,6 @@ def push(array, n, axis, method="blelloch"):
# TODO: Replace all this function
# once https://github.com/pydata/xarray/issues/9229 being implemented

def _fill_with_last_one(a, b):
# cumreduction apply the push func over all the blocks first so,
# the only missing part is filling the missing values using the
# last data of the previous chunk
return np.where(np.isnan(b), a, b)

def _dtype_push(a, axis, dtype=None):
# Not sure why the blelloch algorithm force to receive a dtype
return _push(a, axis=axis)

pushed_array = da.reductions.cumreduction(
func=_dtype_push,
binop=_fill_with_last_one,
Expand All @@ -113,26 +145,9 @@ def _dtype_push(a, axis, dtype=None):
)

if n is not None and 0 < n < array.shape[axis] - 1:

def _reset_cumsum(a, axis, dtype=None):
cumsum = np.cumsum(a, axis=axis)
reset_points = np.maximum.accumulate(np.where(a == 0, cumsum, 0), axis=axis)
return cumsum - reset_points

def _last_reset_cumsum(a, axis, keepdims=None):
# Take the last cumulative sum taking into account the reset
# This is useful for blelloch method
return np.take(_reset_cumsum(a, axis=axis), axis=axis, indices=[-1])

def _combine_reset_cumsum(a, b):
# It is going to sum the previous result until the first
# non nan value
bitmask = np.cumprod(b != 0, axis=axis)
return np.where(bitmask, b + a, b)

valid_positions = da.reductions.cumreduction(
func=_reset_cumsum,
binop=_combine_reset_cumsum,
binop=partial(_combine_reset_cumsum, axis=axis),
ident=0,
x=da.isnan(array, dtype=int),
axis=axis,
Expand Down
Loading

0 comments on commit f2327df

Please sign in to comment.