Skip to content

Commit

Permalink
Merge branch 'main' into numpy2-netcdf4
Browse files Browse the repository at this point in the history
  • Loading branch information
keewis authored Jul 10, 2024
2 parents 53bff5a + 879b06b commit 84f82e3
Show file tree
Hide file tree
Showing 11 changed files with 240 additions and 133 deletions.
7 changes: 7 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@ Deprecations

Bug fixes
~~~~~~~~~
- Fix scatter plot broadcasting unneccesarily. (:issue:`9129`, :pull:`9206`)
By `Jimmy Westling <https://github.com/illviljan>`_.
- Don't convert custom indexes to ``pandas`` indexes when computing a diff (:pull:`9157`)
By `Justus Magin <https://github.com/keewis>`_.
- Make :py:func:`testing.assert_allclose` work with numpy 2.0 (:issue:`9165`, :pull:`9166`).
By `Pontus Lurcock <https://github.com/pont-us>`_.
- Allow diffing objects with array attributes on variables (:issue:`9153`, :pull:`9169`).
Expand All @@ -49,6 +53,9 @@ Bug fixes
By `Michael Niklas <https://github.com/headtr1ck>`_.
- Dark themes are now properly detected for ``html[data-theme=dark]``-tags (:pull:`9200`).
By `Dieter Werthmüller <https://github.com/prisae>`_.
- Reductions no longer fail for ``np.complex_`` dtype arrays when numbagg is
installed.
By `Maximilian Roos <https://github.com/max-sixty>`_

Documentation
~~~~~~~~~~~~~
Expand Down
40 changes: 23 additions & 17 deletions xarray/core/datatree.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@
import pandas as pd

from xarray.core.datatree_io import T_DataTreeNetcdfEngine, T_DataTreeNetcdfTypes
from xarray.core.merge import CoercibleValue
from xarray.core.merge import CoercibleMapping, CoercibleValue
from xarray.core.types import ErrorOptions, NetcdfWriteModes, ZarrWriteModes

# """
Expand Down Expand Up @@ -954,23 +954,29 @@ def update(
Just like `dict.update` this is an in-place operation.
"""
# TODO separate by type
new_children: dict[str, DataTree] = {}
new_variables = {}
for k, v in other.items():
if isinstance(v, DataTree):
# avoid named node being stored under inconsistent key
new_child: DataTree = v.copy()
# Datatree's name is always a string until we fix that (#8836)
new_child.name = str(k)
new_children[str(k)] = new_child
elif isinstance(v, (DataArray, Variable)):
# TODO this should also accommodate other types that can be coerced into Variables
new_variables[k] = v
else:
raise TypeError(f"Type {type(v)} cannot be assigned to a DataTree")

vars_merge_result = dataset_update_method(self.to_dataset(), new_variables)
new_variables: CoercibleMapping

if isinstance(other, Dataset):
new_variables = other
else:
new_variables = {}
for k, v in other.items():
if isinstance(v, DataTree):
# avoid named node being stored under inconsistent key
new_child: DataTree = v.copy()
# Datatree's name is always a string until we fix that (#8836)
new_child.name = str(k)
new_children[str(k)] = new_child
elif isinstance(v, (DataArray, Variable)):
# TODO this should also accommodate other types that can be coerced into Variables
new_variables[k] = v
else:
raise TypeError(f"Type {type(v)} cannot be assigned to a DataTree")

vars_merge_result = dataset_update_method(
self.to_dataset(inherited=False), new_variables
)
data = Dataset._construct_direct(**vars_merge_result._asdict())

# TODO are there any subtleties with preserving order of children like this?
Expand Down
4 changes: 2 additions & 2 deletions xarray/core/formatting.py
Original file line number Diff line number Diff line change
Expand Up @@ -898,8 +898,8 @@ def diff_coords_repr(a, b, compat, col_width=None):
"Coordinates",
summarize_variable,
col_width=col_width,
a_indexes=a.indexes,
b_indexes=b.indexes,
a_indexes=a.xindexes,
b_indexes=b.xindexes,
)


Expand Down
2 changes: 1 addition & 1 deletion xarray/core/nputils.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def f(values, axis=None, **kwargs):
or kwargs.get("ddof", 0) == 1
)
# TODO: bool?
and values.dtype.kind in "uifc"
and values.dtype.kind in "uif"
# and values.dtype.isnative
and (dtype is None or np.dtype(dtype) == values.dtype)
# numbagg.nanquantile only available after 0.8.0 and with linear method
Expand Down
15 changes: 10 additions & 5 deletions xarray/plot/dataset_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -721,8 +721,8 @@ def _temp_dataarray(ds: Dataset, y: Hashable, locals_: dict[str, Any]) -> DataAr
"""Create a temporary datarray with extra coords."""
from xarray.core.dataarray import DataArray

# Base coords:
coords = dict(ds.coords)
coords = dict(ds[y].coords)
dims = set(ds[y].dims)

# Add extra coords to the DataArray from valid kwargs, if using all
# kwargs there is a risk that we add unnecessary dataarrays as
Expand All @@ -732,12 +732,17 @@ def _temp_dataarray(ds: Dataset, y: Hashable, locals_: dict[str, Any]) -> DataAr
coord_kwargs = locals_.keys() & valid_coord_kwargs
for k in coord_kwargs:
key = locals_[k]
if ds.data_vars.get(key) is not None:
coords[key] = ds[key]
darray = ds.get(key)
if darray is not None:
coords[key] = darray
dims.update(darray.dims)

# Trim dataset from unneccessary dims:
ds_trimmed = ds.drop_dims(ds.sizes.keys() - dims) # TODO: Use ds.dims in the future

# The dataarray has to include all the dims. Broadcast to that shape
# and add the additional coords:
_y = ds[y].broadcast_like(ds)
_y = ds[y].broadcast_like(ds_trimmed)

return DataArray(_y, coords=coords)

Expand Down
Loading

0 comments on commit 84f82e3

Please sign in to comment.