From 888c5c3b2345ef401182773ced36f4f365e239da Mon Sep 17 00:00:00 2001 From: nameloCmaS Date: Sun, 21 Jan 2024 13:49:39 +0000 Subject: [PATCH 01/13] Fix doc building following breaking change in Pandas 2.2.0 https://pandas.pydata.org/docs/whatsnew/v2.2.0.html#deprecate-aliases-m-q-y-etc-in-favour-of-me-qe-ye-etc-for-offsets --- doc/whats-new.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index f9d308171a9..041da92a2ac 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -7225,6 +7225,7 @@ Breaking changes such `'DJF'`: .. ipython:: python + :okwarning: ds = xray.Dataset({"t": pd.date_range("2000-01-01", periods=12, freq="M")}) ds["t.season"] From 3f8ad6b3c9481de3ec218117a50f00c974115530 Mon Sep 17 00:00:00 2001 From: nameloCmaS Date: Sun, 21 Jan 2024 22:24:00 +0000 Subject: [PATCH 02/13] Further fixes for changed Pandas 2.2 date aliases Fix docstrings following Pandas 2.2 date aliases changed --- xarray/backends/api.py | 2 +- xarray/core/_aggregations.py | 264 ++++++++++++++-------------- xarray/core/accessor_dt.py | 2 +- xarray/core/dataarray.py | 2 +- xarray/tests/test_accessor_dt.py | 2 +- xarray/tests/test_cftime_offsets.py | 4 +- 6 files changed, 138 insertions(+), 138 deletions(-) diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 1d538bf94ed..670a0ec6d68 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -1433,7 +1433,7 @@ def save_mfdataset( >>> ds = xr.Dataset( ... {"a": ("time", np.linspace(0, 1, 48))}, - ... coords={"time": pd.date_range("2010-01-01", freq="M", periods=48)}, + ... coords={"time": pd.date_range("2010-01-01", freq="ME", periods=48)}, ... ) >>> ds diff --git a/xarray/core/_aggregations.py b/xarray/core/_aggregations.py index 0d4b4413b7c..e214c2c7c5a 100644 --- a/xarray/core/_aggregations.py +++ b/xarray/core/_aggregations.py @@ -77,7 +77,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -149,7 +149,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -221,7 +221,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -299,7 +299,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -386,7 +386,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -477,7 +477,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -575,7 +575,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -682,7 +682,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -786,7 +786,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -890,7 +890,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -990,7 +990,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1081,7 +1081,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1174,7 +1174,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1273,7 +1273,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1339,7 +1339,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1405,7 +1405,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1477,7 +1477,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1556,7 +1556,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1639,7 +1639,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1729,7 +1729,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1826,7 +1826,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1920,7 +1920,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2014,7 +2014,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2104,7 +2104,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2187,7 +2187,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2276,7 +2276,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2401,7 +2401,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2499,7 +2499,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2597,7 +2597,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2701,7 +2701,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2817,7 +2817,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2935,7 +2935,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3060,7 +3060,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3197,7 +3197,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3331,7 +3331,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3465,7 +3465,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3595,7 +3595,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3698,7 +3698,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3799,7 +3799,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3928,7 +3928,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3942,7 +3942,7 @@ def count( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").count() + >>> ds.resample(time="3ME").count() Dimensions: (time: 3) Coordinates: @@ -4026,7 +4026,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4040,7 +4040,7 @@ def all( Data variables: da (time) bool True True True True True False - >>> ds.resample(time="3M").all() + >>> ds.resample(time="3ME").all() Dimensions: (time: 3) Coordinates: @@ -4124,7 +4124,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4138,7 +4138,7 @@ def any( Data variables: da (time) bool True True True True True False - >>> ds.resample(time="3M").any() + >>> ds.resample(time="3ME").any() Dimensions: (time: 3) Coordinates: @@ -4228,7 +4228,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4242,7 +4242,7 @@ def max( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").max() + >>> ds.resample(time="3ME").max() Dimensions: (time: 3) Coordinates: @@ -4252,7 +4252,7 @@ def max( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").max(skipna=False) + >>> ds.resample(time="3ME").max(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4344,7 +4344,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4358,7 +4358,7 @@ def min( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").min() + >>> ds.resample(time="3ME").min() Dimensions: (time: 3) Coordinates: @@ -4368,7 +4368,7 @@ def min( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").min(skipna=False) + >>> ds.resample(time="3ME").min(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4462,7 +4462,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4476,7 +4476,7 @@ def mean( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").mean() + >>> ds.resample(time="3ME").mean() Dimensions: (time: 3) Coordinates: @@ -4486,7 +4486,7 @@ def mean( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").mean(skipna=False) + >>> ds.resample(time="3ME").mean(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4587,7 +4587,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4601,7 +4601,7 @@ def prod( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").prod() + >>> ds.resample(time="3ME").prod() Dimensions: (time: 3) Coordinates: @@ -4611,7 +4611,7 @@ def prod( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").prod(skipna=False) + >>> ds.resample(time="3ME").prod(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4621,7 +4621,7 @@ def prod( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> ds.resample(time="3M").prod(skipna=True, min_count=2) + >>> ds.resample(time="3ME").prod(skipna=True, min_count=2) Dimensions: (time: 3) Coordinates: @@ -4724,7 +4724,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4738,7 +4738,7 @@ def sum( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").sum() + >>> ds.resample(time="3ME").sum() Dimensions: (time: 3) Coordinates: @@ -4748,7 +4748,7 @@ def sum( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").sum(skipna=False) + >>> ds.resample(time="3ME").sum(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4758,7 +4758,7 @@ def sum( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> ds.resample(time="3M").sum(skipna=True, min_count=2) + >>> ds.resample(time="3ME").sum(skipna=True, min_count=2) Dimensions: (time: 3) Coordinates: @@ -4858,7 +4858,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4872,7 +4872,7 @@ def std( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").std() + >>> ds.resample(time="3ME").std() Dimensions: (time: 3) Coordinates: @@ -4882,7 +4882,7 @@ def std( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").std(skipna=False) + >>> ds.resample(time="3ME").std(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4892,7 +4892,7 @@ def std( Specify ``ddof=1`` for an unbiased estimate. - >>> ds.resample(time="3M").std(skipna=True, ddof=1) + >>> ds.resample(time="3ME").std(skipna=True, ddof=1) Dimensions: (time: 3) Coordinates: @@ -4992,7 +4992,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5006,7 +5006,7 @@ def var( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").var() + >>> ds.resample(time="3ME").var() Dimensions: (time: 3) Coordinates: @@ -5016,7 +5016,7 @@ def var( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").var(skipna=False) + >>> ds.resample(time="3ME").var(skipna=False) Dimensions: (time: 3) Coordinates: @@ -5026,7 +5026,7 @@ def var( Specify ``ddof=1`` for an unbiased estimate. - >>> ds.resample(time="3M").var(skipna=True, ddof=1) + >>> ds.resample(time="3ME").var(skipna=True, ddof=1) Dimensions: (time: 3) Coordinates: @@ -5122,7 +5122,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5136,7 +5136,7 @@ def median( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").median() + >>> ds.resample(time="3ME").median() Dimensions: (time: 3) Coordinates: @@ -5146,7 +5146,7 @@ def median( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").median(skipna=False) + >>> ds.resample(time="3ME").median(skipna=False) Dimensions: (time: 3) Coordinates: @@ -5225,7 +5225,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5239,7 +5239,7 @@ def cumsum( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").cumsum() + >>> ds.resample(time="3ME").cumsum() Dimensions: (time: 6) Dimensions without coordinates: time @@ -5248,7 +5248,7 @@ def cumsum( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").cumsum(skipna=False) + >>> ds.resample(time="3ME").cumsum(skipna=False) Dimensions: (time: 6) Dimensions without coordinates: time @@ -5326,7 +5326,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5340,7 +5340,7 @@ def cumprod( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").cumprod() + >>> ds.resample(time="3ME").cumprod() Dimensions: (time: 6) Dimensions without coordinates: time @@ -5349,7 +5349,7 @@ def cumprod( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").cumprod(skipna=False) + >>> ds.resample(time="3ME").cumprod(skipna=False) Dimensions: (time: 6) Dimensions without coordinates: time @@ -5455,7 +5455,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5546,7 +5546,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5637,7 +5637,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5734,7 +5734,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5841,7 +5841,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5950,7 +5950,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6066,7 +6066,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6192,7 +6192,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6315,7 +6315,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6438,7 +6438,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6557,7 +6557,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6652,7 +6652,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6749,7 +6749,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6874,7 +6874,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6885,7 +6885,7 @@ def count( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").count() + >>> da.resample(time="3ME").count() array([1, 3, 1]) Coordinates: @@ -6965,7 +6965,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6976,7 +6976,7 @@ def all( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").all() + >>> da.resample(time="3ME").all() array([ True, True, False]) Coordinates: @@ -7056,7 +7056,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7067,7 +7067,7 @@ def any( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").any() + >>> da.resample(time="3ME").any() array([ True, True, True]) Coordinates: @@ -7153,7 +7153,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7164,7 +7164,7 @@ def max( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").max() + >>> da.resample(time="3ME").max() array([1., 3., 2.]) Coordinates: @@ -7172,7 +7172,7 @@ def max( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").max(skipna=False) + >>> da.resample(time="3ME").max(skipna=False) array([ 1., 3., nan]) Coordinates: @@ -7260,7 +7260,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7271,7 +7271,7 @@ def min( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").min() + >>> da.resample(time="3ME").min() array([1., 0., 2.]) Coordinates: @@ -7279,7 +7279,7 @@ def min( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").min(skipna=False) + >>> da.resample(time="3ME").min(skipna=False) array([ 1., 0., nan]) Coordinates: @@ -7369,7 +7369,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7380,7 +7380,7 @@ def mean( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").mean() + >>> da.resample(time="3ME").mean() array([1. , 1.66666667, 2. ]) Coordinates: @@ -7388,7 +7388,7 @@ def mean( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").mean(skipna=False) + >>> da.resample(time="3ME").mean(skipna=False) array([1. , 1.66666667, nan]) Coordinates: @@ -7485,7 +7485,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7496,7 +7496,7 @@ def prod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").prod() + >>> da.resample(time="3ME").prod() array([1., 0., 2.]) Coordinates: @@ -7504,7 +7504,7 @@ def prod( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").prod(skipna=False) + >>> da.resample(time="3ME").prod(skipna=False) array([ 1., 0., nan]) Coordinates: @@ -7512,7 +7512,7 @@ def prod( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> da.resample(time="3M").prod(skipna=True, min_count=2) + >>> da.resample(time="3ME").prod(skipna=True, min_count=2) array([nan, 0., nan]) Coordinates: @@ -7611,7 +7611,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7622,7 +7622,7 @@ def sum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").sum() + >>> da.resample(time="3ME").sum() array([1., 5., 2.]) Coordinates: @@ -7630,7 +7630,7 @@ def sum( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").sum(skipna=False) + >>> da.resample(time="3ME").sum(skipna=False) array([ 1., 5., nan]) Coordinates: @@ -7638,7 +7638,7 @@ def sum( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> da.resample(time="3M").sum(skipna=True, min_count=2) + >>> da.resample(time="3ME").sum(skipna=True, min_count=2) array([nan, 5., nan]) Coordinates: @@ -7734,7 +7734,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7745,7 +7745,7 @@ def std( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").std() + >>> da.resample(time="3ME").std() array([0. , 1.24721913, 0. ]) Coordinates: @@ -7753,7 +7753,7 @@ def std( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").std(skipna=False) + >>> da.resample(time="3ME").std(skipna=False) array([0. , 1.24721913, nan]) Coordinates: @@ -7761,7 +7761,7 @@ def std( Specify ``ddof=1`` for an unbiased estimate. - >>> da.resample(time="3M").std(skipna=True, ddof=1) + >>> da.resample(time="3ME").std(skipna=True, ddof=1) array([ nan, 1.52752523, nan]) Coordinates: @@ -7857,7 +7857,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7868,7 +7868,7 @@ def var( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").var() + >>> da.resample(time="3ME").var() array([0. , 1.55555556, 0. ]) Coordinates: @@ -7876,7 +7876,7 @@ def var( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").var(skipna=False) + >>> da.resample(time="3ME").var(skipna=False) array([0. , 1.55555556, nan]) Coordinates: @@ -7884,7 +7884,7 @@ def var( Specify ``ddof=1`` for an unbiased estimate. - >>> da.resample(time="3M").var(skipna=True, ddof=1) + >>> da.resample(time="3ME").var(skipna=True, ddof=1) array([ nan, 2.33333333, nan]) Coordinates: @@ -7976,7 +7976,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7987,7 +7987,7 @@ def median( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").median() + >>> da.resample(time="3ME").median() array([1., 2., 2.]) Coordinates: @@ -7995,7 +7995,7 @@ def median( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").median(skipna=False) + >>> da.resample(time="3ME").median(skipna=False) array([ 1., 2., nan]) Coordinates: @@ -8071,7 +8071,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -8082,7 +8082,7 @@ def cumsum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").cumsum() + >>> da.resample(time="3ME").cumsum() array([1., 2., 5., 5., 2., 2.]) Coordinates: @@ -8091,7 +8091,7 @@ def cumsum( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").cumsum(skipna=False) + >>> da.resample(time="3ME").cumsum(skipna=False) array([ 1., 2., 5., 5., 2., nan]) Coordinates: @@ -8168,7 +8168,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -8179,7 +8179,7 @@ def cumprod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").cumprod() + >>> da.resample(time="3ME").cumprod() array([1., 2., 6., 0., 2., 2.]) Coordinates: @@ -8188,7 +8188,7 @@ def cumprod( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").cumprod(skipna=False) + >>> da.resample(time="3ME").cumprod(skipna=False) array([ 1., 2., 6., 0., 2., nan]) Coordinates: diff --git a/xarray/core/accessor_dt.py b/xarray/core/accessor_dt.py index b57c2f3857c..2b964edbea7 100644 --- a/xarray/core/accessor_dt.py +++ b/xarray/core/accessor_dt.py @@ -545,7 +545,7 @@ class TimedeltaAccessor(TimeAccessor[T_DataArray]): Examples -------- - >>> dates = pd.timedelta_range(start="1 day", freq="6H", periods=20) + >>> dates = pd.timedelta_range(start="1 day", freq="6h", periods=20) >>> ts = xr.DataArray(dates, dims=("time")) >>> ts diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index d2ea0f8a1a4..0faf889de3d 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -6349,7 +6349,7 @@ def curvefit( ... param="time_constant" ... ) # doctest: +NUMBER - array([1.0569203, 1.7354963, 2.9421577]) + array([1.05692035, 1.73549638, 2.9421577 ]) Coordinates: * x (x) int64 0 1 2 param None: assert_equal(actual.compute(), expected.compute()) def test_seasons(self) -> None: - dates = pd.date_range(start="2000/01/01", freq="M", periods=12) + dates = pd.date_range(start="2000/01/01", freq="ME", periods=12) dates = dates.append(pd.Index([np.datetime64("NaT")])) dates = xr.DataArray(dates) seasons = xr.DataArray( diff --git a/xarray/tests/test_cftime_offsets.py b/xarray/tests/test_cftime_offsets.py index 0ffcb5e8ab9..2de97e061c5 100644 --- a/xarray/tests/test_cftime_offsets.py +++ b/xarray/tests/test_cftime_offsets.py @@ -1406,8 +1406,8 @@ def test_date_range_like(start, freq, cal_src, cal_tgt, use_cftime, exp0, exp_pd elif "YS" in freq: freq = freq.replace("YS", "AS") expected_pandas_freq = freq - elif "Y-" in freq: - freq = freq.replace("Y-", "A-") + elif "YE-" in freq: + freq = freq.replace("YE-", "Y-") expected_pandas_freq = freq elif "h" in freq: expected_pandas_freq = freq.replace("h", "H") From fb70add4da390db367a63f1f8f5009c1a597944d Mon Sep 17 00:00:00 2001 From: nameloCmaS Date: Mon, 22 Jan 2024 00:04:29 +0000 Subject: [PATCH 03/13] Update test_cftime_offsets.py --- xarray/tests/test_cftime_offsets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xarray/tests/test_cftime_offsets.py b/xarray/tests/test_cftime_offsets.py index 2de97e061c5..5bf3b812da7 100644 --- a/xarray/tests/test_cftime_offsets.py +++ b/xarray/tests/test_cftime_offsets.py @@ -1407,7 +1407,7 @@ def test_date_range_like(start, freq, cal_src, cal_tgt, use_cftime, exp0, exp_pd freq = freq.replace("YS", "AS") expected_pandas_freq = freq elif "YE-" in freq: - freq = freq.replace("YE-", "Y-") + freq = freq.replace("YE-", "A-") expected_pandas_freq = freq elif "h" in freq: expected_pandas_freq = freq.replace("h", "H") From d6084f8a926e884c657c9d28c1d17a5f3cccebd8 Mon Sep 17 00:00:00 2001 From: nameloCmaS Date: Mon, 22 Jan 2024 00:56:19 +0000 Subject: [PATCH 04/13] Update whats-new.rst Changed to code-block rather than suppressing the warning as PR comment. --- doc/whats-new.rst | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 041da92a2ac..298b102a600 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -7224,11 +7224,16 @@ Breaking changes - The ``season`` datetime shortcut now returns an array of string labels such `'DJF'`: - .. ipython:: python - :okwarning: + .. code-block:: ipython - ds = xray.Dataset({"t": pd.date_range("2000-01-01", periods=12, freq="M")}) - ds["t.season"] + In[92]: ds = xray.Dataset({"t": pd.date_range("2000-01-01", periods=12, freq="M")}) + + In[93]: ds["t.season"] + Out[93]: + + array(['DJF', 'DJF', 'MAM', ..., 'SON', 'SON', 'DJF'], dtype=' Date: Mon, 22 Jan 2024 01:11:45 +0000 Subject: [PATCH 05/13] Update dataarray.py Reverted due to different precision on aarch64 leading to different output. --- xarray/core/dataarray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index 0faf889de3d..83bc7c7c53c 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -6349,7 +6349,7 @@ def curvefit( ... param="time_constant" ... ) # doctest: +NUMBER - array([1.05692035, 1.73549638, 2.9421577 ]) + array([1.05692036, 1.73549638, 2.94215771]) Coordinates: * x (x) int64 0 1 2 param Date: Mon, 22 Jan 2024 01:15:31 +0000 Subject: [PATCH 06/13] Update test_accessor_dt.py Reverted --- xarray/tests/test_accessor_dt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xarray/tests/test_accessor_dt.py b/xarray/tests/test_accessor_dt.py index 1f910f9b412..387929d3fe9 100644 --- a/xarray/tests/test_accessor_dt.py +++ b/xarray/tests/test_accessor_dt.py @@ -248,7 +248,7 @@ def test_dask_accessor_method(self, method, parameters) -> None: assert_equal(actual.compute(), expected.compute()) def test_seasons(self) -> None: - dates = pd.date_range(start="2000/01/01", freq="ME", periods=12) + dates = pd.date_range(start="2000/01/01", freq="M", periods=12) dates = dates.append(pd.Index([np.datetime64("NaT")])) dates = xr.DataArray(dates) seasons = xr.DataArray( From f29beff034ca556e0c155d05b86c1ce630078f1d Mon Sep 17 00:00:00 2001 From: nameloCmaS Date: Mon, 22 Jan 2024 01:18:12 +0000 Subject: [PATCH 07/13] Update test_cftime_offsets.py Reverted changes. --- xarray/tests/test_cftime_offsets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/xarray/tests/test_cftime_offsets.py b/xarray/tests/test_cftime_offsets.py index 5bf3b812da7..0ffcb5e8ab9 100644 --- a/xarray/tests/test_cftime_offsets.py +++ b/xarray/tests/test_cftime_offsets.py @@ -1406,8 +1406,8 @@ def test_date_range_like(start, freq, cal_src, cal_tgt, use_cftime, exp0, exp_pd elif "YS" in freq: freq = freq.replace("YS", "AS") expected_pandas_freq = freq - elif "YE-" in freq: - freq = freq.replace("YE-", "A-") + elif "Y-" in freq: + freq = freq.replace("Y-", "A-") expected_pandas_freq = freq elif "h" in freq: expected_pandas_freq = freq.replace("h", "H") From 6d866c1b218c56262e6f09c4158ee761cc1d06b1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 19:10:12 +0000 Subject: [PATCH 08/13] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- doc/whats-new.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 316cd034cd1..be4190551ff 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -7272,11 +7272,11 @@ Breaking changes .. ipython:: python :okwarning: - + In[92]: ds = xray.Dataset({"t": pd.date_range("2000-01-01", periods=12, freq="M")}) In[93]: ds["t.season"] - Out[93]: + Out[93]: array(['DJF', 'DJF', 'MAM', ..., 'SON', 'SON', 'DJF'], dtype=' Date: Mon, 22 Jan 2024 21:50:15 +0000 Subject: [PATCH 09/13] Update _aggregations.py Reversed changes --- xarray/core/_aggregations.py | 264 +++++++++++++++++------------------ 1 file changed, 132 insertions(+), 132 deletions(-) diff --git a/xarray/core/_aggregations.py b/xarray/core/_aggregations.py index e214c2c7c5a..0d4b4413b7c 100644 --- a/xarray/core/_aggregations.py +++ b/xarray/core/_aggregations.py @@ -77,7 +77,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -149,7 +149,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -221,7 +221,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -299,7 +299,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -386,7 +386,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -477,7 +477,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -575,7 +575,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -682,7 +682,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -786,7 +786,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -890,7 +890,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -990,7 +990,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1081,7 +1081,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1174,7 +1174,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1273,7 +1273,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1339,7 +1339,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1405,7 +1405,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1477,7 +1477,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1556,7 +1556,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1639,7 +1639,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1729,7 +1729,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1826,7 +1826,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1920,7 +1920,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2014,7 +2014,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2104,7 +2104,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2187,7 +2187,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2276,7 +2276,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2401,7 +2401,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2499,7 +2499,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2597,7 +2597,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2701,7 +2701,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2817,7 +2817,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2935,7 +2935,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3060,7 +3060,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3197,7 +3197,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3331,7 +3331,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3465,7 +3465,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3595,7 +3595,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3698,7 +3698,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3799,7 +3799,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3928,7 +3928,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3942,7 +3942,7 @@ def count( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3ME").count() + >>> ds.resample(time="3M").count() Dimensions: (time: 3) Coordinates: @@ -4026,7 +4026,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4040,7 +4040,7 @@ def all( Data variables: da (time) bool True True True True True False - >>> ds.resample(time="3ME").all() + >>> ds.resample(time="3M").all() Dimensions: (time: 3) Coordinates: @@ -4124,7 +4124,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4138,7 +4138,7 @@ def any( Data variables: da (time) bool True True True True True False - >>> ds.resample(time="3ME").any() + >>> ds.resample(time="3M").any() Dimensions: (time: 3) Coordinates: @@ -4228,7 +4228,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4242,7 +4242,7 @@ def max( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3ME").max() + >>> ds.resample(time="3M").max() Dimensions: (time: 3) Coordinates: @@ -4252,7 +4252,7 @@ def max( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3ME").max(skipna=False) + >>> ds.resample(time="3M").max(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4344,7 +4344,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4358,7 +4358,7 @@ def min( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3ME").min() + >>> ds.resample(time="3M").min() Dimensions: (time: 3) Coordinates: @@ -4368,7 +4368,7 @@ def min( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3ME").min(skipna=False) + >>> ds.resample(time="3M").min(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4462,7 +4462,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4476,7 +4476,7 @@ def mean( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3ME").mean() + >>> ds.resample(time="3M").mean() Dimensions: (time: 3) Coordinates: @@ -4486,7 +4486,7 @@ def mean( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3ME").mean(skipna=False) + >>> ds.resample(time="3M").mean(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4587,7 +4587,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4601,7 +4601,7 @@ def prod( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3ME").prod() + >>> ds.resample(time="3M").prod() Dimensions: (time: 3) Coordinates: @@ -4611,7 +4611,7 @@ def prod( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3ME").prod(skipna=False) + >>> ds.resample(time="3M").prod(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4621,7 +4621,7 @@ def prod( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> ds.resample(time="3ME").prod(skipna=True, min_count=2) + >>> ds.resample(time="3M").prod(skipna=True, min_count=2) Dimensions: (time: 3) Coordinates: @@ -4724,7 +4724,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4738,7 +4738,7 @@ def sum( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3ME").sum() + >>> ds.resample(time="3M").sum() Dimensions: (time: 3) Coordinates: @@ -4748,7 +4748,7 @@ def sum( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3ME").sum(skipna=False) + >>> ds.resample(time="3M").sum(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4758,7 +4758,7 @@ def sum( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> ds.resample(time="3ME").sum(skipna=True, min_count=2) + >>> ds.resample(time="3M").sum(skipna=True, min_count=2) Dimensions: (time: 3) Coordinates: @@ -4858,7 +4858,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4872,7 +4872,7 @@ def std( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3ME").std() + >>> ds.resample(time="3M").std() Dimensions: (time: 3) Coordinates: @@ -4882,7 +4882,7 @@ def std( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3ME").std(skipna=False) + >>> ds.resample(time="3M").std(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4892,7 +4892,7 @@ def std( Specify ``ddof=1`` for an unbiased estimate. - >>> ds.resample(time="3ME").std(skipna=True, ddof=1) + >>> ds.resample(time="3M").std(skipna=True, ddof=1) Dimensions: (time: 3) Coordinates: @@ -4992,7 +4992,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5006,7 +5006,7 @@ def var( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3ME").var() + >>> ds.resample(time="3M").var() Dimensions: (time: 3) Coordinates: @@ -5016,7 +5016,7 @@ def var( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3ME").var(skipna=False) + >>> ds.resample(time="3M").var(skipna=False) Dimensions: (time: 3) Coordinates: @@ -5026,7 +5026,7 @@ def var( Specify ``ddof=1`` for an unbiased estimate. - >>> ds.resample(time="3ME").var(skipna=True, ddof=1) + >>> ds.resample(time="3M").var(skipna=True, ddof=1) Dimensions: (time: 3) Coordinates: @@ -5122,7 +5122,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5136,7 +5136,7 @@ def median( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3ME").median() + >>> ds.resample(time="3M").median() Dimensions: (time: 3) Coordinates: @@ -5146,7 +5146,7 @@ def median( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3ME").median(skipna=False) + >>> ds.resample(time="3M").median(skipna=False) Dimensions: (time: 3) Coordinates: @@ -5225,7 +5225,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5239,7 +5239,7 @@ def cumsum( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3ME").cumsum() + >>> ds.resample(time="3M").cumsum() Dimensions: (time: 6) Dimensions without coordinates: time @@ -5248,7 +5248,7 @@ def cumsum( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3ME").cumsum(skipna=False) + >>> ds.resample(time="3M").cumsum(skipna=False) Dimensions: (time: 6) Dimensions without coordinates: time @@ -5326,7 +5326,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5340,7 +5340,7 @@ def cumprod( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3ME").cumprod() + >>> ds.resample(time="3M").cumprod() Dimensions: (time: 6) Dimensions without coordinates: time @@ -5349,7 +5349,7 @@ def cumprod( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3ME").cumprod(skipna=False) + >>> ds.resample(time="3M").cumprod(skipna=False) Dimensions: (time: 6) Dimensions without coordinates: time @@ -5455,7 +5455,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5546,7 +5546,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5637,7 +5637,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5734,7 +5734,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5841,7 +5841,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5950,7 +5950,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6066,7 +6066,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6192,7 +6192,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6315,7 +6315,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6438,7 +6438,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6557,7 +6557,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6652,7 +6652,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6749,7 +6749,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6874,7 +6874,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6885,7 +6885,7 @@ def count( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").count() + >>> da.resample(time="3M").count() array([1, 3, 1]) Coordinates: @@ -6965,7 +6965,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6976,7 +6976,7 @@ def all( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").all() + >>> da.resample(time="3M").all() array([ True, True, False]) Coordinates: @@ -7056,7 +7056,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7067,7 +7067,7 @@ def any( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").any() + >>> da.resample(time="3M").any() array([ True, True, True]) Coordinates: @@ -7153,7 +7153,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7164,7 +7164,7 @@ def max( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").max() + >>> da.resample(time="3M").max() array([1., 3., 2.]) Coordinates: @@ -7172,7 +7172,7 @@ def max( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3ME").max(skipna=False) + >>> da.resample(time="3M").max(skipna=False) array([ 1., 3., nan]) Coordinates: @@ -7260,7 +7260,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7271,7 +7271,7 @@ def min( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").min() + >>> da.resample(time="3M").min() array([1., 0., 2.]) Coordinates: @@ -7279,7 +7279,7 @@ def min( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3ME").min(skipna=False) + >>> da.resample(time="3M").min(skipna=False) array([ 1., 0., nan]) Coordinates: @@ -7369,7 +7369,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7380,7 +7380,7 @@ def mean( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").mean() + >>> da.resample(time="3M").mean() array([1. , 1.66666667, 2. ]) Coordinates: @@ -7388,7 +7388,7 @@ def mean( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3ME").mean(skipna=False) + >>> da.resample(time="3M").mean(skipna=False) array([1. , 1.66666667, nan]) Coordinates: @@ -7485,7 +7485,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7496,7 +7496,7 @@ def prod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").prod() + >>> da.resample(time="3M").prod() array([1., 0., 2.]) Coordinates: @@ -7504,7 +7504,7 @@ def prod( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3ME").prod(skipna=False) + >>> da.resample(time="3M").prod(skipna=False) array([ 1., 0., nan]) Coordinates: @@ -7512,7 +7512,7 @@ def prod( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> da.resample(time="3ME").prod(skipna=True, min_count=2) + >>> da.resample(time="3M").prod(skipna=True, min_count=2) array([nan, 0., nan]) Coordinates: @@ -7611,7 +7611,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7622,7 +7622,7 @@ def sum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").sum() + >>> da.resample(time="3M").sum() array([1., 5., 2.]) Coordinates: @@ -7630,7 +7630,7 @@ def sum( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3ME").sum(skipna=False) + >>> da.resample(time="3M").sum(skipna=False) array([ 1., 5., nan]) Coordinates: @@ -7638,7 +7638,7 @@ def sum( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> da.resample(time="3ME").sum(skipna=True, min_count=2) + >>> da.resample(time="3M").sum(skipna=True, min_count=2) array([nan, 5., nan]) Coordinates: @@ -7734,7 +7734,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7745,7 +7745,7 @@ def std( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").std() + >>> da.resample(time="3M").std() array([0. , 1.24721913, 0. ]) Coordinates: @@ -7753,7 +7753,7 @@ def std( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3ME").std(skipna=False) + >>> da.resample(time="3M").std(skipna=False) array([0. , 1.24721913, nan]) Coordinates: @@ -7761,7 +7761,7 @@ def std( Specify ``ddof=1`` for an unbiased estimate. - >>> da.resample(time="3ME").std(skipna=True, ddof=1) + >>> da.resample(time="3M").std(skipna=True, ddof=1) array([ nan, 1.52752523, nan]) Coordinates: @@ -7857,7 +7857,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7868,7 +7868,7 @@ def var( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").var() + >>> da.resample(time="3M").var() array([0. , 1.55555556, 0. ]) Coordinates: @@ -7876,7 +7876,7 @@ def var( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3ME").var(skipna=False) + >>> da.resample(time="3M").var(skipna=False) array([0. , 1.55555556, nan]) Coordinates: @@ -7884,7 +7884,7 @@ def var( Specify ``ddof=1`` for an unbiased estimate. - >>> da.resample(time="3ME").var(skipna=True, ddof=1) + >>> da.resample(time="3M").var(skipna=True, ddof=1) array([ nan, 2.33333333, nan]) Coordinates: @@ -7976,7 +7976,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7987,7 +7987,7 @@ def median( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").median() + >>> da.resample(time="3M").median() array([1., 2., 2.]) Coordinates: @@ -7995,7 +7995,7 @@ def median( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3ME").median(skipna=False) + >>> da.resample(time="3M").median(skipna=False) array([ 1., 2., nan]) Coordinates: @@ -8071,7 +8071,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -8082,7 +8082,7 @@ def cumsum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").cumsum() + >>> da.resample(time="3M").cumsum() array([1., 2., 5., 5., 2., 2.]) Coordinates: @@ -8091,7 +8091,7 @@ def cumsum( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3ME").cumsum(skipna=False) + >>> da.resample(time="3M").cumsum(skipna=False) array([ 1., 2., 5., 5., 2., nan]) Coordinates: @@ -8168,7 +8168,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -8179,7 +8179,7 @@ def cumprod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3ME").cumprod() + >>> da.resample(time="3M").cumprod() array([1., 2., 6., 0., 2., 2.]) Coordinates: @@ -8188,7 +8188,7 @@ def cumprod( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3ME").cumprod(skipna=False) + >>> da.resample(time="3M").cumprod(skipna=False) array([ 1., 2., 6., 0., 2., nan]) Coordinates: From b445f47a6dfe9462d62932168ac20239fd41af19 Mon Sep 17 00:00:00 2001 From: nameloCmaS Date: Mon, 22 Jan 2024 21:51:44 +0000 Subject: [PATCH 10/13] Update whats-new.rst Fixed hangover from clashing commits. --- doc/whats-new.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index be4190551ff..f8627c6f572 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -7271,7 +7271,6 @@ Breaking changes such `'DJF'`: .. ipython:: python - :okwarning: In[92]: ds = xray.Dataset({"t": pd.date_range("2000-01-01", periods=12, freq="M")}) From 31b5ed973cf637ee1063718947a5e9e5e53a2576 Mon Sep 17 00:00:00 2001 From: nameloCmaS Date: Mon, 22 Jan 2024 21:55:35 +0000 Subject: [PATCH 11/13] Update generate_aggregations.py --- xarray/util/generate_aggregations.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/xarray/util/generate_aggregations.py b/xarray/util/generate_aggregations.py index a8db6124499..e436cd42335 100644 --- a/xarray/util/generate_aggregations.py +++ b/xarray/util/generate_aggregations.py @@ -532,7 +532,7 @@ def generate_code(self, method, has_keep_attrs): >>> da = xr.DataArray({example_array}, ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -547,7 +547,7 @@ def generate_code(self, method, has_keep_attrs): >>> da = xr.DataArray({example_array}, ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... )""", @@ -589,7 +589,7 @@ def generate_code(self, method, has_keep_attrs): methods=AGGREGATION_METHODS, docref="resampling", docref_description="resampling operations", - example_call_preamble='.resample(time="3M")', + example_call_preamble='.resample(time="3ME")', definition_preamble=RESAMPLE_PREAMBLE, notes=_FLOX_RESAMPLE_NOTES, ) @@ -609,7 +609,7 @@ def generate_code(self, method, has_keep_attrs): methods=AGGREGATION_METHODS, docref="resampling", docref_description="resampling operations", - example_call_preamble='.resample(time="3M")', + example_call_preamble='.resample(time="3ME")', definition_preamble=RESAMPLE_PREAMBLE, notes=_FLOX_RESAMPLE_NOTES, ) From 5cdf81d46637cb43e1d2348f29dcd19e2b46003a Mon Sep 17 00:00:00 2001 From: nameloCmaS Date: Mon, 22 Jan 2024 22:43:03 +0000 Subject: [PATCH 12/13] Update _aggregations.py Updated with xarray/util/generate_aggregations.py --- xarray/core/_aggregations.py | 264 +++++++++++++++++------------------ 1 file changed, 132 insertions(+), 132 deletions(-) diff --git a/xarray/core/_aggregations.py b/xarray/core/_aggregations.py index 0d4b4413b7c..e214c2c7c5a 100644 --- a/xarray/core/_aggregations.py +++ b/xarray/core/_aggregations.py @@ -77,7 +77,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -149,7 +149,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -221,7 +221,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -299,7 +299,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -386,7 +386,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -477,7 +477,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -575,7 +575,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -682,7 +682,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -786,7 +786,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -890,7 +890,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -990,7 +990,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1081,7 +1081,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1174,7 +1174,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1273,7 +1273,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1339,7 +1339,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1405,7 +1405,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1477,7 +1477,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1556,7 +1556,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1639,7 +1639,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1729,7 +1729,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1826,7 +1826,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -1920,7 +1920,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2014,7 +2014,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2104,7 +2104,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2187,7 +2187,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2276,7 +2276,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2401,7 +2401,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2499,7 +2499,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2597,7 +2597,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2701,7 +2701,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2817,7 +2817,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -2935,7 +2935,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3060,7 +3060,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3197,7 +3197,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3331,7 +3331,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3465,7 +3465,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3595,7 +3595,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3698,7 +3698,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3799,7 +3799,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3928,7 +3928,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -3942,7 +3942,7 @@ def count( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").count() + >>> ds.resample(time="3ME").count() Dimensions: (time: 3) Coordinates: @@ -4026,7 +4026,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4040,7 +4040,7 @@ def all( Data variables: da (time) bool True True True True True False - >>> ds.resample(time="3M").all() + >>> ds.resample(time="3ME").all() Dimensions: (time: 3) Coordinates: @@ -4124,7 +4124,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4138,7 +4138,7 @@ def any( Data variables: da (time) bool True True True True True False - >>> ds.resample(time="3M").any() + >>> ds.resample(time="3ME").any() Dimensions: (time: 3) Coordinates: @@ -4228,7 +4228,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4242,7 +4242,7 @@ def max( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").max() + >>> ds.resample(time="3ME").max() Dimensions: (time: 3) Coordinates: @@ -4252,7 +4252,7 @@ def max( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").max(skipna=False) + >>> ds.resample(time="3ME").max(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4344,7 +4344,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4358,7 +4358,7 @@ def min( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").min() + >>> ds.resample(time="3ME").min() Dimensions: (time: 3) Coordinates: @@ -4368,7 +4368,7 @@ def min( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").min(skipna=False) + >>> ds.resample(time="3ME").min(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4462,7 +4462,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4476,7 +4476,7 @@ def mean( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").mean() + >>> ds.resample(time="3ME").mean() Dimensions: (time: 3) Coordinates: @@ -4486,7 +4486,7 @@ def mean( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").mean(skipna=False) + >>> ds.resample(time="3ME").mean(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4587,7 +4587,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4601,7 +4601,7 @@ def prod( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").prod() + >>> ds.resample(time="3ME").prod() Dimensions: (time: 3) Coordinates: @@ -4611,7 +4611,7 @@ def prod( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").prod(skipna=False) + >>> ds.resample(time="3ME").prod(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4621,7 +4621,7 @@ def prod( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> ds.resample(time="3M").prod(skipna=True, min_count=2) + >>> ds.resample(time="3ME").prod(skipna=True, min_count=2) Dimensions: (time: 3) Coordinates: @@ -4724,7 +4724,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4738,7 +4738,7 @@ def sum( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").sum() + >>> ds.resample(time="3ME").sum() Dimensions: (time: 3) Coordinates: @@ -4748,7 +4748,7 @@ def sum( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").sum(skipna=False) + >>> ds.resample(time="3ME").sum(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4758,7 +4758,7 @@ def sum( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> ds.resample(time="3M").sum(skipna=True, min_count=2) + >>> ds.resample(time="3ME").sum(skipna=True, min_count=2) Dimensions: (time: 3) Coordinates: @@ -4858,7 +4858,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -4872,7 +4872,7 @@ def std( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").std() + >>> ds.resample(time="3ME").std() Dimensions: (time: 3) Coordinates: @@ -4882,7 +4882,7 @@ def std( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").std(skipna=False) + >>> ds.resample(time="3ME").std(skipna=False) Dimensions: (time: 3) Coordinates: @@ -4892,7 +4892,7 @@ def std( Specify ``ddof=1`` for an unbiased estimate. - >>> ds.resample(time="3M").std(skipna=True, ddof=1) + >>> ds.resample(time="3ME").std(skipna=True, ddof=1) Dimensions: (time: 3) Coordinates: @@ -4992,7 +4992,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5006,7 +5006,7 @@ def var( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").var() + >>> ds.resample(time="3ME").var() Dimensions: (time: 3) Coordinates: @@ -5016,7 +5016,7 @@ def var( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").var(skipna=False) + >>> ds.resample(time="3ME").var(skipna=False) Dimensions: (time: 3) Coordinates: @@ -5026,7 +5026,7 @@ def var( Specify ``ddof=1`` for an unbiased estimate. - >>> ds.resample(time="3M").var(skipna=True, ddof=1) + >>> ds.resample(time="3ME").var(skipna=True, ddof=1) Dimensions: (time: 3) Coordinates: @@ -5122,7 +5122,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5136,7 +5136,7 @@ def median( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").median() + >>> ds.resample(time="3ME").median() Dimensions: (time: 3) Coordinates: @@ -5146,7 +5146,7 @@ def median( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").median(skipna=False) + >>> ds.resample(time="3ME").median(skipna=False) Dimensions: (time: 3) Coordinates: @@ -5225,7 +5225,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5239,7 +5239,7 @@ def cumsum( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").cumsum() + >>> ds.resample(time="3ME").cumsum() Dimensions: (time: 6) Dimensions without coordinates: time @@ -5248,7 +5248,7 @@ def cumsum( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").cumsum(skipna=False) + >>> ds.resample(time="3ME").cumsum(skipna=False) Dimensions: (time: 6) Dimensions without coordinates: time @@ -5326,7 +5326,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5340,7 +5340,7 @@ def cumprod( Data variables: da (time) float64 1.0 2.0 3.0 0.0 2.0 nan - >>> ds.resample(time="3M").cumprod() + >>> ds.resample(time="3ME").cumprod() Dimensions: (time: 6) Dimensions without coordinates: time @@ -5349,7 +5349,7 @@ def cumprod( Use ``skipna`` to control whether NaNs are ignored. - >>> ds.resample(time="3M").cumprod(skipna=False) + >>> ds.resample(time="3ME").cumprod(skipna=False) Dimensions: (time: 6) Dimensions without coordinates: time @@ -5455,7 +5455,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5546,7 +5546,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5637,7 +5637,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5734,7 +5734,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5841,7 +5841,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -5950,7 +5950,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6066,7 +6066,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6192,7 +6192,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6315,7 +6315,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6438,7 +6438,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6557,7 +6557,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6652,7 +6652,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6749,7 +6749,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6874,7 +6874,7 @@ def count( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6885,7 +6885,7 @@ def count( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").count() + >>> da.resample(time="3ME").count() array([1, 3, 1]) Coordinates: @@ -6965,7 +6965,7 @@ def all( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -6976,7 +6976,7 @@ def all( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").all() + >>> da.resample(time="3ME").all() array([ True, True, False]) Coordinates: @@ -7056,7 +7056,7 @@ def any( ... np.array([True, True, True, True, True, False], dtype=bool), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7067,7 +7067,7 @@ def any( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").any() + >>> da.resample(time="3ME").any() array([ True, True, True]) Coordinates: @@ -7153,7 +7153,7 @@ def max( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7164,7 +7164,7 @@ def max( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").max() + >>> da.resample(time="3ME").max() array([1., 3., 2.]) Coordinates: @@ -7172,7 +7172,7 @@ def max( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").max(skipna=False) + >>> da.resample(time="3ME").max(skipna=False) array([ 1., 3., nan]) Coordinates: @@ -7260,7 +7260,7 @@ def min( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7271,7 +7271,7 @@ def min( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").min() + >>> da.resample(time="3ME").min() array([1., 0., 2.]) Coordinates: @@ -7279,7 +7279,7 @@ def min( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").min(skipna=False) + >>> da.resample(time="3ME").min(skipna=False) array([ 1., 0., nan]) Coordinates: @@ -7369,7 +7369,7 @@ def mean( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7380,7 +7380,7 @@ def mean( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").mean() + >>> da.resample(time="3ME").mean() array([1. , 1.66666667, 2. ]) Coordinates: @@ -7388,7 +7388,7 @@ def mean( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").mean(skipna=False) + >>> da.resample(time="3ME").mean(skipna=False) array([1. , 1.66666667, nan]) Coordinates: @@ -7485,7 +7485,7 @@ def prod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7496,7 +7496,7 @@ def prod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").prod() + >>> da.resample(time="3ME").prod() array([1., 0., 2.]) Coordinates: @@ -7504,7 +7504,7 @@ def prod( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").prod(skipna=False) + >>> da.resample(time="3ME").prod(skipna=False) array([ 1., 0., nan]) Coordinates: @@ -7512,7 +7512,7 @@ def prod( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> da.resample(time="3M").prod(skipna=True, min_count=2) + >>> da.resample(time="3ME").prod(skipna=True, min_count=2) array([nan, 0., nan]) Coordinates: @@ -7611,7 +7611,7 @@ def sum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7622,7 +7622,7 @@ def sum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").sum() + >>> da.resample(time="3ME").sum() array([1., 5., 2.]) Coordinates: @@ -7630,7 +7630,7 @@ def sum( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").sum(skipna=False) + >>> da.resample(time="3ME").sum(skipna=False) array([ 1., 5., nan]) Coordinates: @@ -7638,7 +7638,7 @@ def sum( Specify ``min_count`` for finer control over when NaNs are ignored. - >>> da.resample(time="3M").sum(skipna=True, min_count=2) + >>> da.resample(time="3ME").sum(skipna=True, min_count=2) array([nan, 5., nan]) Coordinates: @@ -7734,7 +7734,7 @@ def std( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7745,7 +7745,7 @@ def std( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").std() + >>> da.resample(time="3ME").std() array([0. , 1.24721913, 0. ]) Coordinates: @@ -7753,7 +7753,7 @@ def std( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").std(skipna=False) + >>> da.resample(time="3ME").std(skipna=False) array([0. , 1.24721913, nan]) Coordinates: @@ -7761,7 +7761,7 @@ def std( Specify ``ddof=1`` for an unbiased estimate. - >>> da.resample(time="3M").std(skipna=True, ddof=1) + >>> da.resample(time="3ME").std(skipna=True, ddof=1) array([ nan, 1.52752523, nan]) Coordinates: @@ -7857,7 +7857,7 @@ def var( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7868,7 +7868,7 @@ def var( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").var() + >>> da.resample(time="3ME").var() array([0. , 1.55555556, 0. ]) Coordinates: @@ -7876,7 +7876,7 @@ def var( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").var(skipna=False) + >>> da.resample(time="3ME").var(skipna=False) array([0. , 1.55555556, nan]) Coordinates: @@ -7884,7 +7884,7 @@ def var( Specify ``ddof=1`` for an unbiased estimate. - >>> da.resample(time="3M").var(skipna=True, ddof=1) + >>> da.resample(time="3ME").var(skipna=True, ddof=1) array([ nan, 2.33333333, nan]) Coordinates: @@ -7976,7 +7976,7 @@ def median( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -7987,7 +7987,7 @@ def median( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").median() + >>> da.resample(time="3ME").median() array([1., 2., 2.]) Coordinates: @@ -7995,7 +7995,7 @@ def median( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").median(skipna=False) + >>> da.resample(time="3ME").median(skipna=False) array([ 1., 2., nan]) Coordinates: @@ -8071,7 +8071,7 @@ def cumsum( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -8082,7 +8082,7 @@ def cumsum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").cumsum() + >>> da.resample(time="3ME").cumsum() array([1., 2., 5., 5., 2., 2.]) Coordinates: @@ -8091,7 +8091,7 @@ def cumsum( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").cumsum(skipna=False) + >>> da.resample(time="3ME").cumsum(skipna=False) array([ 1., 2., 5., 5., 2., nan]) Coordinates: @@ -8168,7 +8168,7 @@ def cumprod( ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( - ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), + ... time=("time", pd.date_range("2001-01-01", freq="ME", periods=6)), ... labels=("time", np.array(["a", "b", "c", "c", "b", "a"])), ... ), ... ) @@ -8179,7 +8179,7 @@ def cumprod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").cumprod() + >>> da.resample(time="3ME").cumprod() array([1., 2., 6., 0., 2., 2.]) Coordinates: @@ -8188,7 +8188,7 @@ def cumprod( Use ``skipna`` to control whether NaNs are ignored. - >>> da.resample(time="3M").cumprod(skipna=False) + >>> da.resample(time="3ME").cumprod(skipna=False) array([ 1., 2., 6., 0., 2., nan]) Coordinates: From e9a1641658738238f7e2f7b6b383d9a2dea51801 Mon Sep 17 00:00:00 2001 From: nameloCmaS Date: Mon, 22 Jan 2024 23:10:06 +0000 Subject: [PATCH 13/13] Update whats-new.rst Corrected the correction... --- doc/whats-new.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index f8627c6f572..3364087143b 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -7270,7 +7270,7 @@ Breaking changes - The ``season`` datetime shortcut now returns an array of string labels such `'DJF'`: - .. ipython:: python + .. code-block:: ipython In[92]: ds = xray.Dataset({"t": pd.date_range("2000-01-01", periods=12, freq="M")})