Skip to content

Commit

Permalink
Revert "Remove backslashes that makes pytest-accept unhappy"
Browse files Browse the repository at this point in the history
This reverts commit c903ba0.
  • Loading branch information
etienneschalk committed Feb 5, 2024
1 parent dcaa157 commit 85f0de1
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 16 deletions.
6 changes: 3 additions & 3 deletions xarray/core/combine.py
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,7 @@ def combine_nested(
nested-list input along which to merge.
Must be the same length as the depth of the list passed to
``datasets``.
compat : {"identical", "equals", "broadcast_equals"
compat : {"identical", "equals", "broadcast_equals", \
"no_conflicts", "override"}, optional
String indicating how to compare variables of the same name for
potential merge conflicts:
Expand Down Expand Up @@ -448,7 +448,7 @@ def combine_nested(
- "override": if indexes are of same size, rewrite indexes to be
those of the first object with that dimension. Indexes for the same
dimension must have the same size in all objects.
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts"
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts", \
"override"} or callable, default: "drop"
A callable or a string indicating how to combine attrs of the objects being
merged:
Expand Down Expand Up @@ -742,7 +742,7 @@ def combine_by_coords(
those of the first object with that dimension. Indexes for the same
dimension must have the same size in all objects.
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts"
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts", \
"override"} or callable, default: "no_conflicts"
A callable or a string indicating how to combine attrs of the objects being
merged:
Expand Down
26 changes: 13 additions & 13 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2230,7 +2230,7 @@ def to_netcdf(
Write ('w') or append ('a') mode. If mode='w', any existing file at
this location will be overwritten. If mode='a', existing variables
will be overwritten.
format : {"NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_64BIT"
format : {"NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_64BIT", \
"NETCDF3_CLASSIC"}, optional
File format for the resulting netCDF file:
Expand Down Expand Up @@ -3801,7 +3801,7 @@ def interp(
New coordinate can be a scalar, array-like or DataArray.
If DataArrays are passed as new coordinates, their dimensions are
used for the broadcasting. Missing values are skipped.
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial"
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial", \
"barycentric", "krogh", "pchip", "spline", "akima"}, default: "linear"
String indicating which method to use for interpolation:
Expand Down Expand Up @@ -4080,7 +4080,7 @@ def interp_like(
Object with an 'indexes' attribute giving a mapping from dimension
names to an 1d array-like, which provides coordinates upon
which to index the variables in this dataset. Missing values are skipped.
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial"
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial", \
"barycentric", "krogh", "pchip", "spline", "akima"}, default: "linear"
String indicating which method to use for interpolation:
Expand Down Expand Up @@ -5646,7 +5646,7 @@ def merge(
overwrite_vars : hashable or iterable of hashable, optional
If provided, update variables of these name(s) without checking for
conflicts in this dataset.
compat : {"identical", "equals", "broadcast_equals"
compat : {"identical", "equals", "broadcast_equals", \
"no_conflicts", "override", "minimal"}, default: "no_conflicts"
String indicating how to compare variables of the same name for
potential conflicts:
Expand All @@ -5662,7 +5662,7 @@ def merge(
- 'override': skip comparing and pick variable from first dataset
- 'minimal': drop conflicting coordinates
join : {"outer", "inner", "left", "right", "exact", "override"}
join : {"outer", "inner", "left", "right", "exact", "override"}, \
default: "outer"
Method for joining ``self`` and ``other`` along shared dimensions:
Expand All @@ -5677,7 +5677,7 @@ def merge(
fill_value : scalar or dict-like, optional
Value to use for newly missing values. If a dict-like, maps
variable names (including coordinates) to fill values.
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts"
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts", \
"override"} or callable, default: "override"
A callable or a string indicating how to combine attrs of the objects being
merged:
Expand Down Expand Up @@ -6466,7 +6466,7 @@ def interpolate_na(
----------
dim : Hashable or None, optional
Specifies the dimension along which to interpolate.
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial"
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial", \
"barycentric", "krogh", "pchip", "spline", "akima"}, default: "linear"
String indicating which method to use for interpolation:
Expand Down Expand Up @@ -7487,7 +7487,7 @@ def from_dict(cls, d: Mapping[Any, Any]) -> Self:
----------
d : dict-like
Mapping with a minimum structure of
``{"var_0": {"dims": [..], "data": [..]}
``{"var_0": {"dims": [..], "data": [..]}, \
...}``
Returns
Expand Down Expand Up @@ -8265,7 +8265,7 @@ def differentiate(
edge_order: Literal[1, 2] = 1,
datetime_unit: DatetimeUnitOptions | None = None,
) -> Self:
"""Differentiate with the second order accurate central
""" Differentiate with the second order accurate central
differences.
.. note::
Expand All @@ -8278,7 +8278,7 @@ def differentiate(
The coordinate to be used to compute the gradient.
edge_order : {1, 2}, default: 1
N-th order accurate differences at the boundaries.
datetime_unit : None or {"Y", "M", "W", "D", "h", "m", "s", "ms"
datetime_unit : None or {"Y", "M", "W", "D", "h", "m", "s", "ms", \
"us", "ns", "ps", "fs", "as", None}, default: None
Unit to compute gradient. Only valid for datetime coordinate.
Expand Down Expand Up @@ -8346,7 +8346,7 @@ def integrate(
----------
coord : hashable, or sequence of hashable
Coordinate(s) used for the integration.
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns'
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns', \
'ps', 'fs', 'as', None}, optional
Specify the unit if datetime coordinate is used.
Expand Down Expand Up @@ -8469,7 +8469,7 @@ def cumulative_integrate(
----------
coord : hashable, or sequence of hashable
Coordinate(s) used for the integration.
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns'
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns', \
'ps', 'fs', 'as', None}, optional
Specify the unit if datetime coordinate is used.
Expand Down Expand Up @@ -8997,7 +8997,7 @@ def pad(
Mapping with the form of {dim: (pad_before, pad_after)}
describing the number of values padded along each dimension.
{dim: pad} is a shortcut for pad_before = pad_after = pad
mode : {"constant", "edge", "linear_ramp", "maximum", "mean", "median"
mode : {"constant", "edge", "linear_ramp", "maximum", "mean", "median", \
"minimum", "reflect", "symmetric", "wrap"}, default: "constant"
How to pad the DataArray (taken from numpy docs):
Expand Down

0 comments on commit 85f0de1

Please sign in to comment.