Skip to content

Commit 85f6ee4

Browse files
committed
Remove problematic backslashes
1 parent be042cf commit 85f6ee4

File tree

9 files changed

+76
-73
lines changed

9 files changed

+76
-73
lines changed

xarray/backends/api.py

Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -419,8 +419,8 @@ def open_dataset(
419419
ends with .gz, in which case the file is gunzipped and opened with
420420
scipy.io.netcdf (only netCDF3 supported). Byte-strings or file-like
421421
objects are opened by scipy.io.netcdf (netCDF3) or h5py (netCDF4/HDF).
422-
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", \
423-
"zarr", None}, installed backend \
422+
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio",,
423+
"zarr", None}, installed backend,
424424
or subclass of xarray.backends.BackendEntrypoint, optional
425425
Engine to use when reading files. If not provided, the default engine
426426
is chosen based on available dependencies, with a preference for
@@ -626,8 +626,8 @@ def open_dataarray(
626626
ends with .gz, in which case the file is gunzipped and opened with
627627
scipy.io.netcdf (only netCDF3 supported). Byte-strings or file-like
628628
objects are opened by scipy.io.netcdf (netCDF3) or h5py (netCDF4/HDF).
629-
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", \
630-
"zarr", None}, installed backend \
629+
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio",,
630+
"zarr", None}, installed backend,
631631
or subclass of xarray.backends.BackendEntrypoint, optional
632632
Engine to use when reading files. If not provided, the default engine
633633
is chosen based on available dependencies, with a preference for
@@ -791,13 +791,15 @@ def open_dataarray(
791791
def open_mfdataset(
792792
paths: str | NestedSequence[str | os.PathLike],
793793
chunks: T_Chunks | None = None,
794-
concat_dim: str
795-
| DataArray
796-
| Index
797-
| Sequence[str]
798-
| Sequence[DataArray]
799-
| Sequence[Index]
800-
| None = None,
794+
concat_dim: (
795+
str
796+
| DataArray
797+
| Index
798+
| Sequence[str]
799+
| Sequence[DataArray]
800+
| Sequence[Index]
801+
| None
802+
) = None,
801803
compat: CompatOptions = "no_conflicts",
802804
preprocess: Callable[[Dataset], Dataset] | None = None,
803805
engine: T_Engine | None = None,
@@ -847,7 +849,7 @@ def open_mfdataset(
847849
combine : {"by_coords", "nested"}, optional
848850
Whether ``xarray.combine_by_coords`` or ``xarray.combine_nested`` is used to
849851
combine all the data. Default is to use ``xarray.combine_by_coords``.
850-
compat : {"identical", "equals", "broadcast_equals", \
852+
compat : {"identical", "equals", "broadcast_equals",,
851853
"no_conflicts", "override"}, default: "no_conflicts"
852854
String indicating how to compare variables of the same name for
853855
potential conflicts when merging:
@@ -866,8 +868,8 @@ def open_mfdataset(
866868
If provided, call this function on each dataset prior to concatenation.
867869
You can find the file-name from which each dataset was loaded in
868870
``ds.encoding["source"]``.
869-
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio", \
870-
"zarr", None}, installed backend \
871+
engine : {"netcdf4", "scipy", "pydap", "h5netcdf", "pynio",,
872+
"zarr", None}, installed backend,
871873
or subclass of xarray.backends.BackendEntrypoint, optional
872874
Engine to use when reading files. If not provided, the default engine
873875
is chosen based on available dependencies, with a preference for
@@ -917,7 +919,7 @@ def open_mfdataset(
917919
Path of the file used to read global attributes from.
918920
By default global attributes are read from the first file provided,
919921
with wildcard matches sorted by filename.
920-
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts", \
922+
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts",,
921923
"override"} or callable, default: "override"
922924
A callable or a string indicating how to combine attrs of the objects being
923925
merged:
@@ -1390,7 +1392,7 @@ def save_mfdataset(
13901392
mode : {"w", "a"}, optional
13911393
Write ("w") or append ("a") mode. If mode="w", any existing file at
13921394
these locations will be overwritten.
1393-
format : {"NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_64BIT", \
1395+
format : {"NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_64BIT",,
13941396
"NETCDF3_CLASSIC"}, optional
13951397
**kwargs : additional arguments are passed along to ``to_netcdf``
13961398

xarray/core/combine.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -372,7 +372,7 @@ def _nested_combine(
372372

373373
def combine_nested(
374374
datasets: DATASET_HYPERCUBE,
375-
concat_dim: (str | DataArray | None | Sequence[str | DataArray | pd.Index | None]),
375+
concat_dim: str | DataArray | None | Sequence[str | DataArray | pd.Index | None],
376376
compat: str = "no_conflicts",
377377
data_vars: str = "all",
378378
coords: str = "different",
@@ -413,7 +413,7 @@ def combine_nested(
413413
nested-list input along which to merge.
414414
Must be the same length as the depth of the list passed to
415415
``datasets``.
416-
compat : {"identical", "equals", "broadcast_equals", \
416+
compat : {"identical", "equals", "broadcast_equals",
417417
"no_conflicts", "override"}, optional
418418
String indicating how to compare variables of the same name for
419419
potential merge conflicts:
@@ -448,7 +448,7 @@ def combine_nested(
448448
- "override": if indexes are of same size, rewrite indexes to be
449449
those of the first object with that dimension. Indexes for the same
450450
dimension must have the same size in all objects.
451-
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts", \
451+
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts",
452452
"override"} or callable, default: "drop"
453453
A callable or a string indicating how to combine attrs of the objects being
454454
merged:
@@ -738,7 +738,7 @@ def combine_by_coords(
738738
those of the first object with that dimension. Indexes for the same
739739
dimension must have the same size in all objects.
740740
741-
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts", \
741+
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts",
742742
"override"} or callable, default: "no_conflicts"
743743
A callable or a string indicating how to combine attrs of the objects being
744744
merged:

xarray/core/computation.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""
22
Functions for applying functions that act on arrays to xarray's labeled data.
33
"""
4+
45
from __future__ import annotations
56

67
import functools
@@ -223,7 +224,7 @@ def build_output_coords_and_indexes(
223224
exclude_dims : set, optional
224225
Dimensions excluded from the operation. Coordinates along these
225226
dimensions are dropped.
226-
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts", \
227+
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts",
227228
"override"} or callable, default: "drop"
228229
A callable or a string indicating how to combine attrs of the objects being
229230
merged:
@@ -731,9 +732,11 @@ def apply_variable_ufunc(
731732
output_dims = [broadcast_dims + out for out in signature.output_core_dims]
732733

733734
input_data = [
734-
broadcast_compat_data(arg, broadcast_dims, core_dims)
735-
if isinstance(arg, Variable)
736-
else arg
735+
(
736+
broadcast_compat_data(arg, broadcast_dims, core_dims)
737+
if isinstance(arg, Variable)
738+
else arg
739+
)
737740
for arg, core_dims in zip(args, signature.input_core_dims)
738741
]
739742

@@ -931,7 +934,7 @@ def apply_ufunc(
931934
the style of NumPy universal functions [1]_ (if this is not the case,
932935
set ``vectorize=True``). If this function returns multiple outputs, you
933936
must set ``output_core_dims`` as well.
934-
*args : Dataset, DataArray, DataArrayGroupBy, DatasetGroupBy, Variable, \
937+
*args : Dataset, DataArray, DataArrayGroupBy, DatasetGroupBy, Variable,
935938
numpy.ndarray, dask.array.Array or scalar
936939
Mix of labeled and/or unlabeled arrays to which to apply the function.
937940
input_core_dims : sequence of sequence, optional

xarray/core/concat.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def concat(
148148
- "override": if indexes are of same size, rewrite indexes to be
149149
those of the first object with that dimension. Indexes for the same
150150
dimension must have the same size in all objects.
151-
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts", \
151+
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts",
152152
"override"} or callable, default: "override"
153153
A callable or a string indicating how to combine attrs of the objects being
154154
merged:

xarray/core/dataarray.py

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -3437,7 +3437,7 @@ def interpolate_na(
34373437
----------
34383438
dim : Hashable or None, optional
34393439
Specifies the dimension along which to interpolate.
3440-
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial", \
3440+
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial",
34413441
"barycentric", "krogh", "pchip", "spline", "akima"}, default: "linear"
34423442
String indicating which method to use for interpolation:
34433443
@@ -3995,7 +3995,7 @@ def to_netcdf(
39953995
Write ('w') or append ('a') mode. If mode='w', any existing file at
39963996
this location will be overwritten. If mode='a', existing variables
39973997
will be overwritten.
3998-
format : {"NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_64BIT", \
3998+
format : {"NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_64BIT",
39993999
"NETCDF3_CLASSIC"}, optional
40004000
File format for the resulting netCDF file:
40014001
@@ -4983,10 +4983,12 @@ def dot(
49834983

49844984
def sortby(
49854985
self,
4986-
variables: Hashable
4987-
| DataArray
4988-
| Sequence[Hashable | DataArray]
4989-
| Callable[[Self], Hashable | DataArray | Sequence[Hashable | DataArray]],
4986+
variables: (
4987+
Hashable
4988+
| DataArray
4989+
| Sequence[Hashable | DataArray]
4990+
| Callable[[Self], Hashable | DataArray | Sequence[Hashable | DataArray]]
4991+
),
49904992
ascending: bool = True,
49914993
) -> Self:
49924994
"""Sort object by labels or values (along an axis).
@@ -5232,7 +5234,7 @@ def differentiate(
52325234
edge_order: Literal[1, 2] = 1,
52335235
datetime_unit: DatetimeUnitOptions = None,
52345236
) -> Self:
5235-
""" Differentiate the array with the second order accurate central
5237+
"""Differentiate the array with the second order accurate central
52365238
differences.
52375239
52385240
.. note::
@@ -5245,7 +5247,7 @@ def differentiate(
52455247
The coordinate to be used to compute the gradient.
52465248
edge_order : {1, 2}, default: 1
52475249
N-th order accurate differences at the boundaries.
5248-
datetime_unit : {"W", "D", "h", "m", "s", "ms", \
5250+
datetime_unit : {"W", "D", "h", "m", "s", "ms",
52495251
"us", "ns", "ps", "fs", "as", None}, optional
52505252
Unit to compute gradient. Only valid for datetime coordinate. "Y" and "M" are not available as
52515253
datetime_unit.
@@ -5304,7 +5306,7 @@ def integrate(
53045306
----------
53055307
coord : Hashable, or sequence of Hashable
53065308
Coordinate(s) used for the integration.
5307-
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns', \
5309+
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns',
53085310
'ps', 'fs', 'as', None}, optional
53095311
Specify the unit if a datetime coordinate is used.
53105312
@@ -5361,7 +5363,7 @@ def cumulative_integrate(
53615363
----------
53625364
coord : Hashable, or sequence of Hashable
53635365
Coordinate(s) used for the integration.
5364-
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns', \
5366+
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns',
53655367
'ps', 'fs', 'as', None}, optional
53665368
Specify the unit if a datetime coordinate is used.
53675369
@@ -5596,14 +5598,12 @@ def pad(
55965598
self,
55975599
pad_width: Mapping[Any, int | tuple[int, int]] | None = None,
55985600
mode: PadModeOptions = "constant",
5599-
stat_length: int
5600-
| tuple[int, int]
5601-
| Mapping[Any, tuple[int, int]]
5602-
| None = None,
5603-
constant_values: float
5604-
| tuple[float, float]
5605-
| Mapping[Any, tuple[float, float]]
5606-
| None = None,
5601+
stat_length: (
5602+
int | tuple[int, int] | Mapping[Any, tuple[int, int]] | None
5603+
) = None,
5604+
constant_values: (
5605+
float | tuple[float, float] | Mapping[Any, tuple[float, float]] | None
5606+
) = None,
56075607
end_values: int | tuple[int, int] | Mapping[Any, tuple[int, int]] | None = None,
56085608
reflect_type: PadReflectOptions = None,
56095609
keep_attrs: bool | None = None,
@@ -5625,7 +5625,7 @@ def pad(
56255625
Mapping with the form of {dim: (pad_before, pad_after)}
56265626
describing the number of values padded along each dimension.
56275627
{dim: pad} is a shortcut for pad_before = pad_after = pad
5628-
mode : {"constant", "edge", "linear_ramp", "maximum", "mean", "median", \
5628+
mode : {"constant", "edge", "linear_ramp", "maximum", "mean", "median",
56295629
"minimum", "reflect", "symmetric", "wrap"}, default: "constant"
56305630
How to pad the DataArray (taken from numpy docs):
56315631

xarray/core/dataset.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2230,7 +2230,7 @@ def to_netcdf(
22302230
Write ('w') or append ('a') mode. If mode='w', any existing file at
22312231
this location will be overwritten. If mode='a', existing variables
22322232
will be overwritten.
2233-
format : {"NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_64BIT", \
2233+
format : {"NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_64BIT",,
22342234
"NETCDF3_CLASSIC"}, optional
22352235
File format for the resulting netCDF file:
22362236
@@ -3801,7 +3801,7 @@ def interp(
38013801
New coordinate can be a scalar, array-like or DataArray.
38023802
If DataArrays are passed as new coordinates, their dimensions are
38033803
used for the broadcasting. Missing values are skipped.
3804-
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial", \
3804+
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial",,
38053805
"barycentric", "krogh", "pchip", "spline", "akima"}, default: "linear"
38063806
String indicating which method to use for interpolation:
38073807
@@ -4080,7 +4080,7 @@ def interp_like(
40804080
Object with an 'indexes' attribute giving a mapping from dimension
40814081
names to an 1d array-like, which provides coordinates upon
40824082
which to index the variables in this dataset. Missing values are skipped.
4083-
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial", \
4083+
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial",,
40844084
"barycentric", "krogh", "pchip", "spline", "akima"}, default: "linear"
40854085
String indicating which method to use for interpolation:
40864086
@@ -5646,7 +5646,7 @@ def merge(
56465646
overwrite_vars : hashable or iterable of hashable, optional
56475647
If provided, update variables of these name(s) without checking for
56485648
conflicts in this dataset.
5649-
compat : {"identical", "equals", "broadcast_equals", \
5649+
compat : {"identical", "equals", "broadcast_equals",,
56505650
"no_conflicts", "override", "minimal"}, default: "no_conflicts"
56515651
String indicating how to compare variables of the same name for
56525652
potential conflicts:
@@ -5662,7 +5662,7 @@ def merge(
56625662
- 'override': skip comparing and pick variable from first dataset
56635663
- 'minimal': drop conflicting coordinates
56645664
5665-
join : {"outer", "inner", "left", "right", "exact", "override"}, \
5665+
join : {"outer", "inner", "left", "right", "exact", "override"},,
56665666
default: "outer"
56675667
Method for joining ``self`` and ``other`` along shared dimensions:
56685668
@@ -5677,7 +5677,7 @@ def merge(
56775677
fill_value : scalar or dict-like, optional
56785678
Value to use for newly missing values. If a dict-like, maps
56795679
variable names (including coordinates) to fill values.
5680-
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts", \
5680+
combine_attrs : {"drop", "identical", "no_conflicts", "drop_conflicts",,
56815681
"override"} or callable, default: "override"
56825682
A callable or a string indicating how to combine attrs of the objects being
56835683
merged:
@@ -6466,7 +6466,7 @@ def interpolate_na(
64666466
----------
64676467
dim : Hashable or None, optional
64686468
Specifies the dimension along which to interpolate.
6469-
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial", \
6469+
method : {"linear", "nearest", "zero", "slinear", "quadratic", "cubic", "polynomial",,
64706470
"barycentric", "krogh", "pchip", "spline", "akima"}, default: "linear"
64716471
String indicating which method to use for interpolation:
64726472
@@ -7487,7 +7487,7 @@ def from_dict(cls, d: Mapping[Any, Any]) -> Self:
74877487
----------
74887488
d : dict-like
74897489
Mapping with a minimum structure of
7490-
``{"var_0": {"dims": [..], "data": [..]}, \
7490+
``{"var_0": {"dims": [..], "data": [..]},,
74917491
...}``
74927492
74937493
Returns
@@ -8265,7 +8265,7 @@ def differentiate(
82658265
edge_order: Literal[1, 2] = 1,
82668266
datetime_unit: DatetimeUnitOptions | None = None,
82678267
) -> Self:
8268-
""" Differentiate with the second order accurate central
8268+
"""Differentiate with the second order accurate central
82698269
differences.
82708270
82718271
.. note::
@@ -8278,7 +8278,7 @@ def differentiate(
82788278
The coordinate to be used to compute the gradient.
82798279
edge_order : {1, 2}, default: 1
82808280
N-th order accurate differences at the boundaries.
8281-
datetime_unit : None or {"Y", "M", "W", "D", "h", "m", "s", "ms", \
8281+
datetime_unit : None or {"Y", "M", "W", "D", "h", "m", "s", "ms",,
82828282
"us", "ns", "ps", "fs", "as", None}, default: None
82838283
Unit to compute gradient. Only valid for datetime coordinate.
82848284
@@ -8346,7 +8346,7 @@ def integrate(
83468346
----------
83478347
coord : hashable, or sequence of hashable
83488348
Coordinate(s) used for the integration.
8349-
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns', \
8349+
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns',,
83508350
'ps', 'fs', 'as', None}, optional
83518351
Specify the unit if datetime coordinate is used.
83528352
@@ -8469,7 +8469,7 @@ def cumulative_integrate(
84698469
----------
84708470
coord : hashable, or sequence of hashable
84718471
Coordinate(s) used for the integration.
8472-
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns', \
8472+
datetime_unit : {'Y', 'M', 'W', 'D', 'h', 'm', 's', 'ms', 'us', 'ns',,
84738473
'ps', 'fs', 'as', None}, optional
84748474
Specify the unit if datetime coordinate is used.
84758475
@@ -8997,7 +8997,7 @@ def pad(
89978997
Mapping with the form of {dim: (pad_before, pad_after)}
89988998
describing the number of values padded along each dimension.
89998999
{dim: pad} is a shortcut for pad_before = pad_after = pad
9000-
mode : {"constant", "edge", "linear_ramp", "maximum", "mean", "median", \
9000+
mode : {"constant", "edge", "linear_ramp", "maximum", "mean", "median",,
90019001
"minimum", "reflect", "symmetric", "wrap"}, default: "constant"
90029002
How to pad the DataArray (taken from numpy docs):
90039003

0 commit comments

Comments
 (0)