Skip to content

Commit e7ab84d

Browse files
authored
Merge branch 'main' into eschalk/make-illegal-path-like-variable-names-when-dt-from-ds
2 parents 107603b + aeaa082 commit e7ab84d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

82 files changed

+1298
-637
lines changed

.github/workflows/pypi-release.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ jobs:
8888
path: dist
8989
- name: Publish package to TestPyPI
9090
if: github.event_name == 'push'
91-
uses: pypa/[email protected].0
91+
uses: pypa/[email protected].1
9292
with:
9393
repository_url: https://test.pypi.org/legacy/
9494
verbose: true
@@ -111,6 +111,6 @@ jobs:
111111
name: releases
112112
path: dist
113113
- name: Publish package to PyPI
114-
uses: pypa/[email protected].0
114+
uses: pypa/[email protected].1
115115
with:
116116
verbose: true

.pre-commit-config.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# https://pre-commit.com/
22
ci:
33
autoupdate_schedule: monthly
4+
autoupdate_commit_msg: 'Update pre-commit hooks'
45
exclude: 'xarray/datatree_.*'
56
repos:
67
- repo: https://github.com/pre-commit/pre-commit-hooks

asv_bench/benchmarks/dataset_io.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -724,7 +724,7 @@ class PerformanceBackend(xr.backends.BackendEntrypoint):
724724
def open_dataset(
725725
self,
726726
filename_or_obj: str | os.PathLike | None,
727-
drop_variables: tuple[str] = None,
727+
drop_variables: tuple[str, ...] = None,
728728
*,
729729
mask_and_scale=True,
730730
decode_times=True,

asv_bench/benchmarks/datatree.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ class Datatree:
66
def setup(self):
77
run1 = DataTree.from_dict({"run1": xr.Dataset({"a": 1})})
88
self.d_few = {"run1": run1}
9-
self.d_many = {f"run{i}": run1.copy() for i in range(100)}
9+
self.d_many = {f"run{i}": xr.Dataset({"a": 1}) for i in range(100)}
1010

1111
def time_from_dict_few(self):
1212
DataTree.from_dict(self.d_few)

asv_bench/benchmarks/groupby.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,7 @@ def setup(self, use_cftime, use_flox):
174174
# GH9426 - deep-copying CFTime object arrays is weirdly slow
175175
asda = xr.DataArray(time)
176176
labeled_time = []
177-
for year, month in zip(asda.dt.year, asda.dt.month):
177+
for year, month in zip(asda.dt.year, asda.dt.month, strict=True):
178178
labeled_time.append(cftime.datetime(year, month, 1))
179179

180180
self.da = xr.DataArray(

asv_bench/benchmarks/rolling.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def time_rolling_long(self, func, pandas, use_bottleneck):
6464
def time_rolling_np(self, window_, min_periods, use_bottleneck):
6565
with xr.set_options(use_bottleneck=use_bottleneck):
6666
self.ds.rolling(x=window_, center=False, min_periods=min_periods).reduce(
67-
getattr(np, "nansum")
67+
np.nansum
6868
).load()
6969

7070
@parameterized(

doc/user-guide/testing.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ different type:
193193

194194
.. ipython:: python
195195
196-
def sparse_random_arrays(shape: tuple[int]) -> sparse._coo.core.COO:
196+
def sparse_random_arrays(shape: tuple[int, ...]) -> sparse._coo.core.COO:
197197
"""Strategy which generates random sparse.COO arrays"""
198198
if shape is None:
199199
shape = npst.array_shapes()

doc/whats-new.rst

Lines changed: 40 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -14,16 +14,50 @@ What's New
1414
1515
np.random.seed(123456)
1616
17+
.. _whats-new.2024.09.1:
1718

18-
.. _whats-new.2024.07.1:
19-
20-
v2024.07.1 (unreleased)
19+
v2024.09.1 (unreleased)
2120
-----------------------
2221

2322
New Features
2423
~~~~~~~~~~~~
2524

26-
- Add :py:attr:`~core.accessor_dt.DatetimeAccessor.days_in_year` and :py:attr:`~core.accessor_dt.DatetimeAccessor.decimal_year` to the Datetime accessor on DataArrays. (:pull:`9105`).
25+
26+
Breaking changes
27+
~~~~~~~~~~~~~~~~
28+
29+
30+
Deprecations
31+
~~~~~~~~~~~~
32+
33+
34+
Bug fixes
35+
~~~~~~~~~
36+
37+
38+
Documentation
39+
~~~~~~~~~~~~~
40+
41+
42+
Internal Changes
43+
~~~~~~~~~~~~~~~~
44+
45+
46+
.. _whats-new.2024.09.0:
47+
48+
v2024.09.0 (Sept 11, 2024)
49+
--------------------------
50+
This release drops support for Python 3.9, and adds support for grouping by :ref:`multiple arrays <groupby.multiple>`, while providing numerous performance improvements and bug fixes.
51+
52+
Thanks to the 33 contributors to this release:
53+
Alfonso Ladino, Andrew Scherer, Anurag Nayak, David Hoese, Deepak Cherian, Diogo Teles Sant'Anna, Dom, Elliott Sales de Andrade, Eni, Holly Mandel, Illviljan, Jack Kelly, Julius Busecke, Justus Magin, Kai Mühlbauer, Manish Kumar Gupta, Matt Savoie, Maximilian Roos, Michele Claus, Miguel Jimenez, Niclas Rieger, Pascal Bourgault, Philip Chmielowiec, Spencer Clark, Stephan Hoyer, Tao Xin, Tiago Sanona, TimothyCera-NOAA, Tom Nicholas, Tom White, Virgile Andreani, oliverhiggs and tiago
54+
55+
New Features
56+
~~~~~~~~~~~~
57+
58+
- Add :py:attr:`~core.accessor_dt.DatetimeAccessor.days_in_year` and
59+
:py:attr:`~core.accessor_dt.DatetimeAccessor.decimal_year` to the
60+
``DatetimeAccessor`` on ``xr.DataArray``. (:pull:`9105`).
2761
By `Pascal Bourgault <https://github.com/aulemahal>`_.
2862

2963
Performance
@@ -38,6 +72,8 @@ Performance
3872
By `Deepak Cherian <https://github.com/dcherian>`_.
3973
- Allow data variable specific ``constant_values`` in the dataset ``pad`` function (:pull:`9353``).
4074
By `Tiago Sanona <https://github.com/tsanona>`_.
75+
- Speed up grouping by avoiding deep-copy of non-dimension coordinates (:issue:`9426`, :pull:`9393`)
76+
By `Deepak Cherian <https://github.com/dcherian>`_.
4177

4278
Breaking changes
4379
~~~~~~~~~~~~~~~~
@@ -61,11 +97,6 @@ Breaking changes
6197
zarr 2.14 2.16
6298
===================== ========= =======
6399

64-
65-
Deprecations
66-
~~~~~~~~~~~~
67-
68-
69100
Bug fixes
70101
~~~~~~~~~
71102

@@ -93,24 +124,12 @@ Bug fixes
93124
(:issue:`9408`, :pull:`9413`).
94125
By `Oliver Higgs <https://github.com/oliverhiggs>`_.
95126

96-
Performance
97-
~~~~~~~~~~~
98-
99-
- Speed up grouping by avoiding deep-copy of non-dimension coordinates (:issue:`9426`, :pull:`9393`)
100-
By `Deepak Cherian <https://github.com/dcherian>`_.
101-
102-
Documentation
103-
~~~~~~~~~~~~~
104-
105-
106127
Internal Changes
107128
~~~~~~~~~~~~~~~~
108129

109130
- Re-enable testing ``pydap`` backend with ``numpy>=2`` (:pull:`9391`).
110131
By `Miguel Jimenez <https://github.com/Mikejmnez>`_ .
111132

112-
113-
114133
.. _whats-new.2024.07.0:
115134

116135
v2024.07.0 (Jul 30, 2024)

properties/test_pandas_roundtrip.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ def test_roundtrip_dataarray(data, arr) -> None:
8080
tuple
8181
)
8282
)
83-
coords = {name: np.arange(n) for (name, n) in zip(names, arr.shape)}
83+
coords = {name: np.arange(n) for (name, n) in zip(names, arr.shape, strict=True)}
8484
original = xr.DataArray(arr, dims=names, coords=coords)
8585
roundtripped = xr.DataArray(original.to_pandas())
8686
xr.testing.assert_identical(original, roundtripped)

pyproject.toml

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ dev = [
4242
"ruff",
4343
"xarray[complete]",
4444
]
45-
io = ["netCDF4", "h5netcdf", "scipy", 'pydap; python_version<"3.10"', "zarr", "fsspec", "cftime", "pooch"]
45+
io = ["netCDF4", "h5netcdf", "scipy", 'pydap; python_version<"3.10"', "zarr<3", "fsspec", "cftime", "pooch"]
4646
parallel = ["dask[complete]"]
4747
viz = ["matplotlib", "seaborn", "nc-time-axis"]
4848

@@ -84,14 +84,13 @@ source = ["xarray"]
8484
exclude_lines = ["pragma: no cover", "if TYPE_CHECKING"]
8585

8686
[tool.mypy]
87-
enable_error_code = "redundant-self"
87+
enable_error_code = ["ignore-without-code", "redundant-self", "redundant-expr"]
8888
exclude = [
8989
'build',
9090
'xarray/util/generate_.*\.py',
9191
'xarray/datatree_/doc/.*\.py',
9292
]
9393
files = "xarray"
94-
show_error_codes = true
9594
show_error_context = true
9695
warn_redundant_casts = true
9796
warn_unused_configs = true
@@ -240,7 +239,6 @@ extend-exclude = [
240239
"doc",
241240
"_typed_ops.pyi",
242241
]
243-
target-version = "py310"
244242

245243
[tool.ruff.lint]
246244
# E402: module level import not at top of file
@@ -249,13 +247,13 @@ target-version = "py310"
249247
extend-safe-fixes = [
250248
"TID252", # absolute imports
251249
]
252-
ignore = [
250+
extend-ignore = [
253251
"E402",
254252
"E501",
255253
"E731",
256254
"UP007",
257255
]
258-
select = [
256+
extend-select = [
259257
"F", # Pyflakes
260258
"E", # Pycodestyle
261259
"W",

xarray/backends/api.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@
5151
try:
5252
from dask.delayed import Delayed
5353
except ImportError:
54-
Delayed = None # type: ignore
54+
Delayed = None # type: ignore[assignment, misc]
5555
from io import BufferedIOBase
5656

5757
from xarray.backends.common import BackendEntrypoint
@@ -1113,7 +1113,7 @@ def open_mfdataset(
11131113
list(combined_ids_paths.keys()),
11141114
list(combined_ids_paths.values()),
11151115
)
1116-
elif combine == "by_coords" and concat_dim is not None:
1116+
elif concat_dim is not None:
11171117
raise ValueError(
11181118
"When combine='by_coords', passing a value for `concat_dim` has no "
11191119
"effect. To manually combine along a specific dimension you should "
@@ -1432,7 +1432,7 @@ def to_netcdf(
14321432
store.sync()
14331433
return target.getvalue()
14341434
finally:
1435-
if not multifile and compute:
1435+
if not multifile and compute: # type: ignore[redundant-expr]
14361436
store.close()
14371437

14381438
if not compute:
@@ -1585,8 +1585,9 @@ def save_mfdataset(
15851585
multifile=True,
15861586
**kwargs,
15871587
)
1588-
for ds, path, group in zip(datasets, paths, groups)
1589-
]
1588+
for ds, path, group in zip(datasets, paths, groups, strict=True)
1589+
],
1590+
strict=True,
15901591
)
15911592

15921593
try:
@@ -1600,7 +1601,10 @@ def save_mfdataset(
16001601
import dask
16011602

16021603
return dask.delayed(
1603-
[dask.delayed(_finalize_store)(w, s) for w, s in zip(writes, stores)]
1604+
[
1605+
dask.delayed(_finalize_store)(w, s)
1606+
for w, s in zip(writes, stores, strict=True)
1607+
]
16041608
)
16051609

16061610

xarray/backends/common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -431,7 +431,7 @@ def set_dimensions(self, variables, unlimited_dims=None):
431431
for v in unlimited_dims: # put unlimited_dims first
432432
dims[v] = None
433433
for v in variables.values():
434-
dims.update(dict(zip(v.dims, v.shape)))
434+
dims.update(dict(zip(v.dims, v.shape, strict=True)))
435435

436436
for dim, length in dims.items():
437437
if dim in existing_dims and length != existing_dims[dim]:

xarray/backends/file_manager.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -276,7 +276,7 @@ def __getstate__(self):
276276
def __setstate__(self, state) -> None:
277277
"""Restore from a pickle."""
278278
opener, args, mode, kwargs, lock, manager_id = state
279-
self.__init__( # type: ignore
279+
self.__init__( # type: ignore[misc]
280280
opener, *args, mode=mode, kwargs=kwargs, lock=lock, manager_id=manager_id
281281
)
282282

xarray/backends/h5netcdf_.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,9 @@ def open_store_variable(self, name, var):
208208
"shuffle": var.shuffle,
209209
}
210210
if var.chunks:
211-
encoding["preferred_chunks"] = dict(zip(var.dimensions, var.chunks))
211+
encoding["preferred_chunks"] = dict(
212+
zip(var.dimensions, var.chunks, strict=True)
213+
)
212214
# Convert h5py-style compression options to NetCDF4-Python
213215
# style, if possible
214216
if var.compression == "gzip":

xarray/backends/netCDF4_.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,9 @@ def _extract_nc4_variable_encoding(
278278
chunksizes = encoding["chunksizes"]
279279
chunks_too_big = any(
280280
c > d and dim not in unlimited_dims
281-
for c, d, dim in zip(chunksizes, variable.shape, variable.dims)
281+
for c, d, dim in zip(
282+
chunksizes, variable.shape, variable.dims, strict=False
283+
)
282284
)
283285
has_original_shape = "original_shape" in encoding
284286
changed_shape = (
@@ -446,7 +448,9 @@ def open_store_variable(self, name: str, var):
446448
else:
447449
encoding["contiguous"] = False
448450
encoding["chunksizes"] = tuple(chunking)
449-
encoding["preferred_chunks"] = dict(zip(var.dimensions, chunking))
451+
encoding["preferred_chunks"] = dict(
452+
zip(var.dimensions, chunking, strict=True)
453+
)
450454
# TODO: figure out how to round-trip "endian-ness" without raising
451455
# warnings from netCDF4
452456
# encoding['endian'] = var.endian()

xarray/backends/plugins.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,7 @@ def get_backend(engine: str | type[BackendEntrypoint]) -> BackendEntrypoint:
199199
"https://docs.xarray.dev/en/stable/getting-started-guide/installing.html"
200200
)
201201
backend = engines[engine]
202-
elif isinstance(engine, type) and issubclass(engine, BackendEntrypoint):
202+
elif issubclass(engine, BackendEntrypoint):
203203
backend = engine()
204204
else:
205205
raise TypeError(

0 commit comments

Comments
 (0)