@@ -154,7 +154,7 @@ def test_multi_index_groupby_sum() -> None:
154
154
155
155
156
156
@requires_pandas_ge_2_2
157
- def test_multi_index_propagation ():
157
+ def test_multi_index_propagation () -> None :
158
158
# regression test for GH9648
159
159
times = pd .date_range ("2023-01-01" , periods = 4 )
160
160
locations = ["A" , "B" ]
@@ -2289,7 +2289,7 @@ def test_resample_origin(self) -> None:
2289
2289
times = pd .date_range ("2000-01-01T02:03:01" , freq = "6h" , periods = 10 )
2290
2290
array = DataArray (np .arange (10 ), [("time" , times )])
2291
2291
2292
- origin = "start"
2292
+ origin : Literal [ "start" ] = "start"
2293
2293
actual = array .resample (time = "24h" , origin = origin ).mean ()
2294
2294
expected = DataArray (array .to_series ().resample ("24h" , origin = origin ).mean ())
2295
2295
assert_identical (expected , actual )
@@ -2694,7 +2694,7 @@ def test_default_flox_method() -> None:
2694
2694
2695
2695
@requires_cftime
2696
2696
@pytest .mark .filterwarnings ("ignore" )
2697
- def test_cftime_resample_gh_9108 ():
2697
+ def test_cftime_resample_gh_9108 () -> None :
2698
2698
import cftime
2699
2699
2700
2700
ds = Dataset (
@@ -3044,7 +3044,7 @@ def test_gappy_resample_reductions(reduction):
3044
3044
assert_identical (expected , actual )
3045
3045
3046
3046
3047
- def test_groupby_transpose ():
3047
+ def test_groupby_transpose () -> None :
3048
3048
# GH5361
3049
3049
data = xr .DataArray (
3050
3050
np .random .randn (4 , 2 ),
@@ -3104,7 +3104,7 @@ def test_lazy_grouping(grouper, expect_index):
3104
3104
3105
3105
3106
3106
@requires_dask
3107
- def test_lazy_grouping_errors ():
3107
+ def test_lazy_grouping_errors () -> None :
3108
3108
import dask .array
3109
3109
3110
3110
data = DataArray (
@@ -3130,15 +3130,15 @@ def test_lazy_grouping_errors():
3130
3130
3131
3131
3132
3132
@requires_dask
3133
- def test_lazy_int_bins_error ():
3133
+ def test_lazy_int_bins_error () -> None :
3134
3134
import dask .array
3135
3135
3136
3136
with pytest .raises (ValueError , match = "Bin edges must be provided" ):
3137
3137
with raise_if_dask_computes ():
3138
3138
_ = BinGrouper (bins = 4 ).factorize (DataArray (dask .array .arange (3 )))
3139
3139
3140
3140
3141
- def test_time_grouping_seasons_specified ():
3141
+ def test_time_grouping_seasons_specified () -> None :
3142
3142
time = xr .date_range ("2001-01-01" , "2002-01-01" , freq = "D" )
3143
3143
ds = xr .Dataset ({"foo" : np .arange (time .size )}, coords = {"time" : ("time" , time )})
3144
3144
labels = ["DJF" , "MAM" , "JJA" , "SON" ]
@@ -3147,7 +3147,7 @@ def test_time_grouping_seasons_specified():
3147
3147
assert_identical (actual , expected .reindex (season = labels ))
3148
3148
3149
3149
3150
- def test_multiple_grouper_unsorted_order ():
3150
+ def test_multiple_grouper_unsorted_order () -> None :
3151
3151
time = xr .date_range ("2001-01-01" , "2003-01-01" , freq = "MS" )
3152
3152
ds = xr .Dataset ({"foo" : np .arange (time .size )}, coords = {"time" : ("time" , time )})
3153
3153
labels = ["DJF" , "MAM" , "JJA" , "SON" ]
@@ -3169,14 +3169,14 @@ def test_multiple_grouper_unsorted_order():
3169
3169
coords = {"x" : [0 , 1 ], "y" : [0 , 1 , 2 ]},
3170
3170
dims = ["x" , "y" , "z" ],
3171
3171
)
3172
- actual = b .groupby (
3172
+ actual2 = b .groupby (
3173
3173
x = UniqueGrouper (labels = [1 , 0 ]), y = UniqueGrouper (labels = [2 , 0 , 1 ])
3174
3174
).sum ()
3175
- expected = b .reindex (x = [1 , 0 ], y = [2 , 0 , 1 ]).transpose ("z" , ...)
3176
- assert_identical (actual , expected )
3175
+ expected2 = b .reindex (x = [1 , 0 ], y = [2 , 0 , 1 ]).transpose ("z" , ...)
3176
+ assert_identical (actual2 , expected2 )
3177
3177
3178
3178
3179
- def test_groupby_multiple_bin_grouper_missing_groups ():
3179
+ def test_groupby_multiple_bin_grouper_missing_groups () -> None :
3180
3180
from numpy import nan
3181
3181
3182
3182
ds = xr .Dataset (
@@ -3253,7 +3253,7 @@ def test_shuffle_by(chunks, expected_chunks):
3253
3253
3254
3254
3255
3255
@requires_dask
3256
- def test_groupby_dask_eager_load_warnings ():
3256
+ def test_groupby_dask_eager_load_warnings () -> None :
3257
3257
ds = xr .Dataset (
3258
3258
{"foo" : (("z" ), np .arange (12 ))},
3259
3259
coords = {"x" : ("z" , np .arange (12 )), "y" : ("z" , np .arange (12 ))},
0 commit comments