@@ -326,7 +326,7 @@ def apply_dataarray_vfunc(
326
326
variable , coords = coords , indexes = indexes , name = name , fastpath = True
327
327
)
328
328
for variable , coords , indexes in zip (
329
- result_var , result_coords , result_indexes
329
+ result_var , result_coords , result_indexes , strict = True
330
330
)
331
331
)
332
332
else :
@@ -407,7 +407,7 @@ def _unpack_dict_tuples(
407
407
) -> tuple [dict [Hashable , Variable ], ...]:
408
408
out : tuple [dict [Hashable , Variable ], ...] = tuple ({} for _ in range (num_outputs ))
409
409
for name , values in result_vars .items ():
410
- for value , results_dict in zip (values , out ):
410
+ for value , results_dict in zip (values , out , strict = True ):
411
411
results_dict [name ] = value
412
412
return out
413
413
@@ -422,7 +422,7 @@ def _check_core_dims(signature, variable_args, name):
422
422
"""
423
423
missing = []
424
424
for i , (core_dims , variable_arg ) in enumerate (
425
- zip (signature .input_core_dims , variable_args )
425
+ zip (signature .input_core_dims , variable_args , strict = True )
426
426
):
427
427
# Check whether all the dims are on the variable. Note that we need the
428
428
# `hasattr` to check for a dims property, to protect against the case where
@@ -454,7 +454,7 @@ def apply_dict_of_variables_vfunc(
454
454
grouped_by_name = collect_dict_values (args , names , fill_value )
455
455
456
456
result_vars = {}
457
- for name , variable_args in zip (names , grouped_by_name ):
457
+ for name , variable_args in zip (names , grouped_by_name , strict = True ):
458
458
core_dim_present = _check_core_dims (signature , variable_args , name )
459
459
if core_dim_present is True :
460
460
result_vars [name ] = func (* variable_args )
@@ -546,7 +546,7 @@ def apply_dataset_vfunc(
546
546
if signature .num_outputs > 1 :
547
547
out = tuple (
548
548
_fast_dataset (* args )
549
- for args in zip (result_vars , list_of_coords , list_of_indexes )
549
+ for args in zip (result_vars , list_of_coords , list_of_indexes , strict = True )
550
550
)
551
551
else :
552
552
(coord_vars ,) = list_of_coords
@@ -616,11 +616,13 @@ def apply_groupby_func(func, *args):
616
616
iterator = itertools .repeat (arg )
617
617
iterators .append (iterator )
618
618
619
- applied : Iterator = (func (* zipped_args ) for zipped_args in zip (* iterators ))
619
+ applied : Iterator = (
620
+ func (* zipped_args ) for zipped_args in zip (* iterators , strict = False )
621
+ )
620
622
applied_example , applied = peek_at (applied )
621
623
combine = first_groupby ._combine # type: ignore[attr-defined]
622
624
if isinstance (applied_example , tuple ):
623
- combined = tuple (combine (output ) for output in zip (* applied ))
625
+ combined = tuple (combine (output ) for output in zip (* applied , strict = True ))
624
626
else :
625
627
combined = combine (applied )
626
628
return combined
@@ -637,7 +639,7 @@ def unified_dim_sizes(
637
639
"broadcasting cannot handle duplicate "
638
640
f"dimensions on a variable: { list (var .dims )} "
639
641
)
640
- for dim , size in zip (var .dims , var .shape ):
642
+ for dim , size in zip (var .dims , var .shape , strict = True ):
641
643
if dim not in exclude_dims :
642
644
if dim not in dim_sizes :
643
645
dim_sizes [dim ] = size
@@ -741,7 +743,7 @@ def apply_variable_ufunc(
741
743
if isinstance (arg , Variable )
742
744
else arg
743
745
)
744
- for arg , core_dims in zip (args , signature .input_core_dims )
746
+ for arg , core_dims in zip (args , signature .input_core_dims , strict = True )
745
747
]
746
748
747
749
if any (is_chunked_array (array ) for array in input_data ):
@@ -766,7 +768,7 @@ def apply_variable_ufunc(
766
768
allow_rechunk = dask_gufunc_kwargs .get ("allow_rechunk" , None )
767
769
if allow_rechunk is None :
768
770
for n , (data , core_dims ) in enumerate (
769
- zip (input_data , signature .input_core_dims )
771
+ zip (input_data , signature .input_core_dims , strict = True )
770
772
):
771
773
if is_chunked_array (data ):
772
774
# core dimensions cannot span multiple chunks
@@ -848,7 +850,7 @@ def func(*arrays):
848
850
)
849
851
850
852
output : list [Variable ] = []
851
- for dims , data in zip (output_dims , result_data ):
853
+ for dims , data in zip (output_dims , result_data , strict = True ):
852
854
data = as_compatible_data (data )
853
855
if data .ndim != len (dims ):
854
856
raise ValueError (
@@ -2179,7 +2181,7 @@ def _calc_idxminmax(
2179
2181
# Handle chunked arrays (e.g. dask).
2180
2182
if is_chunked_array (array .data ):
2181
2183
chunkmanager = get_chunked_array_type (array .data )
2182
- chunks = dict (zip (array .dims , array .chunks ))
2184
+ chunks = dict (zip (array .dims , array .chunks , strict = True ))
2183
2185
dask_coord = chunkmanager .from_array (array [dim ].data , chunks = chunks [dim ])
2184
2186
data = dask_coord [duck_array_ops .ravel (indx .data )]
2185
2187
res = indx .copy (data = duck_array_ops .reshape (data , indx .shape ))
@@ -2268,7 +2270,7 @@ def unify_chunks(*objects: Dataset | DataArray) -> tuple[Dataset | DataArray, ..
2268
2270
_ , chunked_data = chunkmanager .unify_chunks (* unify_chunks_args )
2269
2271
chunked_data_iter = iter (chunked_data )
2270
2272
out : list [Dataset | DataArray ] = []
2271
- for obj , ds in zip (objects , datasets ):
2273
+ for obj , ds in zip (objects , datasets , strict = True ):
2272
2274
for k , v in ds ._variables .items ():
2273
2275
if v .chunks is not None :
2274
2276
ds ._variables [k ] = v .copy (data = next (chunked_data_iter ))
0 commit comments