@@ -714,9 +714,6 @@ def multiple_indexing(indexers):
714
714
]
715
715
multiple_indexing (indexers5 )
716
716
717
- @pytest .mark .xfail (
718
- reason = "zarr without dask handles negative steps in slices incorrectly" ,
719
- )
720
717
def test_vectorized_indexing_negative_step (self ) -> None :
721
718
# use dask explicitly when present
722
719
open_kwargs : dict [str , Any ] | None
@@ -1842,8 +1839,8 @@ def test_unsorted_index_raises(self) -> None:
1842
1839
# dask first pulls items by block.
1843
1840
pass
1844
1841
1842
+ @pytest .mark .skip (reason = "caching behavior differs for dask" )
1845
1843
def test_dataset_caching (self ) -> None :
1846
- # caching behavior differs for dask
1847
1844
pass
1848
1845
1849
1846
def test_write_inconsistent_chunks (self ) -> None :
@@ -2261,9 +2258,6 @@ def test_encoding_kwarg_fixed_width_string(self) -> None:
2261
2258
# not relevant for zarr, since we don't use EncodedStringCoder
2262
2259
pass
2263
2260
2264
- # TODO: someone who understand caching figure out whether caching
2265
- # makes sense for Zarr backend
2266
- @pytest .mark .xfail (reason = "Zarr caching not implemented" )
2267
2261
def test_dataset_caching (self ) -> None :
2268
2262
super ().test_dataset_caching ()
2269
2263
@@ -2712,6 +2706,14 @@ def test_attributes(self, obj) -> None:
2712
2706
with pytest .raises (TypeError , match = r"Invalid attribute in Dataset.attrs." ):
2713
2707
ds .to_zarr (store_target , ** self .version_kwargs )
2714
2708
2709
+ def test_vectorized_indexing_negative_step (self ) -> None :
2710
+ if not has_dask :
2711
+ pytest .xfail (
2712
+ reason = "zarr without dask handles negative steps in slices incorrectly"
2713
+ )
2714
+
2715
+ super ().test_vectorized_indexing_negative_step ()
2716
+
2715
2717
2716
2718
@requires_zarr
2717
2719
class TestZarrDictStore (ZarrBase ):
@@ -3378,8 +3380,8 @@ def roundtrip(
3378
3380
) as ds :
3379
3381
yield ds
3380
3382
3383
+ @pytest .mark .skip (reason = "caching behavior differs for dask" )
3381
3384
def test_dataset_caching (self ) -> None :
3382
- # caching behavior differs for dask
3383
3385
pass
3384
3386
3385
3387
def test_write_inconsistent_chunks (self ) -> None :
@@ -3982,7 +3984,6 @@ def test_open_mfdataset_raise_on_bad_combine_args(self) -> None:
3982
3984
with pytest .raises (ValueError , match = "`concat_dim` has no effect" ):
3983
3985
open_mfdataset ([tmp1 , tmp2 ], concat_dim = "x" )
3984
3986
3985
- @pytest .mark .xfail (reason = "mfdataset loses encoding currently." )
3986
3987
def test_encoding_mfdataset (self ) -> None :
3987
3988
original = Dataset (
3988
3989
{
@@ -4195,7 +4196,6 @@ def test_dataarray_compute(self) -> None:
4195
4196
assert computed ._in_memory
4196
4197
assert_allclose (actual , computed , decode_bytes = False )
4197
4198
4198
- @pytest .mark .xfail
4199
4199
def test_save_mfdataset_compute_false_roundtrip (self ) -> None :
4200
4200
from dask .delayed import Delayed
4201
4201
@@ -5125,15 +5125,17 @@ def test_open_fsspec() -> None:
5125
5125
ds2 = open_dataset (url , engine = "zarr" )
5126
5126
xr .testing .assert_equal (ds0 , ds2 )
5127
5127
5128
- # multi dataset
5129
- url = "memory://out*.zarr"
5130
- ds2 = open_mfdataset (url , engine = "zarr" )
5131
- xr .testing .assert_equal (xr .concat ([ds , ds0 ], dim = "time" ), ds2 )
5132
-
5133
- # multi dataset with caching
5134
- url = "simplecache::memory://out*.zarr"
5135
- ds2 = open_mfdataset (url , engine = "zarr" )
5136
- xr .testing .assert_equal (xr .concat ([ds , ds0 ], dim = "time" ), ds2 )
5128
+ # open_mfdataset requires dask
5129
+ if has_dask :
5130
+ # multi dataset
5131
+ url = "memory://out*.zarr"
5132
+ ds2 = open_mfdataset (url , engine = "zarr" )
5133
+ xr .testing .assert_equal (xr .concat ([ds , ds0 ], dim = "time" ), ds2 )
5134
+
5135
+ # multi dataset with caching
5136
+ url = "simplecache::memory://out*.zarr"
5137
+ ds2 = open_mfdataset (url , engine = "zarr" )
5138
+ xr .testing .assert_equal (xr .concat ([ds , ds0 ], dim = "time" ), ds2 )
5137
5139
5138
5140
5139
5141
@requires_h5netcdf
0 commit comments