2
2
from glob import glob
3
3
from io import BytesIO
4
4
from numbers import Number
5
- from pathlib import Path
6
5
from typing import (
7
6
TYPE_CHECKING ,
8
7
Callable ,
@@ -808,7 +807,7 @@ def open_mfdataset(
808
807
- "override": if indexes are of same size, rewrite indexes to be
809
808
those of the first object with that dimension. Indexes for the same
810
809
dimension must have the same size in all objects.
811
- attrs_file : str or pathlib.Path , optional
810
+ attrs_file : str or path-like , optional
812
811
Path of the file used to read global attributes from.
813
812
By default global attributes are read from the first file provided,
814
813
with wildcard matches sorted by filename.
@@ -866,7 +865,7 @@ def open_mfdataset(
866
865
else :
867
866
paths = sorted (glob (_normalize_path (paths )))
868
867
else :
869
- paths = [str (p ) if isinstance (p , Path ) else p for p in paths ]
868
+ paths = [os . fspath (p ) if isinstance (p , os . PathLike ) else p for p in paths ]
870
869
871
870
if not paths :
872
871
raise OSError ("no files to open" )
@@ -958,8 +957,8 @@ def multi_file_closer():
958
957
959
958
# read global attributes from the attrs_file or from the first dataset
960
959
if attrs_file is not None :
961
- if isinstance (attrs_file , Path ):
962
- attrs_file = str (attrs_file )
960
+ if isinstance (attrs_file , os . PathLike ):
961
+ attrs_file = os . fspath (attrs_file )
963
962
combined .attrs = datasets [paths .index (attrs_file )].attrs
964
963
965
964
return combined
@@ -992,8 +991,8 @@ def to_netcdf(
992
991
993
992
The ``multifile`` argument is only for the private use of save_mfdataset.
994
993
"""
995
- if isinstance (path_or_file , Path ):
996
- path_or_file = str (path_or_file )
994
+ if isinstance (path_or_file , os . PathLike ):
995
+ path_or_file = os . fspath (path_or_file )
997
996
998
997
if encoding is None :
999
998
encoding = {}
@@ -1134,7 +1133,7 @@ def save_mfdataset(
1134
1133
----------
1135
1134
datasets : list of Dataset
1136
1135
List of datasets to save.
1137
- paths : list of str or list of Path
1136
+ paths : list of str or list of path-like objects
1138
1137
List of paths to which to save each corresponding dataset.
1139
1138
mode : {"w", "a"}, optional
1140
1139
Write ("w") or append ("a") mode. If mode="w", any existing file at
@@ -1302,7 +1301,7 @@ def check_dtype(var):
1302
1301
1303
1302
def to_zarr (
1304
1303
dataset : Dataset ,
1305
- store : Union [MutableMapping , str , Path ] = None ,
1304
+ store : Union [MutableMapping , str , os . PathLike ] = None ,
1306
1305
chunk_store = None ,
1307
1306
mode : str = None ,
1308
1307
synchronizer = None ,
@@ -1326,7 +1325,7 @@ def to_zarr(
1326
1325
if v .size == 0 :
1327
1326
v .load ()
1328
1327
1329
- # expand str and Path arguments
1328
+ # expand str and path-like arguments
1330
1329
store = _normalize_path (store )
1331
1330
chunk_store = _normalize_path (chunk_store )
1332
1331
0 commit comments