2
2
from glob import glob
3
3
from io import BytesIO
4
4
from numbers import Number
5
- from pathlib import Path
6
5
from typing import (
7
6
TYPE_CHECKING ,
8
7
Callable ,
@@ -808,7 +807,7 @@ def open_mfdataset(
808
807
- "override": if indexes are of same size, rewrite indexes to be
809
808
those of the first object with that dimension. Indexes for the same
810
809
dimension must have the same size in all objects.
811
- attrs_file : str or pathlib.Path , optional
810
+ attrs_file : str or path-like , optional
812
811
Path of the file used to read global attributes from.
813
812
By default global attributes are read from the first file provided,
814
813
with wildcard matches sorted by filename.
@@ -868,7 +867,7 @@ def open_mfdataset(
868
867
elif isinstance (paths , os .PathLike ):
869
868
paths = [os .fspath (paths )]
870
869
else :
871
- paths = [str (p ) if isinstance (p , Path ) else p for p in paths ]
870
+ paths = [os . fspath (p ) if isinstance (p , os . PathLike ) else p for p in paths ]
872
871
873
872
if not paths :
874
873
raise OSError ("no files to open" )
@@ -960,8 +959,8 @@ def multi_file_closer():
960
959
961
960
# read global attributes from the attrs_file or from the first dataset
962
961
if attrs_file is not None :
963
- if isinstance (attrs_file , Path ):
964
- attrs_file = str (attrs_file )
962
+ if isinstance (attrs_file , os . PathLike ):
963
+ attrs_file = os . fspath (attrs_file )
965
964
combined .attrs = datasets [paths .index (attrs_file )].attrs
966
965
967
966
return combined
@@ -994,8 +993,8 @@ def to_netcdf(
994
993
995
994
The ``multifile`` argument is only for the private use of save_mfdataset.
996
995
"""
997
- if isinstance (path_or_file , Path ):
998
- path_or_file = str (path_or_file )
996
+ if isinstance (path_or_file , os . PathLike ):
997
+ path_or_file = os . fspath (path_or_file )
999
998
1000
999
if encoding is None :
1001
1000
encoding = {}
@@ -1136,7 +1135,7 @@ def save_mfdataset(
1136
1135
----------
1137
1136
datasets : list of Dataset
1138
1137
List of datasets to save.
1139
- paths : list of str or list of Path
1138
+ paths : list of str or list of path-like objects
1140
1139
List of paths to which to save each corresponding dataset.
1141
1140
mode : {"w", "a"}, optional
1142
1141
Write ("w") or append ("a") mode. If mode="w", any existing file at
@@ -1304,7 +1303,7 @@ def check_dtype(var):
1304
1303
1305
1304
def to_zarr (
1306
1305
dataset : Dataset ,
1307
- store : Union [MutableMapping , str , Path ] = None ,
1306
+ store : Union [MutableMapping , str , os . PathLike ] = None ,
1308
1307
chunk_store = None ,
1309
1308
mode : str = None ,
1310
1309
synchronizer = None ,
@@ -1328,7 +1327,7 @@ def to_zarr(
1328
1327
if v .size == 0 :
1329
1328
v .load ()
1330
1329
1331
- # expand str and Path arguments
1330
+ # expand str and path-like arguments
1332
1331
store = _normalize_path (store )
1333
1332
chunk_store = _normalize_path (chunk_store )
1334
1333
0 commit comments