From cdc8064232903f2cbaeeb0a5c0b60dc38ddbd08a Mon Sep 17 00:00:00 2001 From: Joseph Hamman Date: Sat, 2 Feb 2019 13:16:07 -0800 Subject: [PATCH 1/4] remove bottleneck dev build from travis, this test env was failing to build --- .travis.yml | 2 -- ci/requirements-py36-bottleneck-dev.yml | 24 ------------------------ 2 files changed, 26 deletions(-) delete mode 100644 ci/requirements-py36-bottleneck-dev.yml diff --git a/.travis.yml b/.travis.yml index fbc01b4815d..ea9ee7adcf4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,7 +19,6 @@ matrix: - EXTRA_FLAGS="--run-flaky --run-network-tests" - env: CONDA_ENV=py36-dask-dev - env: CONDA_ENV=py36-pandas-dev - - env: CONDA_ENV=py36-bottleneck-dev - env: CONDA_ENV=py36-rasterio - env: CONDA_ENV=py36-zarr-dev - env: CONDA_ENV=docs @@ -31,7 +30,6 @@ matrix: - CONDA_ENV=py36 - EXTRA_FLAGS="--run-flaky --run-network-tests" - env: CONDA_ENV=py36-pandas-dev - - env: CONDA_ENV=py36-bottleneck-dev - env: CONDA_ENV=py36-zarr-dev before_install: diff --git a/ci/requirements-py36-bottleneck-dev.yml b/ci/requirements-py36-bottleneck-dev.yml deleted file mode 100644 index 3f08648be32..00000000000 --- a/ci/requirements-py36-bottleneck-dev.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: test_env -channels: - - conda-forge -dependencies: - - python=3.6 - - cftime - - dask - - distributed - - h5py - - h5netcdf - - matplotlib - - netcdf4 - - pytest - - pytest-cov - - pytest-env - - coveralls - - flake8 - - numpy - - pandas - - scipy - - seaborn - - toolz - - pip: - - git+https://github.com/kwgoodman/bottleneck.git From b59a930331f7b0894f21e8508fd0a9dde25072e1 Mon Sep 17 00:00:00 2001 From: Joseph Hamman Date: Sun, 28 Jul 2019 15:36:05 -0700 Subject: [PATCH 2/4] Use entrypoints to specify backend stores --- setup.py | 14 ++++++- xarray/backends/api.py | 73 +++++++++++++++-------------------- xarray/tests/test_backends.py | 2 +- 3 files changed, 45 insertions(+), 44 deletions(-) diff --git a/setup.py b/setup.py index 6998f001f65..a144cfc8b8a 100644 --- a/setup.py +++ b/setup.py @@ -88,6 +88,17 @@ - SciPy2015 talk: https://www.youtube.com/watch?v=X0pAhJgySxk """ # noqa +ENTRY_POINTS = { + 'xarray.backends': [ + 'netcdf4 = xarray.backends:NetCDF4DataStore.open', + 'scipy = xarray.backends:ScipyDataStore', + 'pydap = xarray.backends:PydapDataStore.open', + 'h5netcdf = xarray.backends:H5NetCDFStore', + 'pynio = xarray.backends:NioDataStore', + 'pseudonetcdf = xarray.backends:PseudoNetCDFDataStore.open', + 'cfgrib = xarray.backends:CfGribDataStore', + ] +} setup(name=DISTNAME, version=versioneer.get_version(), @@ -104,4 +115,5 @@ tests_require=TESTS_REQUIRE, url=URL, packages=find_packages(), - package_data={'xarray': ['tests/data/*']}) + package_data={'xarray': ['tests/data/*']}, + entry_points=ENTRY_POINTS) diff --git a/xarray/backends/api.py b/xarray/backends/api.py index e0f269eb51f..25c493b3430 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -129,6 +129,17 @@ def _get_default_engine(path, allow_remote=False): return engine +def _get_backend_cls(engine): + import entrypoints + try: + return entrypoints.get_single('xarray.backends', engine).load() + except entrypoints.NoSuchEntryPoint: + all_entrypoints = entrypoints.get_group_named('xarray.backends') + raise ValueError('unrecognized engine for open_dataset: {}\n' + 'must be one of: {}' + .format(engine, list(all_entrypoints.keys()))) + + def _normalize_path(path): if is_remote_uri(path): return path @@ -352,12 +363,6 @@ def open_dataset(filename_or_obj, group=None, decode_cf=True, -------- open_mfdataset """ - engines = [None, 'netcdf4', 'scipy', 'pydap', 'h5netcdf', 'pynio', - 'cfgrib', 'pseudonetcdf'] - if engine not in engines: - raise ValueError('unrecognized engine for open_dataset: {}\n' - 'must be one of: {}' - .format(engine, engines)) if autoclose is not None: warnings.warn( @@ -382,6 +387,7 @@ def open_dataset(filename_or_obj, group=None, decode_cf=True, if backend_kwargs is None: backend_kwargs = {} + extra_kwargs = {} def maybe_decode_store(store, lock=False): ds = conventions.decode_cf( @@ -417,44 +423,27 @@ def maybe_decode_store(store, lock=False): if isinstance(filename_or_obj, AbstractDataStore): store = filename_or_obj + else: + if isinstance(filename_or_obj, str): + filename_or_obj = _normalize_path(filename_or_obj) - elif isinstance(filename_or_obj, str): - filename_or_obj = _normalize_path(filename_or_obj) - - if engine is None: - engine = _get_default_engine(filename_or_obj, - allow_remote=True) - if engine == 'netcdf4': - store = backends.NetCDF4DataStore.open( - filename_or_obj, group=group, lock=lock, **backend_kwargs) - elif engine == 'scipy': - store = backends.ScipyDataStore(filename_or_obj, **backend_kwargs) - elif engine == 'pydap': - store = backends.PydapDataStore.open( - filename_or_obj, **backend_kwargs) - elif engine == 'h5netcdf': - store = backends.H5NetCDFStore( - filename_or_obj, group=group, lock=lock, **backend_kwargs) - elif engine == 'pynio': - store = backends.NioDataStore( - filename_or_obj, lock=lock, **backend_kwargs) - elif engine == 'pseudonetcdf': - store = backends.PseudoNetCDFDataStore.open( - filename_or_obj, lock=lock, **backend_kwargs) - elif engine == 'cfgrib': - store = backends.CfGribDataStore( - filename_or_obj, lock=lock, **backend_kwargs) + if engine is None: + engine = _get_default_engine(filename_or_obj, allow_remote=True) - else: - if engine not in [None, 'scipy', 'h5netcdf']: - raise ValueError("can only read bytes or file-like objects " - "with engine='scipy' or 'h5netcdf'") - engine = _get_engine_from_magic_number(filename_or_obj) - if engine == 'scipy': - store = backends.ScipyDataStore(filename_or_obj, **backend_kwargs) - elif engine == 'h5netcdf': - store = backends.H5NetCDFStore(filename_or_obj, group=group, - lock=lock, **backend_kwargs) + else: + if engine not in [None, 'scipy', 'h5netcdf']: + raise ValueError("can only read bytes or file-like objects " + "with engine='scipy' or 'h5netcdf'") + engine = _get_engine_from_magic_number(filename_or_obj) + + if engine in ['netcdf4', 'h5netcdf']: + extra_kwargs['group'] = group + extra_kwargs['lock'] = lock + elif engine in ['pynio', 'pseudonetcdf', 'cfgrib']: + extra_kwargs['lock'] = lock + + opener = _get_backend_cls(engine) + store = opener(filename_or_obj, **backend_kwargs, **extra_kwargs) with close_on_error(store): ds = maybe_decode_store(store) diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 2235be4fbb7..aea3a28599c 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -2010,7 +2010,7 @@ def test_engine(self): open_dataset(tmp_file, engine='foobar') netcdf_bytes = data.to_netcdf() - with raises_regex(ValueError, 'unrecognized engine'): + with raises_regex(ValueError, 'can only read bytes or file-like'): open_dataset(BytesIO(netcdf_bytes), engine='foobar') def test_cross_engine_read_write_netcdf3(self): From 6a5abae146cb6164b5e41dede1511d71fdefd018 Mon Sep 17 00:00:00 2001 From: Joseph Hamman Date: Sun, 28 Jul 2019 21:58:57 -0700 Subject: [PATCH 3/4] ci and a little lint --- ci/requirements/py36-hypothesis.yml | 1 + ci/requirements/py36.yml | 1 + ci/requirements/py37-windows.yml | 1 + ci/requirements/py37.yml | 1 + xarray/backends/api.py | 5 +++-- 5 files changed, 7 insertions(+), 2 deletions(-) diff --git a/ci/requirements/py36-hypothesis.yml b/ci/requirements/py36-hypothesis.yml index f1aad7b39e0..7de0d4f36be 100644 --- a/ci/requirements/py36-hypothesis.yml +++ b/ci/requirements/py36-hypothesis.yml @@ -26,3 +26,4 @@ dependencies: - zarr - pydap - lxml + - entrypoints diff --git a/ci/requirements/py36.yml b/ci/requirements/py36.yml index a8481623f80..17d490774f6 100644 --- a/ci/requirements/py36.yml +++ b/ci/requirements/py36.yml @@ -33,5 +33,6 @@ dependencies: - iris>=1.10 - pydap - lxml + - entrypoints - pip: - mypy==0.711 diff --git a/ci/requirements/py37-windows.yml b/ci/requirements/py37-windows.yml index 748546c75b3..c39944f3efc 100644 --- a/ci/requirements/py37-windows.yml +++ b/ci/requirements/py37-windows.yml @@ -23,3 +23,4 @@ dependencies: - toolz - rasterio - zarr + - entrypoints diff --git a/ci/requirements/py37.yml b/ci/requirements/py37.yml index de7fa3abdc6..a75c4245807 100644 --- a/ci/requirements/py37.yml +++ b/ci/requirements/py37.yml @@ -29,6 +29,7 @@ dependencies: - cfgrib>=0.9.2 - lxml - pydap + - entrypoints - pip: - mypy==0.650 - numbagg diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 25c493b3430..d1c4fe438de 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -428,12 +428,13 @@ def maybe_decode_store(store, lock=False): filename_or_obj = _normalize_path(filename_or_obj) if engine is None: - engine = _get_default_engine(filename_or_obj, allow_remote=True) + engine = _get_default_engine(filename_or_obj, + allow_remote=True) else: if engine not in [None, 'scipy', 'h5netcdf']: raise ValueError("can only read bytes or file-like objects " - "with engine='scipy' or 'h5netcdf'") + "with engine='scipy' or 'h5netcdf'") engine = _get_engine_from_magic_number(filename_or_obj) if engine in ['netcdf4', 'h5netcdf']: From 77cc998e798f2bcd6906677b97dbc13d5969b22a Mon Sep 17 00:00:00 2001 From: Joseph Hamman Date: Sun, 28 Jul 2019 22:09:48 -0700 Subject: [PATCH 4/4] add entrypoints to docs build --- doc/environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/environment.yml b/doc/environment.yml index b2f89bd9f96..7776403baef 100644 --- a/doc/environment.yml +++ b/doc/environment.yml @@ -24,4 +24,5 @@ dependencies: - pillow=5.4.1 - sphinx_rtd_theme=0.4.2 - mock=2.0.0 + - entrypoints=0.3 - pip