diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 4d6bf2a0507..532074ba41f 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -23,6 +23,7 @@ Enhancements thread lock by default for reading from netCDF files. This avoids possible segmentation faults for reading from netCDF4 files when HDF5 is not configured properly for concurrent access (:issue:`444`). +- Added support for serializing arrays of complex numbers with `engine='h5netcdf'`. - The new :py:func:`~xray.save_mfdataset` function allows for saving multiple datasets to disk simultaneously. This is useful when processing large datasets with dask.array. For example, to save a dataset too big to fit into memory diff --git a/xray/backends/netCDF4_.py b/xray/backends/netCDF4_.py index 7fb61f38cd7..4cdb65c2fb6 100644 --- a/xray/backends/netCDF4_.py +++ b/xray/backends/netCDF4_.py @@ -57,12 +57,14 @@ def _nc4_values_and_dtype(var): if len(var) > 0: var = var.astype('O') dtype = str - elif var.dtype.kind in ['i', 'u', 'f', 'S']: + elif var.dtype.kind == 'S': # use character arrays instead of unicode, because unicode suppot in # netCDF4 is still rather buggy data, dims = maybe_convert_to_char_array(var.data, var.dims) var = Variable(dims, data, var.attrs, var.encoding) dtype = var.dtype + elif var.dtype.kind in ['i', 'u', 'f', 'c']: + dtype = var.dtype else: raise ValueError('cannot infer dtype for netCDF4 variable') return var, dtype diff --git a/xray/test/test_backends.py b/xray/test/test_backends.py index 0dbfb0d7c00..5288d86dc90 100644 --- a/xray/test/test_backends.py +++ b/xray/test/test_backends.py @@ -699,6 +699,11 @@ def test_orthogonal_indexing(self): # doesn't work for h5py (without using dask as an intermediate layer) pass + def test_complex(self): + expected = Dataset({'x': ('y', np.ones(5) + 1j * np.ones(5))}) + with self.roundtrip(expected) as actual: + self.assertDatasetEqual(expected, actual) + @requires_dask @requires_netCDF4