diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 2e9d0e365a4..3d82ed9b466 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -29,6 +29,10 @@ Enhancements Bug fixes ~~~~~~~~~ +- Fixed open_dataarray does not pass properly its parameters to open_dataset + (:issue:`1359`). + `Stephan Hoyer `_. + .. _whats-new.0.9.2: v0.9.2 (2 April, 2017) diff --git a/xarray/backends/api.py b/xarray/backends/api.py index f1dbcecd340..cb6eae38754 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -318,12 +318,8 @@ def maybe_decode_store(store, lock=False): return maybe_decode_store(store) -def open_dataarray(filename_or_obj, group=None, decode_cf=True, - mask_and_scale=True, decode_times=True, - concat_characters=True, decode_coords=True, engine=None, - chunks=None, lock=None, cache=None, drop_variables=None): - """ - Opens an DataArray from a netCDF file containing a single data variable. +def open_dataarray(*args, **kwargs): + """Open an DataArray from a netCDF file containing a single data variable. This is designed to read netCDF files with only one data variable. If multiple variables are present then a ValueError is raised. @@ -353,6 +349,10 @@ def open_dataarray(filename_or_obj, group=None, decode_cf=True, decode_times : bool, optional If True, decode times encoded in the standard NetCDF datetime format into datetime objects. Otherwise, leave them encoded as numbers. + autoclose : bool, optional + If True, automatically close files to avoid OS Error of too many files + being open. However, this option doesn't work with streams, e.g., + BytesIO. concat_characters : bool, optional If True, concatenate along the last dimension of character arrays to form string arrays. Dimensions will only be concatenated over (and @@ -400,10 +400,7 @@ def open_dataarray(filename_or_obj, group=None, decode_cf=True, -------- open_dataset """ - dataset = open_dataset(filename_or_obj, group, decode_cf, - mask_and_scale, decode_times, - concat_characters, decode_coords, engine, - chunks, lock, cache, drop_variables) + dataset = open_dataset(*args, **kwargs) if len(dataset.data_vars) != 1: raise ValueError('Given file dataset contains more than one data ' diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index 0e73b710663..e7a2dbd4381 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -1522,7 +1522,7 @@ def new_dataset_and_coord_attrs(): ds.to_netcdf(tmp_file) -@requires_netCDF4 +@requires_scipy_or_netCDF4 class TestDataArrayToNetCDF(TestCase): def test_dataarray_to_netcdf_no_name(self): @@ -1554,3 +1554,14 @@ def test_dataarray_to_netcdf_coord_name_clash(self): with open_dataarray(tmp) as loaded_da: self.assertDataArrayIdentical(original_da, loaded_da) + + def test_open_dataarray_options(self): + data = DataArray( + np.arange(5), coords={'y': ('x', range(5))}, dims=['x']) + + with create_tmp_file() as tmp: + data.to_netcdf(tmp) + + expected = data.drop('y') + with open_dataarray(tmp, drop_variables=['y']) as loaded: + self.assertDataArrayIdentical(expected, loaded)