Skip to content
This repository has been archived by the owner on Oct 7, 2024. It is now read-only.

Commit

Permalink
Fix open_dataarray does not pass properly its parameters to open_data…
Browse files Browse the repository at this point in the history
…set (pydata#1367)

Fixes GH1359
  • Loading branch information
shoyer authored Apr 12, 2017
1 parent f87bb0b commit 7b9b455
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 11 deletions.
4 changes: 4 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@ Enhancements
Bug fixes
~~~~~~~~~

- Fixed open_dataarray does not pass properly its parameters to open_dataset
(:issue:`1359`).
`Stephan Hoyer <https://github.com/shoyer>`_.

.. _whats-new.0.9.2:

v0.9.2 (2 April, 2017)
Expand Down
17 changes: 7 additions & 10 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,12 +318,8 @@ def maybe_decode_store(store, lock=False):
return maybe_decode_store(store)


def open_dataarray(filename_or_obj, group=None, decode_cf=True,
mask_and_scale=True, decode_times=True,
concat_characters=True, decode_coords=True, engine=None,
chunks=None, lock=None, cache=None, drop_variables=None):
"""
Opens an DataArray from a netCDF file containing a single data variable.
def open_dataarray(*args, **kwargs):
"""Open an DataArray from a netCDF file containing a single data variable.
This is designed to read netCDF files with only one data variable. If
multiple variables are present then a ValueError is raised.
Expand Down Expand Up @@ -353,6 +349,10 @@ def open_dataarray(filename_or_obj, group=None, decode_cf=True,
decode_times : bool, optional
If True, decode times encoded in the standard NetCDF datetime format
into datetime objects. Otherwise, leave them encoded as numbers.
autoclose : bool, optional
If True, automatically close files to avoid OS Error of too many files
being open. However, this option doesn't work with streams, e.g.,
BytesIO.
concat_characters : bool, optional
If True, concatenate along the last dimension of character arrays to
form string arrays. Dimensions will only be concatenated over (and
Expand Down Expand Up @@ -400,10 +400,7 @@ def open_dataarray(filename_or_obj, group=None, decode_cf=True,
--------
open_dataset
"""
dataset = open_dataset(filename_or_obj, group, decode_cf,
mask_and_scale, decode_times,
concat_characters, decode_coords, engine,
chunks, lock, cache, drop_variables)
dataset = open_dataset(*args, **kwargs)

if len(dataset.data_vars) != 1:
raise ValueError('Given file dataset contains more than one data '
Expand Down
13 changes: 12 additions & 1 deletion xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -1522,7 +1522,7 @@ def new_dataset_and_coord_attrs():
ds.to_netcdf(tmp_file)


@requires_netCDF4
@requires_scipy_or_netCDF4
class TestDataArrayToNetCDF(TestCase):

def test_dataarray_to_netcdf_no_name(self):
Expand Down Expand Up @@ -1554,3 +1554,14 @@ def test_dataarray_to_netcdf_coord_name_clash(self):

with open_dataarray(tmp) as loaded_da:
self.assertDataArrayIdentical(original_da, loaded_da)

def test_open_dataarray_options(self):
data = DataArray(
np.arange(5), coords={'y': ('x', range(5))}, dims=['x'])

with create_tmp_file() as tmp:
data.to_netcdf(tmp)

expected = data.drop('y')
with open_dataarray(tmp, drop_variables=['y']) as loaded:
self.assertDataArrayIdentical(expected, loaded)

0 comments on commit 7b9b455

Please sign in to comment.