Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update pytest config and un-xfail some tests #8246

Merged
merged 10 commits into from
Sep 30, 2023
4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,9 @@ select = [
known-first-party = ["xarray"]

[tool.pytest.ini_options]
addopts = '--strict-markers'
addopts = ["--strict-config", "--strict-markers"]
log_cli_level = "INFO"
minversion = "7"
filterwarnings = [
"ignore:Using a non-tuple sequence for multidimensional indexing is deprecated:FutureWarning",
]
Expand Down
40 changes: 21 additions & 19 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -714,9 +714,6 @@ def multiple_indexing(indexers):
]
multiple_indexing(indexers5)

@pytest.mark.xfail(
reason="zarr without dask handles negative steps in slices incorrectly",
)
def test_vectorized_indexing_negative_step(self) -> None:
# use dask explicitly when present
open_kwargs: dict[str, Any] | None
Expand Down Expand Up @@ -1842,8 +1839,8 @@ def test_unsorted_index_raises(self) -> None:
# dask first pulls items by block.
pass

@pytest.mark.skip(reason="caching behavior differs for dask")
def test_dataset_caching(self) -> None:
# caching behavior differs for dask
pass

def test_write_inconsistent_chunks(self) -> None:
Expand Down Expand Up @@ -2261,9 +2258,6 @@ def test_encoding_kwarg_fixed_width_string(self) -> None:
# not relevant for zarr, since we don't use EncodedStringCoder
pass

# TODO: someone who understand caching figure out whether caching
# makes sense for Zarr backend
@pytest.mark.xfail(reason="Zarr caching not implemented")
def test_dataset_caching(self) -> None:
super().test_dataset_caching()

Expand Down Expand Up @@ -2712,6 +2706,14 @@ def test_attributes(self, obj) -> None:
with pytest.raises(TypeError, match=r"Invalid attribute in Dataset.attrs."):
ds.to_zarr(store_target, **self.version_kwargs)

def test_vectorized_indexing_negative_step(self) -> None:
if not has_dask:
pytest.xfail(
reason="zarr without dask handles negative steps in slices incorrectly"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this still failing? I fixed an obscure bug here: #7586

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Interesting. can you open an issue for this test please?

)

super().test_vectorized_indexing_negative_step()


@requires_zarr
class TestZarrDictStore(ZarrBase):
Expand Down Expand Up @@ -3378,8 +3380,8 @@ def roundtrip(
) as ds:
yield ds

@pytest.mark.skip(reason="caching behavior differs for dask")
def test_dataset_caching(self) -> None:
# caching behavior differs for dask
pass

def test_write_inconsistent_chunks(self) -> None:
Expand Down Expand Up @@ -3982,7 +3984,6 @@ def test_open_mfdataset_raise_on_bad_combine_args(self) -> None:
with pytest.raises(ValueError, match="`concat_dim` has no effect"):
open_mfdataset([tmp1, tmp2], concat_dim="x")

@pytest.mark.xfail(reason="mfdataset loses encoding currently.")
def test_encoding_mfdataset(self) -> None:
original = Dataset(
{
Expand Down Expand Up @@ -4195,7 +4196,6 @@ def test_dataarray_compute(self) -> None:
assert computed._in_memory
assert_allclose(actual, computed, decode_bytes=False)

@pytest.mark.xfail
def test_save_mfdataset_compute_false_roundtrip(self) -> None:
from dask.delayed import Delayed

Expand Down Expand Up @@ -5125,15 +5125,17 @@ def test_open_fsspec() -> None:
ds2 = open_dataset(url, engine="zarr")
xr.testing.assert_equal(ds0, ds2)

# multi dataset
url = "memory://out*.zarr"
ds2 = open_mfdataset(url, engine="zarr")
xr.testing.assert_equal(xr.concat([ds, ds0], dim="time"), ds2)

# multi dataset with caching
url = "simplecache::memory://out*.zarr"
ds2 = open_mfdataset(url, engine="zarr")
xr.testing.assert_equal(xr.concat([ds, ds0], dim="time"), ds2)
# open_mfdataset requires dask
if has_dask:
mathause marked this conversation as resolved.
Show resolved Hide resolved
mathause marked this conversation as resolved.
Show resolved Hide resolved
# multi dataset
url = "memory://out*.zarr"
ds2 = open_mfdataset(url, engine="zarr")
xr.testing.assert_equal(xr.concat([ds, ds0], dim="time"), ds2)

# multi dataset with caching
url = "simplecache::memory://out*.zarr"
ds2 = open_mfdataset(url, engine="zarr")
xr.testing.assert_equal(xr.concat([ds, ds0], dim="time"), ds2)


@requires_h5netcdf
Expand Down
1 change: 0 additions & 1 deletion xarray/tests/test_cftimeindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -1135,7 +1135,6 @@ def test_to_datetimeindex_feb_29(calendar):


@requires_cftime
@pytest.mark.xfail(reason="https://github.com/pandas-dev/pandas/issues/24263")
def test_multiindex():
index = xr.cftime_range("2001-01-01", periods=100, calendar="360_day")
mindex = pd.MultiIndex.from_arrays([index])
Expand Down
1 change: 0 additions & 1 deletion xarray/tests/test_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,6 @@ def test_concat_attr_retention(self) -> None:
result = concat([ds1, ds2], dim="dim1")
assert result.attrs == original_attrs

@pytest.mark.xfail
def test_merge_attr_retention(self) -> None:
da1 = create_test_dataarray_attrs(var="var1")
da2 = create_test_dataarray_attrs(var="var2")
Expand Down
4 changes: 1 addition & 3 deletions xarray/tests/test_rolling.py
Original file line number Diff line number Diff line change
Expand Up @@ -766,9 +766,7 @@ def test_ndrolling_construct(self, center, fill_value, dask) -> None:
)
assert_allclose(actual, expected)

@pytest.mark.xfail(
reason="See https://github.com/pydata/xarray/pull/4369 or docstring"
)
@requires_dask
@pytest.mark.filterwarnings("error")
@pytest.mark.parametrize("ds", (2,), indirect=True)
@pytest.mark.parametrize("name", ("mean", "max"))
Expand Down
13 changes: 9 additions & 4 deletions xarray/tests/test_sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,6 @@ def test_variable_property(prop):
],
),
True,
marks=xfail(reason="Coercion to dense"),
),
param(
do("conjugate"),
Expand Down Expand Up @@ -201,7 +200,6 @@ def test_variable_property(prop):
param(
do("reduce", func="sum", dim="x"),
True,
marks=xfail(reason="Coercion to dense"),
),
param(
do("rolling_window", dim="x", window=2, window_dim="x_win"),
Expand All @@ -218,7 +216,7 @@ def test_variable_property(prop):
param(
do("var"), False, marks=xfail(reason="Missing implementation for np.nanvar")
),
param(do("to_dict"), False, marks=xfail(reason="Coercion to dense")),
param(do("to_dict"), False),
(do("where", cond=make_xrvar({"x": 10, "y": 5}) > 0.5), True),
],
ids=repr,
Expand All @@ -237,7 +235,14 @@ def test_variable_method(func, sparse_output):
assert isinstance(ret_s.data, sparse.SparseArray)
assert np.allclose(ret_s.data.todense(), ret_d.data, equal_nan=True)
else:
assert np.allclose(ret_s, ret_d, equal_nan=True)
if func.meth != "to_dict":
assert np.allclose(ret_s, ret_d)
else:
# pop the arrays from the dict
arr_s, arr_d = ret_s.pop("data"), ret_d.pop("data")

assert np.allclose(arr_s, arr_d)
assert ret_s == ret_d


@pytest.mark.parametrize(
Expand Down
24 changes: 11 additions & 13 deletions xarray/tests/test_units.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
assert_identical,
requires_dask,
requires_matplotlib,
requires_numbagg,
)
from xarray.tests.test_plot import PlotTestCase
from xarray.tests.test_variable import _PAD_XR_NP_ARGS
Expand Down Expand Up @@ -2548,7 +2549,6 @@ def test_univariate_ufunc(self, units, error, dtype):
assert_units_equal(expected, actual)
assert_identical(expected, actual)

@pytest.mark.xfail(reason="needs the type register system for __array_ufunc__")
@pytest.mark.parametrize(
"unit,error",
(
Expand Down Expand Up @@ -3849,23 +3849,21 @@ def test_computation(self, func, variant, dtype):
method("groupby", "x"),
method("groupby_bins", "y", bins=4),
method("coarsen", y=2),
pytest.param(
method("rolling", y=3),
marks=pytest.mark.xfail(
reason="numpy.lib.stride_tricks.as_strided converts to ndarray"
),
),
pytest.param(
method("rolling_exp", y=3),
marks=pytest.mark.xfail(
reason="numbagg functions are not supported by pint"
),
),
method("rolling", y=3),
pytest.param(method("rolling_exp", y=3), marks=requires_numbagg),
method("weighted", xr.DataArray(data=np.linspace(0, 1, 10), dims="y")),
),
ids=repr,
)
def test_computation_objects(self, func, variant, dtype):
if variant == "data":
if func.name == "rolling_exp":
pytest.xfail(reason="numbagg functions are not supported by pint")
elif func.name == "rolling":
pytest.xfail(
reason="numpy.lib.stride_tricks.as_strided converts to ndarray"
)

unit = unit_registry.m

variants = {
Expand Down
45 changes: 29 additions & 16 deletions xarray/tests/test_variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -885,20 +885,10 @@ def test_getitem_error(self):
"mode",
[
"mean",
pytest.param(
"median",
marks=pytest.mark.xfail(reason="median is not implemented by Dask"),
),
pytest.param(
"reflect", marks=pytest.mark.xfail(reason="dask.array.pad bug")
),
"median",
"reflect",
"edge",
pytest.param(
"linear_ramp",
marks=pytest.mark.xfail(
reason="pint bug: https://github.com/hgrecco/pint/issues/1026"
),
),
"linear_ramp",
"maximum",
"minimum",
"symmetric",
Expand Down Expand Up @@ -2345,12 +2335,35 @@ def test_dask_rolling(self, dim, window, center):
assert actual.shape == expected.shape
assert_equal(actual, expected)

@pytest.mark.xfail(
reason="https://github.com/pydata/xarray/issues/6209#issuecomment-1025116203"
)
def test_multiindex(self):
super().test_multiindex()

@pytest.mark.parametrize(
"mode",
[
"mean",
pytest.param(
"median",
marks=pytest.mark.xfail(reason="median is not implemented by Dask"),
),
pytest.param(
"reflect", marks=pytest.mark.xfail(reason="dask.array.pad bug")
),
"edge",
"linear_ramp",
"maximum",
"minimum",
"symmetric",
"wrap",
],
)
@pytest.mark.parametrize("xr_arg, np_arg", _PAD_XR_NP_ARGS)
@pytest.mark.filterwarnings(
r"ignore:dask.array.pad.+? converts integers to floats."
)
def test_pad(self, mode, xr_arg, np_arg):
super().test_pad(mode, xr_arg, np_arg)


@requires_sparse
class TestVariableWithSparse:
Expand Down