Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix missing config errors when running test suite #1365

Merged
merged 3 commits into from
Apr 12, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,3 @@ def pytest_addoption(parser):
"""Add command-line flags for pytest."""
parser.addoption("--run-flaky", action="store_true",
help="runs flaky tests")
parser.addoption("--skip-slow", action="store_true",
help="skips slow tests")
5 changes: 5 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ Bug fixes
(:issue:`1359`).
`Stephan Hoyer <https://github.com/shoyer>`_.

- Ensure test suite works when runs from an installed version of xarray
(:issue:`1336`). Use ``@pytest.mark.slow`` instead of a custom flag to mark
slow tests.
By `Stephan Hoyer <https://github.com/shoyer>`_

.. _whats-new.0.9.2:

v0.9.2 (2 April, 2017)
Expand Down
65 changes: 26 additions & 39 deletions xarray/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,49 +78,36 @@

# slighly simpler construction that the full functions.
# Generally `pytest.importorskip('package')` inline is even easier
requires_matplotlib = pytest.mark.skipif(not has_matplotlib, reason='requires matplotlib')
requires_matplotlib = pytest.mark.skipif(
not has_matplotlib, reason='requires matplotlib')
requires_scipy = pytest.mark.skipif(
not has_scipy, reason='requires scipy')
requires_pydap = pytest.mark.skipif(
not has_pydap, reason='requires pydap')
requires_netCDF4 = pytest.mark.skipif(
not has_netCDF4, reason='requires netCDF4')
requires_h5netcdf = pytest.mark.skipif(
not has_h5netcdf, reason='requires h5netcdf')
requires_pynio = pytest.mark.skipif(
not has_pynio, reason='requires pynio')
requires_scipy_or_netCDF4 = pytest.mark.skipif(
not has_scipy and not has_netCDF4, reason='requires scipy or netCDF4')
requires_dask = pytest.mark.skipif(
not has_dask, reason='requires dask')
requires_bottleneck = pytest.mark.skipif(
not has_bottleneck, reason='requires bottleneck')


def requires_scipy(test):
return test if has_scipy else pytest.mark.skip('requires scipy')(test)


def requires_pydap(test):
return test if has_pydap else pytest.mark.skip('requires pydap.client')(test)


def requires_netCDF4(test):
return test if has_netCDF4 else pytest.mark.skip('requires netCDF4')(test)


def requires_h5netcdf(test):
return test if has_h5netcdf else pytest.mark.skip('requires h5netcdf')(test)


def requires_pynio(test):
return test if has_pynio else pytest.mark.skip('requires pynio')(test)


def requires_scipy_or_netCDF4(test):
return (test if has_scipy or has_netCDF4
else pytest.mark.skip('requires scipy or netCDF4')(test))


def requires_dask(test):
return test if has_dask else pytest.mark.skip('requires dask')(test)


def requires_bottleneck(test):
return test if has_bottleneck else pytest.mark.skip('requires bottleneck')(test)

try:
_SKIP_FLAKY = not pytest.config.getoption("--run-flaky")
except ValueError:
# Can't get config from pytest, e.g., because xarray is installed instead
# of being run from a development version (and hence conftests.py is not
# available). Don't run flaky tests.
_SKIP_FLAKY = True
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice solution!


flaky = pytest.mark.skipif(
not pytest.config.getoption("--run-flaky"),
reason="set --run-flaky option to run flaky tests")

slow = pytest.mark.skipif(
pytest.config.getoption("--skip-slow"),
reason="set --skip-slow option to run slow tests")
_SKIP_FLAKY, reason="set --run-flaky option to run flaky tests")


class TestCase(unittest.TestCase):
Expand Down
10 changes: 5 additions & 5 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from . import (TestCase, requires_scipy, requires_netCDF4, requires_pydap,
requires_scipy_or_netCDF4, requires_dask, requires_h5netcdf,
requires_pynio, has_netCDF4, has_scipy, assert_allclose,
flaky, slow)
flaky)
from .test_dataset import create_test_data

try:
Expand Down Expand Up @@ -1118,21 +1118,21 @@ def test_4_autoclose_h5netcdf(self):
@requires_dask
@requires_netCDF4
@flaky
@slow
@pytest.mark.slow
def test_1_open_large_num_files_netcdf4(self):
self.validate_open_mfdataset_large_num_files(engine=['netcdf4'])

@requires_dask
@requires_scipy
@flaky
@slow
@pytest.mark.slow
def test_2_open_large_num_files_scipy(self):
self.validate_open_mfdataset_large_num_files(engine=['scipy'])

@requires_dask
@requires_pynio
@flaky
@slow
@pytest.mark.slow
def test_3_open_large_num_files_pynio(self):
self.validate_open_mfdataset_large_num_files(engine=['pynio'])

Expand All @@ -1142,7 +1142,7 @@ def test_3_open_large_num_files_pynio(self):
@requires_h5netcdf
@flaky
@pytest.mark.xfail
@slow
@pytest.mark.slow
def test_4_open_large_num_files_h5netcdf(self):
self.validate_open_mfdataset_large_num_files(engine=['h5netcdf'])

Expand Down
27 changes: 14 additions & 13 deletions xarray/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -3342,21 +3342,21 @@ def test_dir_unicode(data_set):
@pytest.fixture(params=[1])
def ds(request):
if request.param == 1:
return Dataset({'z1': (['y', 'x'], np.random.randn(5, 20)),
'z2': (['time', 'y'], np.random.randn(30, 5))},
{'x': ('x', np.linspace(0, 1.0, 20)),
'time': ('time', np.linspace(0, 1.0, 30)),
'c': ('y', ['a', 'b', 'c', 'd', 'e']),
'y': range(5)})
return Dataset({'z1': (['y', 'x'], np.random.randn(2, 8)),
'z2': (['time', 'y'], np.random.randn(10, 2))},
{'x': ('x', np.linspace(0, 1.0, 8)),
'time': ('time', np.linspace(0, 1.0, 10)),
'c': ('y', ['a', 'b']),
'y': range(2)})

if request.param == 2:
return Dataset({'z1': (['time', 'y'], np.random.randn(30, 5)),
'z2': (['time'], np.random.randn(30)),
'z3': (['x', 'time'], np.random.randn(20, 30))},
{'x': ('x', np.linspace(0, 1.0, 20)),
'time': ('time', np.linspace(0, 1.0, 30)),
'c': ('y', ['a', 'b', 'c', 'd', 'e']),
'y': range(5)})
return Dataset({'z1': (['time', 'y'], np.random.randn(10, 2)),
'z2': (['time'], np.random.randn(10)),
'z3': (['x', 'time'], np.random.randn(8, 10))},
{'x': ('x', np.linspace(0, 1.0, 8)),
'time': ('time', np.linspace(0, 1.0, 10)),
'c': ('y', ['a', 'b']),
'y': range(2)})


def test_rolling_properties(ds):
Expand Down Expand Up @@ -3440,6 +3440,7 @@ def test_rolling_pandas_compat(center, window, min_periods):
ds_rolling['index'])


@pytest.mark.slow
@pytest.mark.parametrize('ds', (1, 2), indirect=True)
@pytest.mark.parametrize('center', (True, False))
@pytest.mark.parametrize('min_periods', (None, 1, 2, 3))
Expand Down
Loading