Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump dependencies incl pandas>=2 #8968

Merged
merged 8 commits into from
Apr 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions ci/requirements/bare-minimum.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@ dependencies:
- pytest-xdist
- pytest-timeout
- numpy=1.23
- packaging=22.0
- pandas=1.5
- packaging=23.1
- pandas=2.0
22 changes: 11 additions & 11 deletions ci/requirements/min-all-deps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ dependencies:
# doc/user-guide/installing.rst, doc/user-guide/plotting.rst and setup.py.
- python=3.9
- array-api-strict=1.0 # dependency for testing the array api compat
- boto3=1.24
- boto3=1.26
- bottleneck=1.3
- cartopy=0.21
- cftime=1.6
- coveralls
- dask-core=2022.12
- distributed=2022.12
- dask-core=2023.4
- distributed=2023.4
# Flox > 0.8 has a bug with numbagg versions
# It will require numbagg > 0.6
# so we should just skip that series eventually
Expand All @@ -25,24 +25,24 @@ dependencies:
# h5py and hdf5 tend to cause conflicts
# for e.g. hdf5 1.12 conflicts with h5py=3.1
# prioritize bumping other packages instead
- h5py=3.7
- h5py=3.8
- hdf5=1.12
- hypothesis
- iris=3.4
- lxml=4.9 # Optional dep of pydap
- matplotlib-base=3.6
- matplotlib-base=3.7
- nc-time-axis=1.4
# netcdf follows a 1.major.minor[.patch] convention
# (see https://github.com/Unidata/netcdf4-python/issues/1090)
- netcdf4=1.6.0
- numba=0.56
- numbagg=0.2.1
- numpy=1.23
- packaging=22.0
- pandas=1.5
- packaging=23.1
- pandas=2.0
- pint=0.22
- pip
- pydap=3.3
- pydap=3.4
- pytest
- pytest-cov
- pytest-env
Expand All @@ -51,7 +51,7 @@ dependencies:
- rasterio=1.3
- scipy=1.10
- seaborn=0.12
- sparse=0.13
- sparse=0.14
- toolz=0.12
- typing_extensions=4.4
- zarr=2.13
- typing_extensions=4.5
- zarr=2.14
17 changes: 17 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,23 @@ Breaking changes
- The PyNIO backend has been deleted (:issue:`4491`, :pull:`7301`).
By `Deepak Cherian <https://github.com/dcherian>`_.

- The minimum versions of some dependencies were changed, in particular our minimum supported pandas version is now Pandas 2.

===================== ========= =======
Package Old New
===================== ========= =======
dask-core 2022.12 2023.4
distributed 2022.12 2023.4
h5py 3.7 3.8
matplotlib-base 3.6 3.7
packaging 22.0 23.1
pandas 1.5 2.0
pydap 3.3 3.4
sparse 0.13 0.14
typing_extensions 4.4 4.5
zarr 2.13 2.14
===================== ========= =======


Bug fixes
~~~~~~~~~
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ requires-python = ">=3.9"

dependencies = [
"numpy>=1.23",
"packaging>=22",
"pandas>=1.5",
"packaging>=23.1",
"pandas>=2.0",
]

[project.optional-dependencies]
Expand Down
5 changes: 0 additions & 5 deletions xarray/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,11 +141,6 @@ def _importorskip(
requires_numbagg_or_bottleneck = pytest.mark.skipif(
not has_scipy_or_netCDF4, reason="requires scipy or netCDF4"
)
# _importorskip does not work for development versions
has_pandas_version_two = Version(pd.__version__).major >= 2
requires_pandas_version_two = pytest.mark.skipif(
not has_pandas_version_two, reason="requires pandas 2.0.0"
)
has_numpy_array_api, requires_numpy_array_api = _importorskip("numpy", "1.26.0")
has_h5netcdf_ros3, requires_h5netcdf_ros3 = _importorskip("h5netcdf", "1.3.0")

Expand Down
2 changes: 0 additions & 2 deletions xarray/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@
requires_cupy,
requires_dask,
requires_numexpr,
requires_pandas_version_two,
requires_pint,
requires_scipy,
requires_sparse,
Expand Down Expand Up @@ -3431,7 +3430,6 @@ def test_expand_dims_kwargs_python36plus(self) -> None:
)
assert_identical(other_way_expected, other_way)

@requires_pandas_version_two
def test_expand_dims_non_nanosecond_conversion(self) -> None:
# Regression test for https://github.com/pydata/xarray/issues/7493#issuecomment-1953091000
with pytest.warns(UserWarning, match="non-nanosecond precision"):
Expand Down
7 changes: 2 additions & 5 deletions xarray/tests/test_groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
create_test_data,
has_cftime,
has_flox,
has_pandas_version_two,
requires_dask,
requires_flox,
requires_scipy,
Expand Down Expand Up @@ -93,7 +92,7 @@ def test_groupby_sizes_property(dataset) -> None:
assert dataset.groupby("x").sizes == dataset.isel(x=1).sizes
with pytest.warns(UserWarning, match="The `squeeze` kwarg"):
assert dataset.groupby("y").sizes == dataset.isel(y=1).sizes
dataset = dataset.drop("cat")
dataset = dataset.drop_vars("cat")
stacked = dataset.stack({"xy": ("x", "y")})
with pytest.warns(UserWarning, match="The `squeeze` kwarg"):
assert stacked.groupby("xy").sizes == stacked.isel(xy=0).sizes
Expand Down Expand Up @@ -2172,7 +2171,6 @@ def test_upsample_interpolate_dask(self, chunked_time: bool) -> None:
# done here due to floating point arithmetic
assert_allclose(expected, actual, rtol=1e-16)

@pytest.mark.skipif(has_pandas_version_two, reason="requires pandas < 2.0.0")
def test_resample_base(self) -> None:
times = pd.date_range("2000-01-01T02:03:01", freq="6h", periods=10)
array = DataArray(np.arange(10), [("time", times)])
Expand Down Expand Up @@ -2204,11 +2202,10 @@ def test_resample_origin(self) -> None:
expected = DataArray(array.to_series().resample("24h", origin=origin).mean())
assert_identical(expected, actual)

@pytest.mark.skipif(has_pandas_version_two, reason="requires pandas < 2.0.0")
@pytest.mark.parametrize(
"loffset",
[
"-12H",
"-12h",
datetime.timedelta(hours=-12),
pd.Timedelta(hours=-12),
pd.DateOffset(hours=-12),
Expand Down
18 changes: 6 additions & 12 deletions xarray/tests/test_variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,12 +36,10 @@
assert_equal,
assert_identical,
assert_no_warnings,
has_pandas_version_two,
raise_if_dask_computes,
requires_bottleneck,
requires_cupy,
requires_dask,
requires_pandas_version_two,
requires_pint,
requires_sparse,
source_ndarray,
Expand Down Expand Up @@ -2645,7 +2643,6 @@ def test_datetime(self):
assert np.ndarray == type(actual)
assert np.dtype("datetime64[ns]") == actual.dtype

@requires_pandas_version_two
def test_tz_datetime(self) -> None:
tz = pytz.timezone("America/New_York")
times_ns = pd.date_range("2000", periods=1, tz=tz)
Expand Down Expand Up @@ -2938,7 +2935,7 @@ def test_from_pint_wrapping_dask(self, Var):


@pytest.mark.parametrize(
("values", "warns_under_pandas_version_two"),
("values", "warns"),
[
(np.datetime64("2000-01-01", "ns"), False),
(np.datetime64("2000-01-01", "s"), True),
Expand All @@ -2957,9 +2954,9 @@ def test_from_pint_wrapping_dask(self, Var):
],
ids=lambda x: f"{x}",
)
def test_datetime_conversion_warning(values, warns_under_pandas_version_two) -> None:
def test_datetime_conversion_warning(values, warns) -> None:
dims = ["time"] if isinstance(values, (np.ndarray, pd.Index, pd.Series)) else []
if warns_under_pandas_version_two and has_pandas_version_two:
if warns:
with pytest.warns(UserWarning, match="non-nanosecond precision datetime"):
var = Variable(dims, values)
else:
Expand All @@ -2979,7 +2976,6 @@ def test_datetime_conversion_warning(values, warns_under_pandas_version_two) ->
)


@requires_pandas_version_two
def test_pandas_two_only_datetime_conversion_warnings() -> None:
# Note these tests rely on pandas features that are only present in pandas
# 2.0.0 and above, and so for now cannot be parametrized.
Expand Down Expand Up @@ -3014,7 +3010,7 @@ def test_pandas_two_only_datetime_conversion_warnings() -> None:


@pytest.mark.parametrize(
("values", "warns_under_pandas_version_two"),
("values", "warns"),
[
(np.timedelta64(10, "ns"), False),
(np.timedelta64(10, "s"), True),
Expand All @@ -3026,9 +3022,9 @@ def test_pandas_two_only_datetime_conversion_warnings() -> None:
],
ids=lambda x: f"{x}",
)
def test_timedelta_conversion_warning(values, warns_under_pandas_version_two) -> None:
def test_timedelta_conversion_warning(values, warns) -> None:
dims = ["time"] if isinstance(values, (np.ndarray, pd.Index)) else []
if warns_under_pandas_version_two and has_pandas_version_two:
if warns:
with pytest.warns(UserWarning, match="non-nanosecond precision timedelta"):
var = Variable(dims, values)
else:
Expand All @@ -3039,7 +3035,6 @@ def test_timedelta_conversion_warning(values, warns_under_pandas_version_two) ->
assert var.dtype == np.dtype("timedelta64[ns]")


@requires_pandas_version_two
def test_pandas_two_only_timedelta_conversion_warning() -> None:
# Note this test relies on a pandas feature that is only present in pandas
# 2.0.0 and above, and so for now cannot be parametrized.
Expand All @@ -3050,7 +3045,6 @@ def test_pandas_two_only_timedelta_conversion_warning() -> None:
assert var.dtype == np.dtype("timedelta64[ns]")


@requires_pandas_version_two
@pytest.mark.parametrize(
("index", "dtype"),
[
Expand Down
Loading