diff --git a/.github/workflows/ci-additional.yaml b/.github/workflows/ci-additional.yaml index a2e3734f534..6f069af5da6 100644 --- a/.github/workflows/ci-additional.yaml +++ b/.github/workflows/ci-additional.yaml @@ -134,18 +134,16 @@ jobs: name: codecov-umbrella fail_ci_if_error: false - mypy38: - name: Mypy 3.8 + mypy39: + name: Mypy 3.9 runs-on: "ubuntu-latest" needs: detect-ci-trigger - # temporarily skipping due to https://github.com/pydata/xarray/issues/6551 - if: needs.detect-ci-trigger.outputs.triggered == 'false' defaults: run: shell: bash -l {0} env: CONDA_ENV_FILE: ci/requirements/environment.yml - PYTHON_VERSION: "3.8" + PYTHON_VERSION: "3.9" steps: - uses: actions/checkout@v3 @@ -185,7 +183,7 @@ jobs: uses: codecov/codecov-action@v3.1.1 with: file: mypy_report/cobertura.xml - flags: mypy38 + flags: mypy39 env_vars: PYTHON_VERSION name: codecov-umbrella fail_ci_if_error: false diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2d190efc14c..c4881bc4871 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -42,15 +42,15 @@ jobs: matrix: os: ["ubuntu-latest", "macos-latest", "windows-latest"] # Bookend python versions - python-version: ["3.8", "3.10", "3.11"] + python-version: ["3.9", "3.10", "3.11"] env: [""] include: # Minimum python version: - env: "bare-minimum" - python-version: "3.8" + python-version: "3.9" os: ubuntu-latest - env: "min-all-deps" - python-version: "3.8" + python-version: "3.9" os: ubuntu-latest # Latest python version: - env: "all-but-dask" diff --git a/.github/workflows/pypi-release.yaml b/.github/workflows/pypi-release.yaml index c8aa377a20e..8f61ab7503f 100644 --- a/.github/workflows/pypi-release.yaml +++ b/.github/workflows/pypi-release.yaml @@ -18,7 +18,7 @@ jobs: - uses: actions/setup-python@v4 name: Install Python with: - python-version: 3.8 + python-version: "3.11" - name: Install dependencies run: | @@ -53,7 +53,7 @@ jobs: - uses: actions/setup-python@v4 name: Install Python with: - python-version: 3.8 + python-version: "3.11" - uses: actions/download-artifact@v3 with: name: releases diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2351aa220e8..e219eaee03b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: hooks: - id: pyupgrade args: - - "--py38-plus" + - "--py39-plus" # https://github.com/python/black#version-control-integration - repo: https://github.com/psf/black rev: 22.12.0 diff --git a/asv_bench/asv.conf.json b/asv_bench/asv.conf.json index b43e4d1e4e8..6f8a306fc43 100644 --- a/asv_bench/asv.conf.json +++ b/asv_bench/asv.conf.json @@ -40,7 +40,7 @@ // The Pythons you'd like to test against. If not provided, defaults // to the current version of Python used to run `asv`. - "pythons": ["3.8"], + "pythons": ["3.10"], // The matrix of dependencies to test. Each key is the name of a // package (in PyPI) and the values are version numbers. An empty diff --git a/ci/min_deps_check.py b/ci/min_deps_check.py index 23621c55560..1ddde75131d 100755 --- a/ci/min_deps_check.py +++ b/ci/min_deps_check.py @@ -1,11 +1,13 @@ +#!/usr/bin/env python """Fetch from conda database all available versions of the xarray dependencies and their publication date. Compare it against requirements/py37-min-all-deps.yml to verify the policy on obsolete dependencies is being followed. Print a pretty report :) """ import itertools import sys +from collections.abc import Iterator from datetime import datetime -from typing import Dict, Iterator, Optional, Tuple +from typing import Optional import conda.api # type: ignore[import] import yaml @@ -29,7 +31,7 @@ POLICY_MONTHS = {"python": 24, "numpy": 18} POLICY_MONTHS_DEFAULT = 12 -POLICY_OVERRIDE: Dict[str, Tuple[int, int]] = {} +POLICY_OVERRIDE: dict[str, tuple[int, int]] = {} errors = [] @@ -43,7 +45,7 @@ def warning(msg: str) -> None: print("WARNING:", msg) -def parse_requirements(fname) -> Iterator[Tuple[str, int, int, Optional[int]]]: +def parse_requirements(fname) -> Iterator[tuple[str, int, int, Optional[int]]]: """Load requirements/py37-min-all-deps.yml Yield (package name, major version, minor version, [patch version]) @@ -75,7 +77,7 @@ def parse_requirements(fname) -> Iterator[Tuple[str, int, int, Optional[int]]]: raise ValueError("expected major.minor or major.minor.patch: " + row) -def query_conda(pkg: str) -> Dict[Tuple[int, int], datetime]: +def query_conda(pkg: str) -> dict[tuple[int, int], datetime]: """Query the conda repository for a specific package Return map of {(major version, minor version): publication date} @@ -115,7 +117,7 @@ def metadata(entry): def process_pkg( pkg: str, req_major: int, req_minor: int, req_patch: Optional[int] -) -> Tuple[str, str, str, str, str, str]: +) -> tuple[str, str, str, str, str, str]: """Compare package version from requirements file to available versions in conda. Return row to build pandas dataframe: diff --git a/ci/requirements/bare-minimum.yml b/ci/requirements/bare-minimum.yml index fe1af1af027..e8d05180966 100644 --- a/ci/requirements/bare-minimum.yml +++ b/ci/requirements/bare-minimum.yml @@ -3,13 +3,13 @@ channels: - conda-forge - nodefaults dependencies: - - python=3.8 + - python=3.9 - coveralls - pip - pytest - pytest-cov - pytest-env - pytest-xdist - - numpy=1.20 + - numpy=1.21 - packaging=21.3 - - pandas=1.3 + - pandas=1.4 diff --git a/ci/requirements/doc.yml b/ci/requirements/doc.yml index 35fdcb8cdb7..d8823e7cbbc 100644 --- a/ci/requirements/doc.yml +++ b/ci/requirements/doc.yml @@ -8,8 +8,8 @@ dependencies: - bottleneck - cartopy - cfgrib>=0.9 - - dask-core>=2.30 - - h5netcdf>=0.7.4 + - dask-core>=2022.1 + - h5netcdf>=0.13 - ipykernel - ipython - iris>=2.3 @@ -18,9 +18,9 @@ dependencies: - nbsphinx - netcdf4>=1.5 - numba - - numpy>=1.20,<1.24 - - packaging>=21.0 - - pandas>=1.3 + - numpy>=1.21,<1.24 + - packaging>=21.3 + - pandas>=1.4 - pooch - pip - pydata-sphinx-theme>=0.4.3 @@ -35,7 +35,7 @@ dependencies: - sphinx-copybutton - sphinx-design - sphinx!=4.4.0 - - zarr>=2.4 + - zarr>=2.10 - pip: - sphinxext-rediraffe - sphinxext-opengraph diff --git a/ci/requirements/min-all-deps.yml b/ci/requirements/min-all-deps.yml index c7e157992f0..4df2a3d7b19 100644 --- a/ci/requirements/min-all-deps.yml +++ b/ci/requirements/min-all-deps.yml @@ -7,7 +7,7 @@ dependencies: # Run ci/min_deps_check.py to verify that this file respects the policy. # When upgrading python, numpy, or pandas, must also change # doc/user-guide/installing.rst, doc/user-guide/plotting.rst and setup.py. - - python=3.8 + - python=3.9 - boto3=1.20 - bottleneck=1.3 - cartopy=0.20 @@ -15,10 +15,10 @@ dependencies: - cfgrib=0.9 - cftime=1.5 - coveralls - - dask-core=2021.11 - - distributed=2021.11 + - dask-core=2022.1 + - distributed=2022.1 - flox=0.5 - - h5netcdf=0.11 + - h5netcdf=0.13 # h5py and hdf5 tend to cause conflicts # for e.g. hdf5 1.12 conflicts with h5py=3.1 # prioritize bumping other packages instead @@ -26,16 +26,16 @@ dependencies: - hdf5=1.12 - hypothesis - iris=3.1 - - lxml=4.6 # Optional dep of pydap + - lxml=4.7 # Optional dep of pydap - matplotlib-base=3.5 - nc-time-axis=1.4 # netcdf follows a 1.major.minor[.patch] convention # (see https://github.com/Unidata/netcdf4-python/issues/1090) - netcdf4=1.5.7 - - numba=0.54 - - numpy=1.20 + - numba=0.55 + - numpy=1.21 - packaging=21.3 - - pandas=1.3 + - pandas=1.4 - pint=0.18 - pip - pseudonetcdf=3.2 diff --git a/doc/contributing.rst b/doc/contributing.rst index 0913702fd83..748fe4457e7 100644 --- a/doc/contributing.rst +++ b/doc/contributing.rst @@ -154,7 +154,7 @@ We'll now kick off a two-step process: .. code-block:: sh # Create and activate the build environment - conda create -c conda-forge -n xarray-tests python=3.8 + conda create -c conda-forge -n xarray-tests python=3.10 # This is for Linux and MacOS conda env update -f ci/requirements/environment.yml @@ -571,9 +571,9 @@ A test run of this yields ((xarray) $ pytest test_cool_feature.py -v =============================== test session starts ================================ - platform darwin -- Python 3.6.4, pytest-3.2.1, py-1.4.34, pluggy-0.4.0 -- - cachedir: ../../.cache - plugins: cov-2.5.1, hypothesis-3.23.0 + platform darwin -- Python 3.10.6, pytest-7.2.0, pluggy-1.0.0 -- + cachedir: .pytest_cache + plugins: hypothesis-6.56.3, cov-4.0.0 collected 11 items test_cool_feature.py::test_dtypes[int8] PASSED @@ -599,7 +599,9 @@ which match ``int8``. ((xarray) bash-3.2$ pytest test_cool_feature.py -v -k int8 =========================== test session starts =========================== - platform darwin -- Python 3.6.2, pytest-3.2.1, py-1.4.31, pluggy-0.4.0 + platform darwin -- Python 3.10.6, pytest-7.2.0, pluggy-1.0.0 -- + cachedir: .pytest_cache + plugins: hypothesis-6.56.3, cov-4.0.0 collected 11 items test_cool_feature.py::test_dtypes[int8] PASSED @@ -645,8 +647,7 @@ Performance matters and it is worth considering whether your code has introduced performance regressions. *xarray* is starting to write a suite of benchmarking tests using `asv `__ to enable easy monitoring of the performance of critical *xarray* operations. -These benchmarks are all found in the ``xarray/asv_bench`` directory. asv -supports both python2 and python3. +These benchmarks are all found in the ``xarray/asv_bench`` directory. To use all features of asv, you will need either ``conda`` or ``virtualenv``. For more details please check the `asv installation @@ -699,7 +700,7 @@ environment by:: or, to use a specific Python interpreter,:: - asv run -e -E existing:python3.6 + asv run -e -E existing:python3.10 This will display stderr from the benchmarks, and use your local ``python`` that comes from your ``$PATH``. diff --git a/doc/getting-started-guide/installing.rst b/doc/getting-started-guide/installing.rst index 7124b0f87a5..5aa7fc0cee4 100644 --- a/doc/getting-started-guide/installing.rst +++ b/doc/getting-started-guide/installing.rst @@ -6,10 +6,10 @@ Installation Required dependencies --------------------- -- Python (3.8 or later) -- `numpy `__ (1.20 or later) +- Python (3.9 or later) +- `numpy `__ (1.21 or later) - `packaging `__ (21.3 or later) -- `pandas `__ (1.3 or later) +- `pandas `__ (1.4 or later) .. _optional-dependencies: diff --git a/doc/whats-new.rst b/doc/whats-new.rst index b0f6a07841b..800386fda5b 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -27,6 +27,21 @@ New Features Breaking changes ~~~~~~~~~~~~~~~~ +- Support for ``python 3.8`` has been dropped and the minimum versions of some + dependencies were changed (:pull:`7461`): + + ===================== ========= ======== + Package Old New + ===================== ========= ======== + python 3.8 3.9 + numpy 1.20 1.21 + pandas 1.3 1.4 + dask 2021.11 2022.1 + distributed 2021.11 2022.1 + h5netcdf 0.11 0.13 + lxml 4.6 4.7 + numba 5.4 5.5 + ===================== ========= ======== Deprecations ~~~~~~~~~~~~ diff --git a/requirements.txt b/requirements.txt index 4eb5c4a6b67..952710518d0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,6 +2,6 @@ # it exists to let GitHub build the repository dependency graph # https://help.github.com/en/github/visualizing-repository-data-with-graphs/listing-the-packages-that-a-repository-depends-on -numpy >= 1.20 +numpy >= 1.21 packaging >= 21.3 -pandas >= 1.3 +pandas >= 1.4 diff --git a/setup.cfg b/setup.cfg index 70b810307be..dfc64284f58 100644 --- a/setup.cfg +++ b/setup.cfg @@ -64,7 +64,6 @@ classifiers = Intended Audience :: Science/Research Programming Language :: Python Programming Language :: Python :: 3 - Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 @@ -74,10 +73,10 @@ classifiers = packages = find: zip_safe = False # https://mypy.readthedocs.io/en/latest/installed_packages.html include_package_data = True -python_requires = >=3.8 +python_requires = >=3.9 install_requires = - numpy >= 1.20 # recommended to use >= 1.22 for full quantile method support - pandas >= 1.3 + numpy >= 1.21 # recommended to use >= 1.22 for full quantile method support + pandas >= 1.4 packaging >= 21.3 [options.extras_require] diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 2c13117b483..8891ac2986b 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -1,27 +1,12 @@ from __future__ import annotations import os +from collections.abc import Hashable, Iterable, Mapping, MutableMapping, Sequence from functools import partial from glob import glob from io import BytesIO from numbers import Number -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Final, - Hashable, - Iterable, - Literal, - Mapping, - MutableMapping, - Sequence, - Type, - Union, - cast, - overload, -) +from typing import TYPE_CHECKING, Any, Callable, Final, Literal, Union, cast, overload import numpy as np @@ -59,11 +44,11 @@ T_Engine = Union[ T_NetcdfEngine, Literal["pydap", "pynio", "pseudonetcdf", "cfgrib", "zarr"], - Type[BackendEntrypoint], + type[BackendEntrypoint], str, # no nice typing support for custom backends None, ] - T_Chunks = Union[int, Dict[Any, Any], Literal["auto"], None] + T_Chunks = Union[int, dict[Any, Any], Literal["auto"], None] T_NetcdfTypes = Literal[ "NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_64BIT", "NETCDF3_CLASSIC" ] diff --git a/xarray/backends/common.py b/xarray/backends/common.py index 47b50ddc064..050493e3034 100644 --- a/xarray/backends/common.py +++ b/xarray/backends/common.py @@ -4,7 +4,8 @@ import os import time import traceback -from typing import TYPE_CHECKING, Any, ClassVar, Iterable +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any, ClassVar import numpy as np diff --git a/xarray/backends/file_manager.py b/xarray/backends/file_manager.py index 478419b9bbb..91fd15fcaa4 100644 --- a/xarray/backends/file_manager.py +++ b/xarray/backends/file_manager.py @@ -5,7 +5,8 @@ import threading import uuid import warnings -from typing import Any, Hashable +from collections.abc import Hashable +from typing import Any from xarray.backends.locks import acquire from xarray.backends.lru_cache import LRUCache diff --git a/xarray/backends/h5netcdf_.py b/xarray/backends/h5netcdf_.py index 69acd6202c1..51d79005b7f 100644 --- a/xarray/backends/h5netcdf_.py +++ b/xarray/backends/h5netcdf_.py @@ -152,13 +152,12 @@ def open( if format not in [None, "NETCDF4"]: raise ValueError("invalid format for h5netcdf backend") - kwargs = {"invalid_netcdf": invalid_netcdf} + kwargs = { + "invalid_netcdf": invalid_netcdf, + "decode_vlen_strings": decode_vlen_strings, + } if phony_dims is not None: kwargs["phony_dims"] = phony_dims - if Version(h5netcdf.__version__) >= Version("0.10.0") and Version( - h5netcdf.core.h5py.__version__ - ) >= Version("3.0.0"): - kwargs["decode_vlen_strings"] = decode_vlen_strings if lock is None: if mode == "r": diff --git a/xarray/backends/locks.py b/xarray/backends/locks.py index 1de8fc8f7b8..bba12a29609 100644 --- a/xarray/backends/locks.py +++ b/xarray/backends/locks.py @@ -3,7 +3,8 @@ import multiprocessing import threading import weakref -from typing import Any, MutableMapping +from collections.abc import MutableMapping +from typing import Any try: from dask.utils import SerializableLock diff --git a/xarray/backends/lru_cache.py b/xarray/backends/lru_cache.py index c60ef3fc6b3..c09bcb19006 100644 --- a/xarray/backends/lru_cache.py +++ b/xarray/backends/lru_cache.py @@ -2,7 +2,8 @@ import threading from collections import OrderedDict -from typing import Any, Callable, Iterator, MutableMapping, TypeVar +from collections.abc import Iterator, MutableMapping +from typing import Any, Callable, TypeVar K = TypeVar("K") V = TypeVar("V") diff --git a/xarray/coding/times.py b/xarray/coding/times.py index 5a9e0e4b6da..f9e79863d46 100644 --- a/xarray/coding/times.py +++ b/xarray/coding/times.py @@ -2,9 +2,10 @@ import re import warnings +from collections.abc import Hashable from datetime import datetime, timedelta from functools import partial -from typing import TYPE_CHECKING, Callable, Hashable, Union +from typing import TYPE_CHECKING, Callable, Union import numpy as np import pandas as pd @@ -350,19 +351,12 @@ def _infer_time_units_from_diff(unique_timedeltas) -> str: time_units = _NETCDF_TIME_UNITS_CFTIME unit_timedelta = _unit_timedelta_cftime zero_timedelta = timedelta(microseconds=0) - timedeltas = unique_timedeltas else: time_units = _NETCDF_TIME_UNITS_NUMPY unit_timedelta = _unit_timedelta_numpy zero_timedelta = np.timedelta64(0, "ns") - # Note that the modulus operator was only implemented for np.timedelta64 - # arrays as of NumPy version 1.16.0. Once our minimum version of NumPy - # supported is greater than or equal to this we will no longer need to cast - # unique_timedeltas to a TimedeltaIndex. In the meantime, however, the - # modulus operator works for TimedeltaIndex objects. - timedeltas = pd.TimedeltaIndex(unique_timedeltas) for time_unit in time_units: - if np.all(timedeltas % unit_timedelta(time_unit) == zero_timedelta): + if np.all(unique_timedeltas % unit_timedelta(time_unit) == zero_timedelta): return time_unit return "seconds" diff --git a/xarray/coding/variables.py b/xarray/coding/variables.py index 111db535e32..c290307b4b6 100644 --- a/xarray/coding/variables.py +++ b/xarray/coding/variables.py @@ -2,8 +2,9 @@ from __future__ import annotations import warnings +from collections.abc import Hashable, MutableMapping from functools import partial -from typing import TYPE_CHECKING, Any, Callable, Hashable, MutableMapping, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, Union import numpy as np import pandas as pd @@ -13,7 +14,7 @@ from xarray.core.variable import Variable if TYPE_CHECKING: - T_VarTuple = Tuple[Tuple[Hashable, ...], Any, dict, dict] + T_VarTuple = tuple[tuple[Hashable, ...], Any, dict, dict] T_Name = Union[Hashable, None] diff --git a/xarray/conventions.py b/xarray/conventions.py index 908180270bc..74dc686ee95 100644 --- a/xarray/conventions.py +++ b/xarray/conventions.py @@ -2,16 +2,8 @@ import warnings from collections import defaultdict -from typing import ( - TYPE_CHECKING, - Any, - Hashable, - Iterable, - Mapping, - MutableMapping, - Tuple, - Union, -) +from collections.abc import Hashable, Iterable, Mapping, MutableMapping +from typing import TYPE_CHECKING, Any, Union import numpy as np import pandas as pd @@ -48,7 +40,7 @@ from xarray.backends.common import AbstractDataStore from xarray.core.dataset import Dataset - T_VarTuple = Tuple[Tuple[Hashable, ...], Any, dict, dict] + T_VarTuple = tuple[tuple[Hashable, ...], Any, dict, dict] T_Name = Union[Hashable, None] T_Variables = Mapping[Any, Variable] T_Attrs = MutableMapping[Any, Any] diff --git a/xarray/core/_aggregations.py b/xarray/core/_aggregations.py index 34732810b24..ccffe8daf04 100644 --- a/xarray/core/_aggregations.py +++ b/xarray/core/_aggregations.py @@ -3,7 +3,8 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Callable, Sequence +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, Callable from xarray.core import duck_array_ops from xarray.core.options import OPTIONS diff --git a/xarray/core/accessor_str.py b/xarray/core/accessor_str.py index 2e96abf9d47..16e22ec1c66 100644 --- a/xarray/core/accessor_str.py +++ b/xarray/core/accessor_str.py @@ -42,9 +42,11 @@ import codecs import re import textwrap +from collections.abc import Hashable, Mapping from functools import reduce from operator import or_ as set_union -from typing import TYPE_CHECKING, Any, Callable, Generic, Hashable, Mapping, Pattern +from re import Pattern +from typing import TYPE_CHECKING, Any, Callable, Generic from unicodedata import normalize import numpy as np diff --git a/xarray/core/alignment.py b/xarray/core/alignment.py index 38978a5e4f3..45d448dd113 100644 --- a/xarray/core/alignment.py +++ b/xarray/core/alignment.py @@ -3,21 +3,9 @@ import functools import operator from collections import defaultdict +from collections.abc import Hashable, Iterable, Mapping from contextlib import suppress -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Generic, - Hashable, - Iterable, - Mapping, - Tuple, - Type, - TypeVar, - cast, -) +from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar, cast import numpy as np import pandas as pd @@ -98,10 +86,10 @@ def reindex_variables( return new_variables -CoordNamesAndDims = Tuple[Tuple[Hashable, Tuple[Hashable, ...]], ...] -MatchingIndexKey = Tuple[CoordNamesAndDims, Type[Index]] -NormalizedIndexes = Dict[MatchingIndexKey, Index] -NormalizedIndexVars = Dict[MatchingIndexKey, Dict[Hashable, Variable]] +CoordNamesAndDims = tuple[tuple[Hashable, tuple[Hashable, ...]], ...] +MatchingIndexKey = tuple[CoordNamesAndDims, type[Index]] +NormalizedIndexes = dict[MatchingIndexKey, Index] +NormalizedIndexVars = dict[MatchingIndexKey, dict[Hashable, Variable]] class Aligner(Generic[DataAlignable]): diff --git a/xarray/core/combine.py b/xarray/core/combine.py index d470dcb6192..790ff629b2a 100644 --- a/xarray/core/combine.py +++ b/xarray/core/combine.py @@ -3,7 +3,8 @@ import itertools import warnings from collections import Counter -from typing import TYPE_CHECKING, Iterable, Literal, Sequence, Union +from collections.abc import Iterable, Sequence +from typing import TYPE_CHECKING, Literal, Union import pandas as pd diff --git a/xarray/core/common.py b/xarray/core/common.py index 783847cd60d..095d15e32f1 100644 --- a/xarray/core/common.py +++ b/xarray/core/common.py @@ -1,21 +1,11 @@ from __future__ import annotations import warnings +from collections.abc import Hashable, Iterable, Iterator, Mapping from contextlib import suppress from html import escape from textwrap import dedent -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Hashable, - Iterable, - Iterator, - Mapping, - TypeVar, - Union, - overload, -) +from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, overload import numpy as np import pandas as pd @@ -969,36 +959,28 @@ def _resample( dim_name: Hashable = dim dim_coord = self[dim] - # TODO: remove once pandas=1.1 is the minimum required version - with warnings.catch_warnings(): - warnings.filterwarnings( - "ignore", - r"'(base|loffset)' in .resample\(\) and in Grouper\(\) is deprecated.", - category=FutureWarning, + if isinstance(self._indexes[dim_name].to_pandas_index(), CFTimeIndex): + from xarray.core.resample_cftime import CFTimeGrouper + + grouper = CFTimeGrouper( + freq=freq, + closed=closed, + label=label, + base=base, + loffset=loffset, + origin=origin, + offset=offset, + ) + else: + grouper = pd.Grouper( + freq=freq, + closed=closed, + label=label, + base=base, + offset=offset, + origin=origin, + loffset=loffset, ) - - if isinstance(self._indexes[dim_name].to_pandas_index(), CFTimeIndex): - from xarray.core.resample_cftime import CFTimeGrouper - - grouper = CFTimeGrouper( - freq=freq, - closed=closed, - label=label, - base=base, - loffset=loffset, - origin=origin, - offset=offset, - ) - else: - grouper = pd.Grouper( - freq=freq, - closed=closed, - label=label, - base=base, - offset=offset, - origin=origin, - loffset=loffset, - ) group = DataArray( dim_coord, coords=dim_coord.coords, dims=dim_coord.dims, name=RESAMPLE_DIM ) diff --git a/xarray/core/computation.py b/xarray/core/computation.py index de488f7c2b1..2b3cdaeb143 100644 --- a/xarray/core/computation.py +++ b/xarray/core/computation.py @@ -8,19 +8,8 @@ import operator import warnings from collections import Counter -from typing import ( - TYPE_CHECKING, - AbstractSet, - Any, - Callable, - Hashable, - Iterable, - Mapping, - Sequence, - TypeVar, - Union, - overload, -) +from collections.abc import Hashable, Iterable, Mapping, Sequence +from typing import TYPE_CHECKING, AbstractSet, Any, Callable, TypeVar, Union, overload import numpy as np @@ -913,7 +902,6 @@ def apply_ufunc( dimensions as input and vectorize it automatically with :py:func:`numpy.vectorize`. This option exists for convenience, but is almost always slower than supplying a pre-vectorized function. - Using this option requires NumPy version 1.12 or newer. join : {"outer", "inner", "left", "right", "exact"}, default: "exact" Method for joining the indexes of the passed objects along each dimension, and the variables of Dataset objects with mismatched diff --git a/xarray/core/concat.py b/xarray/core/concat.py index 95a2199f9c5..f092911948f 100644 --- a/xarray/core/concat.py +++ b/xarray/core/concat.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Hashable, Iterable, cast, overload +from collections.abc import Hashable, Iterable +from typing import TYPE_CHECKING, Any, cast, overload import pandas as pd diff --git a/xarray/core/coordinates.py b/xarray/core/coordinates.py index 986b4cab443..32809a54ddd 100644 --- a/xarray/core/coordinates.py +++ b/xarray/core/coordinates.py @@ -1,8 +1,9 @@ from __future__ import annotations import warnings +from collections.abc import Hashable, Iterator, Mapping, Sequence from contextlib import contextmanager -from typing import TYPE_CHECKING, Any, Hashable, Iterator, List, Mapping, Sequence +from typing import TYPE_CHECKING, Any import numpy as np import pandas as pd @@ -28,17 +29,11 @@ # arbitrary DataArray objects to datasets _THIS_ARRAY = ReprObject("") -# TODO: Remove when min python version >= 3.9: -GenericAlias = type(List[int]) - class Coordinates(Mapping[Hashable, "T_DataArray"]): _data: DataWithCoords __slots__ = ("_data",) - # TODO: Remove when min python version >= 3.9: - __class_getitem__ = classmethod(GenericAlias) - def __getitem__(self, key: Hashable) -> T_DataArray: raise NotImplementedError() diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index dc8dcbd3776..8fefef5aff7 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -2,20 +2,9 @@ import datetime import warnings +from collections.abc import Hashable, Iterable, Mapping, Sequence from os import PathLike -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Hashable, - Iterable, - Literal, - Mapping, - NoReturn, - Sequence, - cast, - overload, -) +from typing import TYPE_CHECKING, Any, Callable, Literal, NoReturn, cast, overload import numpy as np import pandas as pd diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index 2a45a8f87ab..3670fec8be6 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -8,27 +8,20 @@ import sys import warnings from collections import defaultdict -from html import escape -from numbers import Number -from operator import methodcaller -from os import PathLike -from typing import ( - IO, - TYPE_CHECKING, - Any, - Callable, +from collections.abc import ( Collection, - Generic, Hashable, Iterable, Iterator, - Literal, Mapping, MutableMapping, Sequence, - cast, - overload, ) +from html import escape +from numbers import Number +from operator import methodcaller +from os import PathLike +from typing import IO, TYPE_CHECKING, Any, Callable, Generic, Literal, cast, overload import numpy as np import pandas as pd diff --git a/xarray/core/formatting.py b/xarray/core/formatting.py index 1473d513a01..ed548771809 100644 --- a/xarray/core/formatting.py +++ b/xarray/core/formatting.py @@ -6,10 +6,10 @@ import functools import math from collections import defaultdict +from collections.abc import Collection, Hashable from datetime import datetime, timedelta from itertools import chain, zip_longest from reprlib import recursive_repr -from typing import Collection, Hashable import numpy as np import pandas as pd diff --git a/xarray/core/groupby.py b/xarray/core/groupby.py index a6516611efc..45227fd9c2b 100644 --- a/xarray/core/groupby.py +++ b/xarray/core/groupby.py @@ -2,20 +2,8 @@ import datetime import warnings -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Generic, - Hashable, - Iterator, - Literal, - Mapping, - Sequence, - TypeVar, - Union, - cast, -) +from collections.abc import Hashable, Iterator, Mapping, Sequence +from typing import TYPE_CHECKING, Any, Callable, Generic, Literal, TypeVar, Union, cast import numpy as np import pandas as pd diff --git a/xarray/core/indexes.py b/xarray/core/indexes.py index ab17d561ae2..5f42c50e26f 100644 --- a/xarray/core/indexes.py +++ b/xarray/core/indexes.py @@ -3,19 +3,8 @@ import collections.abc import copy from collections import defaultdict -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Generic, - Hashable, - Iterable, - Iterator, - Mapping, - Sequence, - TypeVar, - cast, -) +from collections.abc import Hashable, Iterable, Iterator, Mapping, Sequence +from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast import numpy as np import pandas as pd @@ -32,7 +21,7 @@ from xarray.core.types import ErrorOptions, T_Index from xarray.core.variable import Variable -IndexVars = Dict[Any, "Variable"] +IndexVars = dict[Any, "Variable"] class Index: @@ -979,7 +968,7 @@ def sel(self, labels, method=None, tolerance=None) -> IndexSelResult: # variable(s) attrs and encoding metadata are propagated # when replacing the indexes in the resulting xarray object new_vars = new_index.create_variables() - indexes = cast(Dict[Any, Index], {k: new_index for k in new_vars}) + indexes = cast(dict[Any, Index], {k: new_index for k in new_vars}) # add scalar variable for each dropped level variables = new_vars diff --git a/xarray/core/indexing.py b/xarray/core/indexing.py index e49a072df1d..5448c18d8e9 100644 --- a/xarray/core/indexing.py +++ b/xarray/core/indexing.py @@ -4,25 +4,20 @@ import functools import operator from collections import Counter, defaultdict +from collections.abc import Hashable, Iterable, Mapping from contextlib import suppress from dataclasses import dataclass, field from datetime import timedelta from html import escape -from typing import TYPE_CHECKING, Any, Callable, Hashable, Iterable, Mapping +from typing import TYPE_CHECKING, Any, Callable import numpy as np import pandas as pd -from packaging.version import Version from xarray.core import duck_array_ops from xarray.core.nputils import NumpyVIndexAdapter from xarray.core.options import OPTIONS -from xarray.core.pycompat import ( - array_type, - integer_types, - is_duck_dask_array, - mod_version, -) +from xarray.core.pycompat import array_type, integer_types, is_duck_dask_array from xarray.core.types import T_Xarray from xarray.core.utils import ( NDArrayMixin, @@ -1386,29 +1381,18 @@ def __getitem__(self, key): return value def __setitem__(self, key, value): - if mod_version("dask") >= Version("2021.04.1"): - if isinstance(key, BasicIndexer): - self.array[key.tuple] = value - elif isinstance(key, VectorizedIndexer): - self.array.vindex[key.tuple] = value - elif isinstance(key, OuterIndexer): - num_non_slices = sum( - 0 if isinstance(k, slice) else 1 for k in key.tuple + if isinstance(key, BasicIndexer): + self.array[key.tuple] = value + elif isinstance(key, VectorizedIndexer): + self.array.vindex[key.tuple] = value + elif isinstance(key, OuterIndexer): + num_non_slices = sum(0 if isinstance(k, slice) else 1 for k in key.tuple) + if num_non_slices > 1: + raise NotImplementedError( + "xarray can't set arrays with multiple " + "array indices to dask yet." ) - if num_non_slices > 1: - raise NotImplementedError( - "xarray can't set arrays with multiple " - "array indices to dask yet." - ) - self.array[key.tuple] = value - else: - raise TypeError( - "This variable's data is stored in a dask array, " - "and the installed dask version does not support item " - "assignment. To assign to this variable, you must either upgrade dask or" - "first load the variable into memory explicitly using the .load() " - "method or accessing its .values attribute." - ) + self.array[key.tuple] = value def transpose(self, order): return self.array.transpose(order) diff --git a/xarray/core/merge.py b/xarray/core/merge.py index 77cfb9bed75..740e3c8c4e2 100644 --- a/xarray/core/merge.py +++ b/xarray/core/merge.py @@ -1,19 +1,8 @@ from __future__ import annotations from collections import defaultdict -from typing import ( - TYPE_CHECKING, - AbstractSet, - Any, - Hashable, - Iterable, - Mapping, - NamedTuple, - Optional, - Sequence, - Tuple, - Union, -) +from collections.abc import Hashable, Iterable, Mapping, Sequence +from typing import TYPE_CHECKING, AbstractSet, Any, NamedTuple, Optional, Union import pandas as pd @@ -40,9 +29,9 @@ ArrayLike = Any VariableLike = Union[ ArrayLike, - Tuple[DimsLike, ArrayLike], - Tuple[DimsLike, ArrayLike, Mapping], - Tuple[DimsLike, ArrayLike, Mapping, Mapping], + tuple[DimsLike, ArrayLike], + tuple[DimsLike, ArrayLike, Mapping], + tuple[DimsLike, ArrayLike, Mapping, Mapping], ] XarrayValue = Union[DataArray, Variable, VariableLike] DatasetLike = Union[Dataset, Mapping[Any, XarrayValue]] @@ -170,7 +159,7 @@ def _assert_compat_valid(compat): raise ValueError(f"compat={compat!r} invalid: must be {set(_VALID_COMPAT)}") -MergeElement = Tuple[Variable, Optional[Index]] +MergeElement = tuple[Variable, Optional[Index]] def _assert_prioritized_valid( diff --git a/xarray/core/missing.py b/xarray/core/missing.py index 3676db9a487..97703b576fa 100644 --- a/xarray/core/missing.py +++ b/xarray/core/missing.py @@ -2,20 +2,20 @@ import datetime as dt import warnings +from collections.abc import Hashable, Sequence from functools import partial from numbers import Number -from typing import TYPE_CHECKING, Any, Callable, Hashable, Sequence, get_args +from typing import TYPE_CHECKING, Any, Callable, get_args import numpy as np import pandas as pd -from packaging.version import Version from xarray.core import utils from xarray.core.common import _contains_datetime_like_objects, ones_like from xarray.core.computation import apply_ufunc from xarray.core.duck_array_ops import datetime_to_numeric, push, timedelta_to_numeric from xarray.core.options import OPTIONS, _get_keep_attrs -from xarray.core.pycompat import is_duck_dask_array, mod_version +from xarray.core.pycompat import is_duck_dask_array from xarray.core.types import Interp1dOptions, InterpOptions from xarray.core.utils import OrderedSet, is_scalar from xarray.core.variable import Variable, broadcast_variables @@ -740,12 +740,7 @@ def interp_func(var, x, new_x, method: InterpOptions, kwargs): else: dtype = var.dtype - if mod_version("dask") < Version("2020.12"): - # Using meta and dtype at the same time doesn't work. - # Remove this whenever the minimum requirement for dask is 2020.12: - meta = None - else: - meta = var._meta + meta = var._meta return da.blockwise( _dask_aware_interpnd, diff --git a/xarray/core/parallel.py b/xarray/core/parallel.py index 92d1f777705..38ac1532f29 100644 --- a/xarray/core/parallel.py +++ b/xarray/core/parallel.py @@ -3,16 +3,8 @@ import collections import itertools import operator -from typing import ( - TYPE_CHECKING, - Any, - Callable, - DefaultDict, - Hashable, - Iterable, - Mapping, - Sequence, -) +from collections.abc import Hashable, Iterable, Mapping, Sequence +from typing import TYPE_CHECKING, Any, Callable, DefaultDict import numpy as np diff --git a/xarray/core/pycompat.py b/xarray/core/pycompat.py index fa788821b4f..95387523bc8 100644 --- a/xarray/core/pycompat.py +++ b/xarray/core/pycompat.py @@ -2,7 +2,7 @@ from importlib import import_module from types import ModuleType -from typing import TYPE_CHECKING, Any, Literal, Tuple, Type +from typing import TYPE_CHECKING, Any, Literal import numpy as np from packaging.version import Version @@ -13,7 +13,7 @@ if TYPE_CHECKING: ModType = Literal["dask", "pint", "cupy", "sparse"] - DuckArrayTypes = Tuple[Type[Any], ...] # TODO: improve this? maybe Generic + DuckArrayTypes = tuple[type[Any], ...] # TODO: improve this? maybe Generic class DuckArrayModule: diff --git a/xarray/core/resample.py b/xarray/core/resample.py index 8b66aac5042..9d67fbbe9e3 100644 --- a/xarray/core/resample.py +++ b/xarray/core/resample.py @@ -1,7 +1,8 @@ from __future__ import annotations import warnings -from typing import TYPE_CHECKING, Any, Callable, Hashable, Iterable, Sequence +from collections.abc import Hashable, Iterable, Sequence +from typing import TYPE_CHECKING, Any, Callable import numpy as np diff --git a/xarray/core/rolling.py b/xarray/core/rolling.py index c06a2b40b21..e1a475b6910 100644 --- a/xarray/core/rolling.py +++ b/xarray/core/rolling.py @@ -4,16 +4,8 @@ import itertools import math import warnings -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Generic, - Hashable, - Iterator, - Mapping, - TypeVar, -) +from collections.abc import Hashable, Iterator, Mapping +from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar import numpy as np diff --git a/xarray/core/rolling_exp.py b/xarray/core/rolling_exp.py index faf81c8de13..91edd3acb7c 100644 --- a/xarray/core/rolling_exp.py +++ b/xarray/core/rolling_exp.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Any, Generic, Mapping +from collections.abc import Mapping +from typing import Any, Generic import numpy as np from packaging.version import Version diff --git a/xarray/core/types.py b/xarray/core/types.py index 220aeb8da7a..fc3c6712be2 100644 --- a/xarray/core/types.py +++ b/xarray/core/types.py @@ -1,18 +1,14 @@ from __future__ import annotations import datetime +from collections.abc import Hashable, Iterable, Sequence from typing import ( TYPE_CHECKING, Any, Callable, - Hashable, - Iterable, - List, Literal, Protocol, - Sequence, SupportsIndex, - Tuple, TypeVar, Union, ) @@ -74,13 +70,13 @@ def dtype(self) -> np.dtype: # character codes, type strings or comma-separated fields, e.g., 'float64' str, # (flexible_dtype, itemsize) - Tuple[_DTypeLikeNested, int], + tuple[_DTypeLikeNested, int], # (fixed_dtype, shape) - Tuple[_DTypeLikeNested, _ShapeLike], + tuple[_DTypeLikeNested, _ShapeLike], # (base_dtype, new_dtype) - Tuple[_DTypeLikeNested, _DTypeLikeNested], + tuple[_DTypeLikeNested, _DTypeLikeNested], # because numpy does the same? - List[Any], + list[Any], # anything with a dtype attribute _SupportsDType, ] diff --git a/xarray/core/utils.py b/xarray/core/utils.py index 86c644de5f0..d4a1727887b 100644 --- a/xarray/core/utils.py +++ b/xarray/core/utils.py @@ -46,22 +46,24 @@ import re import sys import warnings -from enum import Enum -from typing import ( - TYPE_CHECKING, - Any, - Callable, +from collections.abc import ( Collection, Container, - Generic, Hashable, Iterable, Iterator, - Literal, Mapping, MutableMapping, MutableSet, Sequence, +) +from enum import Enum +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Generic, + Literal, TypeVar, cast, overload, diff --git a/xarray/core/variable.py b/xarray/core/variable.py index 98aa538af0e..831ece25b67 100644 --- a/xarray/core/variable.py +++ b/xarray/core/variable.py @@ -5,18 +5,9 @@ import math import numbers import warnings +from collections.abc import Hashable, Iterable, Mapping, Sequence from datetime import timedelta -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Hashable, - Iterable, - Literal, - Mapping, - NoReturn, - Sequence, -) +from typing import TYPE_CHECKING, Any, Callable, Literal, NoReturn import numpy as np import pandas as pd diff --git a/xarray/core/weighted.py b/xarray/core/weighted.py index fc6f280b596..46fd8edbe1b 100644 --- a/xarray/core/weighted.py +++ b/xarray/core/weighted.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Generic, Hashable, Iterable, Literal, Sequence, cast +from collections.abc import Hashable, Iterable, Sequence +from typing import TYPE_CHECKING, Generic, Literal, cast import numpy as np from numpy.typing import ArrayLike diff --git a/xarray/plot/accessor.py b/xarray/plot/accessor.py index 62a02400222..ff707602545 100644 --- a/xarray/plot/accessor.py +++ b/xarray/plot/accessor.py @@ -1,7 +1,8 @@ from __future__ import annotations import functools -from typing import TYPE_CHECKING, Any, Hashable, Iterable, Literal, NoReturn, overload +from collections.abc import Hashable, Iterable +from typing import TYPE_CHECKING, Any, Literal, NoReturn, overload import numpy as np diff --git a/xarray/plot/dataarray_plot.py b/xarray/plot/dataarray_plot.py index f1e41478d4c..f18495e5e94 100644 --- a/xarray/plot/dataarray_plot.py +++ b/xarray/plot/dataarray_plot.py @@ -2,21 +2,11 @@ import functools import warnings -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Hashable, - Iterable, - Literal, - MutableMapping, - cast, - overload, -) +from collections.abc import Hashable, Iterable, MutableMapping +from typing import TYPE_CHECKING, Any, Callable, Literal, cast, overload import numpy as np import pandas as pd -from packaging.version import Version from xarray.core.alignment import broadcast from xarray.core.concat import concat @@ -990,12 +980,7 @@ def newplotfunc( ax = get_axis(figsize, size, aspect, ax, **subplot_kws) # Using 30, 30 minimizes rotation of the plot. Making it easier to # build on your intuition from 2D plots: - plt = import_matplotlib_pyplot() - if Version(plt.matplotlib.__version__) < Version("3.5.0"): - ax.view_init(azim=30, elev=30) - else: - # https://github.com/matplotlib/matplotlib/pull/19873 - ax.view_init(azim=30, elev=30, vertical_axis="y") + ax.view_init(azim=30, elev=30, vertical_axis="y") else: ax = get_axis(figsize, size, aspect, ax, **subplot_kws) @@ -1242,8 +1227,6 @@ def scatter( Wraps :py:func:`matplotlib:matplotlib.pyplot.scatter`. """ - plt = import_matplotlib_pyplot() - if "u" in kwargs or "v" in kwargs: raise ValueError("u, v are not allowed in scatter plots.") @@ -1260,16 +1243,7 @@ def scatter( if sizeplt is not None: kwargs.update(s=sizeplt.to_numpy().ravel()) - if Version(plt.matplotlib.__version__) < Version("3.5.0"): - # Plot the data. 3d plots has the z value in upward direction - # instead of y. To make jumping between 2d and 3d easy and intuitive - # switch the order so that z is shown in the depthwise direction: - axis_order = ["x", "z", "y"] - else: - # Switching axis order not needed in 3.5.0, can also simplify the code - # that uses axis_order: - # https://github.com/matplotlib/matplotlib/pull/19873 - axis_order = ["x", "y", "z"] + axis_order = ["x", "y", "z"] plts_dict: dict[str, DataArray | None] = dict(x=xplt, y=yplt, z=zplt) plts_or_none = [plts_dict[v] for v in axis_order] diff --git a/xarray/plot/dataset_plot.py b/xarray/plot/dataset_plot.py index 88d05b34a2b..eda37aadffe 100644 --- a/xarray/plot/dataset_plot.py +++ b/xarray/plot/dataset_plot.py @@ -3,7 +3,8 @@ import functools import inspect import warnings -from typing import TYPE_CHECKING, Any, Callable, Hashable, Iterable, TypeVar, overload +from collections.abc import Hashable, Iterable +from typing import TYPE_CHECKING, Any, Callable, TypeVar, overload from xarray.core.alignment import broadcast from xarray.plot import dataarray_plot diff --git a/xarray/plot/facetgrid.py b/xarray/plot/facetgrid.py index c510b0767ba..4d90c160400 100644 --- a/xarray/plot/facetgrid.py +++ b/xarray/plot/facetgrid.py @@ -3,17 +3,8 @@ import functools import itertools import warnings -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Generic, - Hashable, - Iterable, - Literal, - TypeVar, - cast, -) +from collections.abc import Hashable, Iterable +from typing import TYPE_CHECKING, Any, Callable, Generic, Literal, TypeVar, cast import numpy as np diff --git a/xarray/plot/utils.py b/xarray/plot/utils.py index 46d180447d8..373b303def2 100644 --- a/xarray/plot/utils.py +++ b/xarray/plot/utils.py @@ -3,18 +3,10 @@ import itertools import textwrap import warnings +from collections.abc import Hashable, Iterable, Mapping, Sequence from datetime import datetime from inspect import getfullargspec -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Hashable, - Iterable, - Mapping, - Sequence, - overload, -) +from typing import TYPE_CHECKING, Any, Callable, overload import numpy as np import pandas as pd diff --git a/xarray/testing.py b/xarray/testing.py index 481a23340fd..b6a88135ee1 100644 --- a/xarray/testing.py +++ b/xarray/testing.py @@ -1,7 +1,8 @@ """Testing functions exposed to the user API""" import functools import warnings -from typing import Hashable, Set, Union +from collections.abc import Hashable +from typing import Union import numpy as np import pandas as pd @@ -356,7 +357,7 @@ def _assert_dataset_invariants(ds: Dataset, check_default_indexes: bool): assert type(ds._dims) is dict, ds._dims assert all(isinstance(v, int) for v in ds._dims.values()), ds._dims - var_dims: Set[Hashable] = set() + var_dims: set[Hashable] = set() for v in ds._variables.values(): var_dims.update(v.dims) assert ds._dims.keys() == var_dims, (set(ds._dims), var_dims) diff --git a/xarray/tests/__init__.py b/xarray/tests/__init__.py index 6970a34b63d..f1dd12a5675 100644 --- a/xarray/tests/__init__.py +++ b/xarray/tests/__init__.py @@ -64,7 +64,6 @@ def _importorskip( has_pydap, requires_pydap = _importorskip("pydap.client") has_netCDF4, requires_netCDF4 = _importorskip("netCDF4") has_h5netcdf, requires_h5netcdf = _importorskip("h5netcdf") -has_h5netcdf_0_12, requires_h5netcdf_0_12 = _importorskip("h5netcdf", minversion="0.12") has_pynio, requires_pynio = _importorskip("Nio") has_pseudonetcdf, requires_pseudonetcdf = _importorskip("PseudoNetCDF") has_cftime, requires_cftime = _importorskip("cftime") diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index b923184800e..b49500bd00c 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -13,10 +13,11 @@ import tempfile import uuid import warnings +from collections.abc import Iterator from contextlib import ExitStack from io import BytesIO from pathlib import Path -from typing import TYPE_CHECKING, Any, Final, Iterator, cast +from typing import TYPE_CHECKING, Any, Final, cast import numpy as np import pandas as pd @@ -58,7 +59,6 @@ assert_identical, assert_no_warnings, has_dask, - has_h5netcdf_0_12, has_netCDF4, has_scipy, mock, @@ -68,7 +68,6 @@ requires_dask, requires_fsspec, requires_h5netcdf, - requires_h5netcdf_0_12, requires_iris, requires_netCDF4, requires_pseudonetcdf, @@ -2881,42 +2880,12 @@ def create_store(self): with create_tmp_file() as tmp_file: yield backends.H5NetCDFStore.open(tmp_file, "w") - @pytest.mark.parametrize( - "invalid_netcdf, warntype, num_warns", - [ - pytest.param( - None, - FutureWarning, - 1, - marks=pytest.mark.skipif(has_h5netcdf_0_12, reason="raises"), - ), - pytest.param( - False, - FutureWarning, - 1, - marks=pytest.mark.skipif(has_h5netcdf_0_12, reason="raises"), - ), - (True, None, 0), - ], - ) - def test_complex(self, invalid_netcdf, warntype, num_warns) -> None: + def test_complex(self) -> None: expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))}) - save_kwargs = {"invalid_netcdf": invalid_netcdf} - with warnings.catch_warnings(record=True) as record: - with self.roundtrip(expected, save_kwargs=save_kwargs) as actual: - assert_equal(expected, actual) - - recorded_num_warns = 0 - if warntype: - for warning in record: - if issubclass(warning.category, warntype) and ( - "complex dtypes" in str(warning.message) - ): - recorded_num_warns += 1 - - assert recorded_num_warns == num_warns + save_kwargs = {"invalid_netcdf": True} + with self.roundtrip(expected, save_kwargs=save_kwargs) as actual: + assert_equal(expected, actual) - @requires_h5netcdf_0_12 @pytest.mark.parametrize("invalid_netcdf", [None, False]) def test_complex_error(self, invalid_netcdf) -> None: @@ -3092,11 +3061,7 @@ def test_open_dataset_group(self) -> None: v = group.createVariable("x", "int") v[...] = 42 - kwargs = {} - if Version(h5netcdf.__version__) >= Version("0.10.0") and Version( - h5netcdf.core.h5py.__version__ - ) >= Version("3.0.0"): - kwargs = dict(decode_vlen_strings=True) + kwargs = {"decode_vlen_strings": True} h5 = h5netcdf.File(tmp_file, mode="r", **kwargs) store = backends.H5NetCDFStore(h5["g"]) @@ -3119,11 +3084,7 @@ def test_deepcopy(self) -> None: v = nc.createVariable("y", np.int32, ("x",)) v[:] = np.arange(10) - kwargs = {} - if Version(h5netcdf.__version__) >= Version("0.10.0") and Version( - h5netcdf.core.h5py.__version__ - ) >= Version("3.0.0"): - kwargs = dict(decode_vlen_strings=True) + kwargs = {"decode_vlen_strings": True} h5 = h5netcdf.File(tmp_file, mode="r", **kwargs) store = backends.H5NetCDFStore(h5) @@ -4861,7 +4822,7 @@ def test_pickle_rasterio(self) -> None: assert_equal(actual, rioda) def test_ENVI_tags(self) -> None: - rasterio = pytest.importorskip("rasterio", minversion="1.0a") + rasterio = pytest.importorskip("rasterio") from rasterio.transform import from_origin # Create an ENVI file with some tags in the ENVI namespace @@ -5004,8 +4965,7 @@ def test_rasterio_vrt_with_transform_and_size(self) -> None: # Test open_rasterio() support of WarpedVRT with transform, width and # height (issue #2864) - # https://github.com/rasterio/rasterio/1768 - rasterio = pytest.importorskip("rasterio", minversion="1.0.28") + rasterio = pytest.importorskip("rasterio") from affine import Affine from rasterio.warp import calculate_default_transform @@ -5034,8 +4994,7 @@ def test_rasterio_vrt_with_transform_and_size(self) -> None: def test_rasterio_vrt_with_src_crs(self) -> None: # Test open_rasterio() support of WarpedVRT with specified src_crs - # https://github.com/rasterio/rasterio/1768 - rasterio = pytest.importorskip("rasterio", minversion="1.0.28") + rasterio = pytest.importorskip("rasterio") # create geotiff with no CRS and specify it manually with create_tmp_geotiff(crs=None) as (tmp_file, expected): diff --git a/xarray/tests/test_backends_file_manager.py b/xarray/tests/test_backends_file_manager.py index 1bd66164436..528ee88d29a 100644 --- a/xarray/tests/test_backends_file_manager.py +++ b/xarray/tests/test_backends_file_manager.py @@ -7,7 +7,6 @@ import pytest -# from xarray.backends import file_manager from xarray.backends.file_manager import CachingFileManager from xarray.backends.lru_cache import LRUCache from xarray.core.options import set_options diff --git a/xarray/tests/test_cftimeindex.py b/xarray/tests/test_cftimeindex.py index a27c7f99dfc..2049fd3915d 100644 --- a/xarray/tests/test_cftimeindex.py +++ b/xarray/tests/test_cftimeindex.py @@ -363,85 +363,57 @@ def test_get_loc(date_type, index): @requires_cftime def test_get_slice_bound(date_type, index): - # The kind argument is required in earlier versions of pandas even though it - # is not used by CFTimeIndex. This logic can be removed once our minimum - # version of pandas is at least 1.3. - if Version(pd.__version__) < Version("1.3"): - kind_args = ("getitem",) - else: - kind_args = () - - result = index.get_slice_bound("0001", "left", *kind_args) + result = index.get_slice_bound("0001", "left") expected = 0 assert result == expected - result = index.get_slice_bound("0001", "right", *kind_args) + result = index.get_slice_bound("0001", "right") expected = 2 assert result == expected - result = index.get_slice_bound(date_type(1, 3, 1), "left", *kind_args) + result = index.get_slice_bound(date_type(1, 3, 1), "left") expected = 2 assert result == expected - result = index.get_slice_bound(date_type(1, 3, 1), "right", *kind_args) + result = index.get_slice_bound(date_type(1, 3, 1), "right") expected = 2 assert result == expected @requires_cftime def test_get_slice_bound_decreasing_index(date_type, monotonic_decreasing_index): - # The kind argument is required in earlier versions of pandas even though it - # is not used by CFTimeIndex. This logic can be removed once our minimum - # version of pandas is at least 1.3. - if Version(pd.__version__) < Version("1.3"): - kind_args = ("getitem",) - else: - kind_args = () - - result = monotonic_decreasing_index.get_slice_bound("0001", "left", *kind_args) + result = monotonic_decreasing_index.get_slice_bound("0001", "left") expected = 2 assert result == expected - result = monotonic_decreasing_index.get_slice_bound("0001", "right", *kind_args) + result = monotonic_decreasing_index.get_slice_bound("0001", "right") expected = 4 assert result == expected - result = monotonic_decreasing_index.get_slice_bound( - date_type(1, 3, 1), "left", *kind_args - ) + result = monotonic_decreasing_index.get_slice_bound(date_type(1, 3, 1), "left") expected = 2 assert result == expected - result = monotonic_decreasing_index.get_slice_bound( - date_type(1, 3, 1), "right", *kind_args - ) + result = monotonic_decreasing_index.get_slice_bound(date_type(1, 3, 1), "right") expected = 2 assert result == expected @requires_cftime def test_get_slice_bound_length_one_index(date_type, length_one_index): - # The kind argument is required in earlier versions of pandas even though it - # is not used by CFTimeIndex. This logic can be removed once our minimum - # version of pandas is at least 1.3. - if Version(pd.__version__) <= Version("1.3"): - kind_args = ("getitem",) - else: - kind_args = () - - result = length_one_index.get_slice_bound("0001", "left", *kind_args) + result = length_one_index.get_slice_bound("0001", "left") expected = 0 assert result == expected - result = length_one_index.get_slice_bound("0001", "right", *kind_args) + result = length_one_index.get_slice_bound("0001", "right") expected = 1 assert result == expected - result = length_one_index.get_slice_bound(date_type(1, 3, 1), "left", *kind_args) + result = length_one_index.get_slice_bound(date_type(1, 3, 1), "left") expected = 1 assert result == expected - result = length_one_index.get_slice_bound(date_type(1, 3, 1), "right", *kind_args) + result = length_one_index.get_slice_bound(date_type(1, 3, 1), "right") expected = 1 assert result == expected diff --git a/xarray/tests/test_combine.py b/xarray/tests/test_combine.py index 66da86459a1..53ec8f2f66c 100644 --- a/xarray/tests/test_combine.py +++ b/xarray/tests/test_combine.py @@ -5,7 +5,6 @@ import numpy as np import pytest -from packaging.version import Version from xarray import ( DataArray, @@ -1142,16 +1141,12 @@ def test_combine_by_coords_raises_for_differing_calendars(): da_1 = DataArray([0], dims=["time"], coords=[time_1], name="a").to_dataset() da_2 = DataArray([1], dims=["time"], coords=[time_2], name="a").to_dataset() - if Version(cftime.__version__) >= Version("1.5"): - error_msg = ( - "Cannot combine along dimension 'time' with mixed types." - " Found:.*" - " If importing data directly from a file then setting" - " `use_cftime=True` may fix this issue." - ) - else: - error_msg = r"cannot compare .* \(different calendars\)" - + error_msg = ( + "Cannot combine along dimension 'time' with mixed types." + " Found:.*" + " If importing data directly from a file then setting" + " `use_cftime=True` may fix this issue." + ) with pytest.raises(TypeError, match=error_msg): combine_by_coords([da_1, da_2]) diff --git a/xarray/tests/test_computation.py b/xarray/tests/test_computation.py index d70fd9d0d8d..bfe51c5fcd8 100644 --- a/xarray/tests/test_computation.py +++ b/xarray/tests/test_computation.py @@ -8,7 +8,6 @@ import pandas as pd import pytest from numpy.testing import assert_allclose, assert_array_equal -from packaging.version import Version import xarray as xr from xarray.core.alignment import broadcast @@ -23,7 +22,6 @@ result_name, unified_dim_sizes, ) -from xarray.core.pycompat import mod_version from xarray.tests import ( has_dask, raise_if_dask_computes, @@ -31,8 +29,6 @@ requires_dask, ) -dask_version = mod_version("dask") - def assert_identical(a, b): """A version of this function which accepts numpy arrays""" @@ -1310,13 +1306,8 @@ def test_vectorize_dask_dtype_without_output_dtypes(data_array) -> None: assert expected.dtype == actual.dtype -@pytest.mark.skipif( - dask_version > Version("2021.06"), - reason="dask/dask#7669: can no longer pass output_dtypes and meta", -) @requires_dask def test_vectorize_dask_dtype_meta() -> None: - # meta dtype takes precedence data_array = xr.DataArray([[0, 1, 2], [1, 2, 3]], dims=("x", "y")) expected = xr.DataArray([1, 2], dims=["x"]) @@ -1326,7 +1317,6 @@ def test_vectorize_dask_dtype_meta() -> None: input_core_dims=[["y"]], vectorize=True, dask="parallelized", - output_dtypes=[int], dask_gufunc_kwargs=dict(meta=np.ndarray((0, 0), dtype=float)), ) diff --git a/xarray/tests/test_dask.py b/xarray/tests/test_dask.py index 452e08b15d4..577debbce21 100644 --- a/xarray/tests/test_dask.py +++ b/xarray/tests/test_dask.py @@ -9,13 +9,11 @@ import numpy as np import pandas as pd import pytest -from packaging.version import Version import xarray as xr from xarray import DataArray, Dataset, Variable from xarray.core import duck_array_ops from xarray.core.duck_array_ops import lazy_array_equiv -from xarray.core.pycompat import mod_version from xarray.testing import assert_chunks_equal from xarray.tests import ( assert_allclose, @@ -33,7 +31,6 @@ dask = pytest.importorskip("dask") da = pytest.importorskip("dask.array") dd = pytest.importorskip("dask.dataframe") -dask_version = mod_version("dask") ON_WINDOWS = sys.platform == "win32" @@ -118,9 +115,6 @@ def test_indexing(self): self.assertLazyAndIdentical(u[:1], v[:1]) self.assertLazyAndIdentical(u[[0, 1], [0, 1, 2]], v[[0, 1], [0, 1, 2]]) - @pytest.mark.skipif( - dask_version < Version("2021.04.1"), reason="Requires dask >= 2021.04.1" - ) @pytest.mark.parametrize( "expected_data, index", [ @@ -139,14 +133,6 @@ def test_setitem_dask_array(self, expected_data, index): arr[index] = 99 assert_identical(arr, expected) - @pytest.mark.skipif( - dask_version >= Version("2021.04.1"), reason="Requires dask < 2021.04.1" - ) - def test_setitem_dask_array_error(self): - with pytest.raises(TypeError, match=r"stored in a dask array"): - v = self.lazy_var - v[:1] = 0 - def test_squeeze(self): u = self.eager_var v = self.lazy_var @@ -1679,9 +1665,6 @@ def test_optimize(): arr2.compute() -# The graph_manipulation module is in dask since 2021.2 but it became usable with -# xarray only since 2021.3 -@pytest.mark.skipif(dask_version <= Version("2021.02.0"), reason="new module") def test_graph_manipulation(): """dask.graph_manipulation passes an optional parameter, "rename", to the rebuilder function returned by __dask_postperist__; also, the dsk passed to the rebuilder is diff --git a/xarray/tests/test_dataarray.py b/xarray/tests/test_dataarray.py index 7cc20fdbe23..b1a16eb1dfa 100644 --- a/xarray/tests/test_dataarray.py +++ b/xarray/tests/test_dataarray.py @@ -3,9 +3,10 @@ import pickle import sys import warnings +from collections.abc import Hashable from copy import deepcopy from textwrap import dedent -from typing import Any, Final, Hashable, cast +from typing import Any, Final, cast import numpy as np import pandas as pd @@ -4079,15 +4080,8 @@ def test_pad_constant(self) -> None: expected = xr.DataArray([1, 9, 1], dims="x") assert_identical(actual, expected) - if Version(np.__version__) >= Version("1.20"): - with pytest.raises(ValueError, match="cannot convert float NaN to integer"): - ar.pad(x=1, constant_values=np.NaN) - else: - actual = ar.pad(x=1, constant_values=np.NaN) - expected = xr.DataArray( - [-9223372036854775808, 9, -9223372036854775808], dims="x" - ) - assert_identical(actual, expected) + with pytest.raises(ValueError, match="cannot convert float NaN to integer"): + ar.pad(x=1, constant_values=np.NaN) def test_pad_coords(self) -> None: ar = DataArray( diff --git a/xarray/tests/test_dataset.py b/xarray/tests/test_dataset.py index 6ab4c40cfa7..30e5c10aaf8 100644 --- a/xarray/tests/test_dataset.py +++ b/xarray/tests/test_dataset.py @@ -4,10 +4,11 @@ import re import sys import warnings +from collections.abc import Hashable from copy import copy, deepcopy from io import StringIO from textwrap import dedent -from typing import Any, Hashable +from typing import Any import numpy as np import pandas as pd diff --git a/xarray/tests/test_distributed.py b/xarray/tests/test_distributed.py index 1c7f1c94019..896259cde5a 100644 --- a/xarray/tests/test_distributed.py +++ b/xarray/tests/test_distributed.py @@ -185,7 +185,6 @@ def test_dask_distributed_zarr_integration_test( loop, consolidated: bool, compute: bool ) -> None: if consolidated: - pytest.importorskip("zarr", minversion="2.2.1.dev2") write_kwargs: dict[str, Any] = {"consolidated": True} read_kwargs: dict[str, Any] = {"backend_kwargs": {"consolidated": True}} else: diff --git a/xarray/tests/test_plot.py b/xarray/tests/test_plot.py index 8d69fb46342..e7f0323e3b4 100644 --- a/xarray/tests/test_plot.py +++ b/xarray/tests/test_plot.py @@ -3,9 +3,10 @@ import contextlib import inspect import math +from collections.abc import Hashable from copy import copy from datetime import datetime -from typing import Any, Callable, Hashable, Literal +from typing import Any, Callable, Literal import numpy as np import pandas as pd diff --git a/xarray/tests/test_sparse.py b/xarray/tests/test_sparse.py index fd48bab5f41..a5faa78dd38 100644 --- a/xarray/tests/test_sparse.py +++ b/xarray/tests/test_sparse.py @@ -7,11 +7,10 @@ import numpy as np import pandas as pd import pytest -from packaging.version import Version import xarray as xr from xarray import DataArray, Variable -from xarray.core.pycompat import array_type, mod_version +from xarray.core.pycompat import array_type from xarray.tests import assert_equal, assert_identical, requires_dask filterwarnings = pytest.mark.filterwarnings @@ -20,7 +19,6 @@ sparse = pytest.importorskip("sparse") sparse_array_type = array_type("sparse") -sparse_version = mod_version("sparse") def assert_sparse_equal(a, b): @@ -856,10 +854,6 @@ def test_sparse_coords(self): ) -@pytest.mark.xfail( - sparse_version < Version("0.13.0"), - reason="https://github.com/pydata/xarray/issues/5654", -) @requires_dask def test_chunk(): s = sparse.COO.from_numpy(np.array([0, 0, 1, 2])) diff --git a/xarray/tests/test_utils.py b/xarray/tests/test_utils.py index c1aab2801dd..ef4d054e422 100644 --- a/xarray/tests/test_utils.py +++ b/xarray/tests/test_utils.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Hashable, Iterable, Sequence +from collections.abc import Hashable, Iterable, Sequence import numpy as np import pandas as pd diff --git a/xarray/tests/test_weighted.py b/xarray/tests/test_weighted.py index 8083e545962..f93192de926 100644 --- a/xarray/tests/test_weighted.py +++ b/xarray/tests/test_weighted.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Any, Iterable +from collections.abc import Iterable +from typing import Any import numpy as np import pytest