Skip to content

Commit

Permalink
Enable type hint checking (#5956)
Browse files Browse the repository at this point in the history
* Enable type hint checking

* Add whatsnew entry

* Add class variable annotation

* Update line numbers in doctests

* Implement suggestions from code review

* Update docs/src/whatsnew/latest.rst

Co-authored-by: Martin Yeo <[email protected]>

* Workaround for python/mypy#1465

Co-authored-by: Martin Yeo <[email protected]>

* Also apply workaround for bounds

* Add type hint

---------

Co-authored-by: Martin Yeo <[email protected]>
  • Loading branch information
bouweandela and trexfeathers authored Jun 26, 2024
1 parent 1bea71c commit 7d6a3f6
Show file tree
Hide file tree
Showing 51 changed files with 269 additions and 189 deletions.
8 changes: 8 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,14 @@ repos:
- id: sort-all
types: [file, python]

- repo: https://github.com/pre-commit/mirrors-mypy
rev: 'v1.9.0'
hooks:
- id: mypy
additional_dependencies:
- 'types-requests'
exclude: 'noxfile\.py|docs/src/conf\.py'

- repo: https://github.com/numpy/numpydoc
rev: v1.7.0
hooks:
Expand Down
4 changes: 3 additions & 1 deletion benchmarks/asv_delegated_conda.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from pathlib import Path
from shutil import copy2, copytree, rmtree
from tempfile import TemporaryDirectory
from typing import Callable

from asv import util as asv_util
from asv.config import Config
Expand Down Expand Up @@ -99,6 +100,7 @@ def name(self):
def _update_info(self) -> None:
"""Make sure class properties reflect the actual environment being used."""
# Follow symlink if it has been created.
self._path: str
actual_path = Path(self._path).resolve()
self._path = str(actual_path)

Expand Down Expand Up @@ -132,7 +134,7 @@ def copy_asv_files(src_parent: Path, dst_parent: Path) -> None:
# happened. Also a non-issue when copying in the reverse
# direction because the cache dir is temporary.
if src_path.is_dir():
func = copytree
func: Callable = copytree
else:
func = copy2
func(src_path, dst_path)
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/benchmarks/cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def time_create(self, _, cube_creation_strategy: str) -> None:
new_cube.attributes = self.cube_kwargs["attributes"]
new_cube.cell_methods = self.cube_kwargs["cell_methods"]
for coord, dims in self.cube_kwargs["dim_coords_and_dims"]:
coord: coords.DimCoord # Type hint to help linters.
assert isinstance(coord, coords.DimCoord) # Type hint to help linters.
new_cube.add_dim_coord(coord, dims)
for coord, dims in self.cube_kwargs["aux_coords_and_dims"]:
new_cube.add_aux_coord(coord, dims)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def track_filesize_saved(self, n_cubesphere):
return os.path.getsize("tmp.nc") * 1.0e-6


CombineRegionsSaveData.track_filesize_saved.unit = "Mb"
CombineRegionsSaveData.track_filesize_saved.unit = "Mb" # type: ignore[attr-defined]


class CombineRegionsFileStreamedCalc(MixinCombineRegions):
Expand Down
12 changes: 6 additions & 6 deletions benchmarks/benchmarks/load/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,16 @@
class LoadAndRealise:
# For data generation
timeout = 600.0
params = [
params = (
[(50, 50, 2), (1280, 960, 5), (2, 2, 1000)],
[False, True],
["FF", "PP", "NetCDF"],
]
)
param_names = ["xyz", "compressed", "file_format"]

def setup_cache(self) -> dict:
file_type_args = self.params[2]
file_path_dict = {}
file_path_dict: dict[tuple[int, int, int], dict[bool, dict[str, str]]] = {}
for xyz in self.params[0]:
file_path_dict[xyz] = {}
x, y, z = xyz
Expand Down Expand Up @@ -59,7 +59,7 @@ def time_realise(self, _, __, ___, ____) -> None:

class STASHConstraint:
# xyz sizes mimic LoadAndRealise to maximise file reuse.
params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], ["FF", "PP"]]
params = ([(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], ["FF", "PP"])
param_names = ["xyz", "file_format"]

def setup_cache(self) -> dict:
Expand All @@ -78,7 +78,7 @@ def time_stash_constraint(self, _, __, ___) -> None:


class TimeConstraint:
params = [[3, 20], ["FF", "PP", "NetCDF"]]
params = ([3, 20], ["FF", "PP", "NetCDF"])
param_names = ["time_dim_len", "file_format"]

def setup_cache(self) -> dict:
Expand Down Expand Up @@ -139,7 +139,7 @@ class StructuredFF:
avoiding the cost of merging.
"""

params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], [False, True]]
params = ([(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], [False, True])
param_names = ["xyz", "structured_loading"]

def setup_cache(self) -> dict:
Expand Down
20 changes: 11 additions & 9 deletions benchmarks/bm_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def _setup_common() -> None:

def _asv_compare(*commits: str, overnight_mode: bool = False) -> None:
"""Run through a list of commits comparing each one to the next."""
commits = [commit[:8] for commit in commits]
commits = tuple(commit[:8] for commit in commits)
for i in range(len(commits) - 1):
before = commits[i]
after = commits[i + 1]
Expand Down Expand Up @@ -235,19 +235,19 @@ def _gh_create_reports(commit_sha: str, results_full: str, results_shifts: str)

for login_type in ("author", "mergedBy"):
gh_query = f'.["{login_type}"]["login"]'
command = shlex.split(
commandlist = shlex.split(
f"gh pr view {pr_tag[1:]} "
f"--json {login_type} -q '{gh_query}' "
f"--repo {repo}"
)
login = _subprocess_runner_capture(command)
login = _subprocess_runner_capture(commandlist)

command = [
commandlist = [
"curl",
"-s",
f"https://api.github.com/users/{login}",
]
login_info = _subprocess_runner_capture(command)
login_info = _subprocess_runner_capture(commandlist)
is_user = '"type": "User"' in login_info
if is_user:
assignee = login
Expand Down Expand Up @@ -313,7 +313,7 @@ class _SubParserGenerator(ABC):
description: str = NotImplemented
epilog: str = NotImplemented

def __init__(self, subparsers: ArgumentParser.add_subparsers) -> None:
def __init__(self, subparsers: argparse._SubParsersAction[ArgumentParser]) -> None:
self.subparser: ArgumentParser = subparsers.add_parser(
self.name,
description=self.description,
Expand Down Expand Up @@ -476,10 +476,12 @@ def csperf(args: argparse.Namespace, run_type: Literal["cperf", "sperf"]) -> Non
environ["ON_DEMAND_BENCHMARKS"] = "True"
commit_range = "upstream/main^!"

asv_command = ASV_HARNESS.format(posargs=commit_range) + f" --bench={run_type}"
asv_command_str = (
ASV_HARNESS.format(posargs=commit_range) + f" --bench={run_type}"
)

# Only do a single round.
asv_command = shlex.split(re.sub(r"rounds=\d", "rounds=1", asv_command))
asv_command = shlex.split(re.sub(r"rounds=\d", "rounds=1", asv_command_str))
try:
_subprocess_runner([*asv_command, *args.asv_args], asv=True)
except subprocess.CalledProcessError as err:
Expand Down Expand Up @@ -584,7 +586,7 @@ def func(args: argparse.Namespace) -> None:
environ["DATA_GEN_PYTHON"] = str(python_path)
_setup_common()
# get path of data-gen environment, setup by previous call
python_path = environ["DATA_GEN_PYTHON"]
python_path = Path(environ["DATA_GEN_PYTHON"])
# allow 'on-demand' benchmarks
environ["ON_DEMAND_BENCHMARKS"] = "1"
asv_command = [
Expand Down
16 changes: 8 additions & 8 deletions docs/src/further_topics/filtering_warnings.rst
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,9 @@ Warnings:

>>> my_operation()
...
iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
warnings.warn(wmsg, category=iris.warnings.IrisUserWarning)
iris/coord_systems.py:770: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy.
iris/coord_systems.py:771: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy.
warnings.warn(

Warnings can be suppressed using the Python warnings filter with the ``ignore``
Expand Down Expand Up @@ -110,7 +110,7 @@ You can target specific Warning messages, e.g.
... warnings.filterwarnings("ignore", message="Discarding false_easting")
... my_operation()
...
iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
warnings.warn(wmsg, category=iris.warnings.IrisUserWarning)

::
Expand All @@ -125,16 +125,16 @@ Or you can target Warnings raised by specific lines of specific modules, e.g.
.. doctest:: filtering_warnings

>>> with warnings.catch_warnings():
... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=444)
... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=445)
... my_operation()
...
iris/coord_systems.py:770: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy.
iris/coord_systems.py:771: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy.
warnings.warn(

::

python -W ignore:::iris.coord_systems:444
export PYTHONWARNINGS=ignore:::iris.coord_systems:444
python -W ignore:::iris.coord_systems:445
export PYTHONWARNINGS=ignore:::iris.coord_systems:445

Warnings from a Common Source
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Expand Down Expand Up @@ -188,7 +188,7 @@ module during execution:
... )
... my_operation()
...
iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance.
warnings.warn(wmsg, category=iris.warnings.IrisUserWarning)

----
Expand Down
3 changes: 3 additions & 0 deletions docs/src/whatsnew/latest.rst
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,9 @@ This document explains the changes made to Iris for this release

#. `@rcomer`_ made some :meth:`~iris.cube.Cube.slices_over` tests go faster (:pull:`5973`)

#. `@bouweandela`_ enabled mypy checks for type hints.
The entire team would like to thank Bouwe for putting in the hard
work on an unglamorous but highly valuable contribution. (:pull:`5956`)

.. comment
Whatsnew author names (@github name) in alphabetical order. Note that,
Expand Down
8 changes: 6 additions & 2 deletions lib/iris/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ def callback(cube, field, filename):
import itertools
import os.path
import threading
from typing import Callable, Literal

import iris._constraints
import iris.config
Expand Down Expand Up @@ -189,7 +190,7 @@ def __repr__(self):
return msg.format(self.datum_support, self.pandas_ndim, self.save_split_attrs)

# deprecated_options = {'example_future_flag': 'warning',}
deprecated_options = {}
deprecated_options: dict[str, Literal["error", "warning"]] = {}

def __setattr__(self, name, value):
if name in self.deprecated_options:
Expand Down Expand Up @@ -248,7 +249,10 @@ def context(self, **kwargs):

# Initialise the site configuration dictionary.
#: Iris site configuration dictionary.
site_configuration = {}
site_configuration: dict[
Literal["cf_profile", "cf_patch", "cf_patch_conventions"],
Callable | Literal[False] | None,
] = {}

try:
from iris.site_config import update as _update
Expand Down
3 changes: 2 additions & 1 deletion lib/iris/_lazy_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
"""

from functools import lru_cache, wraps
from types import ModuleType
from typing import Sequence

import dask
Expand Down Expand Up @@ -376,7 +377,7 @@ def _combine(
lazy = any(is_lazy_data(a) for a in arrays)
masked = any(is_masked_data(a) for a in arrays)

array_module = np
array_module: ModuleType = np
if masked:
if lazy:
# Avoid inconsistent array type when slicing resulting array
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1848,7 +1848,7 @@ def interp_order(length):
and lazy data.
"""
MAX_RUN.name = lambda: "max_run"
MAX_RUN.name = lambda: "max_run" # type: ignore[method-assign]


GMEAN = Aggregator("geometric_mean", scipy.stats.mstats.gmean)
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/maths.py
Original file line number Diff line number Diff line change
Expand Up @@ -558,7 +558,7 @@ def exponentiate(cube, exponent, in_place=False):
)
if cube.has_lazy_data():

def power(data):
def power(data, out=None):
return operator.pow(data, exponent)

else:
Expand Down
4 changes: 2 additions & 2 deletions lib/iris/common/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ class BaseMetadata(metaclass=_NamedTupleMeta):

DEFAULT_NAME = "unknown" # the fall-back name for metadata identity

_members = (
_members: str | Iterable[str] = (
"standard_name",
"long_name",
"var_name",
Expand Down Expand Up @@ -870,7 +870,7 @@ def equal(self, other, lenient=None):
class CoordMetadata(BaseMetadata):
"""Metadata container for a :class:`~iris.coords.Coord`."""

_members = ("coord_system", "climatological")
_members: str | Iterable[str] = ("coord_system", "climatological")

__slots__ = ()

Expand Down
12 changes: 6 additions & 6 deletions lib/iris/common/resolve.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@


_AuxCoverage = namedtuple(
"AuxCoverage",
"_AuxCoverage",
[
"cube",
"common_items_aux",
Expand All @@ -45,18 +45,18 @@
)

_CategoryItems = namedtuple(
"CategoryItems",
"_CategoryItems",
["items_dim", "items_aux", "items_scalar"],
)

_DimCoverage = namedtuple(
"DimCoverage",
"_DimCoverage",
["cube", "metadata", "coords", "dims_common", "dims_local", "dims_free"],
)

_Item = namedtuple("Item", ["metadata", "coord", "dims"])
_Item = namedtuple("_Item", ["metadata", "coord", "dims"])

_PreparedFactory = namedtuple("PreparedFactory", ["container", "dependencies"])
_PreparedFactory = namedtuple("_PreparedFactory", ["container", "dependencies"])


@dataclass
Expand Down Expand Up @@ -95,7 +95,7 @@ def create_coord(self, metadata):
return result


_PreparedMetadata = namedtuple("PreparedMetadata", ["combined", "src", "tgt"])
_PreparedMetadata = namedtuple("_PreparedMetadata", ["combined", "src", "tgt"])


class Resolve:
Expand Down
3 changes: 2 additions & 1 deletion lib/iris/coord_systems.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from abc import ABCMeta, abstractmethod
from functools import cached_property
import re
from typing import ClassVar
import warnings

import cartopy.crs as ccrs
Expand Down Expand Up @@ -48,7 +49,7 @@ def _float_or_None(arg):
class CoordSystem(metaclass=ABCMeta):
"""Abstract base class for coordinate systems."""

grid_mapping_name = None
grid_mapping_name: ClassVar[str | None] = None

def __eq__(self, other):
"""Override equality.
Expand Down
14 changes: 12 additions & 2 deletions lib/iris/coords.py
Original file line number Diff line number Diff line change
Expand Up @@ -2704,7 +2704,12 @@ def _new_points_requirements(self, points):
emsg = "The {!r} {} points array must be strictly monotonic."
raise ValueError(emsg.format(self.name(), self.__class__.__name__))

@Coord._values.setter
@property
def _values(self):
# Overridden just to allow .setter override.
return super()._values

@_values.setter
def _values(self, points):
# DimCoord always realises the points, to allow monotonicity checks.
# Ensure it is an actual array, and also make our own copy so that we
Expand Down Expand Up @@ -2796,7 +2801,12 @@ def _new_bounds_requirements(self, bounds):

return bounds

@Coord.bounds.setter
@property
def bounds(self):
# Overridden just to allow .setter override.
return super().bounds

@bounds.setter
def bounds(self, bounds):
if bounds is not None:
# Ensure we have a realised array of new bounds values.
Expand Down
Loading

0 comments on commit 7d6a3f6

Please sign in to comment.