Skip to content

Commit

Permalink
Merge pull request #5186 from bjlittle/codespell
Browse files Browse the repository at this point in the history
Adopt codespell
  • Loading branch information
tkknight authored Apr 18, 2023
2 parents 7b6fa46 + 3038bb2 commit 2eac400
Show file tree
Hide file tree
Showing 72 changed files with 158 additions and 117 deletions.
18 changes: 18 additions & 0 deletions .git-blame-ignore-revs
Original file line number Diff line number Diff line change
@@ -1,2 +1,20 @@
# Format: numpy array format (#5235)
c18dcd8dafef0cc7bbbf80dfce66f76a46ce59c5

# style: flake8 (#3755)
7c86bc0168684345dc475457b1a77dadc77ce9bb

# style: black (#3518)
ffcfad475e0593e1e40895453cf1df154e5f6f2c

# style: isort (#4174)
15bbcc5ac3d539cb6e820148b66e7cf55d91c5d2

# style: blacken-docs (#4205)
1572e180243e492d8ff76fa8cdefb82ef6f90415

# style: sort-all (#4353)
64705dbc40881233aae45f051d96049150369e53

# style: codespell (#5186)
417aa6bbd9b10d25cad7def54d47ef4d718bc38d
7 changes: 7 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,13 @@ repos:
# Don't commit to main branch.
- id: no-commit-to-branch

- repo: https://github.com/codespell-project/codespell
rev: "v2.2.2"
hooks:
- id: codespell
types_or: [asciidoc, python, markdown, rst]
additional_dependencies: [tomli]

- repo: https://github.com/psf/black
rev: 23.3.0
hooks:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def _make_region_cubes(self, full_mesh_cube):
i_faces = np.concatenate([i_faces[:, 2:], i_faces[:, :2]], axis=1)
# flatten to get [2 3 4 0 1 (-) 8 9 10 6 7 (-) 13 14 15 11 12 ...]
i_faces = i_faces.flatten()
# reduce back to orignal length, wrap any overflows into valid range
# reduce back to original length, wrap any overflows into valid range
i_faces = i_faces[:n_faces] % n_faces

# Divide into regions -- always slightly uneven, since 7 doesn't divide
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/benchmarks/sperf/combine_regions.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def _make_region_cubes(self, full_mesh_cube):
i_faces = np.concatenate([i_faces[:, 2:], i_faces[:, :2]], axis=1)
# flatten to get [2 3 4 0 1 (-) 8 9 10 6 7 (-) 13 14 15 11 12 ...]
i_faces = i_faces.flatten()
# reduce back to orignal length, wrap any overflows into valid range
# reduce back to original length, wrap any overflows into valid range
i_faces = i_faces[:n_faces] % n_faces

# Divide into regions -- always slightly uneven, since 7 doesn't divide
Expand Down
2 changes: 1 addition & 1 deletion codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ coverage:
target: auto
# coverage can drop by up to <threshold>% while still posting success
threshold: 3%
patch: off
patch: off
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def make_plot(projection_name, projection_crs):
y_points = y_lower + y_delta * np.concatenate(
(zeros, steps, ones, steps[::-1])
)
# Get the Iris coordinate sytem of the X coordinate (Y should be the same).
# Get the Iris coordinate system of the X coordinate (Y should be the same).
cs_data1 = x_coord.coord_system
# Construct an equivalent Cartopy coordinate reference system ("crs").
crs_data1 = cs_data1.as_cartopy_crs()
Expand Down
2 changes: 1 addition & 1 deletion docs/gallery_code/general/plot_rotated_pole_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def main():
plt.gca().coastlines()
iplt.show()

# Plot #3: Contourf overlayed by coloured point data
# Plot #3: Contourf overlaid by coloured point data
plt.figure()
qplt.contourf(air_pressure)
iplt.points(air_pressure, c=air_pressure.data)
Expand Down
2 changes: 1 addition & 1 deletion docs/gallery_code/meteorology/plot_lagged_ensemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def main():
# Get the time for the entire plot.
time = last_time_coord.units.num2date(last_time_coord.bounds[0, 0])

# Set a global title for the postage stamps with the date formated by
# Set a global title for the postage stamps with the date formatted by
# "monthname year".
time_string = time.strftime("%B %Y")
plt.suptitle(f"Surface temperature ensemble forecasts for {time_string}")
Expand Down
2 changes: 1 addition & 1 deletion docs/src/IEP/IEP001.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ cube.sel(height=1.5)
The semantics of position-based slices will continue to match that of normal Python slices. The start position is included, the end position is excluded.
Value-based slices will be stricly inclusive, with both the start and end values included. This behaviour differs from normal Python slices but is in common with pandas.
Value-based slices will be strictly inclusive, with both the start and end values included. This behaviour differs from normal Python slices but is in common with pandas.
Just as for normal Python slices, we do not need to provide the ability to control the include/exclude behaviour for slicing.
Expand Down
1 change: 1 addition & 0 deletions docs/src/common_links.inc
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
.. _flake8: https://flake8.pycqa.org/en/stable/
.. _.flake8.yml: https://github.com/SciTools/iris/blob/main/.flake8
.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris
.. _codespell: https://github.com/codespell-project/codespell
.. _conda: https://docs.conda.io/en/latest/
.. _contributor: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json
.. _core developers: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json
Expand Down
2 changes: 1 addition & 1 deletion docs/src/community/iris_xarray.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ you can be prepared, and to help you choose the best package for your use case.
Overall Experience
------------------

Iris is the more specialised package, focussed on making it as easy
Iris is the more specialised package, focused on making it as easy
as possible to work with meteorological and climatological data. Iris
is built to natively handle many key concepts, such as the CF conventions,
coordinate systems and bounded coordinates. Iris offers a smaller toolkit of
Expand Down
2 changes: 1 addition & 1 deletion docs/src/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def autolog(message):
# add some sample files from the developers guide..
sys.path.append(os.path.abspath(os.path.join("developers_guide")))

# why isnt the iris path added to it is discoverable too? We dont need to,
# why isn't the iris path added to it is discoverable too? We dont need to,
# the sphinext to generate the api rst knows where the source is. If it
# is added then the travis build will likely fail.

Expand Down
8 changes: 8 additions & 0 deletions docs/src/developers_guide/contributing_ci_tests.rst
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,14 @@ pull-requests given the `Iris`_ GitHub repository `.pre-commit-config.yaml`_.

See the `pre-commit.ci dashboard`_ for details of recent past and active Iris jobs.

.. note::

The `codespell`_ ``pre-commit`` hook checks the spelling of the whole codebase
and documentation. This hook is configured in the ``[tool.codespell]`` section
of the ``pyproject.toml`` file.

Append to the ``ignore-words-list`` option any **valid words** that are
considered **not** a typo and should **not** be corrected by `codespell`_.

.. _.pre-commit-config.yaml: https://github.com/SciTools/iris/blob/main/.pre-commit-config.yaml
.. _pre-commit.ci dashboard: https://results.pre-commit.ci/repo/github/5312648
2 changes: 1 addition & 1 deletion docs/src/developers_guide/gitwash/configure_git.rst
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ and it gives graph / text output something like this (but with color!)::
| * 4aff2a8 - fixed bug 35, and added a test in test_bugfixes (2 weeks ago) [Hugo]
|/
* a7ff2e5 - Added notes on discussion/proposal made during Data Array Summit. (2 weeks ago) [Corran Webster]
* 68f6752 - Initial implimentation of AxisIndexer - uses 'index_by' which needs to be changed to a call on an Axes object - this is all very sketchy right now. (2 weeks ago) [Corr
* 68f6752 - Initial implementation of AxisIndexer - uses 'index_by' which needs to be changed to a call on an Axes object - this is all very sketchy right now. (2 weeks ago) [Corr
* 376adbd - Merge pull request #46 from terhorst/main (2 weeks ago) [Jonathan Terhorst]
|\
| * b605216 - updated joshu example to current api (3 weeks ago) [Jonathan Terhorst]
Expand Down
2 changes: 1 addition & 1 deletion docs/src/developers_guide/release.rst
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ Ensure that the artifacts are successfully uploaded and available on
from PyPI::

> conda deactivate
> conda env create --file ./requrements/ci/iris.yml
> conda env create --file ./requirements/iris.yml
> . activate iris-dev
> python -m pip install --no-deps scitools-iris

Expand Down
2 changes: 1 addition & 1 deletion docs/src/further_topics/ugrid/operations.rst
Original file line number Diff line number Diff line change
Expand Up @@ -897,7 +897,7 @@ previously initialised regridder:

.. code-block:: python
# Extract a different cube defined on te same Mesh.
# Extract a different cube defined on the same Mesh.
>>> mesh_cube2 = mesh_cubes.extract_cube("precipitation_flux")
>>> print(mesh_cube2)
precipitation_flux / (kg m-2 s-1) (-- : 1; -- : 13824)
Expand Down
2 changes: 1 addition & 1 deletion docs/src/userguide/plotting_examples/1d_with_legend.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
plt.grid(True)

# Provide some axis labels
plt.ylabel("Temerature / kelvin")
plt.ylabel("Temperature / kelvin")
plt.xlabel("Longitude / degrees")

# And a sensible title
Expand Down
4 changes: 2 additions & 2 deletions docs/src/whatsnew/1.7.rst
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ Features
transparent; for example, before the introduction of biggus, MemoryErrors
were likely for very large datasets::

>>> result = extremely_large_cube.collapsed('time', iris.analyis.MEAN)
>>> result = extremely_large_cube.collapsed('time', iris.analysis.MEAN)
MemoryError

Now, for supported operations, the evaluation is lazy (i.e. it doesn't take
place until the actual data is subsequently requested) and can handle data
larger than available system memory::

>>> result = extremely_large_cube.collapsed('time', iris.analyis.MEAN)
>>> result = extremely_large_cube.collapsed('time', iris.analysis.MEAN)
>>> print(type(result))
<class 'iris.cube.Cube'>

Expand Down
2 changes: 1 addition & 1 deletion docs/src/whatsnew/2.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ Features
* The *new* in-place arithmetic operators :data:`__iadd__`, :data:`__idiv__`,
:data:`__imul__`, :data:`__isub__`, and :data:`__itruediv__` have been
added to support :class:`~iris.cube.Cube` operations :data:`+=`,
:data:`/=`, :data:`*=`, and :data:`-=`. Note that, for **divison**
:data:`/=`, :data:`*=`, and :data:`-=`. Note that, for **division**
*__future__.division* is always in effect.

* Changes to the :class:`iris.coords.Coord`:
Expand Down
2 changes: 1 addition & 1 deletion docs/src/whatsnew/3.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ v3.0.4 (22 July 2021)
Firstly, ancillary-variables or cell-measures with long names can now widen the cube "dimensions map" to fit,
whereas previously printing these cases caused an Exception.
Secondly, cube units are now always printed, whereas previously they were missed out any time that the
"dimensions map" was widened to accomodate long coordinate names.
"dimensions map" was widened to accommodate long coordinate names.
(:pull:`4233`)(:pull:`4238`)

💼 **Internal**
Expand Down
2 changes: 1 addition & 1 deletion docs/src/whatsnew/3.2.rst
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ v3.2.1 (11 Mar 2022)
as well as some long-standing bugs with vertical coordinates and number
formats. (:pull:`4411`)

#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.subset` to alway return ``None`` if
#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.subset` to always return ``None`` if
no value match is found. (:pull:`4417`)

#. `@wjbenfold`_ changed :meth:`iris.util.points_step` to stop it from warning
Expand Down
2 changes: 1 addition & 1 deletion docs/src/whatsnew/3.5.rst
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ This document explains the changes made to Iris for this release
:ref:`documentation page<community_plugins>` for further information.
(:pull:`5144`)

#. `@rcomer`_ enabled lazy evaluation of :obj:`~iris.analysis.RMS` calcuations
#. `@rcomer`_ enabled lazy evaluation of :obj:`~iris.analysis.RMS` calculations
with weights. (:pull:`5017`)

#. `@schlunma`_ allowed the usage of cubes, coordinates, cell measures, or
Expand Down
7 changes: 5 additions & 2 deletions docs/src/whatsnew/latest.rst
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ This document explains the changes made to Iris for this release
And finally, get in touch with us on :issue:`GitHub<new/choose>` if you have
any issues or feature requests for improving Iris. Enjoy!


📢 Announcements
================

Expand Down Expand Up @@ -68,10 +69,12 @@ This document explains the changes made to Iris for this release
#. `@tkknight`_ migrated to `sphinx-design`_ over the legacy `sphinx-panels`_.
(:pull:`5127`)


💼 Internal
===========

#. N/A
#. `@bjlittle`_ added the `codespell`_ `pre-commit`_ ``git-hook`` to automate
spell checking within the code-base. (:pull:`5186`)


.. comment
Expand All @@ -85,4 +88,4 @@ This document explains the changes made to Iris for this release
Whatsnew resources in alphabetical order:
.. _sphinx-panels: https://github.com/executablebooks/sphinx-panels
.. _sphinx-design: https://github.com/executablebooks/sphinx-design
.. _sphinx-design: https://github.com/executablebooks/sphinx-design
2 changes: 1 addition & 1 deletion lib/iris/_concatenate.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
#
# * Cope with auxiliary coordinate factories.
#
# * Allow concatentation over a user specified dimension.
# * Allow concatenation over a user specified dimension.
#


Expand Down
8 changes: 4 additions & 4 deletions lib/iris/_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ class _CoordSignature(
):
"""
Criterion for identifying a specific type of :class:`iris.cube.Cube`
based on its scalar and vector coorinate data and metadata, and
based on its scalar and vector coordinate data and metadata, and
auxiliary coordinate factories.
Args:
Expand Down Expand Up @@ -516,7 +516,7 @@ class _Relation(namedtuple("Relation", ["separable", "inseparable"])):
* separable:
A set of independent candidate dimension names.
* inseperable:
* inseparable:
A set of dependent candidate dimension names.
"""
Expand Down Expand Up @@ -1419,7 +1419,7 @@ def _define_space(self, space, positions, indexes, function_matrix):
"""

# Heuristic reordering of coordinate defintion indexes into
# Heuristic reordering of coordinate definition indexes into
# preferred dimension order.
def axis_and_name(name):
axis_dict = {"T": 1, "Z": 2, "Y": 3, "X": 4}
Expand Down Expand Up @@ -1467,7 +1467,7 @@ def axis_and_name(name):
}
else:
# TODO: Consider appropriate sort order (ascending,
# decending) i.e. use CF positive attribute.
# descending) i.e. use CF positive attribute.
cells = sorted(indexes[name])
points = np.array(
[cell.point for cell in cells],
Expand Down
4 changes: 2 additions & 2 deletions lib/iris/analysis/_area_weighted.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,7 +433,7 @@ def _spherical_area(y_bounds, x_bounds, radius=1.0):
Args:
* y_bounds:
An (n, 2) shaped NumPy array of latitide bounds in radians.
An (n, 2) shaped NumPy array of latitude bounds in radians.
* x_bounds:
An (m, 2) shaped NumPy array of longitude bounds in radians.
* radius:
Expand Down Expand Up @@ -586,7 +586,7 @@ def _regrid_area_weighted_array(
y_dim = src_data.ndim - 2

# Create empty "pre-averaging" data array that will enable the
# src_data data coresponding to a given target grid point,
# src_data data corresponding to a given target grid point,
# to be stacked per point.
# Note that dtype is not preserved and that the array mask
# allows for regions that do not overlap.
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/_interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ def __init__(self, src_cube, coords, method, extrapolation_mode):
# Trigger any deferred loading of the source cube's data and snapshot
# its state to ensure that the interpolator is impervious to external
# changes to the original source cube. The data is loaded to prevent
# the snaphot having lazy data, avoiding the potential for the
# the snapshot having lazy data, avoiding the potential for the
# same data to be loaded again and again.
if src_cube.has_lazy_data():
src_cube.data
Expand Down
4 changes: 2 additions & 2 deletions lib/iris/analysis/_regrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ def _regrid_indices(cells, depth, points):
x_indices = _regrid_indices(tx_cells, tx_depth, sx_points)
y_indices = _regrid_indices(ty_cells, ty_depth, sy_points)

# Now construct a sparse M x N matix, where M is the flattened target
# Now construct a sparse M x N matrix, where M is the flattened target
# space, and N is the flattened source space. The sparse matrix will then
# be populated with those source cube points that contribute to a specific
# target cube cell.
Expand Down Expand Up @@ -1021,7 +1021,7 @@ def _create_cube(
The dimensions of the X and Y coordinate within the source Cube.
tgt_coords : tuple of :class:`iris.coords.Coord`\\ 's
Either two 1D :class:`iris.coords.DimCoord`\\ 's, two 1D
:class:`iris.experimental.ugrid.DimCoord`\\ 's or two ND
:class:`iris.experimental.ugrid.DimCoord`\\ 's or two n-D
:class:`iris.coords.AuxCoord`\\ 's representing the new grid's
X and Y coordinates.
num_tgt_dims : int
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/cartography.py
Original file line number Diff line number Diff line change
Expand Up @@ -1013,7 +1013,7 @@ def _transform_distance_vectors_tolerance_mask(
"""
Return a mask that can be applied to data array to mask elements
where the magnitude of vectors are not preserved due to numerical
errors introduced by the tranformation between coordinate systems.
errors introduced by the transformation between coordinate systems.
Args:
* src_crs (`cartopy.crs.Projection`):
Expand Down
4 changes: 2 additions & 2 deletions lib/iris/analysis/trajectory.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ def interpolate(cube, sample_points, method=None):
for columns_coord in columns.dim_coords + columns.aux_coords:
src_dims = cube.coord_dims(columns_coord)
if not squish_my_dims.isdisjoint(src_dims):
# Mapping the cube indicies onto the coord
# Mapping the cube indices onto the coord
initial_coord_inds = [initial_inds[ind] for ind in src_dims]
# Making the final ones the same way as for the cube
# 0 will always appear in the initial ones because we know this
Expand Down Expand Up @@ -660,7 +660,7 @@ def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None):
for c, (coord, coord_dims) in enumerate(
sample_space_coords_and_dims
):
# Index of this datum along this coordinate (could be nD).
# Index of this datum along this coordinate (could be n-D).
if coord_dims:
keys = tuple(ndi[ind] for ind in coord_dims)
else:
Expand Down
4 changes: 2 additions & 2 deletions lib/iris/common/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@

def hexdigest(item):
"""
Calculate a hexidecimal string hash representation of the provided item.
Calculate a hexadecimal string hash representation of the provided item.
Calculates a 64-bit non-cryptographic hash of the provided item, using
the extremely fast ``xxhash`` hashing algorithm, and returns the hexdigest
Expand All @@ -67,7 +67,7 @@ def hexdigest(item):
The item that requires to have its hexdigest calculated.
Returns:
The string hexidecimal representation of the item's 64-bit hash.
The string hexadecimal representation of the item's 64-bit hash.
"""
# Special case: deal with numpy arrays.
Expand Down
Loading

0 comments on commit 2eac400

Please sign in to comment.