Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #5150

Merged
merged 2 commits into from
Feb 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ repos:
- id: no-commit-to-branch

- repo: https://github.com/psf/black
rev: 22.12.0
rev: 23.1.0
hooks:
- id: black
pass_filenames: false
Expand Down
1 change: 0 additions & 1 deletion docs/gallery_code/general/plot_lineplot_with_legend.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ def main():
)

for cube in temperature.slices("longitude"):

# Create a string label to identify this cube (i.e. latitude: value).
cube_label = "latitude: %s" % cube.coord("latitude").points[0]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@


def make_plot(projection_name, projection_crs):

# Create a matplotlib Figure.
plt.figure()

Expand Down
1 change: 0 additions & 1 deletion docs/gallery_code/general/plot_zonal_means.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@


def main():

# Loads air_temp.pp and "collapses" longitude into a single, average value.
fname = iris.sample_data_path("air_temp.pp")
temperature = iris.load_cube(fname)
Expand Down
1 change: 0 additions & 1 deletion docs/gallery_code/meteorology/plot_lagged_ensemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,6 @@ def main():

# Iterate over all possible latitude longitude slices.
for cube in last_timestep.slices(["latitude", "longitude"]):

# Get the ensemble member number from the ensemble coordinate.
ens_member = cube.coord("realization").points[0]

Expand Down
1 change: 0 additions & 1 deletion docs/src/userguide/plotting_examples/1d_with_legend.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
temperature = temperature[5:9, :]

for cube in temperature.slices("longitude"):

# Create a string label to identify this cube (i.e. latitude: value)
cube_label = "latitude: %s" % cube.coord("latitude").points[0]

Expand Down
1 change: 0 additions & 1 deletion lib/iris/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,6 @@ def __init__(self, datum_support=False, pandas_ndim=False):
self.__dict__["pandas_ndim"] = pandas_ndim

def __repr__(self):

# msg = ('Future(example_future_flag={})')
# return msg.format(self.example_future_flag)
msg = "Future(datum_support={}, pandas_ndim={})"
Expand Down
1 change: 1 addition & 0 deletions lib/iris/_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -1418,6 +1418,7 @@ def _define_space(self, space, positions, indexes, function_matrix):
participates in a functional relationship.

"""

# Heuristic reordering of coordinate defintion indexes into
# preferred dimension order.
def axis_and_name(name):
Expand Down
1 change: 0 additions & 1 deletion lib/iris/analysis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,6 @@ def _dimensional_metadata_comparison(*cubes, object_get=None):
# for coordinate groups
for cube, coords in zip(cubes, all_coords):
for coord in coords:

# if this coordinate has already been processed, then continue on
# to the next one
if id(coord) in processed_coords:
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/_area_weighted.py
Original file line number Diff line number Diff line change
Expand Up @@ -853,7 +853,7 @@ def _calculate_regrid_area_weighted_weights(
cached_x_bounds = []
cached_x_indices = []
max_x_indices = 0
for (x_0, x_1) in grid_x_bounds:
for x_0, x_1 in grid_x_bounds:
if grid_x_decreasing:
x_0, x_1 = x_1, x_0
x_bounds, x_indices = _cropped_bounds(src_x_bounds, x_0, x_1)
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/_interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ def _account_for_circular(self, points, data):
"""
from iris.analysis.cartography import wrap_lons

for (circular, modulus, index, dim, offset) in self._circulars:
for circular, modulus, index, dim, offset in self._circulars:
if modulus:
# Map all the requested values into the range of the source
# data (centred over the centre of the source data to allow
Expand Down
1 change: 0 additions & 1 deletion lib/iris/analysis/_scipy_interpolate.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,6 @@ def compute_interp_weights(self, xi, method=None):
prepared = (xi_shape, method) + self._find_indices(xi.T)

if method == "linear":

xi_shape, method, indices, norm_distances, out_of_bounds = prepared

# Allocate arrays for describing the sparse matrix.
Expand Down
1 change: 0 additions & 1 deletion lib/iris/analysis/calculus.py
Original file line number Diff line number Diff line change
Expand Up @@ -594,7 +594,6 @@ def curl(i_cube, j_cube, k_cube=None):
horiz_cs, (iris.coord_systems.GeogCS, iris.coord_systems.RotatedGeogCS)
)
if not spherical_coords:

# TODO Implement some mechanism for conforming to a common grid
dj_dx = _curl_differentiate(j_cube, x_coord)
prototype_diff = dj_dx
Expand Down
1 change: 0 additions & 1 deletion lib/iris/analysis/trajectory.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,6 @@ def __init__(self, waypoints, sample_count=10):
cur_seg = segments[cur_seg_i]
len_accum = cur_seg.length
for p in range(self.sample_count):

# calculate the sample position along our total length
sample_at_len = p * sample_step

Expand Down
2 changes: 2 additions & 0 deletions lib/iris/common/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -969,6 +969,7 @@ def _combine_lenient(self, other):
A list of combined metadata member values.

"""

# Perform "strict" combination for "coord_system" and "climatological".
def func(field):
left = getattr(self, field)
Expand Down Expand Up @@ -1024,6 +1025,7 @@ def _difference_lenient(self, other):
A list of difference metadata member values.

"""

# Perform "strict" difference for "coord_system" and "climatological".
def func(field):
left = getattr(self, field)
Expand Down
1 change: 1 addition & 0 deletions lib/iris/coord_categorisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ def vectorised_fn(*args):
# coordinates only
#


# Private "helper" function
def _pt_date(coord, time):
"""
Expand Down
2 changes: 0 additions & 2 deletions lib/iris/coord_systems.py
Original file line number Diff line number Diff line change
Expand Up @@ -478,7 +478,6 @@ def datum(self, value):

@classmethod
def from_datum(cls, datum, longitude_of_prime_meridian=None):

crs = super().__new__(cls)

crs._semi_major_axis = None
Expand Down Expand Up @@ -949,7 +948,6 @@ def __init__(
false_northing=None,
ellipsoid=None,
):

"""
Constructs a Geostationary coord system.

Expand Down
1 change: 0 additions & 1 deletion lib/iris/coords.py
Original file line number Diff line number Diff line change
Expand Up @@ -2846,7 +2846,6 @@ def _new_bounds_requirements(self, bounds):
n_bounds = bounds.shape[-1]
n_points = bounds.shape[0]
if n_points > 1:

directions = set()
for b_index in range(n_bounds):
monotonic, direction = iris.util.monotonic(
Expand Down
1 change: 0 additions & 1 deletion lib/iris/cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -2688,7 +2688,6 @@ def subset(self, coord):
coord_to_extract in self.aux_coords
and len(coord_to_extract.points) == 1
):

# Default to returning None
result = None

Expand Down
4 changes: 1 addition & 3 deletions lib/iris/experimental/ugrid/mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -3127,9 +3127,7 @@ def _construct_access_arrays(self):
flat_inds_safe = al.where(missing_inds, 0, flat_inds_nomask)
# Here's the core indexing operation.
# The comma applies all inds-array values to the *first* dimension.
bounds = node_points[
flat_inds_safe,
]
bounds = node_points[flat_inds_safe,]
# Fix 'missing' locations, and restore the proper shape.
bounds = al.ma.masked_array(bounds, missing_inds)
bounds = bounds.reshape(indices.shape)
Expand Down
5 changes: 5 additions & 0 deletions lib/iris/experimental/ugrid/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ def _combine_lenient(self, other):
A list of combined metadata member values.

"""

# Perform "strict" combination for "cf_role", "start_index", "location_axis".
def func(field):
left = getattr(self, field)
Expand Down Expand Up @@ -113,6 +114,7 @@ def _difference_lenient(self, other):
A list of difference metadata member values.

"""

# Perform "strict" difference for "cf_role", "start_index", "location_axis".
def func(field):
left = getattr(self, field)
Expand Down Expand Up @@ -233,6 +235,7 @@ def _difference_lenient(self, other):
A list of difference metadata member values.

"""

# Perform "strict" difference for "topology_dimension",
# "node_dimension", "edge_dimension" and "face_dimension".
def func(field):
Expand Down Expand Up @@ -297,6 +300,7 @@ def _combine_lenient(self, other):
A list of combined metadata member values.

"""

# It is actually "strict" : return None except where members are equal.
def func(field):
left = getattr(self, field)
Expand Down Expand Up @@ -352,6 +356,7 @@ def _difference_lenient(self, other):
A list of different metadata member values.

"""

# Perform "strict" difference for location / axis.
def func(field):
left = getattr(self, field)
Expand Down
1 change: 0 additions & 1 deletion lib/iris/fileformats/abf.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,6 @@ def load_cubes(filespecs, callback=None):

for filespec in filespecs:
for filename in glob.glob(filespec):

field = ABFField(filename)
cube = field.to_cube()

Expand Down
2 changes: 0 additions & 2 deletions lib/iris/fileformats/name_loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -994,7 +994,6 @@ def load_NAMEIII_version2(filename):
# using the next() method. This will come in handy as we wish to
# progress through the file line by line.
with open(filename, "r") as file_handle:

# define a dictionary to hold the header metadata about this file
header = read_header(file_handle)

Expand All @@ -1005,7 +1004,6 @@ def load_NAMEIII_version2(filename):
column_headings = {}
datacol1 = header["Number of preliminary cols"]
for line in file_handle:

data = [col.strip() for col in line.split(",")][:-1]

# If first column is not zero we have reached the end
Expand Down
4 changes: 1 addition & 3 deletions lib/iris/fileformats/pp.py
Original file line number Diff line number Diff line change
Expand Up @@ -625,7 +625,7 @@ def __getstate__(self):
def __setstate__(self, state):
# Because we have __slots__, this is needed to support Pickle.load()
# (Use setattr, as there is no object dictionary.)
for (key, value) in state:
for key, value in state:
setattr(self, key, value)

def __eq__(self, other):
Expand Down Expand Up @@ -2029,10 +2029,8 @@ def pp_filter(field):
res = True
if field.stash not in _STASH_ALLOW:
if pp_constraints.get("stash"):

res = False
for call_func in pp_constraints["stash"]:

if call_func(str(field.stash)):
res = True
break
Expand Down
1 change: 0 additions & 1 deletion lib/iris/fileformats/pp_load_rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -756,7 +756,6 @@ def date2year(t_in):
)
)
):

coords_and_dims.append(
_new_coord_and_dims(
do_vector,
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/fileformats/rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -394,7 +394,7 @@ def _load_pairs_from_fields_and_filenames(
yield (cube, field)

regrid_cache = {}
for (cube, factories, field) in results_needing_reference:
for cube, factories, field in results_needing_reference:
_resolve_factory_references(
cube, factories, concrete_reference_targets, regrid_cache
)
Expand Down
1 change: 1 addition & 0 deletions lib/iris/fileformats/um/_fast_load_structured_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ def element_arrays_and_dims(self):

def _field_vector_element_arrays(self):
"""Define the field components used in the structure analysis."""

# Define functions to make t1 and t2 values as date-time tuples.
# These depend on header version (PPField2 has no seconds values).
def t1_fn(fld):
Expand Down
1 change: 0 additions & 1 deletion lib/iris/quickplot.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ def _title(cube_or_coord, with_units):
or units.is_no_unit()
or units == cf_units.Unit("1")
):

if _use_symbol(units):
units = units.symbol
elif units.is_time_reference():
Expand Down
2 changes: 0 additions & 2 deletions lib/iris/tests/graphics/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,6 @@ def check_graphic(test_id: str, results_dir: Union[str, Path]) -> None:
try:

def _create_missing(phash: str) -> None:

output_path = test_output_dir / (test_id + ".png")

print(f"Creating image file: {output_path}")
Expand All @@ -214,7 +213,6 @@ def _create_missing(phash: str) -> None:
phash = get_phash(buffer)

if test_id in repo:

expected = hex_to_hash(repo[test_id])

# Calculate hamming distance vector for the result hash.
Expand Down
1 change: 0 additions & 1 deletion lib/iris/tests/integration/test_Datums.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def setUp(self):
self.start_crs = ccrs.OSGB(False)

def test_transform_points_datum(self):

# Iris version
wgs84 = GeogCS.from_datum("WGS84")
iris_cs = LambertConformal(
Expand Down
1 change: 0 additions & 1 deletion lib/iris/tests/test_cdm.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,6 @@ def test_similar_coord(self):
)

def test_cube_summary_cell_methods(self):

cube = self.cube_2d.copy()

# Create a list of values used to create cell methods
Expand Down
2 changes: 0 additions & 2 deletions lib/iris/tests/test_cf.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,9 +276,7 @@ def test_destructor(self):
didn't exist because opening the dataset had failed.
"""
with self.temp_filename(suffix=".nc") as fn:

with open(fn, "wb+") as fh:

fh.write(
b"\x89HDF\r\n\x1a\nBroken file with correct signature"
)
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/tests/test_io_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def test_format_picker(self):
]

# test that each filespec is identified as the expected format
for (expected_format_name, file_spec) in test_specs:
for expected_format_name, file_spec in test_specs:
test_path = tests.get_data_path(file_spec)
with open(test_path, "rb") as test_file:
a = iff.FORMAT_AGENT.get_spec(test_path, test_file)
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/tests/test_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def setUp(self):
)

def test__ndarray_ndarray(self):
for (lazy0, lazy1) in self.lazy_combos:
for lazy0, lazy1 in self.lazy_combos:
cubes = iris.cube.CubeList()
cubes.append(self._make_cube(0, dtype=self.dtype, lazy=lazy0))
cubes.append(self._make_cube(1, dtype=self.dtype, lazy=lazy1))
Expand Down
5 changes: 1 addition & 4 deletions lib/iris/tests/test_netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,9 +313,7 @@ def test_deferred_loading(self):
cube[((0, 8, 4, 2, 14, 12),)][((0, 2, 4, 1),)],
("netcdf", "netcdf_deferred_tuple_1.cml"),
)
subcube = cube[((0, 8, 4, 2, 14, 12),)][((0, 2, 4, 1),)][
(1, 3),
]
subcube = cube[((0, 8, 4, 2, 14, 12),)][((0, 2, 4, 1),)][(1, 3),]
self.assertCML(subcube, ("netcdf", "netcdf_deferred_tuple_2.cml"))

# Consecutive mixture on same dimension.
Expand Down Expand Up @@ -1417,7 +1415,6 @@ def test_process_flags(self):
}

for bits, descriptions in multiple_map.items():

ll_cube = stock.lat_lon_cube()
ll_cube.attributes["ukmo__process_flags"] = descriptions

Expand Down
3 changes: 2 additions & 1 deletion lib/iris/tests/test_nimrod.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ def test_huge_field_load(self):
@tests.skip_data
def test_load_kwarg(self):
"""Tests that the handle_metadata_errors kwarg is effective by setting it to
False with a file with known incomplete meta-data (missing ellipsoid)."""
False with a file with known incomplete meta-data (missing ellipsoid).
"""
datafile = "u1096_ng_ek00_pressure_2km"
with self.assertRaisesRegex(
TranslationError,
Expand Down
Loading