Skip to content

Commit

Permalink
Fix FutureWarning on Dataset.dims.__getvalue__
Browse files Browse the repository at this point in the history
Fixes FutureWarning introduced by pydata/xarray#8500
  • Loading branch information
mx-moth committed Jan 11, 2024
1 parent 9bbcb32 commit f9661b7
Show file tree
Hide file tree
Showing 19 changed files with 63 additions and 58 deletions.
4 changes: 3 additions & 1 deletion docs/releases/development.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,6 @@
Next release (in development)
=============================

* ...
* Fix a ``FutureWarning`` on accessing :attr:`xarray.Dataset.dims`
with xarray >= 2023.12.0
(:pr:`124`, :pr:`pydata/xarray#8500`).
4 changes: 2 additions & 2 deletions src/emsarray/conventions/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1767,7 +1767,7 @@ def grid_shape(self) -> Dict[GridKind, Sequence[int]]:
"""
return {
grid_kind: tuple(
self.dataset.dims[dim]
self.dataset.sizes[dim]
for dim in self.grid_dimensions[grid_kind]
)
for grid_kind in self.grid_kinds
Expand Down Expand Up @@ -1877,7 +1877,7 @@ def wind(
grid_kind = self.default_grid_kind

dimensions = self.grid_dimensions[grid_kind]
sizes = [self.dataset.dims[dim] for dim in dimensions]
sizes = [self.dataset.sizes[dim] for dim in dimensions]

return utils.wind_dimension(
data_array,
Expand Down
5 changes: 4 additions & 1 deletion src/emsarray/conventions/arakawa_c.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,10 @@ def i_dimension(self) -> Hashable:
@cached_property
def shape(self) -> Tuple[int, int]:
"""The shape of this grid, as a tuple of ``(j, i)``."""
return (self.dataset.dims[self.j_dimension], self.dataset.dims[self.i_dimension])
return (
self.dataset.sizes[self.j_dimension],
self.dataset.sizes[self.i_dimension],
)

@cached_property
def size(self) -> int:
Expand Down
8 changes: 4 additions & 4 deletions src/emsarray/conventions/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,8 @@ def x_dimension(self) -> Hashable:
@cached_property
def shape(self) -> Tuple[int, int]:
"""The shape of this grid, as a tuple of ``(y, x)``."""
dims = self.dataset.dims
return (dims[self.y_dimension], dims[self.x_dimension])
sizes = self.dataset.sizes
return (sizes[self.y_dimension], sizes[self.x_dimension])

@cached_property
def size(self) -> int:
Expand Down Expand Up @@ -346,7 +346,7 @@ def _get_or_make_bounds(self, coordinate: xarray.DataArray) -> xarray.DataArray:
if (
len(bounds.dims) == 2
and bounds.dims[0] == coordinate.dims[0]
and self.dataset.dims[bounds.dims[1]] == 2
and self.dataset.sizes[bounds.dims[1]] == 2
):
return bounds
else:
Expand Down Expand Up @@ -481,7 +481,7 @@ def _get_or_make_bounds(self, coordinate: xarray.DataArray) -> xarray.DataArray:
len(bounds.dims) == 3
and bounds.dims[0] == self.y_dimension
and bounds.dims[1] == self.x_dimension
and self.dataset.dims[bounds.dims[2]] == 4
and self.dataset.sizes[bounds.dims[2]] == 4
):
return cast(xarray.DataArray, bounds)
else:
Expand Down
12 changes: 6 additions & 6 deletions src/emsarray/conventions/ugrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -890,10 +890,10 @@ def two_dimension(self) -> Hashable:
"""
two = 'Two'
# Check for the standard name
if two in self.dataset.dims and self.dataset.dims[two] == 2:
if two in self.dataset.sizes and self.dataset.sizes[two] == 2:
return two
# Check for any other dimension of size 2
for name, size in self.dataset.dims.items():
for name, size in self.dataset.sizes.items():
if size == 2:
return name
# Make up a new dimension with the standard name
Expand Down Expand Up @@ -975,7 +975,7 @@ def max_node_dimension(self) -> Hashable:
@property
def node_count(self) -> int:
"""The number of nodes in the dataset."""
return self.dataset.dims[self.node_dimension]
return self.dataset.sizes[self.node_dimension]

@property
def edge_count(self) -> int:
Expand All @@ -986,7 +986,7 @@ def edge_count(self) -> int:
# This dimension may not be defined, so ignore KeyErrors. We can
# compute it below.
with suppress(KeyError):
return self.dataset.dims[self.edge_dimension]
return self.dataset.sizes[self.edge_dimension]

# By computing the edge_node array we can determine how many edges exist
return self.edge_node_array.shape[0]
Expand All @@ -996,12 +996,12 @@ def face_count(self) -> int:
"""
The number of faces in the dataset.
"""
return self.dataset.dims[self.face_dimension]
return self.dataset.sizes[self.face_dimension]

@property
def max_node_count(self) -> int:
"""The maximum number of nodes / edges per face."""
return self.dataset.dims[self.max_node_dimension]
return self.dataset.sizes[self.max_node_dimension]


class UGridKind(str, enum.Enum):
Expand Down
4 changes: 2 additions & 2 deletions src/emsarray/transect.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,7 +422,7 @@ def make_poly_collection(
(distance_bounds[index, 1], depth_bounds[depth_index][0]),
]
for depth_index in range(transect_dataset.coords['depth'].size)
for index in range(transect_dataset.dims['index'])
for index in range(transect_dataset.sizes['index'])
]
return PolyCollection(vertices, **kwargs)

Expand Down Expand Up @@ -474,7 +474,7 @@ def make_ocean_floor_poly_collection(
(distance_bounds[index, 1], deepest),
(distance_bounds[index, 1], bathymetry_values[linear_indices[index]]),
]
for index in range(transect_dataset.dims['index'])
for index in range(transect_dataset.sizes['index'])
]
return PolyCollection(vertices, **kwargs)

Expand Down
2 changes: 1 addition & 1 deletion src/emsarray/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ def check_data_array_dimensions_match(dataset: xarray.Dataset, data_array: xarra
f"Data array has unknown dimension {dimension} of size {data_array_size}"
)

dataset_size = dataset.dims[dimension]
dataset_size = dataset.sizes[dimension]
if data_array_size != dataset_size:
raise ValueError(
"Dimension mismatch between dataset and data array: "
Expand Down
2 changes: 1 addition & 1 deletion tests/cli/commands/test_extract_points.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def test_extract_points(
point_dataset = xarray.open_dataset(out_path)

assert 'point' in point_dataset.dims
assert point_dataset.dims['point'] == num_points
assert point_dataset.sizes['point'] == num_points
assert_equal(points_df['name'], point_dataset['name'].values)
assert_allclose(points_df['lon'], point_dataset['lon'].values)
assert_allclose(points_df['lat'], point_dataset['lat'].values)
Expand Down
2 changes: 1 addition & 1 deletion tests/conventions/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -398,7 +398,7 @@ def test_select_index():
ds_point = convention.select_index(SimpleGridIndex(y, x))

# The x and y dims should have been dropped, as they are now of size 1
assert ds_point.dims == {'t': 5, 'z': 5}
assert ds_point.sizes == {'t': 5, 'z': 5}
# The x and y coords should be single values
assert ds_point.coords['x'].values == x
assert ds_point.coords['y'].values == y
Expand Down
6 changes: 3 additions & 3 deletions tests/conventions/test_cfgrid1d.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ def test_drop_geometry(datasets: pathlib.Path):
dataset = xarray.open_dataset(datasets / 'cfgrid1d.nc')

dropped = dataset.ems.drop_geometry()
assert dropped.dims.keys() == {'lon', 'lat'}
assert set(dropped.dims) == {'lon', 'lat'}

topology = dataset.ems.topology
assert topology.longitude_name in dataset.variables
Expand Down Expand Up @@ -408,7 +408,7 @@ def test_make_clip_mask():
assert_equal(mask.data_vars['cell_mask'].values, expected_cells)

assert mask.attrs == {'type': 'CFGrid mask'}
assert mask.dims == {
assert mask.sizes == {
topology.longitude_name: topology.longitude.size,
topology.latitude_name: topology.latitude.size,
}
Expand Down Expand Up @@ -448,7 +448,7 @@ def test_apply_clip_mask(tmp_path):
# Check that the variable and dimension keys were preserved
assert set(dataset.data_vars.keys()) == set(clipped.data_vars.keys())
assert set(dataset.coords.keys()) == set(clipped.coords.keys())
assert set(dataset.dims.keys()) == set(clipped.dims.keys())
assert set(dataset.dims) == set(clipped.dims)

# Check that the new topology seems reasonable
assert clipped.ems.topology.longitude.size == 5
Expand Down
8 changes: 4 additions & 4 deletions tests/conventions/test_cfgrid2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,8 +323,8 @@ def test_face_centres():
face_centres = convention.face_centres
lons = dataset.variables['longitude'].values
lats = dataset.variables['latitude'].values
for j in range(dataset.dims['j']):
for i in range(dataset.dims['i']):
for j in range(dataset.sizes['j']):
for i in range(dataset.sizes['i']):
lon = lons[j, i]
lat = lats[j, i]
linear_index = convention.ravel_index((j, i))
Expand Down Expand Up @@ -391,7 +391,7 @@ def test_wind():
dataset = make_dataset(j_size=5, i_size=7)
convention: ShocSimple = dataset.ems

time_size = dataset.dims['time']
time_size = dataset.sizes['time']
values = numpy.arange(time_size * convention.grid_size[CFGridKind.face])
flat_array = xarray.DataArray(
data=values.reshape((time_size, -1)),
Expand All @@ -410,7 +410,7 @@ def test_drop_geometry(datasets: pathlib.Path):
dataset = xarray.open_dataset(datasets / 'cfgrid2d.nc')

dropped = dataset.ems.drop_geometry()
assert dropped.dims.keys() == {'i', 'j'}
assert set(dropped.dims) == {'i', 'j'}

topology = dataset.ems.topology
assert topology.longitude_name in dataset.variables
Expand Down
16 changes: 8 additions & 8 deletions tests/conventions/test_shoc_standard.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,8 +256,8 @@ def test_face_centres():
face_centres = convention.face_centres
lons = dataset['x_centre'].values
lats = dataset['y_centre'].values
for j in range(dataset.dims['j_centre']):
for i in range(dataset.dims['i_centre']):
for j in range(dataset.sizes['j_centre']):
for i in range(dataset.sizes['i_centre']):
lon = lons[j, i]
lat = lats[j, i]
linear_index = convention.ravel_index((ArakawaCGridKind.face, j, i))
Expand Down Expand Up @@ -375,7 +375,7 @@ def test_select_index_face():
# because of how xarray handles multidimensional coordinates
'x_centre', 'y_centre',
}
assert face.dims == {'record': 4, 'k_centre': 5}
assert face.sizes == {'record': 4, 'k_centre': 5}
assert face['x_centre'].values == dataset['x_centre'].values[3, 4]
assert face['y_centre'].values == dataset['y_centre'].values[3, 4]

Expand All @@ -393,7 +393,7 @@ def test_select_index_edge():
# because of how xarray handles multidimensional coordinates
'x_left', 'y_left'
}
assert left.dims == {'record': 4, 'k_centre': 5}
assert left.sizes == {'record': 4, 'k_centre': 5}

back = convention.select_index((ArakawaCGridKind.back, 3, 4))
assert set(back.data_vars.keys()) == {
Expand All @@ -404,7 +404,7 @@ def test_select_index_edge():
# because of how xarray handles multidimensional coordinates
'x_back', 'y_back'
}
assert back.dims == {'record': 4, 'k_centre': 5}
assert back.sizes == {'record': 4, 'k_centre': 5}


def test_select_index_grid():
Expand All @@ -420,14 +420,14 @@ def test_select_index_grid():
# because of how xarray handles multidimensional coordinates
'x_grid', 'y_grid'
}
assert node.dims == {'record': 4, 'k_centre': 5}
assert node.sizes == {'record': 4, 'k_centre': 5}


def test_drop_geometry(datasets: pathlib.Path):
dataset = xarray.open_dataset(datasets / 'shoc_standard.nc')

dropped = dataset.ems.drop_geometry()
assert dropped.dims.keys() == {'face_i', 'face_j'}
assert set(dropped.dims) == {'face_i', 'face_j'}
for topology in [dataset.ems.face, dataset.ems.back, dataset.ems.left, dataset.ems.node]:
assert topology.longitude_name in dataset.variables
assert topology.longitude_name in dataset.variables
Expand Down Expand Up @@ -587,7 +587,7 @@ def test_apply_clip_mask(tmp_path):
# Check that the variable and dimension keys were preserved
assert set(dataset.data_vars.keys()) == set(clipped.data_vars.keys())
assert set(dataset.coords.keys()) == set(clipped.coords.keys())
assert set(dataset.dims.keys()) == set(clipped.dims.keys())
assert set(dataset.dims) == set(clipped.dims)

# Check that the new topology seems reasonable
assert clipped.ems.face.longitude.shape == (3, 3)
Expand Down
20 changes: 10 additions & 10 deletions tests/conventions/test_ugrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ def test_polygons():
polygons = dataset.ems.polygons

# Should be one item for every face
assert len(polygons) == dataset.dims['nMesh2_face']
assert len(polygons) == dataset.sizes['nMesh2_face']

# There should be no empty polygons
assert all(poly is not None for poly in polygons)
Expand All @@ -405,7 +405,7 @@ def test_face_centres_from_variables():
face_centres = convention.face_centres
lons = dataset['Mesh2_face_x'].values
lats = dataset['Mesh2_face_y'].values
for face in range(dataset.dims['nMesh2_face']):
for face in range(dataset.sizes['nMesh2_face']):
lon = lons[face]
lat = lats[face]
linear_index = convention.ravel_index((UGridKind.face, face))
Expand All @@ -417,7 +417,7 @@ def test_face_centres_from_centroids():
convention: UGrid = dataset.ems

face_centres = convention.face_centres
for face in range(dataset.dims['nMesh2_face']):
for face in range(dataset.sizes['nMesh2_face']):
linear_index = convention.ravel_index((UGridKind.face, face))
polygon = convention.polygons[linear_index]
lon, lat = polygon.centroid.coords[0]
Expand Down Expand Up @@ -459,7 +459,7 @@ def test_make_geojson_geometry():
def test_ravel():
dataset = make_dataset(width=3)
convention: UGrid = dataset.ems
for linear_index in range(dataset.dims['nMesh2_face']):
for linear_index in range(dataset.sizes['nMesh2_face']):
index = (UGridKind.face, linear_index)
assert convention.ravel_index(index) == linear_index
assert convention.wind_index(linear_index) == index
Expand Down Expand Up @@ -524,7 +524,7 @@ def test_drop_geometry_minimal():
]

dropped = dataset.ems.drop_geometry()
assert dropped.dims.keys() == {
assert set(dropped.dims) == {
# These still exist because there are variables defined on them
topology.face_dimension, 'Mesh2_layers', 'record'
}
Expand Down Expand Up @@ -576,7 +576,7 @@ def test_drop_geometry_full():

print(list(dataset.variables.keys()))
dropped = dataset.ems.drop_geometry()
assert dropped.dims.keys() == {
assert set(dropped.dims) == {
# These still exist because there are variables defined on them
topology.face_dimension, topology.edge_dimension,
'Mesh2_layers', 'record'
Expand Down Expand Up @@ -654,7 +654,7 @@ def test_mask_from_face_indices_without_edges():
node_indices = [12, 13, 14, 17, 18, 19, 20, 23, 24, 25, 26]

mask = mask_from_face_indices(numpy.array(face_indices), topology)
assert mask.dims == {
assert mask.sizes == {
'old_node_index': topology.node_count,
'old_face_index': topology.face_count,
}
Expand All @@ -677,7 +677,7 @@ def test_mask_from_face_indices_with_edges():
node_indices = [12, 13, 14, 17, 18, 19, 20, 23, 24, 25, 26]

mask = mask_from_face_indices(numpy.array(face_indices), topology)
assert mask.dims == {
assert mask.sizes == {
'old_node_index': topology.node_count,
'old_edge_index': topology.edge_count,
'old_face_index': topology.face_count,
Expand Down Expand Up @@ -713,7 +713,7 @@ def test_apply_clip_mask(tmp_path):

# Check that the variable and dimension keys were preserved
assert set(dataset.variables.keys()) == set(clipped.variables.keys())
assert set(dataset.dims.keys()) == set(clipped.dims.keys())
assert set(dataset.dims) == set(clipped.dims)

# Check that the new topology seems reasonable
assert clipped.ems.topology.face_count == len(face_indices)
Expand Down Expand Up @@ -762,7 +762,7 @@ def test_make_and_apply_clip_mask(tmp_path):
# Make a clip mask
clip_mask = dataset.ems.make_clip_mask(polygon, buffer=1)
clip_mask.to_netcdf(tmp_path / "clip.nc")
assert clip_mask.dims == {
assert clip_mask.sizes == {
'old_face_index': topology.face_count,
'old_edge_index': topology.edge_count,
'old_node_index': topology.node_count,
Expand Down
2 changes: 1 addition & 1 deletion tests/masking/test_mask_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def test_standard_mask_from_centres():
# The centre should be the same as the input array.
# Left should be one bigger in the i dimension, back in j.
# Grid should be one bigger in both j and i dimensions.
assert mask.dims == {
assert mask.sizes == {
'j_centre': 6, 'i_centre': 5,
'j_node': 7, 'i_node': 6,
'j_left': 6, 'i_left': 6,
Expand Down
4 changes: 2 additions & 2 deletions tests/operations/depth/test_normalize_depth_variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,8 @@ def test_normalize_depth_variable(
assert_equal(out['depth_name'].values, expected_depths)
assert out['depth_name'].dims == ('depth_dimension',)

assert out.dims['depth_coord'] == 6
assert out.dims['depth_dimension'] == 6
assert out.sizes['depth_coord'] == 6
assert out.sizes['depth_dimension'] == 6

# Check that a warning was raised if the positive: 'up'/'down' attribute
# was not set
Expand Down
2 changes: 1 addition & 1 deletion tests/operations/depth/test_ocean_floor.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def test_ocean_floor():

floor_dataset = ocean_floor(dataset, ['depth'], non_spatial_variables=['time'])

assert floor_dataset.dims == {
assert floor_dataset.sizes == {
't': 5,
'x': 5,
'y': 5,
Expand Down
Loading

0 comments on commit f9661b7

Please sign in to comment.