Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merging models #914

Closed
wants to merge 33 commits into from
Closed
Show file tree
Hide file tree
Changes from 14 commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
cd6f532
Add merging of models in ribasim Python
SouthEndMusic Dec 20, 2023
8a99430
Allow partial basin state input in Ribasim core
SouthEndMusic Dec 20, 2023
f3e0710
column name fix
SouthEndMusic Dec 20, 2023
c3ef344
Add allocation network id offsetting
SouthEndMusic Dec 20, 2023
f7e81a1
Add allocation network plotting
SouthEndMusic Dec 20, 2023
bacb441
Add test model which merges other testmodels
SouthEndMusic Dec 20, 2023
7accfdc
Add method for adding edges to existing model
SouthEndMusic Dec 20, 2023
be96b1d
support deleting nodes by id
SouthEndMusic Dec 20, 2023
3b7b44c
Remove scipy dependency, add subnetworks to legend
SouthEndMusic Dec 21, 2023
042e595
Make MyPy somewhat happier
SouthEndMusic Dec 21, 2023
553c956
Add tests
SouthEndMusic Dec 21, 2023
ff040aa
Expand plot testing
SouthEndMusic Dec 21, 2023
6049ae3
Add docstrings
SouthEndMusic Dec 22, 2023
e4976d1
Make test model runnable
SouthEndMusic Dec 22, 2023
d018e8f
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
f450f72
spacial -> spatial :D
SouthEndMusic Jan 8, 2024
455dd85
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
290d4bc
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
ccf1034
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
d7d8fb5
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
e88ad64
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
1612cd3
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
185071a
Merge branch 'main' into merging_models
SouthEndMusic Jan 9, 2024
8fe0949
Process some comments
SouthEndMusic Jan 9, 2024
d7e1790
Merge branch 'main' into merging_models
SouthEndMusic Jan 9, 2024
cfae794
Test model translation
SouthEndMusic Jan 9, 2024
6e8bdae
rename merge methods
SouthEndMusic Jan 9, 2024
d1a2b26
Merge branch 'main' into merging_models
SouthEndMusic Jan 10, 2024
7db5ff4
Comments adressed
SouthEndMusic Jan 11, 2024
2f550f9
Merge branch 'main' into merging_models
SouthEndMusic Jan 11, 2024
31a37c1
Add offset to to indices of new edges
SouthEndMusic Jan 11, 2024
12445e9
Merge branch 'main' into merging_models
SouthEndMusic Jan 11, 2024
096b9a0
Merge branch 'main' into merging_models
visr Jan 12, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 3 additions & 9 deletions core/src/bmi.jl
Original file line number Diff line number Diff line change
Expand Up @@ -73,15 +73,9 @@
end
@debug "Read database into memory."

storage = if isempty(state)
# default to nearly empty basins, perhaps make required input
fill(1.0, n)
else
storages, errors = get_storages_from_levels(parameters.basin, state.level)
if errors
error("Encountered errors while parsing the initial levels of basins.")
end
storages
storage, errors = get_storages_from_levels(parameters.basin, state.node_id, state.level)
if errors
error("Encountered errors while parsing the initial levels of basins.")

Check warning on line 78 in core/src/bmi.jl

View check run for this annotation

Codecov / codecov/patch

core/src/bmi.jl#L78

Added line #L78 was not covered by tests
end
@assert length(storage) == n "Basin / state length differs from number of Basins"
# Integrals for PID control
Expand Down
27 changes: 22 additions & 5 deletions core/src/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -376,12 +376,29 @@ end
"""Compute the storages of the basins based on the water level of the basins."""
function get_storages_from_levels(
basin::Basin,
levels::Vector,
state_node_id::Vector{Int},
state_level::Vector{Float64},
)::Tuple{Vector{Float64}, Bool}
storages = Float64[]

for (i, level) in enumerate(levels)
push!(storages, get_storage_from_level(basin, i, level))
(; node_id) = basin

storages = fill(1.0, length(node_id))
n_specified_states = length(state_node_id)

if n_specified_states > 0
basin_state_index = 1
basin_state_node_id = state_node_id[1]

for (i, id) in enumerate(node_id)
if basin_state_node_id == id.value
storages[i] =
get_storage_from_level(basin, i, state_level[basin_state_index])
basin_state_index += 1
if basin_state_index > n_specified_states
break
end
basin_state_node_id = state_node_id[basin_state_index]
end
end
end
return storages, any(isnan.(storages))
end
Expand Down
24 changes: 13 additions & 11 deletions core/test/utils_test.jl
Original file line number Diff line number Diff line change
Expand Up @@ -110,24 +110,26 @@ end
]
storage = Ribasim.profile_storage(level, area)
basin = Ribasim.Basin(
Indices(Ribasim.NodeID[1]),
zeros(1),
zeros(1),
zeros(1),
zeros(1),
zeros(1),
zeros(1),
[area],
[level],
[storage],
Indices(Ribasim.NodeID[1, 2]),
zeros(2),
zeros(2),
zeros(2),
zeros(2),
zeros(2),
zeros(2),
[area, area],
[level, level],
[storage, storage],
StructVector{Ribasim.BasinTimeV1}(undef, 0),
)

logger = TestLogger()
with_logger(logger) do
storages, errors = Ribasim.get_storages_from_levels(basin, [-1.0])
storages, errors = Ribasim.get_storages_from_levels(basin, [1], [-1.0])
@test isnan(storages[1])
@test errors
# Storage for basin with unspecified level is set to 1.0
@test storages[2] == 1.0
end

@test length(logger.logs) == 1
Expand Down
33 changes: 33 additions & 0 deletions python/ribasim/ribasim/geometry/edge.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from copy import deepcopy
from typing import Any

import matplotlib.pyplot as plt
Expand All @@ -9,6 +10,7 @@
from numpy.typing import NDArray
from pandera.typing import Series
from pandera.typing.geopandas import GeoSeries
from shapely.geometry import LineString

from ribasim.input_base import SpatialTableModel

Expand Down Expand Up @@ -38,6 +40,37 @@
Table describing the flow connections.
"""

def translate_spacially(
SouthEndMusic marked this conversation as resolved.
Show resolved Hide resolved
self, offset_spacial: tuple[float, float], inplace: bool = True
) -> "Edge":
"""Add the same spacial offset to all edges."""
SouthEndMusic marked this conversation as resolved.
Show resolved Hide resolved
if inplace:
edge = self
else:
edge = deepcopy(self)

Check warning on line 50 in python/ribasim/ribasim/geometry/edge.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/geometry/edge.py#L50

Added line #L50 was not covered by tests

edge.df.geometry = edge.df.geometry.apply(
SouthEndMusic marked this conversation as resolved.
Show resolved Hide resolved
lambda linestring: LineString(
[
(point[0] + offset_spacial[0], point[1] + offset_spacial[1])
for point in linestring.coords
]
)
)
return edge

def offset_allocation_network_ids(
self, offset_allocation_network_id: int, inplace: bool = True
) -> "Edge":
"""Add the same offset to all node IDs."""
if inplace:
edge = self
else:
edge = deepcopy(self)

Check warning on line 69 in python/ribasim/ribasim/geometry/edge.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/geometry/edge.py#L69

Added line #L69 was not covered by tests

edge.df.allocation_network_id += offset_allocation_network_id
return edge

def get_where_edge_type(self, edge_type: str) -> NDArray[np.bool_]:
return (self.df.edge_type == edge_type).to_numpy()

Expand Down
73 changes: 73 additions & 0 deletions python/ribasim/ribasim/geometry/node.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
from collections.abc import Sequence
from copy import deepcopy
from typing import Any

import geopandas as gpd
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import pandera as pa
import shapely
from matplotlib.patches import Patch
from numpy.typing import NDArray
from pandera.typing import Series
from pandera.typing.geopandas import GeoSeries
from shapely.geometry import Point

from ribasim.input_base import SpatialTableModel

Expand Down Expand Up @@ -59,6 +63,34 @@

return node_id, node_type

def translate_spacially(
SouthEndMusic marked this conversation as resolved.
Show resolved Hide resolved
self, offset_spacial: tuple[float, float], inplace: bool = True
) -> "Node":
"""Add the same spacial offset to all nodes."""
if inplace:
node = self
else:
node = deepcopy(self)

Check warning on line 73 in python/ribasim/ribasim/geometry/node.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/geometry/node.py#L73

Added line #L73 was not covered by tests

node.df.geometry = node.df.geometry.apply(
lambda point: Point(
point.x + offset_spacial[0], point.y + offset_spacial[1]
)
)
return node

def offset_allocation_network_ids(
self, offset_allocation_network_id: int, inplace: bool = True
) -> "Node":
"""Add the same offset to all node IDs."""
if inplace:
node = self
else:
node = deepcopy(self)

Check warning on line 89 in python/ribasim/ribasim/geometry/node.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/geometry/node.py#L89

Added line #L89 was not covered by tests

node.df.allocation_network_id += offset_allocation_network_id
return node

def geometry_from_connectivity(
self, from_id: Sequence[int], to_id: Sequence[int]
) -> NDArray[Any]:
Expand Down Expand Up @@ -130,6 +162,47 @@
to_id = node_index[edge_node_id[:, 1]].to_numpy()
return from_id, to_id

def plot_allocation_networks(self, ax=None, zorder=None) -> Any:
if ax is None:
_, ax = plt.subplots()
ax.axis("off")

Check warning on line 168 in python/ribasim/ribasim/geometry/node.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/geometry/node.py#L167-L168

Added lines #L167 - L168 were not covered by tests

COLOR_SUBNETWORK = "black"
COLOR_MAIN_NETWORK = "blue"
ALPHA = 0.25

contains_main_network = False
contains_subnetworks = False

for allocation_subnetwork_id, df_subnetwork in self.df.groupby(
"allocation_network_id"
):
if allocation_subnetwork_id is None:
continue

Check warning on line 181 in python/ribasim/ribasim/geometry/node.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/geometry/node.py#L181

Added line #L181 was not covered by tests
elif allocation_subnetwork_id == 1:
contains_main_network = True
color = COLOR_MAIN_NETWORK
else:
contains_subnetworks = True
color = COLOR_SUBNETWORK

hull = gpd.GeoDataFrame(
geometry=[df_subnetwork.geometry.unary_union.convex_hull]
)
hull.plot(ax=ax, color=color, alpha=ALPHA, zorder=zorder)

handles = []
labels = []

if contains_main_network:
handles.append(Patch(facecolor=COLOR_MAIN_NETWORK, alpha=ALPHA))
labels.append("Main network")
if contains_subnetworks:
handles.append(Patch(facecolor=COLOR_SUBNETWORK, alpha=ALPHA))
labels.append("Subnetwork")

return handles, labels

def plot(self, ax=None, zorder=None) -> Any:
"""
Plot the nodes. Each node type is given a separate marker.
Expand Down
107 changes: 107 additions & 0 deletions python/ribasim/ribasim/input_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from collections.abc import Callable, Generator
from contextlib import closing
from contextvars import ContextVar
from copy import deepcopy
from pathlib import Path
from sqlite3 import Connection, connect
from typing import (
Expand Down Expand Up @@ -159,6 +160,19 @@
class TableModel(FileModel, Generic[TableT]):
df: DataFrame[TableT] | None = Field(default=None, exclude=True, repr=False)

def __eq__(self, other) -> bool:
if not type(self) == type(other):
return False

Check warning on line 165 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L165

Added line #L165 was not covered by tests
if self.filepath != other.filepath:
return False

Check warning on line 167 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L167

Added line #L167 was not covered by tests

if self.df is None and other.df is None:
return True
elif isinstance(self.df, (pd.DataFrame, gpd.GeoDataFrame)):
return self.df.equals(other.df)
else:
return False

Check warning on line 174 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L174

Added line #L174 was not covered by tests

@field_validator("df")
@classmethod
def prefix_extra_columns(cls, v: DataFrame[TableT]):
Expand Down Expand Up @@ -202,6 +216,44 @@

return node_ids

def offset_node_ids(self, offset_node_id: int) -> "TableModel[TableT]":
SouthEndMusic marked this conversation as resolved.
Show resolved Hide resolved
"""Add the same offset to all node IDs."""
copy = deepcopy(self)
df = copy.df
if isinstance(df, (pd.DataFrame, gpd.GeoDataFrame)):
df.index += offset_node_id
for name_column in [
"node_id",
"from_node_id",
"to_node_id",
"listen_feature_id",
]:
if hasattr(df, name_column):
df[name_column] += offset_node_id
return copy

def merge_table(
SouthEndMusic marked this conversation as resolved.
Show resolved Hide resolved
self, table_added: "TableModel[TableT]", inplace: bool = True
) -> "TableModel[TableT]":
"""Merge an added table of the same type into this table."""
assert type(self) == type(
table_added
), "Can only merge tables of the same type."

if inplace:
table = self

Check warning on line 244 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L244

Added line #L244 was not covered by tests
else:
table = deepcopy(self)

table.df = pd.concat(
[
table.df,
table_added.df,
]
)

return table

@classmethod
def _load(cls, filepath: Path | None) -> dict[str, Any]:
db = context_file_loading.get().get("database")
Expand Down Expand Up @@ -421,6 +473,61 @@
ids = self.node_ids()
return list(ids), len(ids) * [self.get_input_type()]

def offset_node_ids(self, offset_node_id: int) -> "NodeModel":
"""Add the same offset to all node IDs in all underlying tables."""
node_copy = deepcopy(self)
for field in node_copy.fields():
attr = getattr(node_copy, field)
if isinstance(attr, TableModel):
table = attr
setattr(
node_copy,
field,
table.offset_node_ids(offset_node_id),
SouthEndMusic marked this conversation as resolved.
Show resolved Hide resolved
)
return node_copy

def merge_node(self, node_added: "NodeModel", inplace: bool = True) -> "NodeModel":
"""Merge an added node of the same type into this node."""
assert type(self) == type(node_added), "Can only merge nodes of the same type."

if inplace:
node = self
else:
node = deepcopy(self)

for field in node_added.fields():
attr = getattr(node_added, field)
if isinstance(attr, TableModel):
table_added = attr
table_node = getattr(node, field)
if table_added.df is not None:
if table_node.df is not None:
table_added = table_node.merge_table(table_added, inplace=False)

setattr(node, field, table_added)
return node

def delete_by_ids(self, node_ids: list[int], inplace: bool = True) -> "NodeModel":
"""Delete all rows of the underlying tables whose node ID is in the given list."""
if inplace:
node = self
else:
node = deepcopy(self)

Check warning on line 516 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L516

Added line #L516 was not covered by tests

for field in node.fields():
attr = getattr(node, field)
if isinstance(attr, TableModel):
df = attr.df
if isinstance(df, (pd.DataFrame, gpd.GeoDataFrame)):
df = df[~df.node_id.isin(node_ids)]
if df.empty:
attr.df = None
else:
attr.df = df

return node

def _save(self, directory: DirectoryPath, input_dir: DirectoryPath, **kwargs):
for field in self.fields():
getattr(self, field)._save(
Expand Down
Loading
Loading