Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merging models #914

Closed
wants to merge 33 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
cd6f532
Add merging of models in ribasim Python
SouthEndMusic Dec 20, 2023
8a99430
Allow partial basin state input in Ribasim core
SouthEndMusic Dec 20, 2023
f3e0710
column name fix
SouthEndMusic Dec 20, 2023
c3ef344
Add allocation network id offsetting
SouthEndMusic Dec 20, 2023
f7e81a1
Add allocation network plotting
SouthEndMusic Dec 20, 2023
bacb441
Add test model which merges other testmodels
SouthEndMusic Dec 20, 2023
7accfdc
Add method for adding edges to existing model
SouthEndMusic Dec 20, 2023
be96b1d
support deleting nodes by id
SouthEndMusic Dec 20, 2023
3b7b44c
Remove scipy dependency, add subnetworks to legend
SouthEndMusic Dec 21, 2023
042e595
Make MyPy somewhat happier
SouthEndMusic Dec 21, 2023
553c956
Add tests
SouthEndMusic Dec 21, 2023
ff040aa
Expand plot testing
SouthEndMusic Dec 21, 2023
6049ae3
Add docstrings
SouthEndMusic Dec 22, 2023
e4976d1
Make test model runnable
SouthEndMusic Dec 22, 2023
d018e8f
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
f450f72
spacial -> spatial :D
SouthEndMusic Jan 8, 2024
455dd85
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
290d4bc
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
ccf1034
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
d7d8fb5
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
e88ad64
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
1612cd3
Merge branch 'main' into merging_models
SouthEndMusic Jan 8, 2024
185071a
Merge branch 'main' into merging_models
SouthEndMusic Jan 9, 2024
8fe0949
Process some comments
SouthEndMusic Jan 9, 2024
d7e1790
Merge branch 'main' into merging_models
SouthEndMusic Jan 9, 2024
cfae794
Test model translation
SouthEndMusic Jan 9, 2024
6e8bdae
rename merge methods
SouthEndMusic Jan 9, 2024
d1a2b26
Merge branch 'main' into merging_models
SouthEndMusic Jan 10, 2024
7db5ff4
Comments adressed
SouthEndMusic Jan 11, 2024
2f550f9
Merge branch 'main' into merging_models
SouthEndMusic Jan 11, 2024
31a37c1
Add offset to to indices of new edges
SouthEndMusic Jan 11, 2024
12445e9
Merge branch 'main' into merging_models
SouthEndMusic Jan 11, 2024
096b9a0
Merge branch 'main' into merging_models
visr Jan 12, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 3 additions & 9 deletions core/src/bmi.jl
Original file line number Diff line number Diff line change
Expand Up @@ -73,15 +73,9 @@
end
@debug "Read database into memory."

storage = if isempty(state)
# default to nearly empty basins, perhaps make required input
fill(1.0, n)
else
storages, errors = get_storages_from_levels(parameters.basin, state.level)
if errors
error("Encountered errors while parsing the initial levels of basins.")
end
storages
storage, errors = get_storages_from_levels(parameters.basin, state.node_id, state.level)
if errors
error("Encountered errors while parsing the initial levels of basins.")

Check warning on line 78 in core/src/bmi.jl

View check run for this annotation

Codecov / codecov/patch

core/src/bmi.jl#L78

Added line #L78 was not covered by tests
end
@assert length(storage) == n "Basin / state length differs from number of Basins"
# Integrals for PID control
Expand Down
27 changes: 22 additions & 5 deletions core/src/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -376,12 +376,29 @@ end
"""Compute the storages of the basins based on the water level of the basins."""
function get_storages_from_levels(
basin::Basin,
levels::Vector,
state_node_id::Vector{Int},
state_level::Vector{Float64},
)::Tuple{Vector{Float64}, Bool}
storages = Float64[]

for (i, level) in enumerate(levels)
push!(storages, get_storage_from_level(basin, i, level))
(; node_id) = basin

storages = fill(1.0, length(node_id))
n_specified_states = length(state_node_id)

if n_specified_states > 0
basin_state_index = 1
basin_state_node_id = state_node_id[1]

for (i, id) in enumerate(node_id)
if basin_state_node_id == id.value
storages[i] =
get_storage_from_level(basin, i, state_level[basin_state_index])
basin_state_index += 1
if basin_state_index > n_specified_states
break
end
basin_state_node_id = state_node_id[basin_state_index]
end
end
end
return storages, any(isnan.(storages))
end
Expand Down
24 changes: 13 additions & 11 deletions core/test/utils_test.jl
Original file line number Diff line number Diff line change
Expand Up @@ -110,24 +110,26 @@ end
]
storage = Ribasim.profile_storage(level, area)
basin = Ribasim.Basin(
Indices(Ribasim.NodeID[1]),
zeros(1),
zeros(1),
zeros(1),
zeros(1),
zeros(1),
zeros(1),
[area],
[level],
[storage],
Indices(Ribasim.NodeID[1, 2]),
zeros(2),
zeros(2),
zeros(2),
zeros(2),
zeros(2),
zeros(2),
[area, area],
[level, level],
[storage, storage],
StructVector{Ribasim.BasinTimeV1}(undef, 0),
)

logger = TestLogger()
with_logger(logger) do
storages, errors = Ribasim.get_storages_from_levels(basin, [-1.0])
storages, errors = Ribasim.get_storages_from_levels(basin, [1], [-1.0])
@test isnan(storages[1])
@test errors
# Storage for basin with unspecified level is set to 1.0
@test storages[2] == 1.0
end

@test length(logger.logs) == 1
Expand Down
13 changes: 13 additions & 0 deletions python/ribasim/ribasim/geometry/edge.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from copy import deepcopy
from typing import Any

import matplotlib.pyplot as plt
Expand Down Expand Up @@ -39,6 +40,18 @@
Table describing the flow connections.
"""

def offset_allocation_network_ids(
self, offset_allocation_network_id: int, inplace: bool = True
) -> "Edge":
"""Add the same offset to all node IDs."""
if inplace:
edge = self
else:
edge = deepcopy(self)

Check warning on line 50 in python/ribasim/ribasim/geometry/edge.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/geometry/edge.py#L50

Added line #L50 was not covered by tests

edge.df.allocation_network_id += offset_allocation_network_id
return edge

def get_where_edge_type(self, edge_type: str) -> NDArray[np.bool_]:
return (self.df.edge_type == edge_type).to_numpy()

Expand Down
56 changes: 56 additions & 0 deletions python/ribasim/ribasim/geometry/node.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
from collections.abc import Sequence
from copy import deepcopy
from typing import Any

import geopandas as gpd
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import pandera as pa
import shapely
from matplotlib.patches import Patch
from numpy.typing import NDArray
from pandera.typing import Series
from pandera.typing.geopandas import GeoSeries
Expand Down Expand Up @@ -59,6 +62,18 @@

return node_id, node_type

def offset_allocation_network_ids(
self, offset_allocation_network_id: int, inplace: bool = True
) -> "Node":
"""Add the same offset to all node IDs."""
if inplace:
node = self
else:
node = deepcopy(self)

Check warning on line 72 in python/ribasim/ribasim/geometry/node.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/geometry/node.py#L72

Added line #L72 was not covered by tests

node.df.allocation_network_id += offset_allocation_network_id
return node

def geometry_from_connectivity(
self, from_id: Sequence[int], to_id: Sequence[int]
) -> NDArray[Any]:
Expand Down Expand Up @@ -130,6 +145,47 @@
to_id = node_index[edge_node_id[:, 1]].to_numpy()
return from_id, to_id

def plot_allocation_networks(self, ax=None, zorder=None) -> Any:
if ax is None:
_, ax = plt.subplots()
ax.axis("off")

Check warning on line 151 in python/ribasim/ribasim/geometry/node.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/geometry/node.py#L150-L151

Added lines #L150 - L151 were not covered by tests

COLOR_SUBNETWORK = "black"
COLOR_MAIN_NETWORK = "blue"
ALPHA = 0.25

contains_main_network = False
contains_subnetworks = False

for allocation_subnetwork_id, df_subnetwork in self.df.groupby(
"allocation_network_id"
):
if allocation_subnetwork_id is None:
continue

Check warning on line 164 in python/ribasim/ribasim/geometry/node.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/geometry/node.py#L164

Added line #L164 was not covered by tests
elif allocation_subnetwork_id == 1:
contains_main_network = True
color = COLOR_MAIN_NETWORK
else:
contains_subnetworks = True
color = COLOR_SUBNETWORK

hull = gpd.GeoDataFrame(
geometry=[df_subnetwork.geometry.unary_union.convex_hull]
)
hull.plot(ax=ax, color=color, alpha=ALPHA, zorder=zorder)

handles = []
labels = []

if contains_main_network:
handles.append(Patch(facecolor=COLOR_MAIN_NETWORK, alpha=ALPHA))
labels.append("Main network")
if contains_subnetworks:
handles.append(Patch(facecolor=COLOR_SUBNETWORK, alpha=ALPHA))
labels.append("Subnetwork")

return handles, labels

def plot(self, ax=None, zorder=None) -> Any:
"""
Plot the nodes. Each node type is given a separate marker.
Expand Down
145 changes: 145 additions & 0 deletions python/ribasim/ribasim/input_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from collections.abc import Callable, Generator
from contextlib import closing
from contextvars import ContextVar
from copy import deepcopy
from pathlib import Path
from sqlite3 import Connection, connect
from typing import (
Expand All @@ -12,6 +13,7 @@
)

import geopandas as gpd
import numpy as np
import pandas as pd
import pandera as pa
from pandera.typing import DataFrame
Expand Down Expand Up @@ -159,6 +161,19 @@
class TableModel(FileModel, Generic[TableT]):
df: DataFrame[TableT] | None = Field(default=None, exclude=True, repr=False)

def __eq__(self, other) -> bool:
if not type(self) == type(other):
return False

Check warning on line 166 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L166

Added line #L166 was not covered by tests
if self.filepath != other.filepath:
return False

Check warning on line 168 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L168

Added line #L168 was not covered by tests

if self.df is None and other.df is None:
return True
elif isinstance(self.df, (pd.DataFrame, gpd.GeoDataFrame)):
return self.df.equals(other.df)
else:
return False

Check warning on line 175 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L175

Added line #L175 was not covered by tests

@field_validator("df")
@classmethod
def prefix_extra_columns(cls, v: DataFrame[TableT]):
Expand Down Expand Up @@ -202,6 +217,81 @@

return node_ids

def offset_node_ids(
self, offset_node_id: int, inplace: bool = True
) -> "TableModel[TableT]":
"""Add the same offset to all node IDs."""
if inplace:
node = self
else:
node = deepcopy(self)

Check warning on line 227 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L227

Added line #L227 was not covered by tests
df = node.df
if isinstance(df, (pd.DataFrame, gpd.GeoDataFrame)):
df.index += offset_node_id
for name_column in [
"node_id",
"from_node_id",
"to_node_id",
"listen_feature_id",
]:
if hasattr(df, name_column):
df[name_column] += offset_node_id

return node

def offset_edge_ids(
self, offset_edge_id: int, inplace=True
) -> "TableModel[TableT]":
if self.tablename() == "Edge" and isinstance(self.df, gpd.GeoDataFrame):
if inplace:
edge = self
else:
edge = deepcopy(self)

Check warning on line 249 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L249

Added line #L249 was not covered by tests
df = edge.df
if isinstance(df, gpd.GeoDataFrame):
df.index += offset_edge_id

return edge

def merge(
self, table_added: "TableModel[TableT]", inplace: bool = True
) -> "TableModel[TableT]":
"""Merge an added table of the same type into this table."""
assert type(self) == type(
table_added
), "Can only merge tables of the same type."

if inplace:
table = self

Check warning on line 265 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L265

Added line #L265 was not covered by tests
else:
table = deepcopy(self)

if table_added.df is not None and table.df is not None:
if isinstance(self.df, gpd.GeoDataFrame):
common_node_ids = np.intersect1d(
table.df.index.to_numpy(), table_added.df.index.to_numpy()
)
else:
common_node_ids = np.intersect1d(
table.df.node_id.to_numpy(), table_added.df.node_id.to_numpy()
)

assert (
common_node_ids.size == 0
), f"Self and added table (of type {type(self)}) have common IDs: {common_node_ids}."

table.df = pd.concat(
[
table.df,
table_added.df,
]
) # type: ignore

elif table_added.df is not None:
table.df = table_added.df

return table

@classmethod
def _load(cls, filepath: Path | None) -> dict[str, Any]:
db = context_file_loading.get().get("database")
Expand Down Expand Up @@ -430,6 +520,61 @@
ids = self.node_ids()
return list(ids), len(ids) * [self.get_input_type()]

def offset_node_ids(self, offset_node_id: int, inplace: bool = True) -> "NodeModel":
"""Add the same offset to all node IDs in all underlying tables."""
if inplace:
node = self

Check warning on line 526 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L526

Added line #L526 was not covered by tests
else:
node = deepcopy(self)
for field in node.fields():
attr = getattr(node, field)
if isinstance(attr, TableModel):
table = attr
setattr(
node,
field,
table.offset_node_ids(offset_node_id),
SouthEndMusic marked this conversation as resolved.
Show resolved Hide resolved
)
return node

def merge(self, node_added: "NodeModel", inplace: bool = True) -> "NodeModel":
"""Merge an added node of the same type into this node."""
assert type(self) == type(node_added), "Can only merge nodes of the same type."

if inplace:
node = self
else:
node = deepcopy(self)

for field in node_added.fields():
attr = getattr(node_added, field)
if isinstance(attr, TableModel):
table_added = attr
table_node = getattr(node, field)
table_added = table_node.merge(table_added, inplace=False)
setattr(node, field, table_added)
return node

def delete_by_ids(self, node_ids: list[int], inplace: bool = True) -> "NodeModel":
"""Delete all rows of the underlying tables whose node ID is in the given list."""
if inplace:
node = self
else:
node = deepcopy(self)

Check warning on line 563 in python/ribasim/ribasim/input_base.py

View check run for this annotation

Codecov / codecov/patch

python/ribasim/ribasim/input_base.py#L563

Added line #L563 was not covered by tests

for field in node.fields():
attr = getattr(node, field)
if isinstance(attr, TableModel):
df = attr.df
if isinstance(df, (pd.DataFrame, gpd.GeoDataFrame)):
df = df[~df.node_id.isin(node_ids)] # type: ignore
if df.empty:
attr.df = None
else:
attr.df = df

return node

def _save(self, directory: DirectoryPath, input_dir: DirectoryPath, **kwargs):
for field in self.fields():
getattr(self, field)._save(
Expand Down
Loading
Loading