diff --git a/core/src/callback.jl b/core/src/callback.jl index 62bb8e380..14c7ffafd 100644 --- a/core/src/callback.jl +++ b/core/src/callback.jl @@ -123,29 +123,27 @@ function get_value( (; basin, flow_boundary, level_boundary) = p if variable == "level" - hasindex_basin, basin_idx = id_index(basin.node_id, node_id) - level_boundary_idx = findsorted(level_boundary.node_id, node_id) - - if hasindex_basin + if node_id.type == NodeType.Basin + _, basin_idx = id_index(basin.node_id, node_id) _, level = get_area_and_level(basin, basin_idx, u[basin_idx]) - elseif level_boundary_idx !== nothing + elseif node_id.type == NodeType.LevelBoundary + level_boundary_idx = findsorted(level_boundary.node_id, node_id) level = level_boundary.level[level_boundary_idx](t + Δt) else error( "Level condition node '$node_id' is neither a basin nor a level boundary.", ) end - value = level elseif variable == "flow_rate" - flow_boundary_idx = findsorted(flow_boundary.node_id, node_id) - - if flow_boundary_idx === nothing + if node_id.type == NodeType.FlowBoundary + flow_boundary_idx = findsorted(flow_boundary.node_id, node_id) + value = flow_boundary.flow_rate[flow_boundary_idx](t + Δt) + else error("Flow condition node $node_id is not a flow boundary.") end - value = flow_boundary.flow_rate[flow_boundary_idx](t + Δt) else error("Unsupported condition variable $variable.") end @@ -418,7 +416,7 @@ function update_basin(integrator)::Nothing ) for row in timeblock - hasindex, i = id_index(node_id, NodeID(row.node_id)) + hasindex, i = id_index(node_id, NodeID(NodeType.Basin, row.node_id)) @assert hasindex "Table 'Basin / time' contains non-Basin IDs" set_table_row!(table, row, i) end @@ -461,7 +459,7 @@ function update_tabulated_rating_curve!(integrator)::Nothing id = first(group).node_id level = [row.level for row in group] flow_rate = [row.flow_rate for row in group] - i = searchsortedfirst(node_id, NodeID(id)) + i = searchsortedfirst(node_id, NodeID(NodeType.TabulatedRatingCurve, id)) tables[i] = LinearInterpolation(flow_rate, level; extrapolate = true) end return nothing diff --git a/core/src/graph.jl b/core/src/graph.jl index 82bd61175..a08888062 100644 --- a/core/src/graph.jl +++ b/core/src/graph.jl @@ -10,7 +10,7 @@ function create_graph(db::DB, config::Config, chunk_sizes::Vector{Int})::MetaGra execute(db, "SELECT fid, type, allocation_network_id FROM Node ORDER BY fid") edge_rows = execute( db, - "SELECT fid, from_node_id, to_node_id, edge_type, allocation_network_id FROM Edge ORDER BY fid", + "SELECT fid, from_node_type, from_node_id, to_node_type, to_node_id, edge_type, allocation_network_id FROM Edge ORDER BY fid", ) # Node IDs per subnetwork node_ids = Dict{Int, Set{NodeID}}() @@ -34,7 +34,7 @@ function create_graph(db::DB, config::Config, chunk_sizes::Vector{Int})::MetaGra graph_data = nothing, ) for row in node_rows - node_id = NodeID(row.fid) + node_id = NodeID(row.type, row.fid) # Process allocation network ID if ismissing(row.allocation_network_id) allocation_network_id = 0 @@ -51,15 +51,23 @@ function create_graph(db::DB, config::Config, chunk_sizes::Vector{Int})::MetaGra flow_vertical_dict[node_id] = flow_vertical_counter end end - for (; fid, from_node_id, to_node_id, edge_type, allocation_network_id) in edge_rows + for (; + fid, + from_node_type, + from_node_id, + to_node_type, + to_node_id, + edge_type, + allocation_network_id, + ) in edge_rows try # hasfield does not work edge_type = getfield(EdgeType, Symbol(edge_type)) catch error("Invalid edge type $edge_type.") end - id_src = NodeID(from_node_id) - id_dst = NodeID(to_node_id) + id_src = NodeID(from_node_type, from_node_id) + id_dst = NodeID(to_node_type, to_node_id) if ismissing(allocation_network_id) allocation_network_id = 0 end diff --git a/core/src/parameter.jl b/core/src/parameter.jl index da492b245..d4c33d41e 100644 --- a/core/src/parameter.jl +++ b/core/src/parameter.jl @@ -1,14 +1,35 @@ +# EdgeType.flow and NodeType.FlowBoundary +@enumx EdgeType flow control none +@eval @enumx NodeType $(config.nodetypes...) + +# Support creating a NodeType enum instance from a symbol or string +function NodeType.T(s::Symbol)::NodeType.T + symbol_map = EnumX.symbol_map(NodeType.T) + for (sym, val) in symbol_map + sym == s && return NodeType.T(val) + end + throw(ArgumentError("Invalid value for NodeType: $s")) +end + +NodeType.T(str::AbstractString) = NodeType.T(Symbol(str)) + struct NodeID + type::NodeType.T value::Int end +NodeID(type::Symbol, value::Int) = NodeID(NodeType.T(type), value) +NodeID(type::AbstractString, value::Int) = NodeID(NodeType.T(type), value) + Base.Int(id::NodeID) = id.value -Base.convert(::Type{NodeID}, value::Int) = NodeID(value) Base.convert(::Type{Int}, id::NodeID) = id.value Base.broadcastable(id::NodeID) = Ref(id) -Base.show(io::IO, id::NodeID) = print(io, '#', Int(id)) +Base.show(io::IO, id::NodeID) = print(io, id.type, " #", Int(id)) function Base.isless(id_1::NodeID, id_2::NodeID)::Bool + if id_1.type != id_2.type + error("Cannot compare NodeIDs of different types") + end return Int(id_1) < Int(id_2) end @@ -64,8 +85,6 @@ struct Allocation } end -@enumx EdgeType flow control none - """ Type for storing metadata of nodes in the graph type: type of the node @@ -318,7 +337,7 @@ struct Pump{T} <: AbstractParameterNode control_mapping, is_pid_controlled, ) where {T} - if valid_flow_rates(node_id, get_tmp(flow_rate, 0), control_mapping, :Pump) + if valid_flow_rates(node_id, get_tmp(flow_rate, 0), control_mapping) return new{T}( node_id, active, @@ -363,7 +382,7 @@ struct Outlet{T} <: AbstractParameterNode control_mapping, is_pid_controlled, ) where {T} - if valid_flow_rates(node_id, get_tmp(flow_rate, 0), control_mapping, :Outlet) + if valid_flow_rates(node_id, get_tmp(flow_rate, 0), control_mapping) return new{T}( node_id, active, diff --git a/core/src/read.jl b/core/src/read.jl index 39a24ef12..8052588d3 100644 --- a/core/src/read.jl +++ b/core/src/read.jl @@ -32,7 +32,7 @@ function parse_static_and_time( # of the current type vals_out = [] - node_ids = get_ids(db, nodetype) + node_ids = NodeID.(nodetype, get_ids(db, nodetype)) node_names = get_names(db, nodetype) n_nodes = length(node_ids) @@ -75,17 +75,21 @@ function parse_static_and_time( end # Get node IDs of static nodes if the static table exists - static_node_ids = if static === nothing - Set{Int}() + if static === nothing + static_node_id_vec = NodeID[] + static_node_ids = Set{NodeID}() else - Set(static.node_id) + static_node_id_vec = NodeID.(nodetype, static.node_id) + static_node_ids = Set(static_node_id_vec) end # Get node IDs of transient nodes if the time table exists time_node_ids = if time === nothing - Set{Int}() + time_node_id_vec = NodeID[] + time_node_ids = Set{NodeID}() else - Set(time.node_id) + time_node_id_vec = NodeID.(nodetype, time.node_id) + time_node_ids = Set(time_node_id_vec) end errors = false @@ -95,7 +99,7 @@ function parse_static_and_time( for (node_idx, (node_id, node_name)) in enumerate(zip(node_ids, node_names)) if node_id in static_node_ids # The interval of rows of the static table that have the current node_id - rows = searchsorted(static.node_id, node_id) + rows = searchsorted(static_node_id_vec, node_id) # The rows of the static table that have the current node_id static_id = view(static, rows) # Here it is assumed that the parameters of a node are given by a single @@ -124,16 +128,16 @@ function parse_static_and_time( end # Add the parameter values to the control mapping control_state_key = coalesce(control_state, "") - control_mapping[(NodeID(node_id), control_state_key)] = + control_mapping[(node_id, control_state_key)] = NamedTuple{Tuple(parameter_names)}(Tuple(parameter_values)) end elseif node_id in time_node_ids # TODO replace (time, node_id) order by (node_id, time) # this fits our access pattern better, so we can use views - idx = findall(==(node_id), time.node_id) + idx = findall(==(node_id), time_node_id_vec) time_subset = time[idx] - time_first_idx = searchsortedfirst(time_subset.node_id, node_id) + time_first_idx = searchsortedfirst(time_node_id_vec[idx], node_id) for parameter_name in parameter_names # If the parameter is interpolatable, create an interpolation object @@ -175,10 +179,10 @@ function static_and_time_node_ids( static::StructVector, time::StructVector, node_type::String, -)::Tuple{Set{Int}, Set{Int}, Vector{Int}, Vector{String}, Bool} - static_node_ids = Set(static.node_id) - time_node_ids = Set(time.node_id) - node_ids = get_ids(db, node_type) +)::Tuple{Set{NodeID}, Set{NodeID}, Vector{NodeID}, Vector{String}, Bool} + static_node_ids = Set(NodeID.(node_type, static.node_id)) + time_node_ids = Set(NodeID.(node_type, time.node_id)) + node_ids = NodeID.(node_type, get_ids(db, node_type)) node_names = get_names(db, node_type) doubles = intersect(static_node_ids, time_node_ids) errors = false @@ -237,7 +241,7 @@ function LinearResistance(db::DB, config::Config)::LinearResistance end return LinearResistance( - NodeID.(parsed_parameters.node_id), + NodeID.(NodeType.LinearResistance, parsed_parameters.node_id), BitVector(parsed_parameters.active), parsed_parameters.resistance, parsed_parameters.max_flow_rate, @@ -269,7 +273,10 @@ function TabulatedRatingCurve(db::DB, config::Config)::TabulatedRatingCurve # If it has a control_state add it to control_mapping. # The last rating curve forms the initial condition and activity. source = "static" - rows = searchsorted(static.node_id, node_id) + rows = searchsorted( + NodeID.(NodeType.TabulatedRatingCurve, static.node_id), + node_id, + ) static_id = view(static, rows) local is_active, interpolation # coalesce control_state to nothing to avoid boolean groupby logic on missing @@ -279,8 +286,10 @@ function TabulatedRatingCurve(db::DB, config::Config)::TabulatedRatingCurve is_active = coalesce(first(group).active, true) interpolation, is_valid = qh_interpolation(node_id, StructVector(group)) if !ismissing(control_state) - control_mapping[(NodeID(node_id), control_state)] = - (; tables = interpolation, active = is_active) + control_mapping[( + NodeID(NodeType.TabulatedRatingCurve, node_id), + control_state, + )] = (; tables = interpolation, active = is_active) end end push!(interpolations, interpolation) @@ -294,11 +303,11 @@ function TabulatedRatingCurve(db::DB, config::Config)::TabulatedRatingCurve push!(interpolations, interpolation) push!(active, true) else - @error "TabulatedRatingCurve node $(repr(node_name)) #$node_id data not in any table." + @error "$node_id data not in any table." errors = true end if !is_valid - @error "A Q(h) relationship for TabulatedRatingCurve $(repr(node_name)) #$node_id from the $source table has repeated levels, this can not be interpolated." + @error "A Q(h) relationship for $node_id from the $source table has repeated levels, this can not be interpolated." errors = true end end @@ -307,13 +316,7 @@ function TabulatedRatingCurve(db::DB, config::Config)::TabulatedRatingCurve error("Errors occurred when parsing TabulatedRatingCurve data.") end - return TabulatedRatingCurve( - NodeID.(node_ids), - active, - interpolations, - time, - control_mapping, - ) + return TabulatedRatingCurve(node_ids, active, interpolations, time, control_mapping) end function ManningResistance(db::DB, config::Config)::ManningResistance @@ -326,7 +329,7 @@ function ManningResistance(db::DB, config::Config)::ManningResistance end return ManningResistance( - NodeID.(parsed_parameters.node_id), + NodeID.(NodeType.ManningResistance, parsed_parameters.node_id), BitVector(parsed_parameters.active), parsed_parameters.length, parsed_parameters.manning_n, @@ -345,7 +348,7 @@ function FractionalFlow(db::DB, config::Config)::FractionalFlow end return FractionalFlow( - NodeID.(parsed_parameters.node_id), + NodeID.(NodeType.FractionalFlow, parsed_parameters.node_id), parsed_parameters.fraction, parsed_parameters.control_mapping, ) @@ -376,11 +379,7 @@ function LevelBoundary(db::DB, config::Config)::LevelBoundary error("Errors occurred when parsing LevelBoundary data.") end - return LevelBoundary( - NodeID.(node_ids), - parsed_parameters.active, - parsed_parameters.level, - ) + return LevelBoundary(node_ids, parsed_parameters.active, parsed_parameters.level) end function FlowBoundary(db::DB, config::Config)::FlowBoundary @@ -417,18 +416,14 @@ function FlowBoundary(db::DB, config::Config)::FlowBoundary error("Errors occurred when parsing FlowBoundary data.") end - return FlowBoundary( - NodeID.(node_ids), - parsed_parameters.active, - parsed_parameters.flow_rate, - ) + return FlowBoundary(node_ids, parsed_parameters.active, parsed_parameters.flow_rate) end function Pump(db::DB, config::Config, chunk_sizes::Vector{Int})::Pump static = load_structvector(db, config, PumpStaticV1) defaults = (; min_flow_rate = 0.0, max_flow_rate = Inf, active = true) parsed_parameters, valid = parse_static_and_time(db, config, "Pump"; static, defaults) - is_pid_controlled = falses(length(NodeID.(parsed_parameters.node_id))) + is_pid_controlled = falses(length(NodeID.(NodeType.Pump, parsed_parameters.node_id))) if !valid error("Errors occurred when parsing Pump data.") @@ -442,7 +437,7 @@ function Pump(db::DB, config::Config, chunk_sizes::Vector{Int})::Pump end return Pump( - NodeID.(parsed_parameters.node_id), + NodeID.(NodeType.Pump, parsed_parameters.node_id), BitVector(parsed_parameters.active), flow_rate, parsed_parameters.min_flow_rate, @@ -457,7 +452,7 @@ function Outlet(db::DB, config::Config, chunk_sizes::Vector{Int})::Outlet defaults = (; min_flow_rate = 0.0, max_flow_rate = Inf, min_crest_level = -Inf, active = true) parsed_parameters, valid = parse_static_and_time(db, config, "Outlet"; static, defaults) - is_pid_controlled = falses(length(NodeID.(parsed_parameters.node_id))) + is_pid_controlled = falses(length(NodeID.(NodeType.Outlet, parsed_parameters.node_id))) if !valid error("Errors occurred when parsing Outlet data.") @@ -471,7 +466,7 @@ function Outlet(db::DB, config::Config, chunk_sizes::Vector{Int})::Outlet end return Outlet( - NodeID.(parsed_parameters.node_id), + NodeID.(NodeType.Outlet, parsed_parameters.node_id), BitVector(parsed_parameters.active), flow_rate, parsed_parameters.min_flow_rate, @@ -484,7 +479,7 @@ end function Terminal(db::DB, config::Config)::Terminal static = load_structvector(db, config, TerminalStaticV1) - return Terminal(NodeID.(static.node_id)) + return Terminal(NodeID.(NodeType.Terminal, static.node_id)) end function Basin(db::DB, config::Config, chunk_sizes::Vector{Int})::Basin @@ -515,7 +510,7 @@ function Basin(db::DB, config::Config, chunk_sizes::Vector{Int})::Basin check_no_nans(table, "Basin") return Basin( - Indices(NodeID.(node_id)), + Indices(NodeID.(NodeType.Basin, node_id)), precipitation, potential_evaporation, drainage, @@ -538,7 +533,8 @@ function DiscreteControl(db::DB, config::Config)::DiscreteControl rows = execute(db, "SELECT from_node_id, edge_type FROM Edge ORDER BY fid") for (; from_node_id, edge_type) in rows if edge_type == "control" - control_state[NodeID(from_node_id)] = ("undefined_state", 0.0) + control_state[NodeID(NodeType.DiscreteControl, from_node_id)] = + ("undefined_state", 0.0) end end @@ -548,7 +544,8 @@ function DiscreteControl(db::DB, config::Config)::DiscreteControl for (node_id, truth_state, control_state_) in zip(logic.node_id, logic.truth_state, logic.control_state) - logic_mapping[(NodeID(node_id), truth_state)] = control_state_ + logic_mapping[(NodeID(NodeType.DiscreteControl, node_id), truth_state)] = + control_state_ end logic_mapping = expand_logic_mapping(logic_mapping) @@ -562,8 +559,8 @@ function DiscreteControl(db::DB, config::Config)::DiscreteControl ) return DiscreteControl( - NodeID.(condition.node_id), # Not unique - NodeID.(condition.listen_feature_id), + NodeID.(NodeType.DiscreteControl, condition.node_id), # Not unique + NodeID.(condition.listen_feature_type, condition.listen_feature_id), condition.variable, look_ahead, condition.greater_than, @@ -626,9 +623,9 @@ function PidControl(db::DB, config::Config, chunk_sizes::Vector{Int})::PidContro end return PidControl( - NodeID.(node_ids), + node_ids, BitVector(parsed_parameters.active), - NodeID.(parsed_parameters.listen_node_id), + NodeID.(parsed_parameters.listen_node_type, parsed_parameters.listen_node_id), parsed_parameters.target, pid_parameters, pid_error, @@ -643,6 +640,8 @@ function User(db::DB, config::Config)::User static_node_ids, time_node_ids, node_ids, _, valid = static_and_time_node_ids(db, static, time, "User") + time_node_id_vec = NodeID.(NodeType.User, time.node_id) + if !valid error("Problems encountered when parsing User static and time node IDs.") end @@ -676,7 +675,7 @@ function User(db::DB, config::Config)::User if node_id in static_node_ids push!(demand_from_timeseries, false) - rows = searchsorted(static.node_id, node_id) + rows = searchsorted(NodeID.(NodeType.User, static.node_id), node_id) static_id = view(static, rows) for p in priorities idx = findsorted(static_id.priority, p) @@ -715,7 +714,7 @@ function User(db::DB, config::Config)::User end push!(demand_itp, demand_itp_node_id) - first_row_idx = searchsortedfirst(time.node_id, node_id) + first_row_idx = searchsortedfirst(time_node_id_vec, node_id) first_row = time[first_row_idx] is_active = true else @@ -748,8 +747,6 @@ function User(db::DB, config::Config)::User abstracted = Float64[], ) - node_ids = NodeID.(node_ids) - return User( node_ids, active, @@ -773,7 +770,7 @@ function Subgrid(db::DB, config::Config, basin::Basin)::Subgrid has_error = false for group in IterTools.groupby(row -> row.subgrid_id, tables) subgrid_id = first(getproperty.(group, :subgrid_id)) - node_id = NodeID(first(getproperty.(group, :node_id))) + node_id = NodeID(NodeType.Basin, first(getproperty.(group, :node_id))) basin_level = getproperty.(group, :basin_level) subgrid_level = getproperty.(group, :subgrid_level) @@ -855,12 +852,16 @@ function Parameters(db::DB, config::Config)::Parameters # Set is_pid_controlled to true for those pumps and outlets that are PID controlled for id in pid_control.node_id id_controlled = only(outneighbor_labels_type(graph, id, EdgeType.control)) - pump_idx = findsorted(pump.node_id, id_controlled) - if pump_idx === nothing + if id_controlled.type == NodeType.Pump + pump_idx = findsorted(pump.node_id, id_controlled) + pump.is_pid_controlled[pump_idx] = true + elseif id_controlled.type == NodeType.Outlet outlet_idx = findsorted(outlet.node_id, id_controlled) outlet.is_pid_controlled[outlet_idx] = true else - pump.is_pid_controlled[pump_idx] = true + error( + "Only Pump and Outlet can be controlled by PidController, got $is_controlled", + ) end end diff --git a/core/src/schema.jl b/core/src/schema.jl index da9c61421..5209152ce 100644 --- a/core/src/schema.jl +++ b/core/src/schema.jl @@ -1,7 +1,5 @@ # These schemas define the name of database tables and the configuration file structure # The identifier is parsed as ribasim.nodetype.kind, no capitals or underscores are allowed. -@schema "ribasim.node" Node -@schema "ribasim.edge" Edge @schema "ribasim.discretecontrol.condition" DiscreteControlCondition @schema "ribasim.discretecontrol.logic" DiscreteControlLogic @schema "ribasim.basin.static" BasinStatic @@ -58,22 +56,6 @@ function nodetype( return Symbol(node[begin:length(n)]), k end -@version NodeV1 begin - fid::Int - name::String = isnothing(s) ? "" : String(s) - type::String = in(Symbol(type), nodetypes) ? type : error("Unknown node type $type") - allocation_network_id::Union{Missing, Int} -end - -@version EdgeV1 begin - fid::Int - name::String = isnothing(s) ? "" : String(s) - from_node_id::Int - to_node_id::Int - edge_type::String - allocation_network_id::Union{Missing, Int} -end - @version PumpStaticV1 begin node_id::Int active::Union{Missing, Bool} @@ -199,6 +181,7 @@ end @version DiscreteControlConditionV1 begin node_id::Int + listen_feature_type::Union{Missing, String} listen_feature_id::Int variable::String greater_than::Float64 @@ -214,6 +197,7 @@ end @version PidControlStaticV1 begin node_id::Int active::Union{Missing, Bool} + listen_node_type::Union{Missing, String} listen_node_id::Int target::Float64 proportional::Float64 @@ -224,6 +208,7 @@ end @version PidControlTimeV1 begin node_id::Int + listen_node_type::Union{Missing, String} listen_node_id::Int time::DateTime target::Float64 diff --git a/core/src/util.jl b/core/src/util.jl index ba3c37fe4..5251e73de 100644 --- a/core/src/util.jl +++ b/core/src/util.jl @@ -35,7 +35,7 @@ function get_storage_from_level(basin::Basin, state_idx::Int, level::Float64)::F if level < bottom node_id = basin.node_id.values[state_idx] - @error "The level $level of basin $node_id is lower than the bottom of this basin $bottom." + @error "The level $level of $node_id is lower than the bottom of this basin; $bottom." return NaN end @@ -181,15 +181,8 @@ end For an element `id` and a vector of elements `ids`, get the range of indices of the last consecutive block of `id`. Returns the empty range `1:0` if `id` is not in `ids`. - -```jldoctest -# 1 2 3 4 5 6 7 8 9 -Ribasim.findlastgroup(2, [5,4,2,2,5,2,2,2,1]) -# output -6:8 -``` """ -function findlastgroup(id::Int, ids::AbstractVector{Int})::UnitRange{Int} +function findlastgroup(id::NodeID, ids::AbstractVector{NodeID})::UnitRange{Int} idx_block_end = findlast(==(id), ids) if idx_block_end === nothing return 1:0 @@ -209,11 +202,12 @@ function get_scalar_interpolation( starttime::DateTime, t_end::Float64, time::AbstractVector, - node_id::Int, + node_id::NodeID, param::Symbol; default_value::Float64 = 0.0, )::Tuple{LinearInterpolation, Bool} - rows = searchsorted(time.node_id, node_id) + nodetype = node_id.type + rows = searchsorted(NodeID.(nodetype, time.node_id), node_id) parameter = getfield.(time, param)[rows] parameter = coalesce(parameter, default_value) times = seconds_since.(time.time[rows], starttime) @@ -271,10 +265,11 @@ From a table with columns node_id, flow_rate (Q) and level (h), create a LinearInterpolation from level to flow rate for a given node_id. """ function qh_interpolation( - node_id::Int, + node_id::NodeID, table::StructVector, )::Tuple{LinearInterpolation, Bool} - rowrange = findlastgroup(node_id, table.node_id) + nodetype = node_id.type + rowrange = findlastgroup(node_id, NodeID.(nodetype, table.node_id)) @assert !isempty(rowrange) "timeseries starts after model start time" return qh_interpolation(table.level[rowrange], table.flow_rate[rowrange]) end @@ -383,17 +378,15 @@ function get_level( storage::Union{AbstractArray, Number} = 0, )::Union{Real, Nothing} (; basin, level_boundary) = p - hasindex, i = id_index(basin.node_id, node_id) - current_level = get_tmp(basin.current_level, storage) - return if hasindex + if node_id.type == NodeType.Basin + _, i = id_index(basin.node_id, node_id) + current_level = get_tmp(basin.current_level, storage) current_level[i] - else + elseif node_id.type == NodeType.LevelBoundary i = findsorted(level_boundary.node_id, node_id) - if i === nothing - nothing - else - level_boundary.level[i](t) - end + level_boundary.level[i](t) + else + nothing end end @@ -479,7 +472,7 @@ function expand_logic_mapping( if haskey(logic_mapping_expanded, new_key) control_state_existing = logic_mapping_expanded[new_key] control_states = sort([control_state, control_state_existing]) - msg = "Multiple control states found for DiscreteControl node $node_id for truth state `$truth_state_new`: $control_states." + msg = "Multiple control states found for $node_id for truth state `$truth_state_new`: $control_states." @assert control_state_existing == control_state msg else logic_mapping_expanded[new_key] = control_state diff --git a/core/src/validation.jl b/core/src/validation.jl index 185e60e04..0ce8fc846 100644 --- a/core/src/validation.jl +++ b/core/src/validation.jl @@ -98,8 +98,6 @@ sort_by_subgrid_level(row) = (row.subgrid_id, row.basin_level) # get the right sort by function given the Schema, with sort_by_id as the default sort_by_function(table::StructVector{<:Legolas.AbstractRecord}) = sort_by_id -sort_by_function(table::StructVector{NodeV1}) = sort_by_fid -sort_by_function(table::StructVector{EdgeV1}) = sort_by_fid sort_by_function(table::StructVector{TabulatedRatingCurveStaticV1}) = sort_by_id_state_level sort_by_function(table::StructVector{BasinProfileV1}) = sort_by_id_level sort_by_function(table::StructVector{UserStaticV1}) = sort_by_priority @@ -174,21 +172,20 @@ function valid_profiles( for (id, levels, areas) in zip(node_id, level, area) if !allunique(levels) errors = true - @error "Basin $id has repeated levels, this cannot be interpolated." + @error "$id has repeated levels, this cannot be interpolated." end if areas[1] <= 0 errors = true @error( - "Basin profiles cannot start with area <= 0 at the bottom for numerical reasons.", - node_id = id, + "$id profile cannot start with area <= 0 at the bottom for numerical reasons.", area = areas[1], ) end if areas[end] < areas[end - 1] errors = true - @error "Basin profiles cannot have decreasing area at the top since extrapolating could lead to negative areas, found decreasing top areas for node $id." + @error "$id profile cannot have decreasing area at the top since extrapolating could lead to negative areas." end end return !errors @@ -201,7 +198,6 @@ function valid_flow_rates( node_id::Vector{NodeID}, flow_rate::Vector, control_mapping::Dict{Tuple{NodeID, String}, NamedTuple}, - node_type::Symbol, )::Bool errors = false @@ -217,7 +213,7 @@ function valid_flow_rates( if flow_rate_ < 0.0 errors = true control_state = key[2] - @error "$node_type flow rates must be non-negative, found $flow_rate_ for control state '$control_state' of $id_controlled." + @error "$id_controlled flow rates must be non-negative, found $flow_rate_ for control state '$control_state'." end end @@ -227,7 +223,7 @@ function valid_flow_rates( end if flow_rate_ < 0.0 errors = true - @error "$node_type flow rates must be non-negative, found $flow_rate_ for static $id." + @error "$id flow rates must be non-negative, found $flow_rate_." end end @@ -246,7 +242,7 @@ function valid_pid_connectivity( for (id, listen_id) in zip(pid_control_node_id, pid_control_listen_node_id) has_index, _ = id_index(basin_node_id, listen_id) if !has_index - @error "Listen node $listen_id of PidControl node $id is not a Basin" + @error "Listen node $listen_id of $id is not a Basin" errors = true end @@ -255,7 +251,7 @@ function valid_pid_connectivity( if controlled_id in pump_node_id pump_intake_id = inflow_id(graph, controlled_id) if pump_intake_id != listen_id - @error "Listen node $listen_id of PidControl node $id is not upstream of controlled pump $controlled_id" + @error "Listen node $listen_id of $id is not upstream of controlled $controlled_id" errors = true end else @@ -296,7 +292,7 @@ function valid_fractional_flow( if src_outneighbor_ids ⊈ node_id_set errors = true @error( - "Node $src_id combines fractional flow outneighbors with other outneigbor types." + "$src_id combines fractional flow outneighbors with other outneigbor types." ) end @@ -353,17 +349,17 @@ function valid_subgrid( if !(node_id in keys(node_to_basin)) errors = true - @error "The node_id of the Basin / subgrid_level does not refer to a basin." node_id subgrid_id + @error "The node_id of the Basin / subgrid does not exist." node_id subgrid_id end if !allunique(basin_level) errors = true - @error "Basin / subgrid_level subgrid_id $(subgrid_id) has repeated basin levels, this cannot be interpolated." + @error "Basin / subgrid subgrid_id $(subgrid_id) has repeated basin levels, this cannot be interpolated." end if !allunique(subgrid_level) errors = true - @error "Basin / subgrid_level subgrid_id $(subgrid_id) has repeated element levels, this cannot be interpolated." + @error "Basin / subgrid subgrid_id $(subgrid_id) has repeated element levels, this cannot be interpolated." end return !errors @@ -381,7 +377,7 @@ function valid_demand( for (col, id) in zip(demand_itp, node_id) for (demand_p_itp, p_itp) in zip(col, priorities) if any(demand_p_itp.u .< 0.0) - @error "Demand of user node $id with priority $p_itp should be non-negative" + @error "Demand of $id with priority $p_itp should be non-negative" errors = true end end @@ -444,22 +440,22 @@ function valid_n_neighbors(node::AbstractParameterNode, graph::MetaGraph)::Bool n_outneighbors = count(x -> true, outneighbor_labels_type(graph, id, edge_type)) if n_inneighbors < bounds.in_min - @error "Nodes of type $node_type must have at least $(bounds.in_min) $edge_type inneighbor(s) (got $n_inneighbors for node $id)." + @error "$id must have at least $(bounds.in_min) $edge_type inneighbor(s) (got $n_inneighbors)." errors = true end if n_inneighbors > bounds.in_max - @error "Nodes of type $node_type can have at most $(bounds.in_max) $edge_type inneighbor(s) (got $n_inneighbors for node $id)." + @error "$id can have at most $(bounds.in_max) $edge_type inneighbor(s) (got $n_inneighbors)." errors = true end if n_outneighbors < bounds.out_min - @error "Nodes of type $node_type must have at least $(bounds.out_min) $edge_type outneighbor(s) (got $n_outneighbors for node $id)." + @error "$id must have at least $(bounds.out_min) $edge_type outneighbor(s) (got $n_outneighbors)." errors = true end if n_outneighbors > bounds.out_max - @error "Nodes of type $node_type can have at most $(bounds.out_max) $edge_type outneighbor(s) (got $n_outneighbors for node $id)." + @error "$id can have at most $(bounds.out_max) $edge_type outneighbor(s) (got $n_outneighbors)." errors = true end end @@ -521,7 +517,7 @@ function valid_discrete_control(p::Parameters, config::Config)::Bool if !isempty(truth_states_wrong_length) errors = true - @error "DiscreteControl node $id has $n_conditions condition(s), which is inconsistent with these truth state(s): $truth_states_wrong_length." + @error "$id has $n_conditions condition(s), which is inconsistent with these truth state(s): $truth_states_wrong_length." end # Check whether these control states are defined for the @@ -548,7 +544,7 @@ function valid_discrete_control(p::Parameters, config::Config)::Bool if !isempty(undefined_control_states) undefined_list = collect(undefined_control_states) node_type = typeof(node).name.name - @error "These control states from DiscreteControl node $id are not defined for controlled $node_type $id_outneighbor: $undefined_list." + @error "These control states from $id are not defined for controlled $id_outneighbor: $undefined_list." errors = true end end diff --git a/core/test/allocation_test.jl b/core/test/allocation_test.jl index 5ca7074d8..63d504143 100644 --- a/core/test/allocation_test.jl +++ b/core/test/allocation_test.jl @@ -23,17 +23,17 @@ end end - Ribasim.set_flow!(graph, NodeID(1), NodeID(2), 4.5) # Source flow + Ribasim.set_flow!(graph, NodeID(:FlowBoundary, 1), NodeID(:Basin, 2), 4.5) # Source flow allocation_model = p.allocation.allocation_models[1] Ribasim.allocate!(p, allocation_model, 0.0) F = allocation_model.problem[:F] - @test JuMP.value(F[(NodeID(2), NodeID(6))]) ≈ 0.0 - @test JuMP.value(F[(NodeID(2), NodeID(10))]) ≈ 0.5 - @test JuMP.value(F[(NodeID(8), NodeID(12))]) ≈ 0.0 - @test JuMP.value(F[(NodeID(6), NodeID(8))]) ≈ 0.0 - @test JuMP.value(F[(NodeID(1), NodeID(2))]) ≈ 0.5 - @test JuMP.value(F[(NodeID(6), NodeID(11))]) ≈ 0.0 + @test JuMP.value(F[(NodeID(:Basin, 2), NodeID(:Basin, 6))]) ≈ 0.0 + @test JuMP.value(F[(NodeID(:Basin, 2), NodeID(:User, 10))]) ≈ 0.5 + @test JuMP.value(F[(NodeID(:Basin, 8), NodeID(:User, 12))]) ≈ 0.0 + @test JuMP.value(F[(NodeID(:Basin, 6), NodeID(:Basin, 8))]) ≈ 0.0 + @test JuMP.value(F[(NodeID(:FlowBoundary, 1), NodeID(:Basin, 2))]) ≈ 0.5 + @test JuMP.value(F[(NodeID(:Basin, 6), NodeID(:User, 11))]) ≈ 0.0 allocated = p.user.allocated @test allocated[1] ≈ [0.0, 0.5] @@ -42,9 +42,9 @@ # Test getting and setting user demands (; user) = p - Ribasim.set_user_demand!(user, NodeID(11), 2, Float64(π)) + Ribasim.set_user_demand!(user, NodeID(:User, 11), 2, Float64(π)) @test user.demand[4] ≈ π - @test Ribasim.get_user_demand(user, NodeID(11), 2) ≈ π + @test Ribasim.get_user_demand(user, NodeID(:User, 11), 2) ≈ π end @testitem "Allocation objective types" begin @@ -65,12 +65,12 @@ end @test objective isa JuMP.QuadExpr # Quadratic expression F = problem[:F] @test JuMP.UnorderedPair{JuMP.VariableRef}( - F[(NodeID(4), NodeID(5))], - F[(NodeID(4), NodeID(5))], + F[(NodeID(:Basin, 4), NodeID(:User, 5))], + F[(NodeID(:Basin, 4), NodeID(:User, 5))], ) in keys(objective.terms) # F[4,5]^2 term @test JuMP.UnorderedPair{JuMP.VariableRef}( - F[(NodeID(4), NodeID(6))], - F[(NodeID(4), NodeID(6))], + F[(NodeID(:Basin, 4), NodeID(:User, 6))], + F[(NodeID(:Basin, 4), NodeID(:User, 6))], ) in keys(objective.terms) # F[4,6]^2 term config = Ribasim.Config(toml_path; allocation_objective_type = "quadratic_relative") @@ -82,12 +82,12 @@ end @test objective.aff.constant == 2.0 F = problem[:F] @test JuMP.UnorderedPair{JuMP.VariableRef}( - F[(NodeID(4), NodeID(5))], - F[(NodeID(4), NodeID(5))], + F[(NodeID(:Basin, 4), NodeID(:User, 5))], + F[(NodeID(:Basin, 4), NodeID(:User, 5))], ) in keys(objective.terms) # F[4,5]^2 term @test JuMP.UnorderedPair{JuMP.VariableRef}( - F[(NodeID(4), NodeID(6))], - F[(NodeID(4), NodeID(6))], + F[(NodeID(:Basin, 4), NodeID(:User, 6))], + F[(NodeID(:Basin, 4), NodeID(:User, 6))], ) in keys(objective.terms) # F[4,6]^2 term config = Ribasim.Config(toml_path; allocation_objective_type = "linear_absolute") @@ -100,12 +100,12 @@ end F = problem[:F] F_abs = problem[:F_abs] - @test objective.terms[F_abs[NodeID(5)]] == 1.0 - @test objective.terms[F_abs[NodeID(6)]] == 1.0 - @test objective.terms[F[(NodeID(4), NodeID(6))]] ≈ 0.125 - @test objective.terms[F[(NodeID(1), NodeID(2))]] ≈ 0.125 - @test objective.terms[F[(NodeID(4), NodeID(5))]] ≈ 0.125 - @test objective.terms[F[(NodeID(2), NodeID(4))]] ≈ 0.125 + @test objective.terms[F_abs[NodeID(:User, 5)]] == 1.0 + @test objective.terms[F_abs[NodeID(:User, 6)]] == 1.0 + @test objective.terms[F[(NodeID(:Basin, 4), NodeID(:User, 6))]] ≈ 0.125 + @test objective.terms[F[(NodeID(:FlowBoundary, 1), NodeID(:Basin, 2))]] ≈ 0.125 + @test objective.terms[F[(NodeID(:Basin, 4), NodeID(:User, 5))]] ≈ 0.125 + @test objective.terms[F[(NodeID(:Basin, 2), NodeID(:Basin, 4))]] ≈ 0.125 config = Ribasim.Config(toml_path; allocation_objective_type = "linear_relative") model = Ribasim.run(config) @@ -117,12 +117,13 @@ end F = problem[:F] F_abs = problem[:F_abs] - @test objective.terms[F_abs[NodeID(5)]] == 1.0 - @test objective.terms[F_abs[NodeID(6)]] == 1.0 - @test objective.terms[F[(NodeID(4), NodeID(6))]] ≈ 62.585499316005475 - @test objective.terms[F[(NodeID(1), NodeID(2))]] ≈ 62.585499316005475 - @test objective.terms[F[(NodeID(4), NodeID(5))]] ≈ 62.585499316005475 - @test objective.terms[F[(NodeID(2), NodeID(4))]] ≈ 62.585499316005475 + @test objective.terms[F_abs[NodeID(:User, 5)]] == 1.0 + @test objective.terms[F_abs[NodeID(:User, 6)]] == 1.0 + @test objective.terms[F[(NodeID(:Basin, 4), NodeID(:User, 6))]] ≈ 62.585499316005475 + @test objective.terms[F[(NodeID(:FlowBoundary, 1), NodeID(:Basin, 2))]] ≈ + 62.585499316005475 + @test objective.terms[F[(NodeID(:Basin, 4), NodeID(:User, 5))]] ≈ 62.585499316005475 + @test objective.terms[F[(NodeID(:Basin, 2), NodeID(:Basin, 4))]] ≈ 62.585499316005475 end @testitem "Allocation with controlled fractional flow" begin @@ -139,12 +140,12 @@ end problem = model.integrator.p.allocation.allocation_models[1].problem F = problem[:F] @test JuMP.normalized_coefficient( - problem[:fractional_flow][(NodeID(3), NodeID(5))], - F[(NodeID(2), NodeID(3))], + problem[:fractional_flow][(NodeID(:TabulatedRatingCurve, 3), NodeID(:Basin, 5))], + F[(NodeID(:Basin, 2), NodeID(:TabulatedRatingCurve, 3))], ) ≈ -0.75 @test JuMP.normalized_coefficient( - problem[:fractional_flow][(NodeID(3), NodeID(8))], - F[(NodeID(2), NodeID(3))], + problem[:fractional_flow][(NodeID(:TabulatedRatingCurve, 3), NodeID(:Basin, 8))], + F[(NodeID(:Basin, 2), NodeID(:TabulatedRatingCurve, 3))], ) ≈ -0.25 solve!(model) @@ -161,8 +162,8 @@ end allocated_9_after = groups[(9, 1)][groups[(9, 1)].time .> t_control, :].allocated @test all( allocated_9_before ./ allocated_6_before .<= - control_mapping[(NodeID(7), "A")].fraction / - control_mapping[(NodeID(4), "A")].fraction, + control_mapping[(NodeID(:FractionalFlow, 7), "A")].fraction / + control_mapping[(NodeID(:FractionalFlow, 4), "A")].fraction, ) @test all(allocated_9_after ./ allocated_6_after .<= 1.0) @@ -172,12 +173,12 @@ end fractional_flow_constraints = model.integrator.p.allocation.allocation_models[1].problem[:fractional_flow] @test JuMP.normalized_coefficient( - problem[:fractional_flow][(NodeID(3), NodeID(5))], - F[(NodeID(2), NodeID(3))], + problem[:fractional_flow][(NodeID(:TabulatedRatingCurve, 3), NodeID(:Basin, 5))], + F[(NodeID(:Basin, 2), NodeID(:TabulatedRatingCurve, 3))], ) ≈ -0.75 @test JuMP.normalized_coefficient( - problem[:fractional_flow][(NodeID(3), NodeID(8))], - F[(NodeID(2), NodeID(3))], + problem[:fractional_flow][(NodeID(:TabulatedRatingCurve, 3), NodeID(:Basin, 8))], + F[(NodeID(:Basin, 2), NodeID(:TabulatedRatingCurve, 3))], ) ≈ -0.25 end @@ -202,30 +203,34 @@ end # Connections from main network to subnetworks @test isempty(main_network_connections[1]) - @test only(main_network_connections[2]) == (NodeID(2), NodeID(11)) - @test only(main_network_connections[3]) == (NodeID(6), NodeID(24)) - @test only(main_network_connections[4]) == (NodeID(10), NodeID(38)) + @test only(main_network_connections[2]) == (NodeID(:Basin, 2), NodeID(:Pump, 11)) + @test only(main_network_connections[3]) == (NodeID(:Basin, 6), NodeID(:Pump, 24)) + @test only(main_network_connections[4]) == (NodeID(:Basin, 10), NodeID(:Pump, 38)) # main-sub connections are part of main network allocation graph allocation_edges_main_network = graph[].edge_ids[1] - @test Tuple{NodeID, NodeID}[(2, 11), (6, 24), (10, 38)] ⊆ allocation_edges_main_network + @test [ + (NodeID(:Basin, 2), NodeID(:Pump, 11)), + (NodeID(:Basin, 6), NodeID(:Pump, 24)), + (NodeID(:Basin, 10), NodeID(:Pump, 38)), + ] ⊆ allocation_edges_main_network # Subnetworks interpreted as users require variables and constraints to # support absolute value expressions in the objective function allocation_model_main_network = Ribasim.get_allocation_model(p, 1) problem = allocation_model_main_network.problem - @test problem[:F_abs].axes[1] == NodeID[11, 24, 38] - @test problem[:abs_positive].axes[1] == NodeID[11, 24, 38] - @test problem[:abs_negative].axes[1] == NodeID[11, 24, 38] + @test problem[:F_abs].axes[1] == NodeID.(:Pump, [11, 24, 38]) + @test problem[:abs_positive].axes[1] == NodeID.(:Pump, [11, 24, 38]) + @test problem[:abs_negative].axes[1] == NodeID.(:Pump, [11, 24, 38]) # In each subnetwork, the connection from the main network to the subnetwork is # interpreted as a source @test Ribasim.get_allocation_model(p, 3).problem[:source].axes[1] == - Tuple{NodeID, NodeID}[(2, 11)] + [(NodeID(:Basin, 2), NodeID(:Pump, 11))] @test Ribasim.get_allocation_model(p, 5).problem[:source].axes[1] == - Tuple{NodeID, NodeID}[(6, 24)] + [(NodeID(:Basin, 6), NodeID(:Pump, 24))] @test Ribasim.get_allocation_model(p, 7).problem[:source].axes[1] == - Tuple{NodeID, NodeID}[(10, 38)] + [(NodeID(:Basin, 10), NodeID(:Pump, 38))] end @testitem "allocation with main network optimization problem" begin @@ -253,9 +258,11 @@ end Ribasim.allocate!(p, allocation_model, t; collect_demands = true) end - @test subnetwork_demands[(NodeID(2), NodeID(11))] ≈ [4.0, 4.0, 0.0] - @test subnetwork_demands[(NodeID(6), NodeID(24))] ≈ [0.001333333333, 0.0, 0.0] - @test subnetwork_demands[(NodeID(10), NodeID(38))] ≈ [0.001, 0.002, 0.002] + @test subnetwork_demands[(NodeID(:Basin, 2), NodeID(:Pump, 11))] ≈ [4.0, 4.0, 0.0] + @test subnetwork_demands[(NodeID(:Basin, 6), NodeID(:Pump, 24))] ≈ + [0.001333333333, 0.0, 0.0] + @test subnetwork_demands[(NodeID(:Basin, 10), NodeID(:Pump, 38))] ≈ + [0.001, 0.002, 0.002] # Solving for the main network, # containing subnetworks as users @@ -267,17 +274,19 @@ end objective = JuMP.objective_function(problem) objective_variables = keys(objective.terms) F_abs = problem[:F_abs] - @test F_abs[NodeID(11)] ∈ objective_variables - @test F_abs[NodeID(24)] ∈ objective_variables - @test F_abs[NodeID(38)] ∈ objective_variables + @test F_abs[NodeID(:Pump, 11)] ∈ objective_variables + @test F_abs[NodeID(:Pump, 24)] ∈ objective_variables + @test F_abs[NodeID(:Pump, 38)] ∈ objective_variables # Running full allocation algorithm - Ribasim.set_flow!(graph, NodeID(1), NodeID(2), 4.5) + Ribasim.set_flow!(graph, NodeID(:FlowBoundary, 1), NodeID(:Basin, 2), 4.5) Ribasim.update_allocation!((; p, t)) - @test subnetwork_allocateds[NodeID(2), NodeID(11)] ≈ [4.0, 0.49766666, 0.0] - @test subnetwork_allocateds[NodeID(6), NodeID(24)] ≈ [0.00133333333, 0.0, 0.0] - @test subnetwork_allocateds[NodeID(10), NodeID(38)] ≈ [0.001, 0.0, 0.0] + @test subnetwork_allocateds[NodeID(:Basin, 2), NodeID(:Pump, 11)] ≈ + [4.0, 0.49766666, 0.0] + @test subnetwork_allocateds[NodeID(:Basin, 6), NodeID(:Pump, 24)] ≈ + [0.00133333333, 0.0, 0.0] + @test subnetwork_allocateds[NodeID(:Basin, 10), NodeID(:Pump, 38)] ≈ [0.001, 0.0, 0.0] @test user.allocated[2] ≈ [4.0, 0.0, 0.0] @test user.allocated[7] ≈ [0.001, 0.0, 0.0] diff --git a/core/test/control_test.jl b/core/test/control_test.jl index 99aaa5af1..2a76aa402 100644 --- a/core/test/control_test.jl +++ b/core/test/control_test.jl @@ -1,5 +1,6 @@ @testitem "Pump discrete control" begin using PreallocationTools: get_tmp + using Ribasim: NodeID toml_path = normpath(@__DIR__, "../../generated_testmodels/pump_discrete_control/ribasim.toml") @@ -10,16 +11,16 @@ # Control input pump_control_mapping = p.pump.control_mapping - @test pump_control_mapping[(Ribasim.NodeID(4), "off")].flow_rate == 0 - @test pump_control_mapping[(Ribasim.NodeID(4), "on")].flow_rate == 1.0e-5 - - logic_mapping::Dict{Tuple{Ribasim.NodeID, String}, String} = Dict( - (Ribasim.NodeID(5), "TT") => "on", - (Ribasim.NodeID(6), "F") => "active", - (Ribasim.NodeID(5), "TF") => "off", - (Ribasim.NodeID(5), "FF") => "on", - (Ribasim.NodeID(5), "FT") => "off", - (Ribasim.NodeID(6), "T") => "inactive", + @test pump_control_mapping[(NodeID(:Pump, 4), "off")].flow_rate == 0 + @test pump_control_mapping[(NodeID(:Pump, 4), "on")].flow_rate == 1.0e-5 + + logic_mapping::Dict{Tuple{NodeID, String}, String} = Dict( + (NodeID(:DiscreteControl, 5), "TT") => "on", + (NodeID(:DiscreteControl, 6), "F") => "active", + (NodeID(:DiscreteControl, 5), "TF") => "off", + (NodeID(:DiscreteControl, 5), "FF") => "on", + (NodeID(:DiscreteControl, 5), "FT") => "off", + (NodeID(:DiscreteControl, 6), "T") => "inactive", ) @test discrete_control.logic_mapping == logic_mapping @@ -182,6 +183,8 @@ end end @testitem "Set PID target with DiscreteControl" begin + using Ribasim: NodeID + toml_path = normpath( @__DIR__, "../../generated_testmodels/discrete_control_of_pid_control/ribasim.toml", @@ -195,8 +198,9 @@ end level = Ribasim.get_storages_and_levels(model).level[1, :] target_high = - pid_control.control_mapping[(Ribasim.NodeID(6), "target_high")].target.u[1] - target_low = pid_control.control_mapping[(Ribasim.NodeID(6), "target_low")].target.u[1] + pid_control.control_mapping[(NodeID(:PidControl, 6), "target_high")].target.u[1] + target_low = + pid_control.control_mapping[(NodeID(:PidControl, 6), "target_low")].target.u[1] t_target_jump = discrete_control.record.time[2] t_idx_target_jump = searchsortedlast(timesteps, t_target_jump) diff --git a/core/test/create_test.jl b/core/test/create_test.jl index 946d6664f..838add56d 100644 --- a/core/test/create_test.jl +++ b/core/test/create_test.jl @@ -2,27 +2,27 @@ using MetaGraphsNext using Graphs using Logging - using Ribasim + using Ribasim: NodeID using Accessors: @set graph = MetaGraph( DiGraph(); - label_type = Ribasim.NodeID, + label_type = NodeID, vertex_data_type = Ribasim.NodeMetadata, edge_data_type = Symbol, graph_data = Tuple, ) - graph[Ribasim.NodeID(1)] = Ribasim.NodeMetadata(Symbol(:delft), 1) - graph[Ribasim.NodeID(2)] = Ribasim.NodeMetadata(Symbol(:denhaag), -1) + graph[NodeID(:Basin, 1)] = Ribasim.NodeMetadata(Symbol(:delft), 1) + graph[NodeID(:Basin, 2)] = Ribasim.NodeMetadata(Symbol(:denhaag), -1) graph[1, 2] = :yes - node_ids = Dict{Int, Set{Ribasim.NodeID}}() - node_ids[0] = Set{Ribasim.NodeID}() - node_ids[-1] = Set{Ribasim.NodeID}() - push!(node_ids[0], Ribasim.NodeID(1)) - push!(node_ids[-1], Ribasim.NodeID(2)) + node_ids = Dict{Int, Set{NodeID}}() + node_ids[0] = Set{NodeID}() + node_ids[-1] = Set{NodeID}() + push!(node_ids[0], NodeID(:Basin, 1)) + push!(node_ids[-1], NodeID(:Basin, 2)) graph_data = (; node_ids,) graph = @set graph.graph_data = graph_data @@ -45,35 +45,35 @@ end using MetaGraphsNext using Graphs using Logging - using Ribasim + using Ribasim: NodeID graph = MetaGraph( DiGraph(); - label_type = Ribasim.NodeID, + label_type = NodeID, vertex_data_type = Ribasim.NodeMetadata, edge_data_type = Symbol, graph_data = Tuple, ) - node_ids = Dict{Int, Set{Ribasim.NodeID}}() - node_ids[1] = Set{Ribasim.NodeID}() - push!(node_ids[1], Ribasim.NodeID(1)) - push!(node_ids[1], Ribasim.NodeID(2)) - push!(node_ids[1], Ribasim.NodeID(3)) - node_ids[2] = Set{Ribasim.NodeID}() - push!(node_ids[2], Ribasim.NodeID(4)) - push!(node_ids[2], Ribasim.NodeID(5)) - push!(node_ids[2], Ribasim.NodeID(6)) + node_ids = Dict{Int, Set{NodeID}}() + node_ids[1] = Set{NodeID}() + push!(node_ids[1], NodeID(:Basin, 1)) + push!(node_ids[1], NodeID(:Basin, 2)) + push!(node_ids[1], NodeID(:Basin, 3)) + node_ids[2] = Set{NodeID}() + push!(node_ids[2], NodeID(:Basin, 4)) + push!(node_ids[2], NodeID(:Basin, 5)) + push!(node_ids[2], NodeID(:Basin, 6)) - graph[Ribasim.NodeID(1)] = Ribasim.NodeMetadata(Symbol(:delft), 1) - graph[Ribasim.NodeID(2)] = Ribasim.NodeMetadata(Symbol(:denhaag), 1) - graph[Ribasim.NodeID(3)] = Ribasim.NodeMetadata(Symbol(:rdam), 1) - graph[Ribasim.NodeID(4)] = Ribasim.NodeMetadata(Symbol(:adam), 2) - graph[Ribasim.NodeID(5)] = Ribasim.NodeMetadata(Symbol(:utrecht), 2) - graph[Ribasim.NodeID(6)] = Ribasim.NodeMetadata(Symbol(:leiden), 2) + graph[NodeID(:Basin, 1)] = Ribasim.NodeMetadata(Symbol(:delft), 1) + graph[NodeID(:Basin, 2)] = Ribasim.NodeMetadata(Symbol(:denhaag), 1) + graph[NodeID(:Basin, 3)] = Ribasim.NodeMetadata(Symbol(:rdam), 1) + graph[NodeID(:Basin, 4)] = Ribasim.NodeMetadata(Symbol(:adam), 2) + graph[NodeID(:Basin, 5)] = Ribasim.NodeMetadata(Symbol(:utrecht), 2) + graph[NodeID(:Basin, 6)] = Ribasim.NodeMetadata(Symbol(:leiden), 2) - graph[Ribasim.NodeID(1), Ribasim.NodeID(2)] = :yes - graph[Ribasim.NodeID(1), Ribasim.NodeID(3)] = :yes + graph[NodeID(:Basin, 1), NodeID(:Basin, 2)] = :yes + graph[NodeID(:Basin, 1), NodeID(:Basin, 3)] = :yes graph[4, 5] = :yes logger = TestLogger() diff --git a/core/test/io_test.jl b/core/test/io_test.jl index 3c3eb6bad..a22db944d 100644 --- a/core/test/io_test.jl +++ b/core/test/io_test.jl @@ -50,9 +50,13 @@ end end @testitem "findlastgroup" begin - @test Ribasim.findlastgroup(2, [5, 4, 2, 2, 5, 2, 2, 2, 1]) === 6:8 - @test Ribasim.findlastgroup(2, [2]) === 1:1 - @test Ribasim.findlastgroup(3, [5, 4, 2, 2, 5, 2, 2, 2, 1]) === 1:0 + using Ribasim: NodeID, findlastgroup + + @test findlastgroup(NodeID(:Pump, 2), NodeID.(:Pump, [5, 4, 2, 2, 5, 2, 2, 2, 1])) === + 6:8 + @test findlastgroup(NodeID(:Pump, 2), NodeID.(:Pump, [2])) === 1:1 + @test findlastgroup(NodeID(:Pump, 3), NodeID.(:Pump, [5, 4, 2, 2, 5, 2, 2, 2, 1])) === + 1:0 end @testitem "table sort" begin diff --git a/core/test/run_models_test.jl b/core/test/run_models_test.jl index 74f74668c..23b5ae794 100644 --- a/core/test/run_models_test.jl +++ b/core/test/run_models_test.jl @@ -458,8 +458,12 @@ end @test all(isapprox.(h_expected, h_actual; atol = 0.02)) # Test for conservation of mass, flow at the beginning == flow at the end n_self_loops = length(p.graph[].flow_dict) - @test Ribasim.get_flow(p.graph, NodeID(1), NodeID(2), 0) ≈ 5.0 atol = 0.001 skip = - Sys.isapple() - @test Ribasim.get_flow(p.graph, NodeID(101), NodeID(102), 0) ≈ 5.0 atol = 0.001 skip = - Sys.isapple() + @test Ribasim.get_flow(p.graph, NodeID(:FlowBoundary, 1), NodeID(:Basin, 2), 0) ≈ 5.0 atol = + 0.001 skip = Sys.isapple() + @test Ribasim.get_flow( + p.graph, + NodeID(:ManningResistance, 101), + NodeID(:LevelBoundary, 102), + 0, + ) ≈ 5.0 atol = 0.001 skip = Sys.isapple() end diff --git a/core/test/utils_test.jl b/core/test/utils_test.jl index d4263a878..a9784bc20 100644 --- a/core/test/utils_test.jl +++ b/core/test/utils_test.jl @@ -1,22 +1,21 @@ @testitem "NodeID" begin using Ribasim: NodeID - id = NodeID(2) - @test sprint(show, id) === "#2" - @test id < NodeID(3) + + id = NodeID(:Basin, 2) + @test sprint(show, id) === "Basin #2" + @test id < NodeID(:Basin, 3) + @test_throws ErrorException id < NodeID(:Pump, 3) @test Int(id) === 2 @test convert(Int, id) === 2 - @test convert(NodeID, 2) === NodeID(2) - a = [1, 0, 3] - a[id] = id - @test a[2] === 2 end @testitem "id_index" begin using Dictionaries: Indices + using Ribasim: NodeID - ids = Indices(Ribasim.NodeID[2, 4, 6]) - @test Ribasim.id_index(ids, Ribasim.NodeID(4)) === (true, 2) - @test Ribasim.id_index(ids, Ribasim.NodeID(5)) === (false, 0) + ids = Indices(NodeID.(:Basin, [2, 4, 6])) + @test Ribasim.id_index(ids, NodeID(:Basin, 4)) === (true, 2) + @test Ribasim.id_index(ids, NodeID(:Basin, 5)) === (false, 0) end @testitem "profile_storage" begin @@ -29,6 +28,7 @@ end @testitem "bottom" begin using Dictionaries: Indices using StructArrays: StructVector + using Ribasim: NodeID # create two basins with different bottoms/levels area = [[0.01, 1.0], [0.01, 1.0]] @@ -36,7 +36,7 @@ end darea = zeros(2) storage = Ribasim.profile_storage.(level, area) basin = Ribasim.Basin( - Indices(Ribasim.NodeID[5, 7]), + Indices(NodeID.(:Basin, [5, 7])), [2.0, 3.0], [2.0, 3.0], [2.0, 3.0], @@ -50,32 +50,32 @@ end ) @test basin.level[2][1] === 4.0 - @test Ribasim.basin_bottom(basin, Ribasim.NodeID(5)) === 0.0 - @test Ribasim.basin_bottom(basin, Ribasim.NodeID(7)) === 4.0 - @test Ribasim.basin_bottom(basin, Ribasim.NodeID(6)) === nothing + @test Ribasim.basin_bottom(basin, NodeID(:Basin, 5)) === 0.0 + @test Ribasim.basin_bottom(basin, NodeID(:Basin, 7)) === 4.0 + @test Ribasim.basin_bottom(basin, NodeID(:Basin, 6)) === nothing @test Ribasim.basin_bottoms( basin, - Ribasim.NodeID(5), - Ribasim.NodeID(7), - Ribasim.NodeID(6), + NodeID(:Basin, 5), + NodeID(:Basin, 7), + NodeID(:Pump, 6), ) === (0.0, 4.0) @test Ribasim.basin_bottoms( basin, - Ribasim.NodeID(5), - Ribasim.NodeID(0), - Ribasim.NodeID(6), + NodeID(:Basin, 5), + NodeID(:Basin, 0), + NodeID(:Pump, 6), ) === (0.0, 0.0) @test Ribasim.basin_bottoms( basin, - Ribasim.NodeID(0), - Ribasim.NodeID(7), - Ribasim.NodeID(6), + NodeID(:Basin, 0), + NodeID(:Basin, 7), + NodeID(:Pump, 6), ) === (4.0, 4.0) - @test_throws "No bottom defined on either side of #6" Ribasim.basin_bottoms( + @test_throws "No bottom defined on either side of Pump #6" Ribasim.basin_bottoms( basin, - Ribasim.NodeID(0), - Ribasim.NodeID(1), - Ribasim.NodeID(6), + NodeID(:Basin, 0), + NodeID(:Basin, 1), + NodeID(:Pump, 6), ) end @@ -83,6 +83,7 @@ end using Dictionaries: Indices using StructArrays: StructVector using Logging + using Ribasim: NodeID level = [ 0.0, @@ -110,7 +111,7 @@ end ] storage = Ribasim.profile_storage(level, area) basin = Ribasim.Basin( - Indices(Ribasim.NodeID[1]), + Indices(NodeID.(:Basin, [1])), zeros(1), zeros(1), zeros(1), @@ -131,7 +132,7 @@ end @test length(logger.logs) == 1 @test logger.logs[1].level == Error @test logger.logs[1].message == - "The level -1.0 of basin #1 is lower than the bottom of this basin 0.0." + "The level -1.0 of Basin #1 is lower than the bottom of this basin; 0.0." # Converting from storages to levels and back should return the same storages storages = range(0.0, 2 * storage[end], 50) @@ -142,19 +143,21 @@ end end @testitem "Expand logic_mapping" begin - logic_mapping = Dict{Tuple{Ribasim.NodeID, String}, String}() - logic_mapping[(Ribasim.NodeID(1), "*T*")] = "foo" - logic_mapping[(Ribasim.NodeID(2), "FF")] = "bar" + using Ribasim: NodeID + + logic_mapping = Dict{Tuple{NodeID, String}, String}() + logic_mapping[(NodeID(:DiscreteControl, 1), "*T*")] = "foo" + logic_mapping[(NodeID(:DiscreteControl, 2), "FF")] = "bar" logic_mapping_expanded = Ribasim.expand_logic_mapping(logic_mapping) - @test logic_mapping_expanded[(Ribasim.NodeID(1), "TTT")] == "foo" - @test logic_mapping_expanded[(Ribasim.NodeID(1), "FTT")] == "foo" - @test logic_mapping_expanded[(Ribasim.NodeID(1), "TTF")] == "foo" - @test logic_mapping_expanded[(Ribasim.NodeID(1), "FTF")] == "foo" - @test logic_mapping_expanded[(Ribasim.NodeID(2), "FF")] == "bar" + @test logic_mapping_expanded[(NodeID(:DiscreteControl, 1), "TTT")] == "foo" + @test logic_mapping_expanded[(NodeID(:DiscreteControl, 1), "FTT")] == "foo" + @test logic_mapping_expanded[(NodeID(:DiscreteControl, 1), "TTF")] == "foo" + @test logic_mapping_expanded[(NodeID(:DiscreteControl, 1), "FTF")] == "foo" + @test logic_mapping_expanded[(NodeID(:DiscreteControl, 2), "FF")] == "bar" @test length(logic_mapping_expanded) == 5 - new_key = (Ribasim.NodeID(3), "duck") + new_key = (NodeID(:DiscreteControl, 3), "duck") logic_mapping[new_key] = "quack" @test_throws "Truth state 'duck' contains illegal characters or is empty." Ribasim.expand_logic_mapping( @@ -163,7 +166,7 @@ end delete!(logic_mapping, new_key) - new_key = (Ribasim.NodeID(3), "") + new_key = (NodeID(:DiscreteControl, 3), "") logic_mapping[new_key] = "bar" @test_throws "Truth state '' contains illegal characters or is empty." Ribasim.expand_logic_mapping( @@ -172,16 +175,16 @@ end delete!(logic_mapping, new_key) - new_key = (Ribasim.NodeID(1), "FTT") + new_key = (NodeID(:DiscreteControl, 1), "FTT") logic_mapping[new_key] = "foo" # This should not throw an error, as although "FTT" for node_id = 1 is already covered above, this is consistent Ribasim.expand_logic_mapping(logic_mapping) - new_key = (Ribasim.NodeID(1), "TTF") + new_key = (NodeID(:DiscreteControl, 1), "TTF") logic_mapping[new_key] = "bar" - @test_throws "AssertionError: Multiple control states found for DiscreteControl node #1 for truth state `TTF`: [\"bar\", \"foo\"]." Ribasim.expand_logic_mapping( + @test_throws "AssertionError: Multiple control states found for DiscreteControl #1 for truth state `TTF`: [\"bar\", \"foo\"]." Ribasim.expand_logic_mapping( logic_mapping, ) end @@ -249,11 +252,46 @@ end @testitem "low_storage_factor" begin using Ribasim: NodeID, low_storage_factor, Indices - @test low_storage_factor([-2.0], Indices(NodeID[5]), NodeID(5), 2.0) === 0.0 - @test low_storage_factor([0.0f0], Indices(NodeID[5]), NodeID(5), 2.0) === 0.0f0 - @test low_storage_factor([0.0], Indices(NodeID[5]), NodeID(5), 2.0) === 0.0 - @test low_storage_factor([1.0f0], Indices(NodeID[5]), NodeID(5), 2.0) === 0.5f0 - @test low_storage_factor([1.0], Indices(NodeID[5]), NodeID(5), 2.0) === 0.5 - @test low_storage_factor([3.0f0], Indices(NodeID[5]), NodeID(5), 2.0) === 1.0f0 - @test low_storage_factor([3.0], Indices(NodeID[5]), NodeID(5), 2.0) === 1.0 + @test low_storage_factor( + [-2.0], + Indices(NodeID.(:Basin, [5])), + NodeID(:Basin, 5), + 2.0, + ) === 0.0 + @test low_storage_factor( + [0.0f0], + Indices(NodeID.(:Basin, [5])), + NodeID(:Basin, 5), + 2.0, + ) === 0.0f0 + @test low_storage_factor( + [0.0], + Indices(NodeID.(:Basin, [5])), + NodeID(:Basin, 5), + 2.0, + ) === 0.0 + @test low_storage_factor( + [1.0f0], + Indices(NodeID.(:Basin, [5])), + NodeID(:Basin, 5), + 2.0, + ) === 0.5f0 + @test low_storage_factor( + [1.0], + Indices(NodeID.(:Basin, [5])), + NodeID(:Basin, 5), + 2.0, + ) === 0.5 + @test low_storage_factor( + [3.0f0], + Indices(NodeID.(:Basin, [5])), + NodeID(:Basin, 5), + 2.0, + ) === 1.0f0 + @test low_storage_factor( + [3.0], + Indices(NodeID.(:Basin, [5])), + NodeID(:Basin, 5), + 2.0, + ) === 1.0 end diff --git a/core/test/validation_test.jl b/core/test/validation_test.jl index df2119a59..2f7f5a250 100644 --- a/core/test/validation_test.jl +++ b/core/test/validation_test.jl @@ -4,7 +4,7 @@ using DataInterpolations: LinearInterpolation using Logging - node_id = Indices([NodeID(1)]) + node_id = Indices([NodeID(:Basin, 1)]) level = [[0.0, 0.0, 1.0]] area = [[0.0, 100.0, 90]] @@ -19,12 +19,11 @@ "Basin #1 has repeated levels, this cannot be interpolated." @test logger.logs[2].level == Error @test logger.logs[2].message == - "Basin profiles cannot start with area <= 0 at the bottom for numerical reasons." - @test logger.logs[2].kwargs[:node_id] == NodeID(1) + "Basin #1 profile cannot start with area <= 0 at the bottom for numerical reasons." @test logger.logs[2].kwargs[:area] == 0 @test logger.logs[3].level == Error @test logger.logs[3].message == - "Basin profiles cannot have decreasing area at the top since extrapolating could lead to negative areas, found decreasing top areas for node #1." + "Basin #1 profile cannot have decreasing area at the top since extrapolating could lead to negative areas." itp, valid = qh_interpolation([0.0, 0.0], [1.0, 2.0]) @test !valid @@ -55,10 +54,10 @@ end @test length(logger.logs) == 2 @test logger.logs[1].level == Error @test logger.logs[1].message == - "A Q(h) relationship for TabulatedRatingCurve \"\" #1 from the static table has repeated levels, this can not be interpolated." + "A Q(h) relationship for TabulatedRatingCurve #1 from the static table has repeated levels, this can not be interpolated." @test logger.logs[2].level == Error @test logger.logs[2].message == - "A Q(h) relationship for TabulatedRatingCurve \"\" #2 from the time table has repeated levels, this can not be interpolated." + "A Q(h) relationship for TabulatedRatingCurve #2 from the time table has repeated levels, this can not be interpolated." end @testitem "Neighbor count validation" begin @@ -75,29 +74,30 @@ end graph_data = nothing, ) - for i in 1:6 - type = i in [1, 6] ? :pump : :other - graph[NodeID(i)] = NodeMetadata(type, 9) - end + graph[NodeID(:Pump, 1)] = NodeMetadata(:pump, 9) + graph[NodeID(:Basin, 2)] = NodeMetadata(:pump, 9) + graph[NodeID(:Basin, 3)] = NodeMetadata(:pump, 9) + graph[NodeID(:Basin, 4)] = NodeMetadata(:pump, 9) + graph[NodeID(:FractionalFlow, 5)] = NodeMetadata(:pump, 9) + graph[NodeID(:Pump, 6)] = NodeMetadata(:pump, 9) function set_edge_metadata!(id_1, id_2, edge_type) - graph[NodeID(id_1), NodeID(id_2)] = - EdgeMetadata(0, edge_type, 0, NodeID(id_1), NodeID(id_2), false, NodeID[]) + graph[id_1, id_2] = EdgeMetadata(0, edge_type, 0, id_1, id_2, false, NodeID[]) return nothing end - set_edge_metadata!(2, 1, EdgeType.flow) - set_edge_metadata!(3, 1, EdgeType.flow) - set_edge_metadata!(6, 2, EdgeType.flow) - set_edge_metadata!(5, 6, EdgeType.control) + set_edge_metadata!(NodeID(:Basin, 2), NodeID(:Pump, 1), EdgeType.flow) + set_edge_metadata!(NodeID(:Basin, 3), NodeID(:Pump, 1), EdgeType.flow) + set_edge_metadata!(NodeID(:Pump, 6), NodeID(:Basin, 2), EdgeType.flow) + set_edge_metadata!(NodeID(:FractionalFlow, 5), NodeID(:Pump, 6), EdgeType.control) pump = Ribasim.Pump( - Ribasim.NodeID[1, 6], + NodeID.(:Pump, [1, 6]), [true, true], [0.0, 0.0], [0.0, 0.0], [1.0, 1.0], - Dict{Tuple{Ribasim.NodeID, String}, NamedTuple}(), + Dict{Tuple{NodeID, String}, NamedTuple}(), falses(2), ) @@ -108,21 +108,23 @@ end @test length(logger.logs) == 3 @test logger.logs[1].level == Error - @test logger.logs[1].message == - "Nodes of type Ribasim.Pump{Vector{Float64}} can have at most 1 flow inneighbor(s) (got 2 for node #1)." + @test logger.logs[1].message == "Pump #1 can have at most 1 flow inneighbor(s) (got 2)." @test logger.logs[2].level == Error @test logger.logs[2].message == - "Nodes of type Ribasim.Pump{Vector{Float64}} must have at least 1 flow outneighbor(s) (got 0 for node #1)." + "Pump #1 must have at least 1 flow outneighbor(s) (got 0)." @test logger.logs[3].level == Error @test logger.logs[3].message == - "Nodes of type Ribasim.Pump{Vector{Float64}} must have at least 1 flow inneighbor(s) (got 0 for node #6)." + "Pump #6 must have at least 1 flow inneighbor(s) (got 0)." - set_edge_metadata!(2, 5, EdgeType.flow) - set_edge_metadata!(5, 3, EdgeType.flow) - set_edge_metadata!(5, 4, EdgeType.flow) + set_edge_metadata!(NodeID(:Basin, 2), NodeID(:FractionalFlow, 5), EdgeType.flow) + set_edge_metadata!(NodeID(:FractionalFlow, 5), NodeID(:Basin, 3), EdgeType.flow) + set_edge_metadata!(NodeID(:FractionalFlow, 5), NodeID(:Basin, 4), EdgeType.flow) - fractional_flow = - Ribasim.FractionalFlow([NodeID(5)], [1.0], Dict{Tuple{Int, String}, NamedTuple}()) + fractional_flow = Ribasim.FractionalFlow( + [NodeID(:FractionalFlow, 5)], + [1.0], + Dict{Tuple{Int, String}, NamedTuple}(), + ) logger = TestLogger(; min_level = Debug) with_logger(logger) do @@ -132,10 +134,10 @@ end @test length(logger.logs) == 2 @test logger.logs[1].level == Error @test logger.logs[1].message == - "Nodes of type Ribasim.FractionalFlow can have at most 1 flow outneighbor(s) (got 2 for node #5)." + "FractionalFlow #5 can have at most 1 flow outneighbor(s) (got 2)." @test logger.logs[2].level == Error @test logger.logs[2].message == - "Nodes of type Ribasim.FractionalFlow can have at most 0 control outneighbor(s) (got 1 for node #5)." + "FractionalFlow #5 can have at most 0 control outneighbor(s) (got 1)." @test_throws "'n_neighbor_bounds_flow' not defined for Val{:foo}()." Ribasim.n_neighbor_bounds_flow( :foo, @@ -152,9 +154,9 @@ end using MetaGraphsNext: MetaGraph using Ribasim: NodeID, NodeMetadata, EdgeMetadata, NodeID, EdgeType - pid_control_node_id = NodeID[1, 6] - pid_control_listen_node_id = NodeID[3, 5] - pump_node_id = NodeID[2, 4] + pid_control_node_id = NodeID.(:PidControl, [1, 6]) + pid_control_listen_node_id = [NodeID(:Terminal, 3), NodeID(:Basin, 5)] + pump_node_id = NodeID.(:Pump, [2, 4]) graph = MetaGraph( DiGraph(); @@ -164,27 +166,26 @@ end graph_data = nothing, ) - graph[NodeID(1)] = NodeMetadata(:pid_control, 0) - graph[NodeID(6)] = NodeMetadata(:pid_control, 0) - graph[NodeID(2)] = NodeMetadata(:pump, 0) - graph[NodeID(4)] = NodeMetadata(:pump, 0) - graph[NodeID(3)] = NodeMetadata(:something_else, 0) - graph[NodeID(5)] = NodeMetadata(:basin, 0) - graph[NodeID(7)] = NodeMetadata(:basin, 0) + graph[NodeID(:PidControl, 1)] = NodeMetadata(:pid_control, 0) + graph[NodeID(:PidControl, 6)] = NodeMetadata(:pid_control, 0) + graph[NodeID(:Pump, 2)] = NodeMetadata(:pump, 0) + graph[NodeID(:Pump, 4)] = NodeMetadata(:pump, 0) + graph[NodeID(:Terminal, 3)] = NodeMetadata(:something_else, 0) + graph[NodeID(:Basin, 5)] = NodeMetadata(:basin, 0) + graph[NodeID(:Basin, 7)] = NodeMetadata(:basin, 0) function set_edge_metadata!(id_1, id_2, edge_type) - graph[NodeID(id_1), NodeID(id_2)] = - EdgeMetadata(0, edge_type, 0, NodeID(id_1), NodeID(id_2), false, NodeID[]) + graph[id_1, id_2] = EdgeMetadata(0, edge_type, 0, id_1, id_2, false, NodeID[]) return nothing end - set_edge_metadata!(3, 4, EdgeType.flow) - set_edge_metadata!(7, 2, EdgeType.flow) + set_edge_metadata!(NodeID(:Terminal, 3), NodeID(:Pump, 4), EdgeType.flow) + set_edge_metadata!(NodeID(:Basin, 7), NodeID(:Pump, 2), EdgeType.flow) - set_edge_metadata!(1, 4, EdgeType.control) - set_edge_metadata!(6, 2, EdgeType.control) + set_edge_metadata!(NodeID(:PidControl, 1), NodeID(:Pump, 4), EdgeType.control) + set_edge_metadata!(NodeID(:PidControl, 6), NodeID(:Pump, 2), EdgeType.control) - basin_node_id = Indices(NodeID[5, 7]) + basin_node_id = Indices(NodeID.(:Basin, [5, 7])) logger = TestLogger() with_logger(logger) do @@ -199,10 +200,11 @@ end @test length(logger.logs) == 2 @test logger.logs[1].level == Error - @test logger.logs[1].message == "Listen node #3 of PidControl node #1 is not a Basin" + @test logger.logs[1].message == + "Listen node Terminal #3 of PidControl #1 is not a Basin" @test logger.logs[2].level == Error @test logger.logs[2].message == - "Listen node #5 of PidControl node #6 is not upstream of controlled pump #2" + "Listen node Basin #5 of PidControl #6 is not upstream of controlled Pump #2" end @testitem "FractionalFlow validation" begin @@ -235,24 +237,24 @@ end @test length(logger.logs) == 4 @test logger.logs[1].level == Error @test logger.logs[1].message == - "Node #7 combines fractional flow outneighbors with other outneigbor types." + "TabulatedRatingCurve #7 combines fractional flow outneighbors with other outneigbor types." @test logger.logs[2].level == Error @test logger.logs[2].message == "Fractional flow nodes must have non-negative fractions." - @test logger.logs[2].kwargs[:node_id] == NodeID(3) + @test logger.logs[2].kwargs[:node_id] == NodeID(:FractionalFlow, 3) @test logger.logs[2].kwargs[:fraction] ≈ -0.1 @test logger.logs[2].kwargs[:control_state] == "" @test logger.logs[3].level == Error @test logger.logs[3].message == "The sum of fractional flow fractions leaving a node must be ≈1." - @test logger.logs[3].kwargs[:node_id] == NodeID(7) + @test logger.logs[3].kwargs[:node_id] == NodeID(:TabulatedRatingCurve, 7) @test logger.logs[3].kwargs[:fraction_sum] ≈ 0.4 @test logger.logs[3].kwargs[:control_state] == "" @test logger.logs[4].level == Error @test logger.logs[4].message == "Cannot connect a basin to a fractional_flow." @test logger.logs[4].kwargs[:edge_id] == 6 - @test logger.logs[4].kwargs[:id_src] == NodeID(2) - @test logger.logs[4].kwargs[:id_dst] == NodeID(8) + @test logger.logs[4].kwargs[:id_src] == NodeID(:Basin, 2) + @test logger.logs[4].kwargs[:id_dst] == NodeID(:FractionalFlow, 8) end @testitem "DiscreteControl logic validation" begin @@ -278,55 +280,55 @@ end @test length(logger.logs) == 5 @test logger.logs[1].level == Error @test logger.logs[1].message == - "DiscreteControl node #5 has 3 condition(s), which is inconsistent with these truth state(s): [\"FFFF\"]." + "DiscreteControl #5 has 3 condition(s), which is inconsistent with these truth state(s): [\"FFFF\"]." @test logger.logs[2].level == Error @test logger.logs[2].message == - "These control states from DiscreteControl node #5 are not defined for controlled Pump #2: [\"foo\"]." + "These control states from DiscreteControl #5 are not defined for controlled Pump #2: [\"foo\"]." @test logger.logs[3].level == Error @test logger.logs[3].message == - "Look ahead supplied for non-timeseries listen variable 'level' from listen node #1." + "Look ahead supplied for non-timeseries listen variable 'level' from listen node Basin #1." @test logger.logs[4].level == Error @test logger.logs[4].message == - "Look ahead for listen variable 'flow_rate' from listen node #4 goes past timeseries end during simulation." + "Look ahead for listen variable 'flow_rate' from listen node FlowBoundary #4 goes past timeseries end during simulation." @test logger.logs[5].level == Error @test logger.logs[5].message == - "Negative look ahead supplied for listen variable 'flow_rate' from listen node #4." + "Negative look ahead supplied for listen variable 'flow_rate' from listen node FlowBoundary #4." end @testitem "Pump/outlet flow rate sign validation" begin using Logging + using Ribasim: NodeID logger = TestLogger() with_logger(logger) do @test_throws "Invalid Outlet flow rate(s)." Ribasim.Outlet( - [Ribasim.NodeID(1)], + [NodeID(:Outlet, 1)], [true], [-1.0], [NaN], [NaN], [NaN], - Dict{Tuple{Ribasim.NodeID, String}, NamedTuple}(), + Dict{Tuple{NodeID, String}, NamedTuple}(), [false], ) end @test length(logger.logs) == 1 @test logger.logs[1].level == Error - @test logger.logs[1].message == - "Outlet flow rates must be non-negative, found -1.0 for static #1." + @test logger.logs[1].message == "Outlet #1 flow rates must be non-negative, found -1.0." logger = TestLogger() with_logger(logger) do @test_throws "Invalid Pump flow rate(s)." Ribasim.Pump( - Ribasim.NodeID[1], + [NodeID(:Pump, 1)], [true], [-1.0], [NaN], [NaN], - Dict{Tuple{Ribasim.NodeID, String}, NamedTuple}( - (Ribasim.NodeID(1), "foo") => (; flow_rate = -1.0), + Dict{Tuple{NodeID, String}, NamedTuple}( + (NodeID(:Pump, 1), "foo") => (; flow_rate = -1.0), ), [false], ) @@ -336,7 +338,7 @@ end @test length(logger.logs) == 1 @test logger.logs[1].level == Error @test logger.logs[1].message == - "Pump flow rates must be non-negative, found -1.0 for control state 'foo' of #1." + "Pump #1 flow rates must be non-negative, found -1.0 for control state 'foo'." end @testitem "Edge type validation" begin @@ -368,25 +370,24 @@ end using Ribasim: valid_subgrid, NodeID using Logging - node_to_basin = Dict(NodeID(9) => 1) + node_to_basin = Dict(NodeID(:Basin, 9) => 1) logger = TestLogger() with_logger(logger) do - @test !valid_subgrid(1, NodeID(10), node_to_basin, [-1.0, 0.0], [-1.0, 0.0]) + @test !valid_subgrid(1, NodeID(:Basin, 10), node_to_basin, [-1.0, 0.0], [-1.0, 0.0]) end @test length(logger.logs) == 1 @test logger.logs[1].level == Error - @test logger.logs[1].message == - "The node_id of the Basin / subgrid_level does not refer to a basin." - @test logger.logs[1].kwargs[:node_id] == NodeID(10) + @test logger.logs[1].message == "The node_id of the Basin / subgrid does not exist." + @test logger.logs[1].kwargs[:node_id] == NodeID(:Basin, 10) @test logger.logs[1].kwargs[:subgrid_id] == 1 logger = TestLogger() with_logger(logger) do @test !valid_subgrid( 1, - NodeID(9), + NodeID(:Basin, 9), node_to_basin, [-1.0, 0.0, 0.0], [-1.0, 0.0, 0.0], @@ -396,20 +397,22 @@ end @test length(logger.logs) == 2 @test logger.logs[1].level == Error @test logger.logs[1].message == - "Basin / subgrid_level subgrid_id 1 has repeated basin levels, this cannot be interpolated." + "Basin / subgrid subgrid_id 1 has repeated basin levels, this cannot be interpolated." @test logger.logs[2].level == Error @test logger.logs[2].message == - "Basin / subgrid_level subgrid_id 1 has repeated element levels, this cannot be interpolated." + "Basin / subgrid subgrid_id 1 has repeated element levels, this cannot be interpolated." end @testitem "negative demand" begin using Logging using DataInterpolations: LinearInterpolation + using Ribasim: NodeID + logger = TestLogger() with_logger(logger) do @test_throws "Invalid demand" Ribasim.User( - [Ribasim.NodeID(1)], + [NodeID(:User, 1)], [true], [0.0], [[LinearInterpolation([-5.0, -5.0], [-1.8, 1.8])]], @@ -425,5 +428,5 @@ end @test length(logger.logs) == 1 @test logger.logs[1].level == Error @test logger.logs[1].message == - "Demand of user node #1 with priority 1 should be non-negative" + "Demand of User #1 with priority 1 should be non-negative" end diff --git a/docs/contribute/addnode.qmd b/docs/contribute/addnode.qmd index b8c456a4b..b5d77ecb6 100644 --- a/docs/contribute/addnode.qmd +++ b/docs/contribute/addnode.qmd @@ -57,7 +57,7 @@ function NewNodeType(db::DB, config::Config)::NewNodeType # Unpack the fields of static as inputs for the NewNodeType constructor return NewNodeType( - NodeID.(parsed_parameters.node_id), + NodeID.(NodeType.NewNodeType, parsed_parameters.node_id), parsed_parameters.some_property, parsed_parameters.control_mapping) end diff --git a/docs/core/allocation.qmd b/docs/core/allocation.qmd index e09d11765..6bf3d266d 100644 --- a/docs/core/allocation.qmd +++ b/docs/core/allocation.qmd @@ -233,6 +233,7 @@ The following is an example of an optimization problem for the example shown [he ```{julia} # | code-fold: true using Ribasim +using Ribasim: NodeID using SQLite toml_path = normpath(@__DIR__, "../../generated_testmodels/allocation_example/ribasim.toml") @@ -242,7 +243,7 @@ allocation_model = p.allocation.allocation_models[1] t = 0.0 priority_idx = 1 -Ribasim.set_flow!(p.graph, Ribasim.NodeID(1), Ribasim.NodeID(2), 1.0) +Ribasim.set_flow!(p.graph, NodeID(:FlowBoundary, 1), NodeID(:Basin, 2), 1.0) Ribasim.adjust_source_capacities!(allocation_model, p, priority_idx) Ribasim.adjust_edge_capacities!(allocation_model, p, priority_idx) diff --git a/python/ribasim/ribasim/geometry/edge.py b/python/ribasim/ribasim/geometry/edge.py index 3abd3216e..763f3c2b3 100644 --- a/python/ribasim/ribasim/geometry/edge.py +++ b/python/ribasim/ribasim/geometry/edge.py @@ -17,7 +17,9 @@ class EdgeSchema(pa.SchemaModel): name: Series[str] = pa.Field(default="") + from_node_type: Series[str] = pa.Field(nullable=True) from_node_id: Series[int] = pa.Field(default=0, coerce=True) + to_node_type: Series[str] = pa.Field(nullable=True) to_node_id: Series[int] = pa.Field(default=0, coerce=True) edge_type: Series[str] = pa.Field(default="flow", coerce=True) allocation_network_id: Series[pd.Int64Dtype] = pa.Field( diff --git a/python/ribasim/ribasim/model.py b/python/ribasim/ribasim/model.py index 4f1188409..f9a23b336 100644 --- a/python/ribasim/ribasim/model.py +++ b/python/ribasim/ribasim/model.py @@ -309,6 +309,28 @@ def validate_model(self): self.validate_model_node_field_ids() self.validate_model_node_ids() + def _add_node_type(self, df: pd.DataFrame | None, id_col: str, type_col: str): + node = self.network.node.df + assert node is not None + if df is not None: + df[type_col] = node.loc[df[id_col], "type"].to_numpy() + + def _add_node_types(self): + """Add the from/to node types to tables that reference external node IDs. + + Only valid with globally unique node IDs, which is assured by using the node index. + """ + self._add_node_type(self.network.edge.df, "from_node_id", "from_node_type") + self._add_node_type(self.network.edge.df, "to_node_id", "to_node_type") + id_col, type_col = "listen_node_id", "listen_node_type" + self._add_node_type(self.pid_control.static.df, id_col, type_col) + self._add_node_type(self.pid_control.time.df, id_col, type_col) + self._add_node_type( + self.discrete_control.condition.df, + "listen_feature_id", + "listen_feature_type", + ) + @classmethod def read(cls, filepath: FilePath) -> "Model": """Read model from TOML file.""" @@ -325,6 +347,7 @@ def write(self, filepath: Path | str) -> Path: filepath: FilePath ending in .toml """ self.validate_model() + self._add_node_types() filepath = Path(filepath) if not filepath.suffix == ".toml": raise ValueError(f"Filepath '{filepath}' is not a .toml file.") diff --git a/python/ribasim/ribasim/schemas.py b/python/ribasim/ribasim/schemas.py index c907d6d54..d6af47bd7 100644 --- a/python/ribasim/ribasim/schemas.py +++ b/python/ribasim/ribasim/schemas.py @@ -50,6 +50,7 @@ class BasinTimeSchema(_BaseSchema): class DiscreteControlConditionSchema(_BaseSchema): node_id: Series[int] = pa.Field(nullable=False) + listen_feature_type: Series[str] = pa.Field(nullable=True) listen_feature_id: Series[int] = pa.Field(nullable=False) variable: Series[str] = pa.Field(nullable=False) greater_than: Series[float] = pa.Field(nullable=False) @@ -62,15 +63,6 @@ class DiscreteControlLogicSchema(_BaseSchema): control_state: Series[str] = pa.Field(nullable=False) -class EdgeSchema(_BaseSchema): - fid: Series[int] = pa.Field(nullable=False) - name: Series[str] = pa.Field(nullable=False) - from_node_id: Series[int] = pa.Field(nullable=False) - to_node_id: Series[int] = pa.Field(nullable=False) - edge_type: Series[str] = pa.Field(nullable=False) - allocation_network_id: Series[int] = pa.Field(nullable=True) - - class FlowBoundaryStaticSchema(_BaseSchema): node_id: Series[int] = pa.Field(nullable=False) active: Series[pa.BOOL] = pa.Field(nullable=True) @@ -119,13 +111,6 @@ class ManningResistanceStaticSchema(_BaseSchema): control_state: Series[str] = pa.Field(nullable=True) -class NodeSchema(_BaseSchema): - fid: Series[int] = pa.Field(nullable=False) - name: Series[str] = pa.Field(nullable=False) - type: Series[str] = pa.Field(nullable=False) - allocation_network_id: Series[int] = pa.Field(nullable=True) - - class OutletStaticSchema(_BaseSchema): node_id: Series[int] = pa.Field(nullable=False) active: Series[pa.BOOL] = pa.Field(nullable=True) @@ -139,6 +124,7 @@ class OutletStaticSchema(_BaseSchema): class PidControlStaticSchema(_BaseSchema): node_id: Series[int] = pa.Field(nullable=False) active: Series[pa.BOOL] = pa.Field(nullable=True) + listen_node_type: Series[str] = pa.Field(nullable=True) listen_node_id: Series[int] = pa.Field(nullable=False) target: Series[float] = pa.Field(nullable=False) proportional: Series[float] = pa.Field(nullable=False) @@ -149,6 +135,7 @@ class PidControlStaticSchema(_BaseSchema): class PidControlTimeSchema(_BaseSchema): node_id: Series[int] = pa.Field(nullable=False) + listen_node_type: Series[str] = pa.Field(nullable=True) listen_node_id: Series[int] = pa.Field(nullable=False) time: Series[Timestamp] = pa.Field(nullable=False) target: Series[float] = pa.Field(nullable=False) diff --git a/python/ribasim_testmodels/ribasim_testmodels/invalid.py b/python/ribasim_testmodels/ribasim_testmodels/invalid.py index c77bde12b..5591df6be 100644 --- a/python/ribasim_testmodels/ribasim_testmodels/invalid.py +++ b/python/ribasim_testmodels/ribasim_testmodels/invalid.py @@ -137,7 +137,7 @@ def invalid_fractional_flow_model(): ) # Setup the edges: - # Invalid: Node #7 combines fractional flow outneighbors with other outneigbor types. + # Invalid: TabulatedRatingCurve #7 combines FractionalFlow outneighbors with other outneigbor types. from_id = np.array([1, 7, 7, 3, 7, 4, 2], dtype=np.int64) to_id = np.array([7, 2, 3, 5, 4, 6, 8], dtype=np.int64) lines = node.geometry_from_connectivity(from_id, to_id)