Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add direct JuMP integration bypassing expensive initialize #3

Merged
merged 2 commits into from
Mar 29, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ authors = ["odow <[email protected]>"]
version = "0.1.0"

[deps]
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7"
Expand Down
14 changes: 14 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,23 @@ Pkg.add("https://github.com/odow/SymbolicAD.jl)

## Use with JuMP

There are two ways to use SymbolicAD with JuMP.

First, you can use `SymbolicAD.Optimizer`:
```julia
using JuMP
import Ipopt
import SymbolicAD
model = Model(() -> SymbolicAD.Optimizer(Ipopt.Optimizer))
```

Second, you can set an `SymbolicAD.optimizer_hook`:
```julia
using JuMP
import Ipopt
import SymbolicAD
model = Model(Ipopt.Optimizer)
set_optimizer_hook(model, SymbolicAD.optimizer_hook)
```

In general, the `SymbolicAD.optimizer_hook` approach should be faster.
16 changes: 11 additions & 5 deletions src/MOI_wrapper.jl
Original file line number Diff line number Diff line change
@@ -1,16 +1,22 @@
struct Optimizer{O} <: MOI.AbstractOptimizer
use_threads::Bool
backend::AbstractNonlinearOracleBackend
inner::O
function Optimizer(inner; use_threads::Bool = false)
function Optimizer(
inner;
backend::AbstractNonlinearOracleBackend = DefaultBackend(),
)
model = MOI.instantiate(inner)
return new{typeof(model)}(use_threads, model)
return new{typeof(model)}(backend, model)
end
end

function MOI.set(model::Optimizer, ::MOI.NLPBlock, value)
@info("Replacing NLPBlock with the SymbolicAD one")
nlp_block = SymbolicAD.nlp_block_data(value.evaluator)
MOI.set(model.inner, MOI.NLPBlock(), nlp_block)
MOI.set(
model.inner,
MOI.NLPBlock(),
_nlp_block_data(value.evaluator; backend = model.backend),
)
return
end

Expand Down
1 change: 1 addition & 0 deletions src/SymbolicAD.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
module SymbolicAD

import Base.Meta: isexpr
import JuMP
import MathOptInterface
import SparseArrays
import Symbolics
Expand Down
103 changes: 90 additions & 13 deletions src/nonlinear_oracle.jl
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,17 @@ mutable struct _NonlinearOracle{B} <: MOI.AbstractNLPEvaluator
eval_constraint_timer::Float64
eval_constraint_jacobian_timer::Float64
eval_hessian_lagrangian_timer::Float64
function _NonlinearOracle(backend::B, objective, constraints) where {B}
return new{B}(
backend,
objective,
constraints,
Dict{UInt,_SymbolicsFunction}(),
0.0,
0.0,
0.0,
)
end
end

function MOI.initialize(
Expand Down Expand Up @@ -339,16 +350,16 @@ function MOI.eval_hessian_lagrangian(
end

"""
nlp_block_data(
_nlp_block_data(
d::MOI.AbstractNLPEvaluator;
backend::AbstractNonlinearOracleBackend = DefaultBackend(),
backend::AbstractNonlinearOracleBackend,
)

Convert an AbstractNLPEvaluator into a SymbolicAD instance of MOI.NLPBlockData.
"""
function nlp_block_data(
function _nlp_block_data(
d::MOI.AbstractNLPEvaluator;
backend::AbstractNonlinearOracleBackend = DefaultBackend(),
backend::AbstractNonlinearOracleBackend,
)
MOI.initialize(d, [:ExprGraph])
objective = d.has_nlobj ? _Function(MOI.objective_expr(d)) : nothing
Expand All @@ -359,15 +370,7 @@ function nlp_block_data(
f, bound = _nonlinear_constraint(MOI.constraint_expr(d, i))
functions[i], bounds[i] = f, bound
end
oracle = _NonlinearOracle(
backend,
objective,
functions,
Dict{UInt,_SymbolicsFunction}(),
0.0,
0.0,
0.0,
)
oracle = _NonlinearOracle(backend, objective, functions)
return MOI.NLPBlockData(bounds, oracle, d.has_nlobj)
end

Expand Down Expand Up @@ -479,3 +482,77 @@ function MOI.eval_hessian_lagrangian(
oracle.eval_hessian_lagrangian_timer += time() - start
return
end

###
### JuMP integration
###

function _to_expr(
nlp_data::JuMP._NLPData,
data::JuMP._NonlinearExprData,
subexpressions::Vector{Expr},
)
tree = Any[]
for node in data.nd
expr = if node.nodetype == JuMP._Derivatives.CALL
Expr(:call, JuMP._Derivatives.operators[node.index])
elseif node.nodetype == JuMP._Derivatives.CALLUNIVAR
Expr(:call, JuMP._Derivatives.univariate_operators[node.index])
elseif node.nodetype == JuMP._Derivatives.SUBEXPRESSION
subexpressions[node.index]
elseif node.nodetype == JuMP._Derivatives.MOIVARIABLE
MOI.VariableIndex(node.index)
elseif node.nodetype == JuMP._Derivatives.PARAMETER
nlp_data.nlparamvalues[node.index]
else
@assert node.nodetype == JuMP._Derivatives.VALUE
data.const_values[node.index]
end
if 1 <= node.parent <= length(tree)
push!(tree[node.parent].args, expr)
end
push!(tree, expr)
end
return tree[1]
end

_to_expr(::JuMP._NLPData, ::Nothing, ::Vector{Expr}) = nothing

function _nlp_block_data(
model::JuMP.Model;
backend::AbstractNonlinearOracleBackend,
)
return _nlp_block_data(model.nlp_data; backend = backend)
end

function _nlp_block_data(
nlp_data::JuMP._NLPData;
backend::AbstractNonlinearOracleBackend,
)
subexpressions = map(nlp_data.nlexpr) do expr
return _to_expr(nlp_data, expr, Expr[])::Expr
end
nlobj = _to_expr(nlp_data, nlp_data.nlobj, subexpressions)
objective = nlobj === nothing ? nothing : _Function(nlobj)
functions = map(nlp_data.nlconstr) do c
return _Function(_to_expr(nlp_data, c.terms, subexpressions))
end
return MOI.NLPBlockData(
[MOI.NLPBoundsPair(c.lb, c.ub) for c in nlp_data.nlconstr],
_NonlinearOracle(backend, objective, functions),
objective !== nothing,
)
end

function optimize_hook(
model::JuMP.Model;
backend::AbstractNonlinearOracleBackend = DefaultBackend(),
)
nlp_block = _nlp_block_data(model; backend = backend)
nlp_data = model.nlp_data
model.nlp_data = nothing
MOI.set(model, MOI.NLPBlock(), nlp_block)
JuMP.optimize!(model; ignore_optimize_hook = true)
model.nlp_data = nlp_data
return
end
47 changes: 42 additions & 5 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,15 @@ function runtests()
return
end

function run_unit_benchmark(model::JuMP.Model)
function run_unit_benchmark(model::JuMP.Model; direct_model::Bool = false)
@info "Constructing oracles"

@time d = JuMP.NLPEvaluator(model)
@time nlp_block = SymbolicAD.nlp_block_data(d)
@time nlp_block = if direct_model
SymbolicAD._nlp_block_data(model; backend = SymbolicAD.DefaultBackend())
else
SymbolicAD._nlp_block_data(d; backend = SymbolicAD.DefaultBackend())
end
oracle = nlp_block.evaluator

@info "MOI.initialize"
Expand Down Expand Up @@ -159,7 +163,7 @@ function run_solution_benchmark(model::JuMP.Model)

serial_model = Ipopt.Optimizer()
MOI.copy_to(serial_model, model)
serial_nlp_block = SymbolicAD.nlp_block_data(
serial_nlp_block = SymbolicAD._nlp_block_data(
JuMP.NLPEvaluator(model);
backend = SymbolicAD.DefaultBackend(),
)
Expand All @@ -175,7 +179,7 @@ function run_solution_benchmark(model::JuMP.Model)

threaded_model = Ipopt.Optimizer()
MOI.copy_to(threaded_model, model)
threaded_nlp_block = SymbolicAD.nlp_block_data(
threaded_nlp_block = SymbolicAD._nlp_block_data(
JuMP.NLPEvaluator(model);
backend = SymbolicAD.ThreadedBackend(),
)
Expand Down Expand Up @@ -252,7 +256,8 @@ end

function test_case5_pjm_unit()
model = power_model("pglib_opf_case5_pjm.m")
run_unit_benchmark(model)
run_unit_benchmark(model; direct_model = false)
run_unit_benchmark(model; direct_model = true)
return
end

Expand Down Expand Up @@ -312,6 +317,38 @@ function test_optimizer_case5_pjm()
return
end

function test_optimizer_case5_pjm_optimize_hook()
model = power_model("pglib_opf_case5_pjm.m")
set_optimizer(model, Ipopt.Optimizer)
set_optimize_hook(model, SymbolicAD.optimize_hook)
optimize!(model)
symbolic_obj = objective_value(model)
set_optimize_hook(model, nothing)
optimize!(model)
Test.@test isapprox(objective_value(model), symbolic_obj, atol = 1e-6)
return
end

"""
test_optimize_hook()

This model is chosen to have a number of unique features that cover the range of
node types in JuMP's nonlinear expression data structure.
"""
function test_optimize_hook()
model = Model(Ipopt.Optimizer)
@variable(model, 0 <= x <= 2π, start = π)
@NLexpression(model, y, sin(x))
@NLparameter(model, p == 1)
@NLconstraint(model, sqrt(y + 2) <= p + 1)
@NLobjective(model, Min, p * y)
set_optimize_hook(model, SymbolicAD.optimize_hook)
optimize!(model)
Test.@test isapprox(objective_value(model), -1; atol = 1e-6)
Test.@test isapprox(value(x), 1.5 * π; atol = 1e-6)
return
end

end # module

RunTests.runtests()