diff --git a/Project.toml b/Project.toml index 6f7346a..a3bf68b 100644 --- a/Project.toml +++ b/Project.toml @@ -4,6 +4,7 @@ authors = ["odow "] version = "0.1.0" [deps] +JuMP = "4076af6c-e467-56ae-b986-b466b2749572" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7" diff --git a/README.md b/README.md index 51abe26..c101da3 100644 --- a/README.md +++ b/README.md @@ -36,9 +36,23 @@ Pkg.add("https://github.com/odow/SymbolicAD.jl) ## Use with JuMP +There are two ways to use SymbolicAD with JuMP. + +First, you can use `SymbolicAD.Optimizer`: ```julia using JuMP import Ipopt import SymbolicAD model = Model(() -> SymbolicAD.Optimizer(Ipopt.Optimizer)) ``` + +Second, you can set an `SymbolicAD.optimizer_hook`: +```julia +using JuMP +import Ipopt +import SymbolicAD +model = Model(Ipopt.Optimizer) +set_optimizer_hook(model, SymbolicAD.optimizer_hook) +``` + +In general, the `SymbolicAD.optimizer_hook` approach should be faster. diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index 82cf398..5eb57eb 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -1,16 +1,22 @@ struct Optimizer{O} <: MOI.AbstractOptimizer - use_threads::Bool + backend::AbstractNonlinearOracleBackend inner::O - function Optimizer(inner; use_threads::Bool = false) + function Optimizer( + inner; + backend::AbstractNonlinearOracleBackend = DefaultBackend(), + ) model = MOI.instantiate(inner) - return new{typeof(model)}(use_threads, model) + return new{typeof(model)}(backend, model) end end function MOI.set(model::Optimizer, ::MOI.NLPBlock, value) @info("Replacing NLPBlock with the SymbolicAD one") - nlp_block = SymbolicAD.nlp_block_data(value.evaluator) - MOI.set(model.inner, MOI.NLPBlock(), nlp_block) + MOI.set( + model.inner, + MOI.NLPBlock(), + _nlp_block_data(value.evaluator; backend = model.backend), + ) return end diff --git a/src/SymbolicAD.jl b/src/SymbolicAD.jl index ecb70c0..5fd126d 100644 --- a/src/SymbolicAD.jl +++ b/src/SymbolicAD.jl @@ -1,6 +1,7 @@ module SymbolicAD import Base.Meta: isexpr +import JuMP import MathOptInterface import SparseArrays import Symbolics diff --git a/src/nonlinear_oracle.jl b/src/nonlinear_oracle.jl index cce98bf..766c8ff 100644 --- a/src/nonlinear_oracle.jl +++ b/src/nonlinear_oracle.jl @@ -156,6 +156,17 @@ mutable struct _NonlinearOracle{B} <: MOI.AbstractNLPEvaluator eval_constraint_timer::Float64 eval_constraint_jacobian_timer::Float64 eval_hessian_lagrangian_timer::Float64 + function _NonlinearOracle(backend::B, objective, constraints) where {B} + return new{B}( + backend, + objective, + constraints, + Dict{UInt,_SymbolicsFunction}(), + 0.0, + 0.0, + 0.0, + ) + end end function MOI.initialize( @@ -339,16 +350,16 @@ function MOI.eval_hessian_lagrangian( end """ - nlp_block_data( + _nlp_block_data( d::MOI.AbstractNLPEvaluator; - backend::AbstractNonlinearOracleBackend = DefaultBackend(), + backend::AbstractNonlinearOracleBackend, ) Convert an AbstractNLPEvaluator into a SymbolicAD instance of MOI.NLPBlockData. """ -function nlp_block_data( +function _nlp_block_data( d::MOI.AbstractNLPEvaluator; - backend::AbstractNonlinearOracleBackend = DefaultBackend(), + backend::AbstractNonlinearOracleBackend, ) MOI.initialize(d, [:ExprGraph]) objective = d.has_nlobj ? _Function(MOI.objective_expr(d)) : nothing @@ -359,15 +370,7 @@ function nlp_block_data( f, bound = _nonlinear_constraint(MOI.constraint_expr(d, i)) functions[i], bounds[i] = f, bound end - oracle = _NonlinearOracle( - backend, - objective, - functions, - Dict{UInt,_SymbolicsFunction}(), - 0.0, - 0.0, - 0.0, - ) + oracle = _NonlinearOracle(backend, objective, functions) return MOI.NLPBlockData(bounds, oracle, d.has_nlobj) end @@ -479,3 +482,77 @@ function MOI.eval_hessian_lagrangian( oracle.eval_hessian_lagrangian_timer += time() - start return end + +### +### JuMP integration +### + +function _to_expr( + nlp_data::JuMP._NLPData, + data::JuMP._NonlinearExprData, + subexpressions::Vector{Expr}, +) + tree = Any[] + for node in data.nd + expr = if node.nodetype == JuMP._Derivatives.CALL + Expr(:call, JuMP._Derivatives.operators[node.index]) + elseif node.nodetype == JuMP._Derivatives.CALLUNIVAR + Expr(:call, JuMP._Derivatives.univariate_operators[node.index]) + elseif node.nodetype == JuMP._Derivatives.SUBEXPRESSION + subexpressions[node.index] + elseif node.nodetype == JuMP._Derivatives.MOIVARIABLE + MOI.VariableIndex(node.index) + elseif node.nodetype == JuMP._Derivatives.PARAMETER + nlp_data.nlparamvalues[node.index] + else + @assert node.nodetype == JuMP._Derivatives.VALUE + data.const_values[node.index] + end + if 1 <= node.parent <= length(tree) + push!(tree[node.parent].args, expr) + end + push!(tree, expr) + end + return tree[1] +end + +_to_expr(::JuMP._NLPData, ::Nothing, ::Vector{Expr}) = nothing + +function _nlp_block_data( + model::JuMP.Model; + backend::AbstractNonlinearOracleBackend, +) + return _nlp_block_data(model.nlp_data; backend = backend) +end + +function _nlp_block_data( + nlp_data::JuMP._NLPData; + backend::AbstractNonlinearOracleBackend, +) + subexpressions = map(nlp_data.nlexpr) do expr + return _to_expr(nlp_data, expr, Expr[])::Expr + end + nlobj = _to_expr(nlp_data, nlp_data.nlobj, subexpressions) + objective = nlobj === nothing ? nothing : _Function(nlobj) + functions = map(nlp_data.nlconstr) do c + return _Function(_to_expr(nlp_data, c.terms, subexpressions)) + end + return MOI.NLPBlockData( + [MOI.NLPBoundsPair(c.lb, c.ub) for c in nlp_data.nlconstr], + _NonlinearOracle(backend, objective, functions), + objective !== nothing, + ) +end + +function optimize_hook( + model::JuMP.Model; + backend::AbstractNonlinearOracleBackend = DefaultBackend(), +) + nlp_block = _nlp_block_data(model; backend = backend) + nlp_data = model.nlp_data + model.nlp_data = nothing + MOI.set(model, MOI.NLPBlock(), nlp_block) + JuMP.optimize!(model; ignore_optimize_hook = true) + model.nlp_data = nlp_data + return +end diff --git a/test/runtests.jl b/test/runtests.jl index 6cc30ee..383787c 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -19,11 +19,15 @@ function runtests() return end -function run_unit_benchmark(model::JuMP.Model) +function run_unit_benchmark(model::JuMP.Model; direct_model::Bool = false) @info "Constructing oracles" @time d = JuMP.NLPEvaluator(model) - @time nlp_block = SymbolicAD.nlp_block_data(d) + @time nlp_block = if direct_model + SymbolicAD._nlp_block_data(model; backend = SymbolicAD.DefaultBackend()) + else + SymbolicAD._nlp_block_data(d; backend = SymbolicAD.DefaultBackend()) + end oracle = nlp_block.evaluator @info "MOI.initialize" @@ -159,7 +163,7 @@ function run_solution_benchmark(model::JuMP.Model) serial_model = Ipopt.Optimizer() MOI.copy_to(serial_model, model) - serial_nlp_block = SymbolicAD.nlp_block_data( + serial_nlp_block = SymbolicAD._nlp_block_data( JuMP.NLPEvaluator(model); backend = SymbolicAD.DefaultBackend(), ) @@ -175,7 +179,7 @@ function run_solution_benchmark(model::JuMP.Model) threaded_model = Ipopt.Optimizer() MOI.copy_to(threaded_model, model) - threaded_nlp_block = SymbolicAD.nlp_block_data( + threaded_nlp_block = SymbolicAD._nlp_block_data( JuMP.NLPEvaluator(model); backend = SymbolicAD.ThreadedBackend(), ) @@ -252,7 +256,8 @@ end function test_case5_pjm_unit() model = power_model("pglib_opf_case5_pjm.m") - run_unit_benchmark(model) + run_unit_benchmark(model; direct_model = false) + run_unit_benchmark(model; direct_model = true) return end @@ -312,6 +317,38 @@ function test_optimizer_case5_pjm() return end +function test_optimizer_case5_pjm_optimize_hook() + model = power_model("pglib_opf_case5_pjm.m") + set_optimizer(model, Ipopt.Optimizer) + set_optimize_hook(model, SymbolicAD.optimize_hook) + optimize!(model) + symbolic_obj = objective_value(model) + set_optimize_hook(model, nothing) + optimize!(model) + Test.@test isapprox(objective_value(model), symbolic_obj, atol = 1e-6) + return +end + +""" + test_optimize_hook() + +This model is chosen to have a number of unique features that cover the range of +node types in JuMP's nonlinear expression data structure. +""" +function test_optimize_hook() + model = Model(Ipopt.Optimizer) + @variable(model, 0 <= x <= 2π, start = π) + @NLexpression(model, y, sin(x)) + @NLparameter(model, p == 1) + @NLconstraint(model, sqrt(y + 2) <= p + 1) + @NLobjective(model, Min, p * y) + set_optimize_hook(model, SymbolicAD.optimize_hook) + optimize!(model) + Test.@test isapprox(objective_value(model), -1; atol = 1e-6) + Test.@test isapprox(value(x), 1.5 * π; atol = 1e-6) + return +end + end # module RunTests.runtests()