diff --git a/Project.toml b/Project.toml index 78bfcf5..8f7c9be 100644 --- a/Project.toml +++ b/Project.toml @@ -5,6 +5,8 @@ version = "0.7.0" [deps] MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" +DiffOpt = "930fe3bc-9c6b-11ea-2d94-6184641e85e7" +JuMP = "4076af6c-e467-56ae-b986-b466b2749572" [compat] GLPK = "1" @@ -16,6 +18,7 @@ julia = "1.6" [extras] GLPK = "60bf3e95-4087-53dc-ae20-288a0d20c6a6" +HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9" JuMP = "4076af6c-e467-56ae-b986-b466b2749572" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" @@ -23,4 +26,4 @@ SCS = "c946c3f1-0d1f-5ce8-9dea-7daa1f7e2d13" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["GLPK", "Ipopt", "JuMP", "LinearAlgebra", "SCS", "Test"] +test = ["GLPK", "HiGHS", "Ipopt", "JuMP", "LinearAlgebra", "SCS", "Test"] diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index 70f3aff..6f90795 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -99,7 +99,9 @@ function MOI.is_empty(model::Optimizer) # isempty(model.multiplicative_parameters) && isempty(model.dual_value_of_parameters) && - model.number_of_parameters_in_model == 0 + model.number_of_parameters_in_model == 0 && + isempty(model.parameter_input_forward) && + isempty(model.parameter_output_backward) end function MOI.empty!(model::Optimizer{T}) where {T} @@ -133,6 +135,8 @@ function MOI.empty!(model::Optimizer{T}) where {T} empty!(model.dual_value_of_parameters) # model.number_of_parameters_in_model = 0 + empty!(model.parameter_input_forward) + empty!(model.parameter_output_backward) return end diff --git a/src/ParametricOptInterface.jl b/src/ParametricOptInterface.jl index 951ece7..29a475d 100644 --- a/src/ParametricOptInterface.jl +++ b/src/ParametricOptInterface.jl @@ -162,6 +162,10 @@ mutable struct Optimizer{T,OT<:MOI.ModelLike} <: MOI.AbstractOptimizer number_of_parameters_in_model::Int64 constraints_interpretation::ConstraintsInterpretationCode save_original_objective_and_constraints::Bool + + # sensitivity data + parameter_input_forward::Dict{ParameterIndex,T} + parameter_output_backward::Dict{ParameterIndex,T} function Optimizer( optimizer::OT; evaluate_duals::Bool = true, @@ -219,6 +223,8 @@ mutable struct Optimizer{T,OT<:MOI.ModelLike} <: MOI.AbstractOptimizer 0, ONLY_CONSTRAINTS, save_original_objective_and_constraints, + Dict{ParameterIndex,T}(), + Dict{ParameterIndex,T}(), ) end end @@ -248,5 +254,6 @@ end include("duals.jl") include("update_parameters.jl") include("MOI_wrapper.jl") +include("diff.jl") end # module diff --git a/src/diff.jl b/src/diff.jl new file mode 100644 index 0000000..95a6071 --- /dev/null +++ b/src/diff.jl @@ -0,0 +1,501 @@ +using DiffOpt + +# forward mode + +function _affine_constraint_set_forward!( + model::Optimizer{T}, + affine_constraint_cache_inner, +) where {T} + for (inner_ci, pf) in affine_constraint_cache_inner + cte = zero(T) + terms = MOI.ScalarAffineTerm{T}[] + sizehint!(terms, 0) + for term in pf.p + p = p_idx(term.variable) + sensitivity = get(model.parameter_input_forward, p, 0.0) + cte += sensitivity * term.coefficient + end + if !iszero(cte) + MOI.set( + model.optimizer, + DiffOpt.ForwardConstraintFunction(), + inner_ci, + MOI.ScalarAffineFunction{T}(terms, cte), + ) + end + end + return +end + +function _vector_affine_constraint_set_forward!( + model::Optimizer{T}, + vector_affine_constraint_cache_inner, +) where {T} + for (inner_ci, pf) in vector_affine_constraint_cache_inner + cte = zeros(T, length(pf.c)) + terms = MOI.VectorAffineTerm{T}[] + sizehint!(terms, 0) + for term in pf.p + p = p_idx(term.scalar_term.variable) + sensitivity = get(model.parameter_input_forward, p, 0.0) + cte[term.output_index] += sensitivity * term.scalar_term.coefficient + end + if !iszero(cte) + MOI.set( + model.optimizer, + DiffOpt.ForwardConstraintFunction(), + inner_ci, + MOI.VectorAffineFunction{T}(terms, cte), + ) + end + end + return +end + +function _quadratic_constraint_set_forward!( + model::Optimizer{T}, + quadratic_constraint_cache_inner, +) where {T} + for (inner_ci, pf) in quadratic_constraint_cache_inner + cte = zero(T) + terms = MOI.ScalarAffineTerm{T}[] + # terms_dict = Dict{MOI.VariableIndex,T}() # canonicalize? + sizehint!(terms, length(pf.pv)) + for term in pf.p + p = p_idx(term.variable) + sensitivity = get(model.parameter_input_forward, p, 0.0) + cte += sensitivity * term.coefficient + end + for term in pf.pp + p_1 = p_idx(term.variable_1) + p_2 = p_idx(term.variable_2) + sensitivity_1 = get(model.parameter_input_forward, p_1, 0.0) + sensitivity_2 = get(model.parameter_input_forward, p_2, 0.0) + cte += + sensitivity_1 * model.parameters[p_2] * term.coefficient / + ifelse(term.variable_1 === term.variable_2, 2, 1) + cte += + model.parameters[p_1] * sensitivity_2 * term.coefficient / + ifelse(term.variable_1 === term.variable_2, 2, 1) + end + # canonicalize? + for term in pf.pv + p = p_idx(term.variable_1) + sensitivity = get(model.parameter_input_forward, p, NaN) + if !isnan(sensitivity) + push!( + terms, + MOI.ScalarAffineTerm{T}( + sensitivity * term.coefficient, + term.variable_2, + ), + ) + end + end + if !iszero(cte) || !isempty(terms) + MOI.set( + model.optimizer, + DiffOpt.ForwardConstraintFunction(), + inner_ci, + MOI.ScalarAffineFunction{T}(terms, cte), + ) + end + end + return +end + +function _affine_objective_set_forward!(model::Optimizer{T}) where {T} + cte = zero(T) + terms = MOI.ScalarAffineTerm{T}[] + pf = model.affine_objective_cache + sizehint!(terms, 0) + for term in pf.p + p = p_idx(term.variable) + sensitivity = get(model.parameter_input_forward, p, 0.0) + cte += sensitivity * term.coefficient + end + if !iszero(cte) + MOI.set( + model.optimizer, + DiffOpt.ForwardObjectiveFunction(), + MOI.ScalarAffineFunction{T}(terms, cte), + ) + end + return +end + +function _quadratic_objective_set_forward!(model::Optimizer{T}) where {T} + cte = zero(T) + terms = MOI.ScalarAffineTerm{T}[] + pf = model.quadratic_objective_cache + sizehint!(terms, length(pf.pv)) + for term in pf.p + p = p_idx(term.variable) + sensitivity = get(model.parameter_input_forward, p, 0.0) + cte += sensitivity * term.coefficient + end + for term in pf.pp + p_1 = p_idx(term.variable_1) + p_2 = p_idx(term.variable_2) + sensitivity_1 = get(model.parameter_input_forward, p_1, 0.0) + sensitivity_2 = get(model.parameter_input_forward, p_2, 0.0) + cte += + sensitivity_1 * model.parameters[p_2] * term.coefficient / + ifelse(term.variable_1 === term.variable_2, 2, 1) + cte += + model.parameters[p_1] * sensitivity_2 * term.coefficient / + ifelse(term.variable_1 === term.variable_2, 2, 1) + end + # canonicalize? + for term in pf.pv + p = p_idx(term.variable_1) + sensitivity = get(model.parameter_input_forward, p, NaN) + if !isnan(sensitivity) + push!( + terms, + MOI.ScalarAffineTerm{T}( + sensitivity * term.coefficient, + term.variable_2, + ), + ) + end + end + if !iszero(cte) || !isempty(terms) + MOI.set( + model.optimizer, + DiffOpt.ForwardObjectiveFunction(), + MOI.ScalarAffineFunction{T}(terms, cte), + ) + end + return +end + +function _empty_input_cache!(model::Optimizer) + _empty_input_cache!(model.optimizer) + return +end +function _empty_input_cache!(model::MOI.Bridges.AbstractBridgeOptimizer) + _empty_input_cache!(model.model) + return +end +function _empty_input_cache!(model::MOI.Utilities.CachingOptimizer) + _empty_input_cache!(model.optimizer) + return +end +function _empty_input_cache!(model::DiffOpt.Optimizer) + empty!(model.input_cache) + return +end + +function DiffOpt.forward_differentiate!(model::Optimizer{T}) where {T} + # TODO: add a reset option + _empty_input_cache!(model) + for (F, S) in MOI.Utilities.DoubleDicts.nonempty_outer_keys( + model.affine_constraint_cache, + ) + _affine_constraint_set_forward!( + model, + model.affine_constraint_cache[F, S], + ) + end + for (F, S) in MOI.Utilities.DoubleDicts.nonempty_outer_keys( + model.vector_affine_constraint_cache, + ) + _vector_affine_constraint_set_forward!( + model, + model.vector_affine_constraint_cache[F, S], + ) + end + for (F, S) in MOI.Utilities.DoubleDicts.nonempty_outer_keys( + model.quadratic_constraint_cache, + ) + _quadratic_constraint_set_forward!( + model, + model.quadratic_constraint_cache[F, S], + ) + end + if model.affine_objective_cache !== nothing + _affine_objective_set_forward!(model) + elseif model.quadratic_objective_cache !== nothing + _quadratic_objective_set_forward!(model) + end + DiffOpt.forward_differentiate!(model.optimizer) + return +end + +struct ForwardParameter <: MOI.AbstractVariableAttribute end + +function MOI.set( + model::Optimizer, + ::ForwardParameter, + variable::MOI.VariableIndex, + value::Number, +) + if _is_variable(variable) + error("Trying to set a forward parameter sensitivity for a variable") + end + parameter = p_idx(variable) + model.parameter_input_forward[parameter] = value + return +end + +function MOI.get( + model::Optimizer, + attr::DiffOpt.ForwardVariablePrimal, + variable::MOI.VariableIndex, +) + if _is_parameter(variable) + error("Trying to get a forward variable sensitivity for a parameter") + end + return MOI.get(model.optimizer, attr, model.variables[variable]) +end + +# reverse mode + +using JuMP + +function _affine_constraint_get_reverse!( + model::Optimizer{T}, + affine_constraint_cache_inner, +) where {T} + for (inner_ci, pf) in affine_constraint_cache_inner + if isempty(pf.p) + continue + end + grad_pf_cte = MOI.constant( + MOI.get( + model.optimizer, + DiffOpt.ReverseConstraintFunction(), + inner_ci, + ), + ) + for term in pf.p + p = p_idx(term.variable) + value = get!(model.parameter_output_backward, p, 0.0) + model.parameter_output_backward[p] = + value + term.coefficient * grad_pf_cte + end + end + return +end + +function _vector_affine_constraint_get_reverse!( + model::Optimizer{T}, + vector_affine_constraint_cache_inner, +) where {T} + for (inner_ci, pf) in vector_affine_constraint_cache_inner + if isempty(pf.p) + continue + end + grad_pf_cte = MOI.constant( + MOI.get( + model.optimizer, + DiffOpt.ReverseConstraintFunction(), + inner_ci, + ), + ) + for term in pf.p + p = p_idx(term.scalar_term.variable) + value = get!(model.parameter_output_backward, p, 0.0) + model.parameter_output_backward[p] = + value + + term.scalar_term.coefficient * grad_pf_cte[term.output_index] + end + end + return +end + +function _quadratic_constraint_get_reverse!( + model::Optimizer{T}, + quadratic_constraint_cache_inner, +) where {T} + for (inner_ci, pf) in quadratic_constraint_cache_inner + if isempty(pf.p) && isempty(pf.pv) && isempty(pf.pp) + continue + end + grad_pf = MOI.get( + model.optimizer, + DiffOpt.ReverseConstraintFunction(), + inner_ci, + ) + grad_pf_cte = MOI.constant(grad_pf) + for term in pf.p + p = p_idx(term.variable) + value = get!(model.parameter_output_backward, p, 0.0) + model.parameter_output_backward[p] = + value + term.coefficient * grad_pf_cte + end + for term in pf.pp + p_1 = p_idx(term.variable_1) + p_2 = p_idx(term.variable_2) + value_1 = get!(model.parameter_output_backward, p_1, 0.0) + value_2 = get!(model.parameter_output_backward, p_2, 0.0) + # TODO: why there is no factor of 2 here???? + # ANS: probably because it was SET + model.parameter_output_backward[p_1] = + value_1 + + term.coefficient * grad_pf_cte * model.parameters[p_2] / + ifelse(term.variable_1 === term.variable_2, 1, 1) + model.parameter_output_backward[p_2] = + value_2 + + term.coefficient * grad_pf_cte * model.parameters[p_1] / + ifelse(term.variable_1 === term.variable_2, 1, 1) + end + for term in pf.pv + p = p_idx(term.variable_1) + v = term.variable_2 # check if inner or outer (should be inner) + value = get!(model.parameter_output_backward, p, 0.0) + model.parameter_output_backward[p] = + value + term.coefficient * JuMP.coefficient(grad_pf, v) # * fixed value of the parameter ? + end + end + return +end + +function _affine_objective_get_reverse!(model::Optimizer{T}) where {T} + pf = model.affine_objective_cache + if isempty(pf.p) + return + end + grad_pf = MOI.get(model.optimizer, DiffOpt.ReverseObjectiveFunction()) + grad_pf_cte = MOI.constant(grad_pf) + for term in pf.p + p = p_idx(term.variable) + value = get!(model.parameter_output_backward, p, 0.0) + model.parameter_output_backward[p] = + value + term.coefficient * grad_pf_cte + end + return +end +function _quadratic_objective_get_reverse!(model::Optimizer{T}) where {T} + pf = model.quadratic_objective_cache + if isempty(pf.p) && isempty(pf.pv) && isempty(pf.pp) + return + end + grad_pf = MOI.get(model.optimizer, DiffOpt.ReverseObjectiveFunction()) + grad_pf_cte = MOI.constant(grad_pf) + for term in pf.p + p = p_idx(term.variable) + value = get!(model.parameter_output_backward, p, 0.0) + model.parameter_output_backward[p] = + value + term.coefficient * grad_pf_cte + end + for term in pf.pp + p_1 = p_idx(term.variable_1) + p_2 = p_idx(term.variable_2) + value_1 = get!(model.parameter_output_backward, p_1, 0.0) + value_2 = get!(model.parameter_output_backward, p_2, 0.0) + model.parameter_output_backward[p_1] = + value_1 + + term.coefficient * grad_pf_cte * model.parameters[p_2] / + ifelse(term.variable_1 === term.variable_2, 2, 1) + model.parameter_output_backward[p_2] = + value_2 + + term.coefficient * grad_pf_cte * model.parameters[p_1] / + ifelse(term.variable_1 === term.variable_2, 2, 1) + end + for term in pf.pv + p = p_idx(term.variable_1) + v = term.variable_2 # check if inner or outer (should be inner) + value = get!(model.parameter_output_backward, p, 0.0) + model.parameter_output_backward[p] = + value + term.coefficient * JuMP.coefficient(grad_pf, v) # * fixed value of the parameter ? + end + return +end + +function DiffOpt.reverse_differentiate!(model::Optimizer) + # TODO: add a reset option + DiffOpt.reverse_differentiate!(model.optimizer) + empty!(model.parameter_output_backward) + sizehint!(model.parameter_output_backward, length(model.parameters)) + for (F, S) in MOI.Utilities.DoubleDicts.nonempty_outer_keys( + model.affine_constraint_cache, + ) + _affine_constraint_get_reverse!( + model, + model.affine_constraint_cache[F, S], + ) + end + for (F, S) in MOI.Utilities.DoubleDicts.nonempty_outer_keys( + model.vector_affine_constraint_cache, + ) + _vector_affine_constraint_get_reverse!( + model, + model.vector_affine_constraint_cache[F, S], + ) + end + for (F, S) in MOI.Utilities.DoubleDicts.nonempty_outer_keys( + model.quadratic_constraint_cache, + ) + _quadratic_constraint_get_reverse!( + model, + model.quadratic_constraint_cache[F, S], + ) + end + if model.affine_objective_cache !== nothing + _affine_objective_get_reverse!(model) + elseif model.quadratic_objective_cache !== nothing + _quadratic_objective_get_reverse!(model) + end + return +end + +function MOI.set( + model::Optimizer, + attr::DiffOpt.ReverseVariablePrimal, + variable::MOI.VariableIndex, + value::Number, +) + if _is_parameter(variable) + error("Trying to set a backward variable sensitivity for a parameter") + end + MOI.set(model.optimizer, attr, variable, value) + return +end + +struct ReverseParameter <: MOI.AbstractVariableAttribute end + +MOI.is_set_by_optimize(::ReverseParameter) = true + +function MOI.get( + model::Optimizer, + ::ReverseParameter, + variable::MOI.VariableIndex, +) + if _is_variable(variable) + error("Trying to get a backward parameter sensitivity for a variable") + end + p = p_idx(variable) + return get(model.parameter_output_backward, p, 0.0) +end + +# extras to handle model_dirty + +# FIXME Workaround for https://github.com/jump-dev/JuMP.jl/issues/2797 +function _moi_get_result(model::MOI.ModelLike, args...) + if MOI.get(model, MOI.TerminationStatus()) == MOI.OPTIMIZE_NOT_CALLED + throw(OptimizeNotCalled()) + end + return MOI.get(model, args...) +end + +function _moi_get_result(model::MOI.Utilities.CachingOptimizer, args...) + if MOI.Utilities.state(model) == MOI.Utilities.NO_OPTIMIZER + throw(NoOptimizer()) + elseif MOI.get(model, MOI.TerminationStatus()) == MOI.OPTIMIZE_NOT_CALLED + throw(OptimizeNotCalled()) + end + return MOI.get(model, args...) +end + +function MOI.get( + model::JuMP.Model, + attr::ReverseParameter, + var_ref::JuMP.VariableRef, +) + JuMP.check_belongs_to_model(var_ref, model) + return _moi_get_result(JuMP.backend(model), attr, JuMP.index(var_ref)) +end + +# TODO: ignore ops that are 0 diff --git a/src/parametric_functions.jl b/src/parametric_functions.jl index bc29a4d..48510db 100644 --- a/src/parametric_functions.jl +++ b/src/parametric_functions.jl @@ -174,7 +174,10 @@ function _parametric_constant( end for term in f.pp param_constant += - term.coefficient * + ( + term.coefficient / + ifelse(term.variable_1 == term.variable_2, 2, 1) + ) * model.parameters[p_idx(term.variable_1)] * model.parameters[p_idx(term.variable_2)] end @@ -211,7 +214,10 @@ function _delta_parametric_constant( model.updated_parameters[p2], ) delta_constant += - term.coefficient * + ( + term.coefficient / + ifelse(term.variable_1 == term.variable_2, 2, 1) + ) * (new_1 * new_2 - model.parameters[p1] * model.parameters[p2]) end end diff --git a/test/jump_diff_param.jl b/test/jump_diff_param.jl new file mode 100644 index 0000000..24e9a67 --- /dev/null +++ b/test/jump_diff_param.jl @@ -0,0 +1,428 @@ +# using Revise + +using JuMP +using DiffOpt +using Test +import ParametricOptInterface as POI +using HiGHS +using SCS + +function test_diff_rhs() + model = Model(() -> POI.Optimizer(DiffOpt.Optimizer(HiGHS.Optimizer()))) + set_silent(model) + @variable(model, x) + @variable(model, p in MOI.Parameter(3.0)) + @constraint(model, cons, x >= 3 * p) + @objective(model, Min, 2x) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ 9 + # the function is + # x(p) = 3p, hence x'(p) = 3 + # differentiate w.r.t. p + MOI.set(model, POI.ForwardParameter(), p, 1) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ 3 + # again with different "direction" + MOI.set(model, POI.ForwardParameter(), p, 2) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ 6 + # + MOI.set(model, POI.ParameterValue(), p, 2.0) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ 6 + # differentiate w.r.t. p + MOI.set(model, POI.ForwardParameter(), p, 1) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ 3 + # again with different "direction" + MOI.set(model, POI.ForwardParameter(), p, 2) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ 6 + # + # test reverse mode + # + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, 1) + DiffOpt.reverse_differentiate!(model) + @test MOI.get(model, POI.ReverseParameter(), p) ≈ 3 + # again with different "direction" + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, 2) + DiffOpt.reverse_differentiate!(model) + @test MOI.get(model, POI.ReverseParameter(), p) ≈ 6 + return +end + +function test_diff_vector_rhs() + model = direct_model(POI.Optimizer(DiffOpt.diff_optimizer(SCS.Optimizer))) + set_silent(model) + @variable(model, x) + @variable(model, p in MOI.Parameter(3.0)) + @constraint(model, cons, [x - 3 * p] in MOI.Zeros(1)) + + # FIXME + @constraint(model, fake_soc, [0, 0, 0] in SecondOrderCone()) + + @objective(model, Min, 2x) + optimize!(model) + @test isapprox(MOI.get(model, MOI.VariablePrimal(), x), 9, atol = 1e-3) + # the function is + # x(p) = 3p, hence x'(p) = 3 + # differentiate w.r.t. p + for p_val in 0:3 + MOI.set(model, POI.ParameterValue(), p, p_val) + optimize!(model) + @test isapprox( + MOI.get(model, MOI.VariablePrimal(), x), + 3 * p_val, + atol = 1e-3, + ) + for direction in 0:3 + MOI.set(model, POI.ForwardParameter(), p, direction) + DiffOpt.forward_differentiate!(model) + @test isapprox( + MOI.get(model, DiffOpt.ForwardVariablePrimal(), x), + direction * 3, + atol = 1e-3, + ) + # reverse mode + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, direction) + DiffOpt.reverse_differentiate!(model) + @test isapprox( + MOI.get(model, POI.ReverseParameter(), p), + direction * 3, + atol = 1e-3, + ) + end + end + return +end + +function test_affine_changes() + model = Model(() -> POI.Optimizer(DiffOpt.Optimizer(HiGHS.Optimizer()))) + set_silent(model) + p_val = 3.0 + pc_val = 1.0 + @variable(model, x) + @variable(model, p in MOI.Parameter(p_val)) + @variable(model, pc in MOI.Parameter(pc_val)) + @constraint(model, cons, pc * x >= 3 * p) + @objective(model, Min, 2x) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ 3 * p_val / pc_val + # the function is + # x(p, pc) = 3p / pc, hence dx/dp = 3 / pc, dx/dpc = -3p / pc^2 + # differentiate w.r.t. p + for direction_p in 1:2 + MOI.set(model, POI.ForwardParameter(), p, direction_p) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ + direction_p * 3 / pc_val + end + # update p + p_val = 2.0 + MOI.set(model, POI.ParameterValue(), p, p_val) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ 3 * p_val / pc_val + # differentiate w.r.t. p + for direction_p in 1:2 + MOI.set(model, POI.ForwardParameter(), p, direction_p) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ + direction_p * 3 / pc_val + end + # differentiate w.r.t. pc + # stop differentiating with respect to p + direction_p = 0.0 + MOI.set(model, POI.ForwardParameter(), p, direction_p) + for direction_pc in 1:2 + MOI.set(model, POI.ForwardParameter(), pc, direction_pc) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ + -direction_pc * 3 * p_val / pc_val^2 + end + # update pc + pc_val = 2.0 + MOI.set(model, POI.ParameterValue(), pc, pc_val) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ 3 * p_val / pc_val + for direction_pc in 1:2 + MOI.set(model, POI.ForwardParameter(), pc, direction_pc) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ + -direction_pc * 3 * p_val / pc_val^2 + end + # test combines directions + for direction_pc in 1:2, direction_p in 1:2 + MOI.set(model, POI.ForwardParameter(), p, direction_p) + MOI.set(model, POI.ForwardParameter(), pc, direction_pc) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ + -direction_pc * 3 * p_val / pc_val^2 + direction_p * 3 / pc_val + end + return +end + +function test_affine_changes_compact() + model = Model(() -> POI.Optimizer(DiffOpt.Optimizer(HiGHS.Optimizer()))) + set_silent(model) + p_val = 3.0 + pc_val = 1.0 + @variable(model, x) + @variable(model, p in MOI.Parameter(p_val)) + @variable(model, pc in MOI.Parameter(pc_val)) + @constraint(model, cons, pc * x >= 3 * p) + @objective(model, Min, 2x) + # the function is + # x(p, pc) = 3p / pc, hence dx/dp = 3 / pc, dx/dpc = -3p / pc^2 + for p_val in 1:3, pc_val in 1:3 + MOI.set(model, POI.ParameterValue(), p, p_val) + MOI.set(model, POI.ParameterValue(), pc, pc_val) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ 3 * p_val / pc_val + for direction_pc in 0:2, direction_p in 0:2 + MOI.set(model, POI.ForwardParameter(), p, direction_p) + MOI.set(model, POI.ForwardParameter(), pc, direction_pc) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ + -direction_pc * 3 * p_val / pc_val^2 + + direction_p * 3 / pc_val + end + for direction_x in 0:2 + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, direction_x) + DiffOpt.reverse_differentiate!(model) + @test MOI.get(model, POI.ReverseParameter(), p) ≈ + direction_x * 3 / pc_val + @test MOI.get(model, POI.ReverseParameter(), pc) ≈ + -direction_x * 3 * p_val / pc_val^2 + end + end + return +end + +function test_quadratic_rhs_changes() + model = Model(() -> POI.Optimizer(DiffOpt.Optimizer(HiGHS.Optimizer()))) + set_silent(model) + p_val = 2.0 + q_val = 2.0 + r_val = 2.0 + s_val = 2.0 + t_val = 2.0 + @variable(model, x) + @variable(model, p in MOI.Parameter(p_val)) + @variable(model, q in MOI.Parameter(q_val)) + @variable(model, r in MOI.Parameter(r_val)) + @variable(model, s in MOI.Parameter(s_val)) + @variable(model, t in MOI.Parameter(t_val)) + @constraint(model, cons, 11 * t * x >= 1 + 3 * p * q + 5 * r^2 + 7 * s) + @objective(model, Min, 2x) + # the function is + # x(p, q, r, s, t) = (1 + 3pq + 5r^2 + 7s) / (11t) + # hence + # dx/dp = 3q / (11t) + # dx/dq = 3p / (11t) + # dx/dr = 10r / (11t) + # dx/ds = 7 / (11t) + # dx/dt = - (1 + 3pq + 5r^2 + 7s) / (11t^2) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ + (1 + 3 * p_val * q_val + 5 * r_val^2 + 7 * s_val) / (11 * t_val) + for p_val in 2:3, q_val in 2:3, r_val in 2:3, s_val in 2:3, t_val in 2:3 + MOI.set(model, POI.ParameterValue(), p, p_val) + MOI.set(model, POI.ParameterValue(), q, q_val) + MOI.set(model, POI.ParameterValue(), r, r_val) + MOI.set(model, POI.ParameterValue(), s, s_val) + MOI.set(model, POI.ParameterValue(), t, t_val) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ + (1 + 3 * p_val * q_val + 5 * r_val^2 + 7 * s_val) / (11 * t_val) + for dir_p in 0:2, dir_q in 0:2, dir_r in 0:2, dir_s in 0:2, dir_t in 0:2 + MOI.set(model, POI.ForwardParameter(), p, dir_p) + MOI.set(model, POI.ForwardParameter(), q, dir_q) + MOI.set(model, POI.ForwardParameter(), r, dir_r) + MOI.set(model, POI.ForwardParameter(), s, dir_s) + MOI.set(model, POI.ForwardParameter(), t, dir_t) + DiffOpt.forward_differentiate!(model) + @test isapprox( + MOI.get(model, DiffOpt.ForwardVariablePrimal(), x), + dir_p * 3 * q_val / (11 * t_val) + + dir_q * 3 * p_val / (11 * t_val) + + dir_r * 10 * r_val / (11 * t_val) + + dir_s * 7 / (11 * t_val) + + dir_t * ( + -(1 + 3 * p_val * q_val + 5 * r_val^2 + 7 * s_val) / + (11 * t_val^2) + ), + atol = 1e-10, + ) + end + for dir_x in 0:3 + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, dir_x) + DiffOpt.reverse_differentiate!(model) + @test isapprox( + MOI.get(model, POI.ReverseParameter(), p), + dir_x * 3 * q_val / (11 * t_val), + atol = 1e-10, + ) + @test isapprox( + MOI.get(model, POI.ReverseParameter(), q), + dir_x * 3 * p_val / (11 * t_val), + atol = 1e-10, + ) + @test isapprox( + MOI.get(model, POI.ReverseParameter(), r), + dir_x * 10 * r_val / (11 * t_val), + atol = 1e-10, + ) + @test isapprox( + MOI.get(model, POI.ReverseParameter(), s), + dir_x * 7 / (11 * t_val), + atol = 1e-10, + ) + @test isapprox( + MOI.get(model, POI.ReverseParameter(), t), + dir_x * ( + -(1 + 3 * p_val * q_val + 5 * r_val^2 + 7 * s_val) / + (11 * t_val^2) + ), + atol = 1e-10, + ) + end + end + return +end + +function test_affine_changes_compact_max() + model = Model(() -> POI.Optimizer(DiffOpt.Optimizer(HiGHS.Optimizer()))) + set_silent(model) + p_val = 3.0 + pc_val = 1.0 + @variable(model, x) + @variable(model, p in MOI.Parameter(p_val)) + @variable(model, pc in MOI.Parameter(pc_val)) + @constraint(model, cons, pc * x >= 3 * p) + @objective(model, Max, -2x) + # the function is + # x(p, pc) = 3p / pc, hence dx/dp = 3 / pc, dx/dpc = -3p / pc^2 + for p_val in 1:3, pc_val in 1:3 + MOI.set(model, POI.ParameterValue(), p, p_val) + MOI.set(model, POI.ParameterValue(), pc, pc_val) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ 3 * p_val / pc_val + for direction_pc in 0:2, direction_p in 0:2 + MOI.set(model, POI.ForwardParameter(), p, direction_p) + MOI.set(model, POI.ForwardParameter(), pc, direction_pc) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ + -direction_pc * 3 * p_val / pc_val^2 + + direction_p * 3 / pc_val + end + end + return +end + +function test_diff_affine_objective() + model = Model(() -> POI.Optimizer(DiffOpt.Optimizer(HiGHS.Optimizer()))) + set_silent(model) + p_val = 3.0 + @variable(model, x) + @variable(model, p in MOI.Parameter(p_val)) + @constraint(model, cons, x >= 3) + @objective(model, Min, 2x + 3p) + # x(p, pc) = 3, hence dx/dp = 0 + for p_val in 1:2 + MOI.set(model, POI.ParameterValue(), p, p_val) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ 3 + for direction_p in 0:2 + MOI.set(model, POI.ForwardParameter(), p, direction_p) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ 0.0 + # reverse mode + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, direction_p) + DiffOpt.reverse_differentiate!(model) + @test MOI.get(model, POI.ReverseParameter(), p) ≈ 0.0 + end + end + return +end + +function test_diff_quadratic_objective() + model = Model(() -> POI.Optimizer(DiffOpt.Optimizer(HiGHS.Optimizer()))) + set_silent(model) + p_val = 3.0 + @variable(model, x) + @variable(model, p in MOI.Parameter(p_val)) + @constraint(model, cons, x >= 3) + @objective(model, Min, p * x) + # x(p, pc) = 3, hence dx/dp = 0 + for p_val in 1:2 + MOI.set(model, POI.ParameterValue(), p, p_val) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ 3 + for direction_p in 0:2 + MOI.set(model, POI.ForwardParameter(), p, direction_p) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ 0.0 + # reverse mode + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, direction_p) + DiffOpt.reverse_differentiate!(model) + @test MOI.get(model, POI.ReverseParameter(), p) ≈ 0.0 + end + end + return +end + +function test_quadratic_objective_qp() + model = Model(() -> POI.Optimizer(DiffOpt.Optimizer(HiGHS.Optimizer()))) + set_silent(model) + p_val = 3.0 + @variable(model, x) + @variable(model, p in MOI.Parameter(p_val)) + @constraint(model, cons, x >= -10) + @objective(model, Min, 3 * p * x + x * x + 5 * p + 7 * p^2) + # 2x + 3p = 0, hence x = -3p/2 + # hence dx/dp = -3/2 + for p_val in 3:3 + MOI.set(model, POI.ParameterValue(), p, p_val) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ -3p_val / 2 + for direction_p in 0:2 + MOI.set(model, POI.ForwardParameter(), p, direction_p) + DiffOpt.forward_differentiate!(model) + @test MOI.get(model, DiffOpt.ForwardVariablePrimal(), x) ≈ + direction_p * (-3 / 2) + # reverse mode + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, direction_p) + DiffOpt.reverse_differentiate!(model) + @test MOI.get(model, POI.ReverseParameter(), p) ≈ + direction_p * (-3 / 2) + end + end + return +end + +function test_diff_errors() + model = Model(() -> POI.Optimizer(DiffOpt.Optimizer(HiGHS.Optimizer()))) + set_silent(model) + @variable(model, x) + @variable(model, p in MOI.Parameter(3.0)) + @constraint(model, cons, x >= 3 * p) + @objective(model, Min, 2x) + optimize!(model) + @test MOI.get(model, MOI.VariablePrimal(), x) ≈ 9 + + @test_throws ErrorException MOI.set(model, POI.ForwardParameter(), x, 1) + @test_throws ErrorException MOI.set( + model, + DiffOpt.ReverseVariablePrimal(), + p, + 1, + ) + @test_throws ErrorException MOI.get( + model, + DiffOpt.ForwardVariablePrimal(), + p, + ) + @test_throws ErrorException MOI.get(model, POI.ReverseParameter(), x) + + return +end diff --git a/test/moi_tests.jl b/test/moi_tests.jl index da02d38..17249c2 100644 --- a/test/moi_tests.jl +++ b/test/moi_tests.jl @@ -1154,8 +1154,13 @@ function test_qp_parameter_times_variable() @test ≈(MOI.get(optimizer, MOI.VariablePrimal(), x[1]), 0.5, atol = ATOL) @test ≈(MOI.get(optimizer, MOI.VariablePrimal(), x[2]), 29.25, atol = ATOL) MOI.set(optimizer, MOI.ConstraintSet(), cy, MOI.Parameter(2.0)) + # this implies: x1 + x2 + x1^2 + x1*y + y^2 <= 30 + # becomes: 2 * x1 + x2 + x1^2 <= 30 - 4 = 26 + # then x1 = 0 and x2 = 26 and obj = 26 + # is x1 = eps >= 0, x2 = 26 - 2 * eps - eps^2 and + # obj = 26 - 2 * eps - eps^2 + 2 * eps = 26 - eps ^2 (eps == 0 to maximize) MOI.optimize!(optimizer) - @test ≈(MOI.get(optimizer, MOI.ObjectiveValue()), 22.0, atol = ATOL) + @test ≈(MOI.get(optimizer, MOI.ObjectiveValue()), 26.0, atol = ATOL) return end @@ -1198,7 +1203,13 @@ function test_qp_variable_times_parameter() @test ≈(MOI.get(optimizer, MOI.VariablePrimal(), x[2]), 29.25, atol = ATOL) MOI.set(optimizer, MOI.ConstraintSet(), cy, MOI.Parameter(2.0)) MOI.optimize!(optimizer) - @test ≈(MOI.get(optimizer, MOI.ObjectiveValue()), 22.0, atol = ATOL) + # this implies: x1 + x2 + x1^2 + x1*y + y^2 <= 30 + # becomes: 2 * x1 + x2 + x1^2 <= 30 - 4 = 26 + # then x1 = 0 and x2 = 26 and obj = 26 + # is x1 = eps >= 0, x2 = 26 - 2 * eps - eps^2 and + # obj = 26 - 2 * eps - eps^2 + 2 * eps = 26 - eps ^2 (eps == 0 to maximize) + MOI.optimize!(optimizer) + @test ≈(MOI.get(optimizer, MOI.ObjectiveValue()), 26.0, atol = ATOL) return end @@ -1212,12 +1223,17 @@ function test_qp_parameter_times_parameter() a = [1.0, 1.0] c = [2.0, 1.0] x = MOI.add_variables(optimizer, 2) + # x1 >= 0, x2 >= 0 for x_i in x MOI.add_constraint(optimizer, x_i, MOI.GreaterThan(0.0)) end + # x1 <= 20 MOI.add_constraint(optimizer, x[1], MOI.LessThan(20.0)) + # y == 0 y, cy = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0)) + # z == 0 z, cz = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0)) + # x1 + x2 + y^2 + yz + z^2 <= 30 quad_terms = MOI.ScalarQuadraticTerm{Float64}[] push!(quad_terms, MOI.ScalarQuadraticTerm(A[1, 1], y, y)) push!(quad_terms, MOI.ScalarQuadraticTerm(A[1, 2], y, z)) @@ -1228,6 +1244,7 @@ function test_qp_parameter_times_parameter() 0.0, ) MOI.add_constraint(optimizer, constraint_function, MOI.LessThan(30.0)) + # max 2x1 + x2 obj_func = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.(c, [x[1], x[2]]), 0.0) MOI.set( @@ -1248,16 +1265,28 @@ function test_qp_parameter_times_parameter() 10.0, atol = ATOL, ) + # now x1 + x2 + y^2 + yz + z^2 <= 30 + # implies x1 + x2 <= 26 MOI.set(optimizer, MOI.ConstraintSet(), cy, MOI.Parameter(2.0)) MOI.optimize!(optimizer) - @test isapprox(MOI.get(optimizer, MOI.ObjectiveValue()), 42.0, atol = ATOL) + # hence x1 = 20, x2 = 6 + # and obj = 46 + @test isapprox(MOI.get(optimizer, MOI.ObjectiveValue()), 46.0, atol = ATOL) MOI.set(optimizer, MOI.ConstraintSet(), cz, MOI.Parameter(1.0)) + # now x1 + x2 + y^2 + yz + z^2 <= 30 + # implies x1 + x2 <= 30 - 4 - 2 - 1 = 23 + # hence x1 = 20, x2 = 3 + # and obj = 43 MOI.optimize!(optimizer) - @test isapprox(MOI.get(optimizer, MOI.ObjectiveValue()), 36.0, atol = ATOL) + @test isapprox(MOI.get(optimizer, MOI.ObjectiveValue()), 43.0, atol = ATOL) MOI.set(optimizer, MOI.ConstraintSet(), cy, MOI.Parameter(-1.0)) MOI.set(optimizer, MOI.ConstraintSet(), cz, MOI.Parameter(-1.0)) + # now x1 + x2 + y^2 + yz + z^2 <= 30 + # implies x1 + x2 <= 30 -1 -1 -1 = 27 + # hence x1 = 20, x2 = 7 + # and obj = 47 MOI.optimize!(optimizer) - @test isapprox(MOI.get(optimizer, MOI.ObjectiveValue()), 45.0, atol = ATOL) + @test isapprox(MOI.get(optimizer, MOI.ObjectiveValue()), 47.0, atol = ATOL) return end diff --git a/test/runtests.jl b/test/runtests.jl index cbdff78..41af114 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -23,6 +23,7 @@ end include("moi_tests.jl") include("jump_tests.jl") +include("jump_diff_param.jl") for name in names(@__MODULE__; all = true) if startswith("$name", "test_")