Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: Add support for maximisation #71

Merged
merged 4 commits into from
May 17, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 14 additions & 7 deletions src/decomposition.jl
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
set_loc_art_var(f::Formulation, constr_id::ConstrId) = setvar!(
f, string("local_art_", constr_id), MastArtVar; cost = 10.0,
f, string("local_art_", constr_id), MastArtVar;
cost = (getobjsense(f) == MinSense ? 10.0 : -10.0),
lb = 0.0, ub = Inf, kind = Continuous, sense = Positive
)

set_glob_art_var(f::Formulation, is_pos::Bool) = setvar!(
f, string("global_", (is_pos ? "pos" : "neg"), "_art_var"),
MastArtVar; cost = 100000.0, lb = 0.0, ub = Inf,
kind = Continuous, sense = Positive
MastArtVar; cost = (getobjsense(f) == MinSense ? 100000.0 : -100000.0),
lb = 0.0, ub = Inf, kind = Continuous, sense = Positive
)

function initialize_local_art_vars(master::Formulation,
Expand All @@ -15,11 +16,15 @@ function initialize_local_art_vars(master::Formulation,
for (constr_id, constr) in constrs_in_form
v = setvar!(
master, string("local_art_of_", getname(constr)),
MastArtVar; cost = 10000.0, lb = 0.0, ub = Inf,
# cost = getincval(constr), lb = 0.0, ub = Inf,
kind = Continuous, sense = Positive
MastArtVar;
cost = (getobjsense(master) == MinSense ? 10000.0 : -10000.0),
lb = 0.0, ub = Inf, kind = Continuous, sense = Positive
)
matrix[constr_id, getid(v)] = 1.0
if getsense(getcurdata(constr)) == Greater
matrix[constr_id, getid(v)] = 1.0
elseif getsense(getcurdata(constr)) == Less
matrix[constr_id, getid(v)] = -1.0
end
end
return
end
Expand Down Expand Up @@ -181,6 +186,7 @@ function reformulate!(prob::Problem, annotations::Annotations,
# Create master formulation
master_form = Formulation{DwMaster}(
prob.form_counter; parent_formulation = reformulation,
obj_sense = getobjsense(get_original_formulation(prob)),
moi_optimizer = prob.master_factory()
)
setmaster!(reformulation, master_form)
Expand All @@ -198,6 +204,7 @@ function reformulate!(prob::Problem, annotations::Annotations,
elseif BD.getformulation(annotation) == BD.DwPricingSp
f = Formulation{DwSp}(
prob.form_counter; parent_formulation = master_form,
obj_sense = getobjsense(master_form),
moi_optimizer = prob.pricing_factory()
)
formulations[BD.getid(annotation)] = f
Expand Down
6 changes: 5 additions & 1 deletion src/formulation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -480,7 +480,11 @@ function computereducedcost(form::Formulation, var_id, dual_sol::DualSolution)
coefficient_matrix = getcoefmatrix(form)
for (constr_id, dual_val) in getsol(dual_sol)
coeff = coefficient_matrix[constr_id, var_id]
rc = rc - dual_val * coeff
if getobjsense(form) == MinSense
rc = rc - dual_val * coeff
else
rc = rc + dual_val * coeff
end
end
return rc
end
Expand Down
1 change: 1 addition & 0 deletions src/node.jl
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ get_solver_record!(n::Node, S::Type{<:AbstractSolver}) = n.solver_records[S]
function to_be_pruned(n::Node)
# How to determine if a node should be pruned?? By the lp_gap?
lp_gap(n.incumbents) <= 0.0000001 && return true
ip_gap(n.incumbents) <= 0.0000001 && return true
return false
end

Expand Down
1 change: 1 addition & 0 deletions src/solsandbounds.jl
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ Base.:+(b1::B, b2::B) where {B <: AbstractBound} = B(float(b1) + float(b2))
Base.:/(b1::B, b2::B) where {B <: AbstractBound} = B(float(b1) / float(b2))

Base.isless(b::AbstractBound, r::Real) = b.value < r
Base.isless(r::Real, b::AbstractBound) = r < b.value
Base.isless(b1::B, b2::B) where {B <: AbstractBound} = float(b1) < float(b2)

abstract type AbstractSolution end
Expand Down
13 changes: 8 additions & 5 deletions src/solvers/colgen.jl
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ function insert_cols_in_master!(master_form::Formulation,
nb_of_gen_col = 0

for sp_sol in sp_sols
if getvalue(sp_sol) < -0.0001 # TODO use tolerance
if contrib_improves_mlp(getbound(sp_sol))
nb_of_gen_col += 1
ref = getvarcounter(master_form) + 1
name = string("MC", sp_uid, "_", ref)
Expand Down Expand Up @@ -91,16 +91,19 @@ function insert_cols_in_master!(master_form::Formulation,
return nb_of_gen_col
end

contrib_improves_mlp(sp_primal_bound::PrimalBound{MinSense}) = (sp_primal_bound < 0.0 - 1e-8)
contrib_improves_mlp(sp_primal_bound::PrimalBound{MaxSense}) = (sp_primal_bound > 0.0 + 1e-8)

function compute_pricing_db_contrib(sp_form::Formulation,
sp_sol_value::PrimalBound{S},
sp_sol_primal_bound::PrimalBound{S},
sp_lb::Float64,
sp_ub::Float64) where {S}
# Since convexity constraints are not automated and there is no stab
# the pricing_dual_bound_contrib is just the reduced cost * multiplicty
if sp_sol_value <= 0
contrib = sp_sol_value * sp_ub
if contrib_improves_mlp(sp_sol_primal_bound)
contrib = sp_sol_primal_bound * sp_ub
else
contrib = sp_sol_value * sp_lb
contrib = sp_sol_primal_bound * sp_lb
end
return contrib
end
Expand Down
25 changes: 25 additions & 0 deletions test/models/gap.jl
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,28 @@ function gap_with_penalties(data)

return gap
end

function maximization_gap(data)
gap = BlockModel(with_optimizer(Coluna.Optimizer,# #params = params,
master_factory = with_optimizer(GLPK.Optimizer),
pricing_factory = with_optimizer(GLPK.Optimizer)),
bridge_constraints=false
)

rewards = data.cost
capacities = data.capacity

@axis(M, data.machines)

@variable(gap, x[m in M, j in data.jobs], Bin)
@constraint(gap, cov[j in data.jobs], sum(x[m,j] for m in M) <= 1)

@constraint(gap, knp[m in M],
sum(data.weight[j,m]*x[m,j] for j in data.jobs) <= capacities[m])

@objective(gap, Max, sum(rewards[j,m]*x[m,j] for m in M, j in data.jobs))

@dantzig_wolfe_decomposition(gap, dec, M)

return gap
end
8 changes: 8 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,14 @@ end
#@show JuMP.objective_value(problem)
end

@testset "gap with maximisation objective function" begin
# JuMP.objective_value(problem) = 416.4
problem = maximization_gap(data_gap)
println("\e[1;42m GAP maximization objective function \e[00m")
JuMP.optimize!(problem)
#@show JuMP.objective_value(problem)
end

# model, x = sgap_5_100()
# JuMP.optimize!(model)

Expand Down