jump-dev / Xpress.jl

A Julia interface to the FICO Xpress Optimization suite
https://www.fico.com/en/products/fico-xpress-optimization
65 stars 30 forks source link

Benders implementation with callbacks #169

Closed klorel closed 2 years ago

klorel commented 2 years ago

Hello I tried to use callbacks on a dummy benders decomposition example (see here ) but it seems the callback is not called. Any hints to make it working ?

using JuMP
import GLPK
import Xpress

c_1 = [1, 4];
c_2 = [2, 3];
dim_x = length(c_1);
dim_y = length(c_2);
b = [-2; -3];
A_1 = [1 -3; -1 -3];
A_2 = [1 -2; -1 -1];
M = -1000;

MAXIMUM_ITERATIONS = 100;
ABSOLUTE_OPTIMALITY_GAP = 1e-6;

MY_OPTIMIZER = Xpress.Optimizer;

function print_iteration(k, args...)
    f(x) = Printf.@sprintf("%12.4e", x)
    println(lpad(k, 9), " ", join(f.(args), " "))
    return
end

function solve_subproblem(x)
    model = Model(MY_OPTIMIZER)
    set_optimizer_attribute(model, "PRESOLVE", 0);
    @variable(model, y[1:dim_y] >= 0)
    con = @constraint(model, A_2 * y .<= b - A_1 * x)
    @objective(model, Min, c_2' * y)
    optimize!(model)
    @assert termination_status(model) == OPTIMAL
    return (obj = objective_value(model), y = value.(y), π = dual.(con))
end

"""
    my_callback(cb_data)

A callback that implements Benders decomposition. Note how similar it is to the
inner loop of the iterative method.
"""
function my_callback(cb_data)
    global k += 1
    println("calling my_callback")
    x_k = callback_value.(cb_data, x)
    θ_k = callback_value(cb_data, θ)
    lower_bound = c_1' * x_k + θ_k
    ret = solve_subproblem(x_k)
    upper_bound = c_1' * x_k + c_2' * ret.y
    gap = (upper_bound - lower_bound) / upper_bound
    print_iteration(k, lower_bound, upper_bound, gap)
    if gap < ABSOLUTE_OPTIMALITY_GAP
        println("Terminating with the optimal solution")
        return
    end
    cut = @build_constraint(θ >= ret.obj + -ret.π' * A_1 * (x .- x_k))
    MOI.submit(model, MOI.LazyConstraint(cb_data), cut)
    return
end

lazy_model = Model(MY_OPTIMIZER)

set_optimizer_attribute(lazy_model, "PRESOLVE", 0);
@variable(lazy_model, x[1:dim_x] >= 0, Int)
@variable(lazy_model, θ >= M)
@objective(lazy_model, Min, θ)
print(lazy_model)
k = 0

MOI.set(lazy_model, MOI.LazyConstraintCallback(), my_callback)

optimize!(lazy_model)
x_optimal = value.(x)
rafabench commented 2 years ago

Hi @klorel In this case you should add the attribute:

set_optimizer_attribute(model, "HEURSTRATEGY", 0);

Also, for some reason, this broadcast didn't work

x_k = callback_value.(cb_data, x)

Just change to something like

x_k = [callback_value(cb_data, xk) for xk in x]

I got the same result as GLPK

x_optimal = [-0.0, 1.0]
y_optimal = [0.0, 0.0]
odow commented 2 years ago

for some reason, this broadcast didn't work

You probably need to add Base.broadcastable(x::CallbackData) = Ref(x) after https://github.com/jump-dev/Xpress.jl/blob/6f95f15d1c87eed35419c0e6e09098efa78691c9/src/xprs_callbacks.jl#L2-L6

See GLPK: https://github.com/jump-dev/GLPK.jl/blob/dee196eabbc87da2a3bfcf210e3ebe17b302a178/src/MOI_wrapper/MOI_wrapper.jl#L198-L205

rafabench commented 2 years ago

You probably need to add Base.broadcastable(x::CallbackData) = Ref(x) after

Thanks! That was it. I will add a PR fixing that.

klorel commented 2 years ago

Hi everyone, Thanks for the help.

I managed to do it with xpress but I had to put the callback definition after the creation of the lazy_model (see below). Do you know how to get access to the Jump model used in the MOI.submit from the cb_data ?

using JuMP
import GLPK
import Xpress
import Printf

c_1 = [1, 4];
c_2 = [2, 3];
dim_x = length(c_1);
dim_y = length(c_2);
b = [-2; -3];
A_1 = [1 -3; -1 -3];
A_2 = [1 -2; -1 -1];
M = -1000;

MAXIMUM_ITERATIONS = 100;
ABSOLUTE_OPTIMALITY_GAP = 1e-6;

MY_OPTIMIZER = Xpress.Optimizer;
# MY_OPTIMIZER = GLPK.Optimizer;

function print_iteration(k, args...)
    f(x) = Printf.@sprintf("%12.4e", x)
    println(lpad(k, 9), " ", join(f.(args), " "))
    return
end

function solve_subproblem(x)
    model = Model(MY_OPTIMIZER)
    set_optimizer_attribute(model, "PRESOLVE", 0);
    set_optimizer_attribute(model, "HEURSTRATEGY", 0);
    @variable(model, y[1:dim_y] >= 0)
    con = @constraint(model, A_2 * y .<= b - A_1 * x)
    @objective(model, Min, c_2' * y)
    optimize!(model)
    @assert termination_status(model) == OPTIMAL
    return (obj = objective_value(model), y = value.(y), π = dual.(con))
end

lazy_model = Model(MY_OPTIMIZER)

set_optimizer_attribute(lazy_model, "PRESOLVE", 0);
set_optimizer_attribute(lazy_model, "HEURSTRATEGY", 0);

@variable(lazy_model, x[1:dim_x] >= 0, Int)
@variable(lazy_model, θ >= M)
@objective(lazy_model, Min, θ)
print(lazy_model)
k = 0

"""
    my_callback(cb_data)

A callback that implements Benders decomposition. Note how similar it is to the
inner loop of the iterative method.
"""
function my_callback(cb_data)
    global k += 1
    println("calling my_callback")
    # x_k = callback_value.(cb_data, x)
    x_k = [callback_value(cb_data, xk) for xk in x]
    θ_k = callback_value(cb_data, θ)
    lower_bound = c_1' * x_k + θ_k
    ret = solve_subproblem(x_k)
    upper_bound = c_1' * x_k + c_2' * ret.y
    gap = (upper_bound - lower_bound) / upper_bound
    print_iteration(k, lower_bound, upper_bound, gap)
    if gap < ABSOLUTE_OPTIMALITY_GAP
        println("Terminating with the optimal solution")
        return
    end
    cut = @build_constraint(θ >= ret.obj + -ret.π' * A_1 * (x .- x_k))
    MOI.submit(lazy_model, MOI.LazyConstraint(cb_data), cut)
    return
end

MOI.set(lazy_model, MOI.LazyConstraintCallback(), my_callback)

optimize!(lazy_model)
x_optimal = value.(x)
odow commented 2 years ago

how to get access to the Jump model used in the MOI.submit from the cb_data

You can't.

but I had to put the callback definition after the creation of the lazy_model

Yes. This is what you need to do. Just refer to lazy_model using a closure.