Open odow opened 1 year ago
Reproducible example:
julia> using JuMP, Alpine, Juniper
julia> model = Model(Alpine.Optimizer);
julia> set_attribute(model, "minlp_solver", optimizer_with_attributes(Juniper.Optimizer))
julia> @variable(model, x, Bin)
x
julia> @NLconstraint(model, x^2 <= 1.0)
x ^ 2.0 - 1.0 ≤ 0
julia> optimize!(model)
ERROR: type Symbol has no field head
Stacktrace:
[1] getproperty
@ ./Base.jl:37 [inlined]
[2] traverse_expr_linear_to_affine(expr::Symbol, lhscoeffs::Vector{…}, lhsvars::Vector{…}, rhs::Float64, bufferVal::Nothing, bufferVar::Nothing, sign::Float64, coef::Float64, level::Int64)
Is there another free minlp_solver that can be used with Alpine, such as Bonmin? I get the error below when I try to set Bonmin.
using JuMP
N1 = 5
N = 10
zp = 2
M = zp*N
Br = rand(M,N1)
Bi = rand(M,N1)
s = rand(M,1)
#using SCIP
#model = Model(SCIP.Optimizer)
using JuMP, Alpine, Ipopt, HiGHS, Juniper
ipopt = optimizer_with_attributes(Ipopt.Optimizer, "print_level" => 0)
highs = optimizer_with_attributes(HiGHS.Optimizer, "output_flag" => false)
juniper = optimizer_with_attributes(
Juniper.Optimizer,
MOI.Silent() => true,
"mip_solver" => highs,
"nl_solver" => ipopt,
)
#=
model = Model(
optimizer_with_attributes(
Alpine.Optimizer,
"nlp_solver" => ipopt,
"mip_solver" => highs,
"minlp_solver" => juniper
),
)
=#
using AmplNLWriter, Bonmin_jll
model = Model(
optimizer_with_attributes(
Alpine.Optimizer,
"nlp_solver" => ipopt,
"mip_solver" => highs,
"minlp_solver" => AmplNLWriter.Optimizer(Bonmin_jll.amplexe), # juniper
),
)
#using MAiNGO
#model=Model(optimizer_with_attributes(MAiNGO.Optimizer, "epsilonA"=> 1e-3))
@variable(model, y[1:N1], Bin)
@expression(model, yh[n=1:N1], y[n]-.5)
@expression(model, spec_r, Br * yh)
@expression(model, spec_i, Bi * yh)
@NLexpression(model, spec_sq_mag[m in 1:M], spec_r[m]^2 + spec_i[m]^2)
@variable(model, gamma)
@NLconstraint(model, gamma >= sum((spec_sq_mag[m]-s[m])^2 for m in 1:M))
@objective(model, Min, gamma)
optimize!(model)
---------------------------------------------
ERROR: LoadError: MethodError: Cannot `convert` an object of type AmplNLWriter.Optimizer to an object of type MathOptInterface.OptimizerWithAttributes
Closest candidates are:
convert(::Type{T}, ::T) where T
@ Base Base.jl:84
MathOptInterface.OptimizerWithAttributes(::Any, ::Vector{Pair{MathOptInterface.AbstractOptimizerAttribute, Any}})
@ MathOptInterface ~/.julia/packages/MathOptInterface/2CULs/src/instantiate.jl:22
MathOptInterface.OptimizerWithAttributes(::Any, Pair...) where N
@ MathOptInterface ~/.julia/packages/MathOptInterface/2CULs/src/instantiate.jl:41
...
Stacktrace:
[1] convert(::Type{Union{Nothing, MathOptInterface.OptimizerWithAttributes}}, x::AmplNLWriter.Optimizer)
@ Base ./some.jl:37
[2] setproperty!(x::Alpine.OptimizerOptions, f::Symbol, v::AmplNLWriter.Optimizer)
@ Base ./Base.jl:40
[3] set_option
@ ~/.julia/packages/Alpine/2DP5q/src/MOI_wrapper/MOI_wrapper.jl:173 [inlined]
[4] set(model::Alpine.Optimizer, param::MathOptInterface.RawOptimizerAttribute, value::AmplNLWriter.Optimizer)
@ Alpine ~/.julia/packages/Alpine/2DP5q/src/MOI_wrapper/MOI_wrapper.jl:257
[5] _instantiate_and_check(optimizer_constructor::MathOptInterface.OptimizerWithAttributes)
@ MathOptInterface ~/.julia/packages/MathOptInterface/2CULs/src/instantiate.jl:120
[6] instantiate(optimizer_constructor::Any; with_bridge_type::Type{Float64}, with_cache_type::Nothing)
@ MathOptInterface ~/.julia/packages/MathOptInterface/2CULs/src/instantiate.jl:175
[7] set_optimizer(model::Model, optimizer_constructor::Any; add_bridges::Bool)
@ JuMP ~/.julia/packages/JuMP/Gwn88/src/optimizer_interface.jl:365
[8] set_optimizer
@ ~/.julia/packages/JuMP/Gwn88/src/optimizer_interface.jl:358 [inlined]
[9] Model(optimizer_factory::Any; add_bridges::Bool)
@ JuMP ~/.julia/packages/JuMP/Gwn88/src/JuMP.jl:182
[10] Model(optimizer_factory::Any)
@ JuMP ~/.julia/packages/JuMP/Gwn88/src/JuMP.jl:174
[11] top-level scope
@ ~/julia_code/Gen7_BPM/simple_model.jl:37
in expression starting at /home/stuart/julia_code/Gen7_BPM/simple_model.jl:37
I may have been able to set the minlp_solver to Bonmin using the code below. However, I still get the "type Symbol has no field head" error observed before using Juniper.
using AmplNLWriter, Bonmin_jll
bonmin = optimizer_with_attributes(() -> AmplNLWriter.Optimizer(Bonmin_jll.amplexe))
model = Model(
optimizer_with_attributes(
Alpine.Optimizer,
"nlp_solver" => ipopt,
"mip_solver" => highs,
#"minlp_solver" => AmplNLWriter.Optimizer(Bonmin_jll.amplexe), # juniper
"minlp_solver" => bonmin,
),
)
ERROR: LoadError: type Symbol has no field head Stacktrace: [1] getproperty @ ./Base.jl:37 [inlined] [2] traverse_expr_linear_to_affine(expr::Symbol, lhscoeffs::Vector{Any}, lhsvars::Vector{Any}, rhs::Float64, bufferVal::Nothing, bufferVar::Nothing, sign::Float64, coef::Float64, level::Int64) @ Alpine ~/.julia/packages/Alpine/2DP5q/src/nlexpr.jl:356 [3] traverse_expr_linear_to_affine(expr::Expr, lhscoeffs::Vector{Any}, lhsvars::Vector{Any}, rhs::Float64, bufferVal::Nothing, bufferVar::Nothing, sign::Float64, coef::Float64, level::Int64) (repeats 4 times) @ Alpine ~/.julia/packages/Alpine/2DP5q/src/nlexpr.jl:374 [4] traverse_expr_linear_to_affine(expr::Expr) @ Alpine ~/.julia/packages/Alpine/2DP5q/src/nlexpr.jl:332 [5] expr_linear_to_affine(expr::Expr) @ Alpine ~/.julia/packages/Alpine/2DP5q/src/nlexpr.jl:287 [6] expr_conversion(m::Alpine.Optimizer) @ Alpine ~/.julia/packages/Alpine/2DP5q/src/nlexpr.jl:103 [7] process_expr(m::Alpine.Optimizer) @ Alpine ~/.julia/packages/Alpine/2DP5q/src/nlexpr.jl:10 [8] load!(m::Alpine.Optimizer) @ Alpine ~/.julia/packages/Alpine/2DP5q/src/main_algorithm.jl:110 [9] optimize!(m::Alpine.Optimizer) @ Alpine ~/.julia/packages/Alpine/2DP5q/src/main_algorithm.jl:151 [10] optimize! @ ~/.julia/packages/MathOptInterface/2CULs/src/Bridges/bridge_optimizer.jl:380 [inlined] [11] optimize! @ ~/.julia/packages/MathOptInterface/2CULs/src/MathOptInterface.jl:85 [inlined] [12] optimize!(m::MathOptInterface.Utilities.CachingOptimizer{MathOptInterface.Bridges.LazyBridgeOptimizer{Alpine.Optimizer}, MathOptInterface.Utilities.UniversalFallback{MathOptInterface.Utilities.Model{Float64}}}) @ MathOptInterface.Utilities ~/.julia/packages/MathOptInterface/2CULs/src/Utilities/cachingoptimizer.jl:316 [13] optimize!(model::Model; ignore_optimize_hook::Bool, _differentiation_backend::MathOptInterface.Nonlinear.SparseReverseMode, kwargs::@Kwargs{}) @ JuMP ~/.julia/packages/JuMP/Gwn88/src/optimizer_interface.jl:457 [14] optimize!(model::Model) @ JuMP ~/.julia/packages/JuMP/Gwn88/src/optimizer_interface.jl:409 [15] top-level scope @ ~/julia_code/Gen7_BPM/simple_model.jl:62 in expression starting at /home/stuart/julia_code/Gen7_BPM/simple_model.jl:62
The bug is in Alpine, so changing the solver won't work.
Just use Bonmin directly:
julia> using JuMP, AmplNLWriter, Bonmin_jll
julia> N1, N, zp = 5, 10, 2
(5, 10, 2)
julia> M = zp * N
20
julia> Br, Bi, s = rand(M, N1), rand(M, N1), rand(M, 1);
julia> model = Model(() -> AmplNLWriter.Optimizer(Bonmin_jll.amplexe));
julia> set_attribute(model, "print_level", 0)
julia> @variable(model, y[1:N1], Bin);
julia> @expression(model, yh, y .- 0.5);
julia> @expression(model, spec_r, Br * yh);
julia> @expression(model, spec_i, Bi * yh);
julia> @expression(model, spec_sq_mag, spec_r.^2 .+ spec_i.^2);
julia> @expression(model, gamma, sum((spec_sq_mag .- s).^2));
julia> @objective(model, Min, gamma);
julia> optimize!(model)
Bonmin 1.8.9 using Cbc 2.10.8 and Ipopt 3.14.13
bonmin: print_level=0
******************************************************************************
This program contains Ipopt, a library for large-scale nonlinear optimization.
Ipopt is released as open source code under the Eclipse Public License (EPL).
For more information visit https://github.com/coin-or/Ipopt
******************************************************************************
NLP0012I
Num Status Obj It time Location
NLP0014I 1 OPT 1.9649522 9 0.005499
NLP0014I 2 OPT 2.056342 11 0.005894
NLP0014I 3 OPT 2.2591955 6 0.003932
NLP0014I 4 OPT 2.24508 8 0.004399
NLP0014I 5 OPT 2.137753 16 0.009369
NLP0014I 6 OPT 2.056342 10 0.006097
NLP0014I 7 OPT 2.1016623 7 0.004559
NLP0014I 8 OPT 1.9836419 6 0.003492
NLP0014I 9 OPT 2.056342 18 0.011059
Cbc0010I After 0 nodes, 1 on tree, 1e+50 best solution, best possible -1.7976931e+308 (0.05 seconds)
NLP0014I 10 OPT 2.137753 16 0.008975
NLP0014I 11 OPT 2.24508 8 0.004563
NLP0014I 12 OPT 2.2937578 11 0.006628
NLP0014I 13 OPT 2.9417398 7 0.003827
NLP0014I 14 OPT 2.2944204 8 0.004557
NLP0014I 15 OPT 2.5390929 8 0.004841
NLP0014I 16 OPT 2.254424 8 0.004718
NLP0012I
Num Status Obj It time Location
NLP0014I 1 OPT 2.2544241 0 0
Cbc0004I Integer solution of 2.2544241 found after 51 iterations and 5 nodes (0.09 seconds)
NLP0014I 17 OPT 2.8824657 10 0.005562
NLP0014I 18 OPT 2.3713003 8 0.00458
Cbc0001I Search completed - best objective 2.254424102104494, took 69 iterations and 7 nodes (0.10 seconds)
Cbc0032I Strong branching done 5 times (97 iterations), fathomed 0 nodes and fixed 0 variables
Cbc0035I Maximum depth 2, 0 variables fixed on reduced cost
"Finished"
julia> solution_summary(model; verbose = true)
* Solver : AmplNLWriter
* Status
Result count : 1
Termination status : LOCALLY_SOLVED
Message from the solver:
"bonmin: Optimal"
* Candidate solution (result #1)
Primal status : FEASIBLE_POINT
Dual status : NO_SOLUTION
Objective value : 2.25442e+00
Dual objective value : 2.25442e+00
Primal solution :
y[1] : 1.00000e+00
y[2] : 1.00000e+00
y[3] : 0.00000e+00
y[4] : 0.00000e+00
y[5] : 0.00000e+00
* Work counters
Solve time (sec) : 2.62027e-01
p.s., please use code quoting when posting.
https://github.com/lanl-ansi/Alpine.jl/issues/223 rises from the grave.
I don't remember if we support user-defined functions, but we should at least throw a nicer error message: