SciML / Optimization.jl

Mathematical Optimization in Julia. Local, global, gradient-based and derivative-free. Linear, Quadratic, Convex, Mixed-Integer, and Nonlinear Optimization in one simple, fast, and differentiable interface.
https://docs.sciml.ai/Optimization/stable/
MIT License
725 stars 84 forks source link

Have to specify derivatives for Optim.ZerothOrderOptimizer with bounds #548

Closed DanielVandH closed 1 year ago

DanielVandH commented 1 year ago

The following code leads to an error:

using Optimization
rosenbrock(u, p) = (p[1] - u[1])^2 + p[2] * (u[2] - u[1]^2)^2
u0 = zeros(2)
p = [1.0, 100.0]
prob = OptimizationProblem(rosenbrock, u0, p, lb=[0.5, 0.5], ub=[1.5, 1.5])
using OptimizationOptimJL
sol = solve(prob, NelderMead())
ERROR: Use OptimizationFunction to pass the derivatives or automatically generate them with one of the autodiff backends
Stacktrace:
  [1] error(s::String)
    @ Base .\error.jl:35
  [2] OptimizationOptimJL.OptimJLOptimizationCache(prob::OptimizationProblem{true, OptimizationFunction{true, SciMLBase.NoAD, typeof(rosenbrock), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Nothing, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, opt::Fminbox{NelderMead{Optim.AffineSimplexer, Optim.AdaptiveParameters}, Float64, Optim.var"#49#51"}, data::Base.Iterators.Cycle{Tuple{Optimization.NullData}}; progress::Bool, callback::Function, kwargs::Base.Pairs{Symbol, Nothing, NTuple{4, Symbol}, NamedTuple{(:maxiters, :maxtime, :abstol, :reltol), NTuple{4, Nothing}}})
    @ OptimizationOptimJL c:\Users\User\.julia\packages\OptimizationOptimJL\uRfW9\src\OptimizationOptimJL.jl:46
  [3] __init(prob::OptimizationProblem{true, OptimizationFunction{true, SciMLBase.NoAD, typeof(rosenbrock), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Nothing, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, opt::NelderMead{Optim.AffineSimplexer, Optim.AdaptiveParameters}, data::Base.Iterators.Cycle{Tuple{Optimization.NullData}}; callback::Function, maxiters::Nothing, maxtime::Nothing, abstol::Nothing, reltol::Nothing, progress::Bool, kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ OptimizationOptimJL C:\Users\User\.julia\packages\OptimizationOptimJL\uRfW9\src\OptimizationOptimJL.jl:154
  [4] __init(prob::OptimizationProblem{true, OptimizationFunction{true, SciMLBase.NoAD, typeof(rosenbrock), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Nothing, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, opt::NelderMead{Optim.AffineSimplexer, Optim.AdaptiveParameters}, data::Base.Iterators.Cycle{Tuple{Optimization.NullData}})
    @ OptimizationOptimJL C:\Users\User\.julia\packages\OptimizationOptimJL\uRfW9\src\OptimizationOptimJL.jl:121
  [5] __init(prob::OptimizationProblem{true, OptimizationFunction{true, SciMLBase.NoAD, typeof(rosenbrock), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Nothing, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, opt::NelderMead{Optim.AffineSimplexer, Optim.AdaptiveParameters})
    @ OptimizationOptimJL C:\Users\User\.julia\packages\OptimizationOptimJL\uRfW9\src\OptimizationOptimJL.jl:121
  [6] init(::OptimizationProblem{true, OptimizationFunction{true, SciMLBase.NoAD, typeof(rosenbrock), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Nothing, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, ::NelderMead{Optim.AffineSimplexer, Optim.AdaptiveParameters}; kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ SciMLBase C:\Users\User\.julia\packages\SciMLBase\KcGs1\src\solve.jl:148
  [7] init(::OptimizationProblem{true, OptimizationFunction{true, SciMLBase.NoAD, typeof(rosenbrock), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Nothing, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, ::NelderMead{Optim.AffineSimplexer, Optim.AdaptiveParameters})
    @ SciMLBase C:\Users\User\.julia\packages\SciMLBase\KcGs1\src\solve.jl:146
  [8] solve(::OptimizationProblem{true, OptimizationFunction{true, SciMLBase.NoAD, typeof(rosenbrock), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Nothing, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, ::NelderMead{Optim.AffineSimplexer, Optim.AdaptiveParameters}; kwargs::Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ SciMLBase C:\Users\User\.julia\packages\SciMLBase\KcGs1\src\solve.jl:83
  [9] solve(::OptimizationProblem{true, OptimizationFunction{true, SciMLBase.NoAD, typeof(rosenbrock), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, typeof(SciMLBase.DEFAULT_OBSERVED_NO_TIME), Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Nothing, Nothing, Nothing, Nothing, Base.Pairs{Symbol, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, ::NelderMead{Optim.AffineSimplexer, Optim.AdaptiveParameters})
    @ SciMLBase C:\Users\User\.julia\packages\SciMLBase\KcGs1\src\solve.jl:80
 [10] top-level scope
    @ Untitled-1:7

because OptimJLOptimizationCache has

!(opt isa Optim.ZerothOrderOptimizer) && f.grad === nothing &&
        error("Use OptimizationFunction to pass the derivatives or automatically generate them with one of the autodiff backends")

but, with constraints, opt in this case is

opt = Fminbox{NelderMead{Optim.AffineSimplexer, Optim.AdaptiveParameters}, Float64, Optim.var"#49#51"}(NelderMead{Optim.AffineSimplexer, Optim.AdaptiveParameters}(Optim.AffineSimplexer(0.025, 0.5), Optim.AdaptiveParameters(1.0, 1.0, 0.75, 1.0)), NaN, 0.001, Optim.var"#49#51"())

I think something like

function _optim_requires_grad(opt)
    if opt isa Optim.ConstrainedOptimizer 
        return true 
    elseif opt isa Optim.AbstractConstrainedOptimizer # Optim.ConstrainedOptimizer isn't sufficient - Fminbox isn't one
        return _optim_requires_grad(opt.method)
    else
        return !(opt isa Optim.ZerothOrderOptimizer)
    end
end

is needed, e.g.

_optim_requires_grad(NelderMead()) # false 
_optim_requires_grad(Fminbox(NelderMead())) # false
_optim_requires_grad(LBFGS()) # true
_optim_requires_grad(Fminbox(LBFGS())) # true
_optim_requires_grad(IPNewton()) # true 

Haven't tested anything for any of the other packages.

Vaibhavdixit02 commented 1 year ago

This is fixed on master. Will be doing a release in a few days. Do you mind using master?