JuliaOpt / MathProgBase.jl

DEPRECATED: Solver-independent functions (i.e. linprog and mixintprog) and low-level interface for Mathematical Programming
Other
80 stars 38 forks source link

ForwardDiff to calculate Gradient #199

Open rt5592 opened 6 years ago

rt5592 commented 6 years ago

Hi Can I use ForwardDiff within MathProgBase NLPEvaluator to calculate Grad and Hess vectors? For instance, I tried using it as shown below but the solution isn't correct

type HS071 <: MathProgBase.AbstractNLPEvaluator
end

# hs071
# min x1 * x4 * (x1 + x2 + x3) + x3
# st  x1 * x2 * x3 * x4 >= 25
#     x1^2 + x2^2 + x3^2 + x4^2 = 40
#     1 <= x1, x2, x3, x4 <= 5
# Start at (1,5,5,1)
# End at (1.000..., 4.743..., 3.821..., 1.379...)

function MathProgBase.initialize(d::HS071, requested_features::Vector{Symbol})
    for feat in requested_features
        if !(feat in [:Grad, :Jac, :Hess])
            error("Unsupported feature $feat")
            # TODO: implement Jac-vec and Hess-vec products
            # for solvers that need them
        end
    end
end

MathProgBase.features_available(d::HS071) = [:Grad, :Jac, :Hess]
MathProgBase.eval_f(d::HS071, x) = x[1] * x[4] * (x[1] + x[2] + x[3]) + x[3]

function MathProgBase.eval_grad_f(d::HS071, grad_f, x)

   grad_f= x -> ForwardDiff.gradient(MathProgBase.eval_f, x)       # would this work here? is the syntax correct?

   # grad_f[1] = x[1] * x[4] + x[4] * (x[1] + x[2] + x[3])
   # grad_f[2] = x[1] * x[4]
   # grad_f[3] = x[1] * x[4] + 1
   # grad_f[4] = x[1] * (x[1] + x[2] + x[3])
end

Any feedback is appreciated!