Open tmigot opened 1 month ago
See the following tests:
using CUDA, ADNLPModels, NLPModels, Symbolics
hs6_autodiff(::Type{T}; kwargs...) where {T <: Number} = hs6_autodiff(Vector{T}; kwargs...) function hs6_autodiff(::Type{S} = Vector{Float64}; kwargs...) where {S} x0 = S([-12 // 10; 1]) f(x) = (1 - x[1])^2 c(x) = [10 * (x[2] - x[1]^2)] lcon = fill!(S(undef, 1), 0) ucon = fill!(S(undef, 1), 0)
return ADNLPModel(f, x0, c, lcon, ucon, name = "hs6_autodiff"; kwargs...) end S = CuArray{Float64} function c!(cx, x) cx .= [10 * (x[2] - x[1]^2)] return cx end x0 = S([-12 // 10; 1]) output = similar(x0, 1)
J = Symbolics.jacobian_sparsity(c!, cx, x0)
See the following tests:
hs6_autodiff(::Type{T}; kwargs...) where {T <: Number} = hs6_autodiff(Vector{T}; kwargs...) function hs6_autodiff(::Type{S} = Vector{Float64}; kwargs...) where {S} x0 = S([-12 // 10; 1]) f(x) = (1 - x[1])^2 c(x) = [10 * (x[2] - x[1]^2)] lcon = fill!(S(undef, 1), 0) ucon = fill!(S(undef, 1), 0)
return ADNLPModel(f, x0, c, lcon, ucon, name = "hs6_autodiff"; kwargs...) end S = CuArray{Float64} function c!(cx, x) cx .= [10 * (x[2] - x[1]^2)] return cx end x0 = S([-12 // 10; 1]) output = similar(x0, 1)
nlp = hs6_autodiff(CuArray{Float64})
ADNLPModels.SparseADJacobian(2, x -> (1 - x[1])^2, 1, c!, x0 = x0)
J = ADNLPModels.compute_jacobian_sparsity(c!, output, x0)
J = Symbolics.jacobian_sparsity(c!, cx, x0)