JuliaSmoothOptimizers / ADNLPModels.jl

Other
37 stars 14 forks source link

hprod/jprod not GPU-compatible #225

Open tmigot opened 4 months ago

tmigot commented 4 months ago

See the following tests:

hs6_autodiff(::Type{T}; kwargs...) where {T <: Number} = hs6_autodiff(Vector{T}; kwargs...) function hs6_autodiff(::Type{S} = Vector{Float64}; kwargs...) where {S} x0 = S([-12 // 10; 1]) f(x) = (1 - x[1])^2 c(x) = [10 * (x[2] - x[1]^2)] lcon = fill!(S(undef, 1), 0) ucon = fill!(S(undef, 1), 0)

return ADNLPModel(f, x0, c, lcon, ucon, name = "hs6_autodiff"; kwargs...) end

nlp = hs6_autodiff(CuArray{Float64}) CUDA.allowscalar() jth_hprod(nlp, nlp.meta.x0, nlp.meta.x0, 1) # same for hprod(nlp, nlp.meta.x0, nlp.meta.x0)

=

ERROR: GPU compilation of MethodInstance for (::GPUArrays.var"#map_kernel#38"{…})(::CUDA.CuKernelContext, ::CuDeviceVector{…}, ::Base.Broadcast.Broadcasted{…}, ::Int64) failed KernelError: passing and using non-bitstype argument

Argument 4 to your kernel function is of type Base.Broadcast.Broadcasted{Base.Broadcast.DefaultArrayStyle{1}, Tuple{Base.OneTo{Int64}}, ForwardDiff.var"#85#86"{ForwardDiff.Tag{ADNLPModels.var"#lag#141"{Int64, var"#f#6", Int64, ADNLPModels.var"#c!#319"{var"#c#7"}, ADNLPModels.var"#lag#134#142"}, Float64}}, Tuple{Base.Broadcast.Extruded{Vector{ForwardDiff.Dual{ForwardDiff.Tag{ADNLPModels.var"#lag#141"{Int64, var"#f#6", Int64, ADNLPModels.var"#c!#319"{var"#c#7"}, ADNLPModels.var"#lag#134#142"}, Float64}, Float64, 1}}, Tuple{Bool}, Tuple{Int64}}}}, which is not isbits: .args is of type Tuple{Base.Broadcast.Extruded{Vector{ForwardDiff.Dual{ForwardDiff.Tag{ADNLPModels.var"#lag#141"{Int64, var"#f#6", Int64, ADNLPModels.var"#c!#319"{var"#c#7"}, ADNLPModels.var"#lag#134#142"}, Float64}, Float64, 1}}, Tuple{Bool}, Tuple{Int64}}} which is not isbits. .1 is of type Base.Broadcast.Extruded{Vector{ForwardDiff.Dual{ForwardDiff.Tag{ADNLPModels.var"#lag#141"{Int64, var"#f#6", Int64, ADNLPModels.var"#c!#319"{var"#c#7"}, ADNLPModels.var"#lag#134#142"}, Float64}, Float64, 1}}, Tuple{Bool}, Tuple{Int64}} which is not isbits. .x is of type Vector{ForwardDiff.Dual{ForwardDiff.Tag{ADNLPModels.var"#lag#141"{Int64, var"#f#6", Int64, ADNLPModels.var"#c!#319"{var"#c#7"}, ADNLPModels.var"#lag#134#142"}, Float64}, Float64, 1}} which is not isbits. =#