EnzymeAD / Enzyme.jl

Julia bindings for the Enzyme automatic differentiator
https://enzyme.mit.edu
MIT License
461 stars 66 forks source link

"Invoke is not a generic function" #1676

Open swilliamson7 opened 4 months ago

swilliamson7 commented 4 months ago

I'm in the process of trying to minimize a bigger error, but with my smaller code have hit a "invoke is not a generic function" error I've not seen before. This very well might be from me doing something silly, but the error output seems to point to Enzyme code so not sure.

The code I'm running:

using Enzyme

function integrate(J, nt, k, r, x0, data_steps, data)

    nt = nt
    dt = 0.001
    x0 = x0
    k = k
    r = r

    A = [1 0 0 dt 0 0; 
        0 1 0 0 dt 0;
        0 0 1 0 0 dt;
        -2*k*dt k*dt 0 1-r*dt 0 0;
        k*dt -3*k*dt k*dt 0 1-r*dt 0;
        0 k*dt -2*k*dt 0 0 1-r*dt
    ]

    E = [1 0 0 0 0 0; 
        0 1 0 0 0 0;
        0 0 1 0 0 0;
        0 0 0 1 0 0;
        0 0 0 0 1 0;
        0 0 0 0 0 1
    ]

    sigma_forcing = 0.01

    function q(t)
        q_1 = 0.1 * cos(2 * pi * t / (2.5 / r))
        q_2 = 0.0
        q_3 = 0.0
        q_4 = 0.0
        q_5 = 0.0
        q_6 = 0.0

        return [q_1, q_2, q_3, q_4, q_5, q_6]
    end

    function q_kf(t)
        return 0.5 * q(t)
    end

    B = [1 0 0 0 0 0; 
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0]

    function u(t)
        return sigma_forcing * randn(6)
    end

    Gamma = [1 0 0 0 0 0; 
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0]

    all_states = zeros(6, nt+1)
    all_states[:,1] = x0

    x = x0

    current_timestep = dt
    for j in 1:nt+1

        x = A * x + B * q(current_timestep) + Gamma * u(current_timestep)

        all_states[:, j] = x
        current_timestep += dt

        if j in data_steps

            J += sum((E * data[:, j] - E * x).^2)

        end

        data[:, j] = x

    end

    return nothing

end

J = 0.0 
nt = 10000
k = 30
r = 0.5
x0 = [1., 2., 3., 0., 0., 0.]
data_steps = 3000:300:7000
data = zeros(6, nt+1)

integrate(J, nt, k, r, x0, data_steps, data)

data = data + 0.5 .* randn(6, nt+1)

dk = 0.0
dx0 = zeros(6)

autodiff(ReverseWithPrimal,
    integrate,
    Active(J),
    Const(nt),
    Active(k),
    Const(r),
    Const(x0),
    Const(data_steps),
    Const(data)
)

and the error I see:

julia> include("three_mass_spring.jl")
┌ Warning: Using fallback BLAS replacements for (["dsymv_64_"]), performance may be degraded
└ @ Enzyme.Compiler ~/.julia/packages/GPUCompiler/Y4hSX/src/utils.jl:59
ERROR: LoadError: invoke is not a generic function
Stacktrace:
  [1] error(s::String)
    @ Base ./error.jl:35
  [2] codegen_world_age_generator(world::UInt64, source::Any, self::Any, ft::Type, tt::Type)
    @ Enzyme ~/.julia/packages/Enzyme/aEyGD/src/utils.jl:109
  [3] augfwd_with_return(::Val{…}, ::Val{…}, ::Type{…}, ::Val{…}, ::Type{…}, ::Type{…}, ::typeof(invoke), ::Nothing, ::Const{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Active{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Active{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Active{…}, ::Active{…}, ::Active{…}, ::Const{…}, ::Active{…}, ::Const{…}, ::Const{…}, ::Active{…}, ::Active{…}, ::Active{…}, ::Const{…}, ::Active{…}, ::Const{…}, ::Const{…}, ::Active{…}, ::Active{…}, ::Const{…}, ::Const{…}, ::Active{…})
    @ Enzyme.Compiler ~/.julia/packages/Enzyme/aEyGD/src/rules/jitrules.jl:836
  [4] runtime_iterate_augfwd(activity::Type{…}, width::Val{…}, ModifiedBetween::Val{…}, RT::Val{…}, f::typeof(invoke), df::Nothing, primal_1::Tuple{…}, shadow_1_1::Nothing, primal_2::Tuple{…}, shadow_2_1::Base.RefValue{…})
    @ Enzyme.Compiler ~/.julia/packages/Enzyme/aEyGD/src/rules/jitrules.jl:68
  [5] hvcat
    @ /Applications/Julia-1.10.app/Contents/Resources/julia/share/julia/stdlib/v1.10/SparseArrays/src/sparsevector.jl:1268 [inlined]
  [6] integrate
    @ ~/Documents/GitHub/ShallowWaters_work/trying_to_minimize_difficult_bugs/three_mass_spring.jl:12 [inlined]
  [7] integrate
    @ ~/Documents/GitHub/ShallowWaters_work/trying_to_minimize_difficult_bugs/three_mass_spring.jl:0 [inlined]
  [8] diffejulia_integrate_1449_inner_5wrap
    @ ~/Documents/GitHub/ShallowWaters_work/trying_to_minimize_difficult_bugs/three_mass_spring.jl:0
  [9] macro expansion
    @ Enzyme.Compiler ~/.julia/packages/Enzyme/aEyGD/src/compiler.jl:6673 [inlined]
 [10] enzyme_call
    @ Enzyme.Compiler ~/.julia/packages/Enzyme/aEyGD/src/compiler.jl:6273 [inlined]
 [11] CombinedAdjointThunk
    @ Enzyme.Compiler ~/.julia/packages/Enzyme/aEyGD/src/compiler.jl:6150 [inlined]
 [12] autodiff
    @ Enzyme ~/.julia/packages/Enzyme/aEyGD/src/Enzyme.jl:314 [inlined]
 [13] autodiff
    @ Enzyme ~/.julia/packages/Enzyme/aEyGD/src/Enzyme.jl:338 [inlined]
 [14] autodiff(::ReverseMode{…}, ::typeof(integrate), ::Active{…}, ::Const{…}, ::Active{…}, ::Const{…}, ::Const{…}, ::Const{…}, ::Const{…})
    @ Enzyme ~/.julia/packages/Enzyme/aEyGD/src/Enzyme.jl:323
 [15] top-level scope
    @ ~/Documents/GitHub/ShallowWaters_work/trying_to_minimize_difficult_bugs/three_mass_spring.jl:105
 [16] include(fname::String)
    @ Base.MainInclude ./client.jl:489
 [17] top-level scope
    @ REPL[3]:1
in expression starting at /Users/swilliamson/Documents/GitHub/ShallowWaters_work/trying_to_minimize_difficult_bugs/three_mass_spring.jl:105
Some type information was truncated. Use `show(err)` to see complete types.
wsmoses commented 4 months ago

what version of things are you on -- as this worked for me [though I needed to add Enzyme.API.runtimeActivity!(true)]

swilliamson7 commented 4 months ago

I'm on Enzyme v0.12.22, but I don't have the runtime activity flag so can try adding that

swilliamson7 commented 4 months ago

Whoops that was wrong, I'm actually on Enzyme v0.12.25#main

swilliamson7 commented 4 months ago

Adding the flag did indeed make the error go away, but weirdly the error still happens with my version of the code that uses a struct:

using Enzyme

Enzyme.API.runtimeActivity!(true)

mutable struct model
    J::Float64
    nt::Int
    k::Float64
    r::Float64
    x0::Vector{Float64}
    data_steps::StepRange{Int64, Int64}
    data::Array{Float64}
end

function integrate(model)

    nt = model.nt
    dt = 0.001
    x0 = model.x0
    k = model.k
    r = model.r
    data = model.data
    data_steps = model.data_steps
    J = model.J

    A = [1 0 0 dt 0 0; 
        0 1 0 0 dt 0;
        0 0 1 0 0 dt;
        -2*k*dt k*dt 0 1-r*dt 0 0;
        k*dt -3*k*dt k*dt 0 1-r*dt 0;
        0 k*dt -2*k*dt 0 0 1-r*dt
    ]

    E = [1 0 0 0 0 0; 
        0 1 0 0 0 0;
        0 0 1 0 0 0;
        0 0 0 1 0 0;
        0 0 0 0 1 0;
        0 0 0 0 0 1
    ]

    sigma_forcing = 0.01

    B = [1 0 0 0 0 0; 
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0]

    function u(t)
        return sigma_forcing * randn(6)
    end

    Gamma = [1 0 0 0 0 0; 
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0;
    0 0 0 0 0 0]

    all_states = zeros(6, nt+1)
    all_states[:,1] = x0

    x = x0

    current_timestep = dt
    for j in 1:nt+1

        x = A * x

        all_states[:, j] = x
        current_timestep += dt

        if j in data_steps

            J += sum((E * data[:, j] - E * x).^2)

        end

        data[:, j] = x

    end

    return nothing

end

nt = 10000

parameters = model(
    0.0,
    10000,
    30.,
    0.5,
    [1., 2., 3., 0., 0., 0.],
    3000:300:7000,
    zeros(6, nt+1)
)

integrate(parameters)

data = parameters.data + 0.5 .* randn(6, nt+1)

parameters_for_enzyme = model(
    0.0,
    10000,
    30.,
    0.5,
    [1., 2., 3., 0., 0., 0.],
    3000:300:7000,
    data
)

derivatives = Enzyme.make_zero(parameters_for_enzyme)

autodiff(ReverseWithPrimal,
    integrate,
    Duplicated(parameters_for_enzyme, derivatives)
)