SciML / DataDrivenDiffEq.jl

Data driven modeling and automated discovery of dynamical systems for the SciML Scientific Machine Learning organization
https://docs.sciml.ai/DataDrivenDiffEq/stable/
MIT License
402 stars 56 forks source link

Michaelis Menten Example with 3 subproblems #377

Closed sceptri closed 1 year ago

sceptri commented 1 year ago

For my Bachelor's thesis I need to use Datasets, so I was playing around with Michaelis Menten example.

By default, it features 2 subproblems and everything works fine. But if we add a third subproblem, for example

problem_3 = ODEProblem(michaelis_menten, 3 * u0, (8.0, 12.0));
solution_3 = solve(problem_3, Tsit5(), saveat=0.1);

and

data = (
    Experiment_1=(X=Array(solution_1), t=solution_1.t, DX=michaelis_menten(Array(solution_1), [], solution_1.t)),
    Experiment_2=(X=Array(solution_2), t=solution_2.t, DX=michaelis_menten(Array(solution_2), [], solution_2.t)),
    Experiment_3=(X=Array(solution_3), t=solution_3.t, DX=michaelis_menten(Array(solution_3), [], solution_3.t))
)

Then the solver will crash and throw this error

julia> res = solve(prob, basis, opt, normalize=false, denoise=false, by=:min, sampler=sampler, maxiter=1000);
ERROR: BoundsError: attempt to access 2×0 view(::Matrix{Float64}, :, 124:123) with eltype Float64 at index [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82]
Stacktrace:
 [1] copyto_unaliased!(deststyle::IndexLinear, dest::SubArray{Float64, 2, Matrix{Float64}, Tuple{Base.Slice{Base.OneTo{Int64}}, UnitRange{Int64}}, true}, srcstyle::IndexLinear, src::Matrix{Float64})
   @ Base ./abstractarray.jl:1032
 [2] copyto!
   @ ./abstractarray.jl:1018 [inlined]
 [3] map(f::typeof(copyto!), t::Tuple{SubArray{Float64, 2, Matrix{Float64}, Tuple{Base.Slice{Base.OneTo{Int64}}, UnitRange{Int64}}, true}, Vector{Float64}, SubArray{Float64, 1, Vector{Float64}, Tuple{UnitRange{Int64}}, true}}, s::Tuple{Matrix{Float64}, Vector{Float64}, Vector{Float64}})
   @ Base ./tuple.jl:250
 [4] get_implicit_oop_args(x::DataDrivenDataset{Float64, true, DataDrivenDiffEq.Continuous})
   @ DataDrivenDiffEq ~/.julia/packages/DataDrivenDiffEq/s9jl3/src/problem/set.jl:188
 [5] solve!(p::DataDrivenDiffEq.SparseIdentificationProblem{Array{Float64, 3}, DataDrivenDataset{Float64, true, DataDrivenDiffEq.Continuous}, Basis, Vector{UnitRange{Int64}}, UnitRange{Int64}, ImplicitOptimizer{StepRangeLen{Float64, Base.TwicePrecision{Float64}, Base.TwicePrecision{Float64}, Int64}}, DataDrivenCommonOptions{Float64, Base.Pairs{Symbol, Symbol, Tuple{Symbol}, NamedTuple{(:by,), Tuple{Symbol}}}}})
   @ DataDrivenDiffEq ~/.julia/packages/DataDrivenDiffEq/s9jl3/src/solve/sparse_identification.jl:84
 [6] solve(::DataDrivenDataset{Float64, true, DataDrivenDiffEq.Continuous}, ::Vararg{Any}; kwargs::Base.Pairs{Symbol, Any, NTuple{5, Symbol}, NamedTuple{(:normalize, :denoise, :by, :sampler, :maxiter), Tuple{Bool, Bool, Symbol, DataSampler{Tuple{Split, Batcher}}, Int64}}})
   @ CommonSolve ~/.julia/packages/CommonSolve/TGRvG/src/CommonSolve.jl:23
 [7] top-level scope
   @ REPL[1]:1

Everything else has stayed the same. Why is that? Or am I doing something wrong?

Full code to reproduce this issue is

using DataDrivenDiffEq
using LinearAlgebra
using ModelingToolkit
using OrdinaryDiffEq

function michaelis_menten(u, p, t)
    [0.6 - 1.5u[1] / (0.3 + u[1])]
end

u0 = [0.5]

problem_1 = ODEProblem(michaelis_menten, u0, (0.0, 4.0));
solution_1 = solve(problem_1, Tsit5(), saveat=0.1);
problem_2 = ODEProblem(michaelis_menten, 2 * u0, (4.0, 8.0));
solution_2 = solve(problem_2, Tsit5(), saveat=0.1);
problem_3 = ODEProblem(michaelis_menten, 3 * u0, (8.0, 12.0));
solution_3 = solve(problem_3, Tsit5(), saveat=0.1);

function michaelis_menten(X::AbstractMatrix, p, t::AbstractVector)
    reduce(hcat, map((x, ti) -> michaelis_menten(x, p, ti), eachcol(X), t))
end

data = (
    Experiment_1=(X=Array(solution_1), t=solution_1.t, DX=michaelis_menten(Array(solution_1), [], solution_1.t)),
    Experiment_2=(X=Array(solution_2), t=solution_2.t, DX=michaelis_menten(Array(solution_2), [], solution_2.t)),
    Experiment_3=(X=Array(solution_3), t=solution_3.t, DX=michaelis_menten(Array(solution_3), [], solution_3.t))
)

prob = DataDrivenDiffEq.ContinuousDataset(data);

@parameters t
D = Differential(t)
@variables u[1:1](t)
h = [monomial_basis(u[1:1], 4)...]
basis = Basis([h; h .* D(u[1])], u, implicits=D.(u), iv=t)
println(basis) # hide

sampler = DataSampler(
    Split(ratio=0.8), Batcher(n=10)
)

opt = ImplicitOptimizer(1e-1:1e-1:5e-1)
res = solve(prob, basis, opt, normalize=false, denoise=false, by=:min, sampler=sampler, maxiter=1000);

From what I tried, I am not sure, if the function (from set.jl)

function (b::AbstractBasis)(dx::AbstractMatrix, d::DataDrivenDataset)
    last = 1
    @views for (i,s) in enumerate(cumsum(d.sizes))
        b(dx[:, last:s], d.probs[i])
        last += s
    end
    return
end

is really correct. The line last += s seems strange to me, as s is given by a cumsum... Shouldn't it be last = s+1?

sceptri commented 1 year ago

Closed by #380