lanl-ansi / MathOptAI.jl

Embed trained machine learning predictors in JuMP
https://lanl-ansi.github.io/MathOptAI.jl/
Other
29 stars 1 forks source link

Add GaussianProcess #44

Closed odow closed 4 months ago

odow commented 4 months ago

See https://arxiv.org/pdf/2310.00763

odow commented 4 months ago
import AbstractGPs
import StatsBase
using JuMP
# Univariate example
x = 2π .* rand(20)
y = sin.(x)
fx = AbstractGPs.GP(AbstractGPs.Matern32Kernel())(x, 0.001)
p_fx = AbstractGPs.posterior(fx, y)
StatsBase.mean_and_var(p_fx([0.4]))

# Univariate example
x = 2π .* rand(20, 2)
y = vec(sum(sin.(x); dims = 2))
kernel = AbstractGPs.Matern32Kernel()
fx = AbstractGPs.GP(kernel)(AbstractGPs.RowVecs(x), 0.01)
p_fx = AbstractGPs.posterior(fx, y)
StatsBase.mean_and_var(p_fx(AbstractGPs.RowVecs([0.4 0.5])))

A example like

model = Model()
@variable(model, 0 <= x_input <= 2π)
μ, v = StatsBase.mean_and_var(p_fx([x_input]))

is going to need a bunch of missign methods

KernelFunctions.dim(x::AbstractVector{<:JuMP.AbstractJuMPScalar}) = 1
odow commented 4 months ago
import AbstractGPs
import Distributions
import Ipopt
import Plots
import StatsBase

using JuMP

function add_gp_quantile_operator(model, p_fx, input_dim, quantile)
    function gp_q(x...)
        λ = Distributions.invlogcdf(Distributions.Normal(0, 1), log(quantile))
        μ, σ² = only.(StatsBase.mean_and_var(p_fx(collect(x))))
        return only(μ) + λ * sqrt(σ²)
    end
    return JuMP.add_nonlinear_operator(model, input_dim, gp_q; name = :op_gp_q)
end

# Univariate example
f = x -> sin(x)

x = 2π .* (0.0:0.1:1.0)
y = f.(x)
fx = AbstractGPs.GP(AbstractGPs.Matern32Kernel())(x, 0.1)
p_fx = AbstractGPs.posterior(fx, y)

model = Model(Ipopt.Optimizer)
@variable(model, 1 <= x_input <= 6, start = 3)
@objective(model, Max, x_input)
op_gp_quantile = add_gp_quantile_operator(model, p_fx, 1, 0.9)
@constraint(model, op_gp_quantile(x_input) >= 0.5)
optimize!(model)
@assert is_solved_and_feasible(model)

Plots.plot(0:0.1:2π+0.1, f; label = "True function")
Plots.scatter!(x, y; label = "Data")
Plots.plot!(0:0.1:2π+0.1, p_fx; label = "GP")
Plots.hline!([0.5]; label = false)
Plots.vline!([value(x_input)]; label = false)

image

odow commented 4 months ago

Or something like

import AbstractGPs
import Distributions
import Ipopt
import MathOptAI
import Plots
import StatsBase

using JuMP

struct QuantileOperator{GP} <: MathOptAI.AbstractPredictor
    p_fx::GP
    quantile::Float64
end

function MathOptAI.add_predictor(
    model::JuMP.Model,
    predictor::QuantileOperator,
    x::Vector,
)
    y = JuMP.@variable(model)
    N = Distributions.Normal(0, 1)
    λ = Distributions.invlogcdf(N, log(predictor.quantile))
    function gp_q(x...)
        input_x = collect(x)
        if length(input_x) > 1
            input_x = AbstractGPs.RowVecs(x')
        end
        μ, σ² = only.(StatsBase.mean_and_var(predictor.p_fx(input_x)))
        return only(μ) + λ * sqrt(σ²)
    end
    input_dim = length(x)
    op = JuMP.add_nonlinear_operator(model, input_dim, gp_q; name = :op_gp_q)
    JuMP.@constraint(model, y == op(x...))
    return [y]
end

# Univariate example
f = x -> sin(x)
x = 2π .* (0.0:0.1:1.0)
y = f.(x)
fx = AbstractGPs.GP(AbstractGPs.Matern32Kernel())(x, 0.1)
p_fx = AbstractGPs.posterior(fx, y)

model = Model(Ipopt.Optimizer)
@variable(model, 1 <= x_input[1:1] <= 6, start = 3)
@objective(model, Max, x_input[1])
predictor = QuantileOperator(p_fx, 0.9)
y_output = MathOptAI.add_predictor(model, predictor, x_input)
set_lower_bound(only(y_output), 0.5)
optimize!(model)
@assert is_solved_and_feasible(model)
Plots.plot(0:0.1:2π+0.1, f; label = "True function")
Plots.scatter!(x, y; label = "Data")
Plots.plot!(0:0.1:2π+0.1, p_fx; label = "GP")
Plots.hline!([0.5]; label = false)
Plots.vline!(value.(x_input); label = false)