Closed theogf closed 1 year ago
So implementing
_hadamard(f, x::Tuple) = f(first(x)) .* _hadamard(f, Base.tail(x))
_hadamard(f, x::Tuple{Tx}) where {Tx} = f(only(x))
function kernelmatrix(κ::KernelProduct, x::AbstractVector)
return _hadamard(Base.Fix2(kernelmatrix, x), κ.kernels)
end
Does not seem to be enough...
However, when looking at @code_warntype
the output type seems fine :+1:
julia> @code_warntype kernelmatrix(nested_k, x)
MethodInstance for KernelFunctions.kernelmatrix(::KernelProduct{Tuple{SqExponentialKernel{Distances.Euclidean}, TransformedKernel{KernelSum{Tuple{LinearKernel{Float64}, KernelProduct{Tuple{ScaledKernel{CosineKernel{Distances.Euclidean}, Float64}, SqExponentialKernel{Distances.Euclidean}}}}}, SelectTransform{Int64}}}}, ::RowVecs{Float64, Matrix{Float64}, SubArray{Float64, 1, Matrix{Float64}, Tuple{Int64, Base.Slice{Base.OneTo{Int64}}}, true}})
from kernelmatrix(κ::KernelProduct, x::AbstractVector) in KernelFunctions at /home/theo/.julia/dev/KernelFunctions.jl/src/kernels/kernelproduct.jl:51
Arguments
#self#::Core.Const(KernelFunctions.kernelmatrix)
κ::KernelProduct{Tuple{SqExponentialKernel{Distances.Euclidean}, TransformedKernel{KernelSum{Tuple{LinearKernel{Float64}, KernelProduct{Tuple{ScaledKernel{CosineKernel{Distances.Euclidean}, Float64}, SqExponentialKernel{Distances.Euclidean}}}}}, SelectTransform{Int64}}}}
x::RowVecs{Float64, Matrix{Float64}, SubArray{Float64, 1, Matrix{Float64}, Tuple{Int64, Base.Slice{Base.OneTo{Int64}}}, true}}
Body::Matrix{Float64}
1 ─ %1 = Base.Fix2::Core.Const(Base.Fix2)
│ %2 = (%1)(KernelFunctions.kernelmatrix, x)::Base.Fix2{typeof(kernelmatrix), RowVecs{Float64, Matrix{Float64}, SubArray{Float64, 1, Matrix{Float64}, Tuple{Int64, Base.Slice{Base.OneTo{Int64}}}, true}}}
│ %3 = Base.getproperty(κ, :kernels)::Tuple{SqExponentialKernel{Distances.Euclidean}, TransformedKernel{KernelSum{Tuple{LinearKernel{Float64}, KernelProduct{Tuple{ScaledKernel{CosineKernel{Distances.Euclidean}, Float64}, SqExponentialKernel{Distances.Euclidean}}}}}, SelectTransform{Int64}}}
│ %4 = KernelFunctions._hadamard(%2, %3)::Matrix{Float64}
└── return %4
EDIT: Nevermind, Cthulhu agrees with @inferred
, I will use this instead
Turning it into:
_hadamard(ks::Tuple, x) = kernelmatrix(ks[1], x) .* _hadamard(Base.tail(ks), x)
_hadamard(ks::Tuple{Tx}, x) where {Tx} = kernelmatrix(ks[1], x)
function kernelmatrix(κ::KernelProduct, x::AbstractVector)
return _hadamard(κ.kernels, x)
end
Seems to work however
So does
_hadamard(f, ks::Tuple, x) = f(first(ks), x) .* _hadamard(f, Base.tail(ks), x)
_hadamard(f, ks::Tuple{Tx}, x) where {Tx} = f(only(ks), x)
function kernelmatrix(κ::KernelProduct, x::AbstractVector)
return _hadamard(kernelmatrix, κ.kernels, x)
end
Similar to #458 there seems to be an issue with product of kernels. Basic cases like
SqExponentialKernel() * SqExponentialKernel()
work fine but more nested kernels have issues. In particular, this does not seem to work:I will try to make a PR similar to #459 to solve the issue.