cornellius-gp / gpytorch

A highly efficient implementation of Gaussian Processes in PyTorch
MIT License
3.46k stars 546 forks source link

[Bug] Initialization of Variational Distribution with function reference instead of Multivariate Normal #2401

Closed VolkmannB closed 10 months ago

VolkmannB commented 10 months ago

🐛 Bug

In the call function of _Variational Strategy (Link) a function reference is passed instead of a multivariate normal object.

To reproduce

Code snippet to reproduce

from gpytorch.distributions import MultivariateNormal
from gpytorch.models import ApproximateGP
from gpytorch.variational import _VariationalDistribution, CholeskyVariationalDistribution
import gpytorch
from gpytorch.variational._variational_strategy import _VariationalStrategy
from linear_operator.operators import LinearOperator
import torch
from torch import Tensor

class VarStratTest(gpytorch.variational._VariationalStrategy):

    def __init__(self, model: ApproximateGP | _VariationalStrategy, inducing_points: Tensor, variational_distribution: _VariationalDistribution, learn_inducing_locations: bool = True, jitter_val: float | None = None):
        super().__init__(model, inducing_points, variational_distribution, learn_inducing_locations, jitter_val)

    def prior_distribution(self) -> MultivariateNormal:
        return self.module.forward(self.inducing_points)

    def forward(self, x: Tensor, inducing_points: Tensor, inducing_values: Tensor, variational_inducing_covar: LinearOperator | None = None, **kwargs) -> MultivariateNormal:
        return self.module.forward(x)

class GPModel(ApproximateGP):
    def __init__(self, inducing_points):
        variational_distribution = CholeskyVariationalDistribution(inducing_points.size(0))
        variational_strategy = VarStratTest(self, inducing_points, variational_distribution, learn_inducing_locations=True)
        super(GPModel, self).__init__(variational_strategy)
        self.mean_module = gpytorch.means.ConstantMean()
        self.covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.RBFKernel())

    def forward(self, x):
        mean_x = self.mean_module(x)
        covar_x = self.covar_module(x)
        return gpytorch.distributions.MultivariateNormal(mean_x, covar_x)

inducing_points = torch.arange(-1.3,1.3,0.1)
model = GPModel(inducing_points=inducing_points)

x = torch.arange(-1.5,1.5,0.01)
model(x)

Stack trace/error message

Traceback (most recent call last):
  File "..\test.py", line 45, in <module>
    model(x)
  File "..\gpytorch\models\approximate_gp.py", line 108, in __call__
    return self.variational_strategy(inputs, prior=prior, **kwargs)
  File "..\gpytorch\variational\_variational_strategy.py", line 328, in __call__
    self._variational_distribution.initialize_variational_distribution(prior_dist)
  File "..\gpytorch\variational\cholesky_variational_distribution.py", line 56, in initialize_variational_distribution
    self.variational_mean.data.copy_(prior_dist.mean)
AttributeError: 'function' object has no attribute 'mean'

Expected Behavior

The call returning model.forward()

System information

Please complete the following information:

gpleiss commented 10 months ago

You need to add a @property decorator to prior_distribution.