I'm using the following kernel for my Gaussian process regression with optimizer as Adam. But when i print the model optimized parameters the length scales are not in the interval range i provided.
class GPR(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y, likelihood):
super(GPR, self).__init__(train_x, train_y, likelihood)
self.mean_module = gpytorch.means.ConstantMean()
lengthscale_constraint1 = Interval(lower_bound=1,upper_bound=10)
covar_module1 = gpytorch.kernels.Scalekernel(gpytorch.kernels.RQKernel(ard_nums_dims=train_x.shape[1],lengthscale_constraint= lengthsclae_constraint1))
covar_module1.outputscale = 3.542
covar_module1.raw_outputscale.requires_grad=False
raw_alpha = torch.nn.Parameter(torch.tensor([1e-20]))
covar_module1.base_kernel.register_parameter("raw_alpha", raw_alpha)
covar_module1.base_kernel.raw_alpha.requires_grad = False
self.covar_module = gpytorch.kernels.LinearKernel() + covar_module1
Actual Output
length scales are negative and not in the range. Actual Output
Expected output
The length scales to be in the range of 1 and 10.