torch.set_rng_state(self.rng_state)
def test_spectral_mixture_gp_mean_abs_error(self):
likelihood = GaussianLikelihood()
gp_model = SpectralMixtureGPModel(train_x.data, train_y.data, likelihood)
mll = gpytorch.mlls.ExactMarginalLogLikelihood(likelihood, gp_model)
// Optimize the model
gp_model.train()
likelihood.train()
optimizer = optim.Adam(list(gp_model.parameters()) + list(likelihood.parameters()), lr=0.1)
optimizer.n_iter = 0
for _ in range(50):
After Change
torch.set_rng_state(self.rng_state)
def test_spectral_mixture_gp_mean_abs_error(self):
likelihood = GaussianLikelihood(
log_noise_prior=SmoothedBoxPrior(exp(-5), exp(3), sigma=0.1, log_transform=True)
)
gp_model = SpectralMixtureGPModel(train_x.data, train_y.data, likelihood)
mll = gpytorch.mlls.ExactMarginalLogLikelihood(likelihood, gp_model)
// Optimize the model
gp_model.train()
likelihood.train()
optimizer = optim.Adam(list(gp_model.parameters()), lr=0.1)
optimizer.n_iter = 0
with gpytorch.settings.num_trace_samples(100):
for _ in range(150):
optimizer.zero_grad()
output = gp_model(train_x)
loss = -mll(output, train_y)
loss.backward()
optimizer.n_iter += 1
optimizer.step()
for param in gp_model.parameters():
self.assertTrue(param.grad is not None)
self.assertGreater(param.grad.norm().item(), 0)
for param in likelihood.parameters():
self.assertTrue(param.grad is not None)
self.assertGreater(param.grad.norm().item(), 0)
optimizer.step()
// Test the model
gp_model.eval()