prior_gp_model.likelihood.initialize(log_noise=1)
// Compute posterior distribution
infer = Inference(prior_gp_model)posterior_gp_model = infer.run(train_x, train_y)
// Find optimal model hyperparameters
posterior_gp_model.train()
optimizer = optim.Adam(posterior_gp_model.parameters(), lr=0.1)
optimizer.n_iter = 0
for i in range(50):
optimizer.zero_grad()
output = posterior_gp_model(train_x)
loss = -posterior_gp_model.marginal_log_likelihood(output, train_y)
loss.backward()
optimizer.n_iter += 1
optimizer.step()
// Test the model
After Change
optimizer.step()
// Compute posterior distribution
gp_model.condition(train_x, train_y)
// Test the model
gp_model.eval()
test_function_predictions = gp_model(test_x)