n = self.n
x = rnd.randn(n)
y = rnd.randn(n)
z = rnd.randn(n)
// The signature of the cost function now implies that we are on the
// product manifold, so we mimic the behavior of solvers by calling the
// cost function with a single argument: a tuple containing a tuple (x,
// y) and a single vector z.
self.assertAlmostEqual(np.sum(x ** 2 + y + z ** 3), cost(((x, y), z)))
egrad = cost.compute_gradient()
g = egrad(((x, y), z))
// We defined the cost function signature to treat the first two
// arguments as one parameter, so a call to the gradient must produce
// two elements.
self.assertIsInstance(g, (list, tuple))
self.assertEqual(len(g), 2)
g_xy, g_z = g
self.assertIsInstance(g_xy, (list, tuple))
self.assertEqual(len(g_xy), 2)
self.assertIsInstance(g_z, np.ndarray)
// Verify correctness of the gradient.
np_testing.assert_allclose(g_xy[0], 2 * x)
np_testing.assert_allclose(g_xy[1], 1)
np_testing.assert_allclose(g_z, 3 * z ** 2)
// Test the Hessian.
u = rnd.randn(n)
v = rnd.randn(n)
w = rnd.randn(n)
ehess = cost.compute_hessian()
After Change
np_testing.assert_allclose(g_z, 3 * z ** 2)
// Test the Hessian.
u, v, w = [rnd.randn(n) for _ in range(3)]
ehess = cost.compute_hessian_vector_product()
h = ehess(x, y, z, u, v, w)