@dispatch(DiagonalGaussian, kernels.Product, InducingPoints, kernels.Product, InducingPoints)
@quadrature_fallback
def _expectation(p, kern1, feat1, kern2, feat2):
if feat1 != feat2:
raise NotImplementedError("Different features are not supported")
if kern1 != kern2:
raise NotImplementedError("Calculating the expectation over two different Product kernels is not supported")
kern = kern1
feat = feat1
if not kern.on_separate_dimensions:
raise NotImplementedError("Product currently needs to be defined on separate dimensions.") // pragma: no cover
with tf.control_dependencies([
tf.assert_equal(tf.rank(p.var), 2,
message="Product currently only supports diagonal Xcov.", name="assert_Xcov_diag"),
]):
_expectation_fn = lambda k: _expectation(p, k, feat, k, feat)
return functools.reduce(tf.multiply, [_expectation_fn(k) for k in kern.kern_list])
@dispatch(DiagonalGaussian, object, (InducingFeature, type(None)), object, (InducingFeature, type(None)))