if rhs.ndimension() == 1:
rhs = rhs.unsqueeze(1)
is_vector = True
batch_size = max(rhs.size(0), self.size(0)) if rhs.ndimension() == 3 else None
// Here we have a root decomposition
if isinstance(self.left_lazy_tensor, RootLazyTensor):
left_root = self.left_lazy_tensor.root.evaluate()
After Change
pass
def _matmul(self, rhs):
output_shape = _matmul_broadcast_shape(self.shape, rhs.shape)
output_batch_shape = torch.Size(output_shape[:-2])
if self.non_lazy_self is not None:
return self.non_lazy_self._matmul(rhs)