S = np.diagflat(S)
else:
if self.kernel_type == "geometric":
S = np.diagflat(1/(1-self.lamda*Dij))
elif self.kernel_type == "exponential":
S = np.diagflat(np.exp(self.lamda*Dij))
return ff.dot(S).dot(ff.T)
After Change
// Conjugate Gradient Method as presented in
// [Vishwanathan et al., 2006] p.12, s.4.2
Ax, Ay = X, Y
xs, ys = Ax.shape[0], Ay.shape[0]
mn = xs*ys
def lsf(x, lamda):
xm = x.reshape((xs, ys), order="F")