// do not use += because autograd does not support it
loss = loss + self.triplet_loss_(fX[anchor, :],
fX[positive, :],
fX[negative, :])
n_comparisons = n_comparisons + 1
After Change
loss = 0.
n_comparisons = 0
distance = self.metric_(fX)
// consider every embedding as anchor
for anchor, y_anchor in enumerate(y):
// consider every other embedding with the same label as positive
for positive, y_positive in enumerate(y):
// if same embedding or different labels, skip
if (anchor == positive) or (y_anchor != y_positive):
continue
for negative, y_negative in enumerate(y):
// if same label, skip
if y_negative == y_positive:
continue
loss_ = distance[anchor, positive] - \
distance[anchor, negative] + \
self.margin * self.metric_max_
if self.clamp == "positive":
loss_ = ag_np.maximum(loss_, 0.)
elif self.clamp == "sigmoid":
loss_ = 1. / (1. + ag_np.exp(-loss_))
// do not use += because autograd does not support it
loss = loss + loss_
n_comparisons = n_comparisons + 1
return loss / n_comparisons