print("init MultiLayerBinaryClassifier with layers %s and dropout %s" % (self.layer_sizes, self.dropout))
def forward(self, X, **kwargs):
return (self.model(X).float())
// HACK -- parameter to measure *variation* between last layer of the MLP.
// Why? To support multihead -- for the same category, where we want multiple heads to predict with different functions
// (similar to training a mixture of models) -- useful for uncertainty sampling
After Change
clf_out = self.model(X).float()
if self.heads_per_class <= 1:
return clf_out
clf_out = clf_out.view(clf_out.size(0), -1, self.heads_per_class)probs = clf_out
if self.softmax:
probs = F.softmax(probs, 1)
clf_mean = probs.mean(dim=2)
clf_std = probs.std(dim=2)
return clf_out, clf_mean, clf_std
// HACK -- parameter to measure *variation* between last layer of the MLP.
// Why? To support multihead -- for the same category, where we want multiple heads to predict with different functions
// (similar to training a mixture of models) -- useful for uncertainty sampling