try:
self.model.fit(X_train, y_train)
r = self.model.score(X_test, y_test)
except BaseException as ex:
r = 0.0 // on error: return assumed smallest value of objective function
// while negative values could be informative, they could be very large also,
// which could mess up the optimization procedure. Suggestions are welcome.
return max(r, 0.0)
// this is necessary to generate table for README in the end
table_template = |Blackbox Function| Minimum | Best minimum |
------------------|------------|-----------|---------------------|
After Change
try:
model_instance.fit(X_train, y_train)
if isinstance(model_instance, RegressorMixin): // r^2 metric
y_predicted = model_instance.predict(X_test)score = r2_score(y_test, y_predicted)
elif isinstance(model_instance, ClassifierMixin): // log loss
y_predicted = model_instance.predict_proba(X_test)
score = -log_loss(y_test, y_predicted) // in the context of this function, the higher score is better
// avoid any kind of singularitites, eg probability being zero, and thus breaking the log_loss
if math.isnan(score):
score = min_obj_val
score = max(score, min_obj_val) // this is necessary to avoid -inf or NaN
except BaseException as ex:
score = min_obj_val // on error: return assumed smallest value of objective function
return score
// this is necessary to generate table for README in the end
table_template = |Blackbox Function| Minimum | Best minimum |
------------------|------------|-----------|---------------------|