y_max = max([plot_tracker.data[constants.MEAN][i][-1],
plot_tracker.data[constants.CURRENT][i][-1], y_max])
pad = 0.5 * y_max // Padding bellow and above thresholds
plot_tracker.sub_plot_obj.set_ylim((y_min-pad, y_max+pad))
plot_tracker.sub_plot_obj.set_xlim(0, self._sample_ids[-1])
def _clear_annotations(self):
Clear annotations, so next frame is correctly rendered.
After Change
y_min = min([plot_tracker.data[data_id][i][-1], plot_tracker.data[constants.Y_TRUE][-1], y_min])
y_max = max([plot_tracker.data[data_id][i][-1], plot_tracker.data[constants.Y_TRUE][-1], y_max])
elif metric_id == constants.DATA_POINTS:
update_xy_limits = False
// Process features
data_id = "X"
features_dict = data_buffer.get_data(metric_id=metric_id, data_id=data_id)
feature_indices = list(features_dict.keys())
feature_indices.sort()
// Store tuple of feature values into the buffer, sorted by index
plot_tracker.data[data_id].add_element([[features_dict[feature_indices[0]],
features_dict[feature_indices[1]]]])
plot_tracker.sub_plot_obj.set_xlabel("Feature {}".format(feature_indices[0]))
plot_tracker.sub_plot_obj.set_ylabel("Feature {}".format(feature_indices[1]))
// TODO consider a fading/update strategy instead
plot_tracker.sub_plot_obj.clear()
X1 = plot_tracker.data[data_id].get_queue()[-1][0]
X2 = plot_tracker.data[data_id].get_queue()[-1][1]
// Process target values
data_id = "target_values"
plot_tracker.data[data_id] = data_buffer.get_data(metric_id=metric_id, data_id=data_id)
if not plot_tracker.data["clusters_initialized"]:
for j in range(len(plot_tracker.data[data_id])):
plot_tracker.data["clusters"].append(FastBuffer(plot_tracker.data["buffer_size"]))
// Process predictions
data_id = "predictions"
plot_tracker.data[data_id].add_element([data_buffer.get_data(metric_id=metric_id, data_id=data_id)])
for k, cluster in enumerate(plot_tracker.data["clusters"]):
if plot_tracker.data[data_id].get_queue()[-1] == k:
plot_tracker.data["clusters"][k].add_element([(X1, X2)])
// TODO confirm buffer update inside the loop
if cluster.get_queue():
temp = cluster.get_queue()
plot_tracker.sub_plot_obj.scatter(*zip(*temp), label="Class {k}".format(k=k))
plot_tracker.sub_plot_obj.legend(loc=2, bbox_to_anchor=(1.01, 1.))
else:
// Default case, "mean" and "current" performance
for data_id in data_ids:
// Buffer data
data = data_buffer.get_data(metric_id=metric_id, data_id=data_id)
for i in range(self.n_models):
plot_tracker.data[data_id][i].append(data[i])
plot_tracker.line_objs[data_id][i].set_data(self._sample_ids, plot_tracker.data[data_id][i])
// Process data
for i in range(self.n_models):
// Update annotations
self._update_annotations(i, plot_tracker.sub_plot_obj, self.model_names[i],
plot_tracker.data[constants.MEAN][i][-1],
plot_tracker.data[constants.CURRENT][i][-1])
// Update plot limits
if metric_id in [constants.KAPPA_T, constants.KAPPA_M]:
y_min = min([plot_tracker.data[constants.MEAN][i][-1],
plot_tracker.data[constants.CURRENT][i][-1], y_min])
if metric_id in [constants.MSE, constants.MAE, constants.AMSE, constants.AMAE, constants.ARMSE]:
y_min = -1
y_max = max([plot_tracker.data[constants.MEAN][i][-1],
plot_tracker.data[constants.CURRENT][i][-1], y_max])
pad = 0.5 * y_max // Padding bellow and above thresholds
if update_xy_limits:
plot_tracker.sub_plot_obj.set_ylim((y_min-pad, y_max+pad))
plot_tracker.sub_plot_obj.set_xlim(0, self._sample_ids[-1])
def _clear_annotations(self):
Clear annotations, so next frame is correctly rendered.
for i in range(len(self._text_annotations)):
self._text_annotations[i].remove()