for eval_value_batch in eval_value_batches:
eval_values.append([x.tolist() for x in eval_value_batch]) // numpy.array.toList
total_loss += cur_avg_loss * cur_num
if progress:
pbar.update(iter_idx)
if progress:
pbar.finish()
loss = float(total_loss) / total
data_set.reset()
After Change
self._eval_batches(batches, eval_tensor_names=eval_tensor_names, **eval_args)
if params.supervise:
num_corrects += cur_num_corrects
num_wrongs += cur_num_wrongs
cur_num = sum(batch[NUM] for batch in batches)
total_loss += cur_avg_loss * cur_num
for eval_value_batch in eval_value_batches:
eval_values.append([x.tolist() for x in eval_value_batch]) // numpy.array.toList
// For printing results to stdout and log
if params.supervise:
total = num_corrects + num_wrongs
loss = float(total_loss) / total
acc = float(num_corrects) / total
string = "%s at epoch %d: acc = %.2f%% = %d / %d, loss = %.4f" % \
(data_set.name, epoch, 100 * acc, num_corrects, total, loss)
logging.info(string)
print(string)
else:
loss, acc = None, None
// For outputting eval json files