Logger().log_value("eval_batch.timer.load", timer["load"], should_print=False)
for key, value in out.items():
if type(value) == torch.autograd.variable.Variable:
value = value.data
if torch.is_tensor(value):
if value.dim() != 1 or value.size(0) != 1:
After Change
if torch.is_tensor(value):
if value.dim() == 0:
//value = value.detach() // not tracked by autograd anymore
value = value.item() // get number from a torch scalar
else:
continue
if type(value) == list: