if ctx.training:
mean = x.new().resize_(1, running_mean.size(0))
var = x.new().resize_(1, running_var.size(0))
_check_contiguous(x, mean, var)
_check(_ext.bn_mean_var_cuda, x, mean, var)
if ctx.is_master:
After Change
// Mark in-place modified tensors
ctx.mark_dirty(x, running_mean, running_var)
else:
mean, var = running_mean.contiguous(), running_var.contiguous()
ctx.mark_dirty(x)
// BN forward + activation
backend.forward(x, mean, var, weight, bias, ctx.affine, ctx.eps)