d_loss.mean().backward()
self.d_optimizer.step()
self.g_optimizer.zero_grad()
d_loss, g_loss = self.gan.forward_loss()
for hook in self.train_hooks:
loss = hook.forward()
if loss[1] is not None:
After Change
for p, np in zip(self.gan.d_parameters(), d_grads):
p.grad = np
if(len(d_grads) > 0):
self.d_optimizer.step()
_, g_grads = self.calculate_gradients(["g"])
for hook in self.train_hooks:
_, g_grads = hook.gradients(_, g_grads)