for hook in self.train_hooks:
loss = hook.forward()
if loss[0] is not None:
d_loss += loss[0]d_loss.mean().backward()
self.d_optimizer.step()
self.g_optimizer.zero_grad()
d_loss, g_loss = self.gan.forward_loss()
After Change
d_grads, _ = self.calculate_gradients(["d"])
for hook in self.train_hooks:
d_grads, _ = hook.gradients(d_grads, _)
for p, np in zip(self.gan.d_parameters(), d_grads):
p.grad = np