// Do checkpointing.
if i == steps - 1 or time.time() >= checkpoint_time + checkpoint_interval:
with self._session.as_default():
manager.save()
checkpoint_time = time.time()
def restore(self):
Reload the model parameters from the most recent checkpoint file.
After Change
summed_gradients = meta_gradients
else:
summed_gradients = [
s + g for s, g in zip(summed_gradients, meta_gradients)
]
self._tf_optimizer.apply_gradients(zip(summed_gradients, variables))
// Do checkpointing.