optimizer = optim.Adam(model.parameters(), lr=opt.learning_rate)
// Load the optimizer
if infos.get("state_dict", None):
optimizer.load_state_dict(infos["state_dict"])
while True:
After Change
if best_flag:
checkpoint_path = os.path.join(opt.checkpoint_path, "model-best.ckpt")
torch.save(model.state_dict(), checkpoint_path)
print("model saved to {}".format(checkpoint_path))
with open(os.path.join(opt.checkpoint_path, "infos_"+opt.id+"-best.pkl"), "wb") as f:
cPickle.dump(infos, f)