if args.fixbase_epoch > 0:
if hasattr(model, "classifier") and isinstance(model.classifier, nn.Module):
optimizer_tmp = init_optimizer(model.classifier.parameters(), **optimizer_kwargs(args))
else:
print("Warn: model has no attribute "classifier" and fixbase_epoch is reset to 0")
args.fixbase_epoch = 0
After Change
del optimizer_tmp
print("Now open all layers for training")
raise NotImplementedError
for epoch in range(args.start_epoch, args.max_epoch):
start_train_time = time.time()
train(epoch, model, criterion, optimizer, trainloader, use_gpu)