+for n_epoch in range(nb_epochs_finished, nb_epochs):
+
+ if args.optim == 'sgd':
+ optimizer = torch.optim.SGD(model.parameters(), lr = args.learning_rate)
+ elif args.optim == 'adam':
+ optimizer = torch.optim.Adam(model.parameters(), lr = args.learning_rate)
+ elif args.optim == 'adamw':
+ optimizer = torch.optim.AdamW(model.parameters(), lr = args.learning_rate)
+ else:
+ raise ValueError(f'Unknown optimizer {args.optim}.')