for n_epoch in range(args.nb_epochs):
learning_rate = learning_rate_scheduler.get_learning_rate()
+ log_string(f"learning_rate {n_epoch} {learning_rate}")
+
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
acc_train_loss, nb_train_samples = 0, 0
for n_epoch in range(nb_epochs_finished, args.nb_epochs):
learning_rate = learning_rate_scheduler.get_learning_rate()
-
- log_string(f"learning_rate {learning_rate}")
+ log_string(f"learning_rate {n_epoch} {learning_rate}")
if args.optim == "sgd":
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)