projects
/
beaver.git
/ commitdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
| commitdiff |
tree
raw
|
patch
|
inline
| side by side (parent:
a113de0
)
Update
author
François Fleuret
<francois@fleuret.org>
Tue, 28 Mar 2023 20:36:19 +0000
(22:36 +0200)
committer
François Fleuret
<francois@fleuret.org>
Tue, 28 Mar 2023 20:36:19 +0000
(22:36 +0200)
beaver.py
patch
|
blob
|
history
diff --git
a/beaver.py
b/beaver.py
index
f5b3563
..
7800527
100755
(executable)
--- a/
beaver.py
+++ b/
beaver.py
@@
-265,6
+265,8
@@
def oneshot(gpt, learning_rate_scheduler, task):
for n_epoch in range(args.nb_epochs):
learning_rate = learning_rate_scheduler.get_learning_rate()
for n_epoch in range(args.nb_epochs):
learning_rate = learning_rate_scheduler.get_learning_rate()
+ log_string(f"learning_rate {n_epoch} {learning_rate}")
+
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
acc_train_loss, nb_train_samples = 0, 0
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
acc_train_loss, nb_train_samples = 0, 0
@@
-711,8
+713,7
@@
learning_rate_scheduler.reset()
for n_epoch in range(nb_epochs_finished, args.nb_epochs):
learning_rate = learning_rate_scheduler.get_learning_rate()
for n_epoch in range(nb_epochs_finished, args.nb_epochs):
learning_rate = learning_rate_scheduler.get_learning_rate()
-
- log_string(f"learning_rate {learning_rate}")
+ log_string(f"learning_rate {n_epoch} {learning_rate}")
if args.optim == "sgd":
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)
if args.optim == "sgd":
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)