From ffe183868ac8563fd82fc8312fda90f6f8a95833 Mon Sep 17 00:00:00 2001
From: =?utf8?q?Fran=C3=A7ois=20Fleuret?= <francois@fleuret.org>
Date: Wed, 10 Jan 2024 09:43:18 +0100
Subject: [PATCH] Update.

---
 main.py  | 4 ++--
 mygpt.py | 3 +++
 2 files changed, 5 insertions(+), 2 deletions(-)

diff --git a/main.py b/main.py
index 74e1d6c..c51035c 100755
--- a/main.py
+++ b/main.py
@@ -117,7 +117,7 @@ parser.add_argument("--deterministic_synthesis", action="store_true", default=Fa
 
 parser.add_argument("--no_checkpoint", action="store_true", default=False)
 
-parser.add_argument("--overwrite_results", action="store_true", default=False)
+parser.add_argument("--continue_training", action="store_true", default=False)
 
 parser.add_argument("--checkpoint_name", type=str, default="checkpoint.pth")
 
@@ -426,7 +426,7 @@ else:
 try:
     os.mkdir(args.result_dir)
 except FileExistsError:
-    if not args.overwrite_results:
+    if not args.continue_training:
         print(f"result directory {args.result_dir} already exists")
         exit(1)
 
diff --git a/mygpt.py b/mygpt.py
index d8fd227..676b921 100755
--- a/mygpt.py
+++ b/mygpt.py
@@ -574,6 +574,9 @@ class Caterpillar(nn.Module):
         ######################################################################
 
         if self.training and self.proba_gate_dropout > 0.0:
+            # This is a better implementation of "flashbacks".  A is
+            # NxExT where e is the caterpillar's row.
+
             warnings.warn("gate dropout", RuntimeWarning)
             epsilon = 0.5
 
-- 
2.39.5