From: François Fleuret Date: Fri, 7 Jul 2023 20:23:49 +0000 (+0200) Subject: Update. X-Git-Url: https://fleuret.org/cgi-bin/gitweb/gitweb.cgi?a=commitdiff_plain;h=363ce48d64d1a036b86d29564bf6ad367126c2b1;p=picoclvr.git Update. --- diff --git a/maze.py b/maze.py index c2774dd..f6715f0 100755 --- a/maze.py +++ b/maze.py @@ -324,9 +324,9 @@ def save_image( if __name__ == "__main__": device = torch.device("cuda" if torch.cuda.is_available() else "cpu") - mazes, paths = create_maze_data(8) + mazes, paths, policies = create_maze_data(8) mazes, paths = mazes.to(device), paths.to(device) - save_image("test.png", mazes, paths, paths) + save_image("test.png", mazes=mazes, target_paths=paths, predicted_paths=paths) print(path_correctness(mazes, paths)) ###################################################################### diff --git a/mygpt.py b/mygpt.py index 8cd0152..45b7b59 100755 --- a/mygpt.py +++ b/mygpt.py @@ -45,6 +45,9 @@ class BracketedSequence: def slice(self): return self.x[:, self.first : self.first + self.nb] + def complete(self): + return self.first == 0 and self.nb == x.size(1) + ###################################################################### @@ -120,7 +123,6 @@ class QKVAttention(nn.Module): def randw(*d): return nn.Parameter(torch.randn(*d) / math.sqrt(d[-1])) - assert causal, "TODO: Switch off the cache when non-causal!!!" self.causal = causal self.attention_dropout = attention_dropout @@ -132,6 +134,10 @@ class QKVAttention(nn.Module): def forward(self, bs_q): x_q = bs_q.x + assert ( + self.causal or bs_q.complete() + ), "Partial evaluation is only possible for causal models" + if bs_q.first == 0: self.cache_k = x_q.new_zeros( x_q.size(0), self.w_k.size(0), x_q.size(1), self.w_k.size(1)