From e6a0ef14db49f98a2b9c1b1a1be2a98af62c4919 Mon Sep 17 00:00:00 2001 From: =?utf8?q?Fran=C3=A7ois=20Fleuret?= Date: Sun, 23 Jul 2023 12:47:21 +0200 Subject: [PATCH] Update. --- tasks.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tasks.py b/tasks.py index 42d9126..17904d8 100755 --- a/tasks.py +++ b/tasks.py @@ -1283,7 +1283,8 @@ class RPL(Task): ) if save_attention_image is not None: - input = self.test_input[:1].clone() + ns=torch.randint(self.text_input.size(0),(1,)).item() + input = self.test_input[ns:ns+1].clone() last = (input != self.t_nul).max(0).values.nonzero().max() + 3 input = input[:, :last].to(self.device) @@ -1296,7 +1297,7 @@ class RPL(Task): ram = model.retrieve_attention() model.record_attention(False) - tokens_output = [self.id2token[i.item()] for i in input[0]] + tokens_output = [self.id2token[i.item()] for i in input[ns]] tokens_input = ["n/a"] + tokens_output[:-1] for n_head in range(ram[0].size(1)): filename = os.path.join( -- 2.39.5