X-Git-Url: https://fleuret.org/cgi-bin/gitweb/gitweb.cgi?a=blobdiff_plain;f=tasks.py;h=0f80d4fc3499a44138f0f86ab3f2fd9f0ca38337;hb=3168a3161668caacb36ebd717e308e36c9eef2b1;hp=d680951b4090c2df8858b07786cc6292232b018a;hpb=21ed4aa91d0f1ac87ec684d8808e5ced552ad457;p=picoclvr.git diff --git a/tasks.py b/tasks.py index d680951..0f80d4f 100755 --- a/tasks.py +++ b/tasks.py @@ -1861,3 +1861,90 @@ class QMLP(Task): ###################################################################### + +import escape + + +class Escape(Task): + def __init__( + self, + nb_train_samples, + nb_test_samples, + batch_size, + height, + width, + T, + logger=None, + device=torch.device("cpu"), + ): + super().__init__() + + self.batch_size = batch_size + self.device = device + self.height = height + self.width = width + + states, actions, rewards = escape.generate_episodes( + nb_train_samples + nb_test_samples, height, width, T + ) + seq = escape.episodes2seq(states, actions, rewards, lookahead_delta=5) + self.train_input = seq[:nb_train_samples].to(self.device) + self.test_input = seq[nb_train_samples:].to(self.device) + + self.nb_codes = max(self.train_input.max(), self.test_input.max()) + 1 + + # if logger is not None: + # for s, a in zip(self.train_input[:100], self.train_ar_mask[:100]): + # logger(f"train_sequences {self.problem.seq2str(s)}") + # a = "".join(["01"[x.item()] for x in a]) + # logger(f" {a}") + + def batches(self, split="train", nb_to_use=-1, desc=None): + assert split in {"train", "test"} + input = self.train_input if split == "train" else self.test_input + if nb_to_use > 0: + input = input[:nb_to_use] + if desc is None: + desc = f"epoch-{split}" + for batch in tqdm.tqdm( + input.split(self.batch_size), dynamic_ncols=True, desc=desc + ): + yield batch + + def vocabulary_size(self): + return self.nb_codes + + def produce_results( + self, n_epoch, model, result_dir, logger, deterministic_synthesis, nmax=1000 + ): + result = self.test_input[:100].clone() + ar_mask = ( + torch.arange(result.size(1), device=result.device) + > self.height * self.width + 2 + ).long()[None, :] + ar_mask = ar_mask.expand_as(result) + result *= 1 - ar_mask # paraaaaanoiaaaaaaa + + masked_inplace_autoregression( + model, + self.batch_size, + result, + ar_mask, + deterministic_synthesis, + device=self.device, + ) + + s, a, r, lr = escape.seq2episodes( + result, self.height, self.width, lookahead=True + ) + str = escape.episodes2str( + s, a, r, lookahead_rewards=lr, unicode=True, ansi_colors=True + ) + + filename = os.path.join(result_dir, f"test_seq_{n_epoch:04d}.txt") + with open(filename, "w") as f: + f.write(str) + logger(f"wrote {filename}") + + +######################################################################