+ ri = (
+ (x == self.height * self.width).long().sum(dim=-1).argmax(-1).view(-1, 1, 1)
+ )
+ rj = (
+ (x == self.height * self.width).long().sum(dim=-2).argmax(-1).view(-1, 1, 1)
+ )
+
+ m = 1 - torch.logical_or(i == ri, j == rj).long().flatten(1)
+
+ x = x.flatten(1)
+ u = torch.arange(self.height * self.width, device=x.device).reshape(1, -1)
+
+ d = (x - (m * u + (1 - m) * self.height * self.width)).abs().sum(-1)
+ return d
+
+ def moves(self, x):
+ y = (
+ x[:, None, :, :]
+ .expand(-1, self.height * 2 + self.width * 2, -1, -1)
+ .clone()
+ )
+ k = 0
+
+ for i in range(self.height):
+ y[:, k, i, :] = y[:, k, i, :].roll(dims=-1, shifts=-1)
+ k += 1
+ y[:, k, i, :] = y[:, k, i, :].roll(dims=-1, shifts=1)
+ k += 1
+
+ for j in range(self.width):
+ y[:, k, :, j] = y[:, k, :, j].roll(dims=-1, shifts=-1)
+ k += 1
+ y[:, k, :, j] = y[:, k, :, j].roll(dims=-1, shifts=1)
+ k += 1
+
+ return y
+
+ def generate_sequences(self, nb):
+ x = self.start_random(nb)
+
+ seq = [x.flatten(1)]
+
+ for t in range(self.nb_time_steps - 1):
+ y = self.moves(x)
+ x = y[torch.arange(nb), torch.randint(y.size(1), (nb,))]
+ seq.append(x.flatten(1))
+
+ if self.hard:
+ seq.reverse()
+
+ seq = torch.cat(seq, dim=1)
+ return seq, seq.new_full(seq.size(), 1, dtype=torch.int64)
+
+ def compute_nb_correct(self, input, ar_mask, result):
+ a = [
+ x.reshape(result.size(0), self.height, self.width)
+ for x in result.split(self.height * self.width, dim=1)
+ ]
+ if self.hard:
+ a.reverse()
+
+ x = a[0]
+
+ d = self.start_error(x)
+
+ for t in range(self.nb_time_steps - 1):
+ x0, x = a[t], a[t + 1]
+ y = self.moves(x0)
+ d = d + (x[:, None] - y).abs().sum((-1, -2)).min(dim=-1).values
+
+ nb_total, nb_correct = result.size(0), (d == 0).long().sum().item()
+
+ return nb_total, nb_correct
+
+ def seq2str(self, seq):
+ return " | ".join(
+ [
+ " ".join(
+ [
+ "-".join(
+ [
+ f"{x:02d}" if x < self.height * self.width else "**"
+ for x in s
+ ]
+ )
+ for s in r.split(self.width)
+ ]
+ )
+ for r in seq.split(self.height * self.width)
+ ]
+ )
+
+
+####################