Update.
authorFrançois Fleuret <francois@fleuret.org>
Mon, 17 Jun 2024 13:41:51 +0000 (15:41 +0200)
committerFrançois Fleuret <francois@fleuret.org>
Mon, 17 Jun 2024 13:41:51 +0000 (15:41 +0200)
turing.py [new file with mode: 0755]

diff --git a/turing.py b/turing.py
new file mode 100755 (executable)
index 0000000..66c7f03
--- /dev/null
+++ b/turing.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+import torch
+
+
+def generate_turing_sequences(N, nb_iter=5, nb_states=4, nb_symbols=2, tape_size=5):
+    next_state = torch.randint(nb_states, (N, nb_states, nb_symbols))
+    next_symbol = torch.randint(nb_symbols, (N, nb_states, nb_symbols))
+    next_move = torch.randint(3, (N, nb_states, nb_symbols))
+
+    all_n = torch.arange(N)
+
+    tape = torch.randint(nb_symbols, (N, tape_size))
+    position = torch.randint(tape_size, (N,))
+    state = torch.randint(nb_states, (N,))
+
+    result = []
+
+    for _ in range(nb_iter):
+        result.append(tape)
+        current_symbol = tape[all_n, position]
+        tape[all_n, position] = next_symbol[all_n, state, current_symbol]
+        position = (position + next_move[all_n, state, current_symbol] - 1) % tape_size
+        state = next_state[all_n, state, current_symbol]
+
+    result = torch.cat([x[:, None, :] for x in result], dim=1)
+
+    return result
+
+
+######################################################################
+
+if __name__ == "__main__":
+    print("Basic check.")
+
+    tapes = generate_turing_sequences(5)
+
+    for i in range(tapes.size(1)):
+        print(f"- {i:03d} ------------------------")
+        # for s, h, r in zip(state, position, tape):
+        # print("".join([f"{x}" for x in r]))
+        # print(" " * h + f"^[{s}]")
+        for r in tapes:
+            print("".join([f"{x}" for x in r[i]]))