Update.
[pytorch.git] / eingather.py
index b271100..03b713c 100755 (executable)
@@ -15,54 +15,99 @@ def eingather(op, src, *indexes):
     s_indexes = re.findall("\(([^)]*)\)", s_src)
     s_src = re.sub("\([^)]*\)", "_", s_src)
 
-    all_sizes = tuple(d for s in ( src, ) + indexes for d in s.size())
+    all_sizes = tuple(d for s in (src,) + indexes for d in s.size())
     s_all = "".join([s_src] + s_indexes)
     shape = tuple(all_sizes[s_all.index(v)] for v in s_dst)
 
-    idx = []
-    n_index = 0
+    def do(x, s_x):
+        idx = []
+        n_index = 0
 
-    for i in range(src.dim()):
-        v = s_src[i]
-        if v == "_":
-            index, s_index = indexes[n_index], s_indexes[n_index]
-            n_index += 1
+        for i in range(x.dim()):
+            v = s_x[i]
+            if v == "_":
+                idx.append(do(indexes[n_index], s_indexes[n_index]))
+                n_index += 1
+            else:
+                j = s_dst.index(v)
+                a = (
+                    torch.arange(x.size(i))
+                    .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
+                    .expand(shape)
+                )
+                idx.append(a)
+
+        return x[idx]
+
+    return do(src, s_src)
+
+
+def lambda_eingather(op, src_shape, *indexes_shape):
+    s_src, s_dst = re.search("^([^ ]*) *-> *(.*)", op).groups()
+    s_indexes = re.findall("\(([^)]*)\)", s_src)
+    s_src = re.sub("\([^)]*\)", "_", s_src)
 
-            sub_idx = []
+    all_sizes = tuple(d for s in (src_shape,) + indexes_shape for d in s)
+    s_all = "".join([s_src] + s_indexes)
+    shape = tuple(all_sizes[s_all.index(v)] for v in s_dst)
 
-            for i in range(index.dim()):
-                v = s_index[i]
+    def do(x_shape, s_x):
+        idx = []
+        n_index = 0
+
+        for i in range(len(x_shape)):
+            v = s_x[i]
+            if v == "_":
+                f = do(indexes_shape[n_index], s_indexes[n_index])
+                idx.append(lambda indexes: indexes[n_index][f(indexes)])
+                n_index += 1
+            else:
                 j = s_dst.index(v)
                 a = (
-                    torch.arange(index.size(i))
+                    torch.arange(x_shape[i])
                     .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
                     .expand(shape)
                 )
-                sub_idx.append(a)
+                idx.append(lambda indexes: a)
+
+        print(f"{idx=}")
+        return lambda indexes: [f(indexes) for f in idx]
+
+    f = do(src_shape, s_src)
+    print(f"{f(0)=}")
+    return lambda src, *indexes: src[f(indexes)]
+
+
+######################################################################
 
-            index = index[sub_idx]
-            idx.append(index)
-        else:
-            j = s_dst.index(v)
-            a = (
-                torch.arange(src.size(i))
-                .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
-                .expand(shape)
-            )
-            idx.append(a)
+# src = torch.rand(3, 5, 3)
 
-    return src[idx]
+# print(eingather("aba -> ab", src))
 
+# f = lambda_eingather("aba -> ab", src.shape)
 
-#######################
+# print(f(src))
+
+# exit(0)
+
+######################################################################
 
 src = torch.rand(3, 5, 7, 11)
 index1 = torch.randint(src.size(2), (src.size(3), src.size(1), src.size(3)))
 index2 = torch.randint(src.size(3), (src.size(1),))
 
-# I want result[a, c, e] = src[c, a, index1[e, a, e], index2[a], e]
+# f = lambda_eingather("ca(eae)(a) -> ace", src.shape, index1.shape, index2.shape)
+
+# print(f(src, index1, index2))
+
+# result[a, c, e] = src[c, a, index1[e, a, e], index2[a]]
+
+# result = eingather("ca(eae)(a) -> ace", src, index1, index2)
+
+from functorch.dim import dims
 
-result = eingather("ca(eae)(a) -> ace", src, index1, index2)
+a, c, e = dims(3)
+result = src[c, a, index1[e, a, e], index2[a]].order(a, c, e)
 
 # Check