return do(src, s_src)
-#######################
+def lambda_eingather(op, src_shape, *indexes_shape):
+ s_src, s_dst = re.search("^([^ ]*) *-> *(.*)", op).groups()
+ s_indexes = re.findall("\(([^)]*)\)", s_src)
+ s_src = re.sub("\([^)]*\)", "_", s_src)
+
+ all_sizes = tuple(d for s in (src_shape,) + indexes_shape for d in s)
+ s_all = "".join([s_src] + s_indexes)
+ shape = tuple(all_sizes[s_all.index(v)] for v in s_dst)
+
+ def do(x_shape, s_x):
+ idx = []
+ n_index = 0
+
+ for i in range(len(x_shape)):
+ v = s_x[i]
+ if v == "_":
+ f = do(indexes_shape[n_index], s_indexes[n_index])
+ idx.append(lambda indexes: indexes[n_index][f(indexes)])
+ n_index += 1
+ else:
+ j = s_dst.index(v)
+ a = (
+ torch.arange(x_shape[i])
+ .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
+ .expand(shape)
+ )
+ idx.append(lambda indexes: a)
+
+ print(f"{idx=}")
+ return lambda indexes: [ f(indexes) for f in idx]
+
+ f = do(src_shape, s_src)
+ print(f"{f(0)=}")
+ return lambda src, *indexes: src[f(indexes)]
+
+
+######################################################################
+
+# src = torch.rand(3, 5, 3)
+
+# print(eingather("aba -> ab", src))
+
+# f = lambda_eingather("aba -> ab", src.shape)
+
+# print(f(src))
+
+# exit(0)
+
+######################################################################
src = torch.rand(3, 5, 7, 11)
index1 = torch.randint(src.size(2), (src.size(3), src.size(1), src.size(3)))
index2 = torch.randint(src.size(3), (src.size(1),))
+# f = lambda_eingather("ca(eae)(a) -> ace", src.shape, index1.shape, index2.shape)
+
+# print(f(src, index1, index2))
+
# result[a, c, e] = src[c, a, index1[e, a, e], index2[a]]
-result = eingather("ca(eae)(a) -> ace", src, index1, index2)
+#result = eingather("ca(eae)(a) -> ace", src, index1, index2)
+
+from functorch.dim import dims
+
+a,c,e=dims(3)
+result=src[c,a,index1[e,a,e],index2[a]].order(a,c,e)
# Check