+def lambda_eingather(op, src_shape, *indexes_shape):
+ s_src, s_dst = re.search("^([^ ]*) *-> *(.*)", op).groups()
+ s_indexes = re.findall("\(([^)]*)\)", s_src)
+ s_src = re.sub("\([^)]*\)", "_", s_src)
+
+ all_sizes = tuple(d for s in (src_shape,) + indexes_shape for d in s)
+ s_all = "".join([s_src] + s_indexes)
+ shape = tuple(all_sizes[s_all.index(v)] for v in s_dst)
+
+ def do(x_shape, s_x):
+ idx = []
+ n_index = 0
+
+ for i in range(len(x_shape)):
+ v = s_x[i]
+ if v == "_":
+ f = do(indexes_shape[n_index], s_indexes[n_index])
+ idx.append(lambda indexes: indexes[n_index][f(indexes)])
+ n_index += 1
+ else:
+ j = s_dst.index(v)
+ a = (
+ torch.arange(x_shape[i])
+ .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
+ .expand(shape)
+ )
+ idx.append(lambda indexes: a)
+
+ print(f"{idx=}")
+ return lambda indexes: [ f(indexes) for f in idx]
+
+ f = do(src_shape, s_src)
+ print(f"{f(0)=}")
+ return lambda src, *indexes: src[f(indexes)]
+
+
+######################################################################
+
+# src = torch.rand(3, 5, 3)
+
+# print(eingather("aba -> ab", src))
+
+# f = lambda_eingather("aba -> ab", src.shape)
+
+# print(f(src))
+
+# exit(0)
+
+######################################################################