#####################
-def eingather(op, src, index):
+def eingather(op, src, *indexes):
s_src, s_dst = re.search("^([^ ]*) *-> *(.*)", op).groups()
- s_index = re.search("\(([^)]*)\)", s_src).group(1)
+ s_indexes = re.findall("\(([^)]*)\)", s_src)
s_src = re.sub("\([^)]*\)", "_", s_src)
- shape = tuple(src.size(s_src.index(v)) for v in s_dst)
+ all_sizes = tuple(d for s in (src,) + indexes for d in s.size())
+ s_all = "".join([s_src] + s_indexes)
+ shape = tuple(all_sizes[s_all.index(v)] for v in s_dst)
- idx = []
+ def do(x, s_x):
+ idx = []
+ n_index = 0
- for i in range(index.dim()):
- v = s_index[i]
- j = s_dst.index(v)
- a = (
- torch.arange(index.size(i))
- .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
- .expand(shape)
- )
- idx.append(a)
+ for i in range(x.dim()):
+ v = s_x[i]
+ if v == "_":
+ idx.append(do(indexes[n_index], s_indexes[n_index]))
+ n_index += 1
+ else:
+ j = s_dst.index(v)
+ a = (
+ torch.arange(x.size(i))
+ .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
+ .expand(shape)
+ )
+ idx.append(a)
- index = index[idx]
+ return x[idx]
- idx = []
+ return do(src, s_src)
- for i in range(src.dim()):
- v = s_src[i]
- if v == "_":
- idx.append(index)
- else:
- j = s_dst.index(v)
- a = (
- torch.arange(src.size(i))
- .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
- .expand(shape)
- )
- idx.append(a)
- return src[idx]
+def lambda_eingather(op, src_shape, *indexes_shape):
+ s_src, s_dst = re.search("^([^ ]*) *-> *(.*)", op).groups()
+ s_indexes = re.findall("\(([^)]*)\)", s_src)
+ s_src = re.sub("\([^)]*\)", "_", s_src)
+
+ all_sizes = tuple(d for s in (src_shape,) + indexes_shape for d in s)
+ s_all = "".join([s_src] + s_indexes)
+ shape = tuple(all_sizes[s_all.index(v)] for v in s_dst)
+
+ def do(x_shape, s_x):
+ idx = []
+ n_index = 0
+
+ for i in range(len(x_shape)):
+ v = s_x[i]
+ if v == "_":
+ f = do(indexes_shape[n_index], s_indexes[n_index])
+ idx.append(lambda indexes: indexes[n_index][f(indexes)])
+ n_index += 1
+ else:
+ j = s_dst.index(v)
+ a = (
+ torch.arange(x_shape[i])
+ .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
+ .expand(shape)
+ )
+ idx.append(lambda indexes: a)
+
+ print(f"{idx=}")
+ return lambda indexes: [f(indexes) for f in idx]
+
+ f = do(src_shape, s_src)
+ print(f"{f(0)=}")
+ return lambda src, *indexes: src[f(indexes)]
+
+
+######################################################################
+# src = torch.rand(3, 5, 3)
-#######################
+# print(eingather("aba -> ab", src))
+
+# f = lambda_eingather("aba -> ab", src.shape)
+
+# print(f(src))
+
+# exit(0)
+
+######################################################################
src = torch.rand(3, 5, 7, 11)
-index = torch.randint(src.size(2), (src.size(3), src.size(1), src.size(3)))
+index1 = torch.randint(src.size(2), (src.size(3), src.size(1), src.size(3)))
+index2 = torch.randint(src.size(3), (src.size(1),))
+
+# f = lambda_eingather("ca(eae)(a) -> ace", src.shape, index1.shape, index2.shape)
+
+# print(f(src, index1, index2))
+
+# result[a, c, e] = src[c, a, index1[e, a, e], index2[a]]
+
+# result = eingather("ca(eae)(a) -> ace", src, index1, index2)
-# I want result[a,c,e]=src[c,a,index[e,a,e],e]
+from functorch.dim import dims
-result = eingather("ca(eae)e -> ace", src, index)
+a, c, e = dims(3)
+result = src[c, a, index1[e, a, e], index2[a]].order(a, c, e)
# Check
for a in range(result.size(0)):
for c in range(result.size(1)):
for e in range(result.size(2)):
- error += (result[a, c, e] - src[c, a, index[e, a, e], e]).abs()
+ error += (result[a, c, e] - src[c, a, index1[e, a, e], index2[a]]).abs()
print(error.item())