s_indexes = re.findall("\(([^)]*)\)", s_src)
s_src = re.sub("\([^)]*\)", "_", s_src)
- shape = tuple(src.size(s_src.index(v)) for v in s_dst)
+ all_sizes = tuple(d for s in (src,) + indexes for d in s.size())
+ s_all = "".join([s_src] + s_indexes)
+ shape = tuple(all_sizes[s_all.index(v)] for v in s_dst)
+
+ def do(x, s_x):
+ idx = []
+ n_index = 0
+
+ for i in range(x.dim()):
+ v = s_x[i]
+ if v == "_":
+ idx.append(do(indexes[n_index], s_indexes[n_index]))
+ n_index += 1
+ else:
+ j = s_dst.index(v)
+ a = (
+ torch.arange(x.size(i))
+ .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
+ .expand(shape)
+ )
+ idx.append(a)
- idx = []
- n_index = 0
+ return x[idx]
- for i in range(src.dim()):
- v = s_src[i]
- if v == "_":
- index, s_index = indexes[n_index], s_indexes[n_index]
- n_index += 1
+ return do(src, s_src)
- sub_idx = []
- for i in range(index.dim()):
- v = s_index[i]
+def lambda_eingather(op, src_shape, *indexes_shape):
+ s_src, s_dst = re.search("^([^ ]*) *-> *(.*)", op).groups()
+ s_indexes = re.findall("\(([^)]*)\)", s_src)
+ s_src = re.sub("\([^)]*\)", "_", s_src)
+
+ all_sizes = tuple(d for s in (src_shape,) + indexes_shape for d in s)
+ s_all = "".join([s_src] + s_indexes)
+ shape = tuple(all_sizes[s_all.index(v)] for v in s_dst)
+
+ def do(x_shape, s_x):
+ idx = []
+ n_index = 0
+
+ for i in range(len(x_shape)):
+ v = s_x[i]
+ if v == "_":
+ f = do(indexes_shape[n_index], s_indexes[n_index])
+ idx.append(lambda indexes: indexes[n_index][f(indexes)])
+ n_index += 1
+ else:
j = s_dst.index(v)
a = (
- torch.arange(index.size(i))
+ torch.arange(x_shape[i])
.reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
.expand(shape)
)
- sub_idx.append(a)
+ idx.append(lambda indexes: a)
+
+ print(f"{idx=}")
+ return lambda indexes: [f(indexes) for f in idx]
- index = index[sub_idx]
- idx.append(index)
- else:
- j = s_dst.index(v)
- a = (
- torch.arange(src.size(i))
- .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
- .expand(shape)
- )
- idx.append(a)
+ f = do(src_shape, s_src)
+ print(f"{f(0)=}")
+ return lambda src, *indexes: src[f(indexes)]
- return src[idx]
+######################################################################
-#######################
+# src = torch.rand(3, 5, 3)
-src = torch.rand(3, 5, 7, 11, 13)
-index1 = torch.randint(src.size(2), (src.size(4), src.size(1), src.size(4)))
+# print(eingather("aba -> ab", src))
+
+# f = lambda_eingather("aba -> ab", src.shape)
+
+# print(f(src))
+
+# exit(0)
+
+######################################################################
+
+src = torch.rand(3, 5, 7, 11)
+index1 = torch.randint(src.size(2), (src.size(3), src.size(1), src.size(3)))
index2 = torch.randint(src.size(3), (src.size(1),))
-# I want result[a, c, e] = src[c, a, index1[e, a, e], index2[a], e]
+# f = lambda_eingather("ca(eae)(a) -> ace", src.shape, index1.shape, index2.shape)
+
+# print(f(src, index1, index2))
+
+# result[a, c, e] = src[c, a, index1[e, a, e], index2[a]]
+
+# result = eingather("ca(eae)(a) -> ace", src, index1, index2)
+
+from functorch.dim import dims
-result = eingather("ca(eae)(a)e -> ace", src, index1, index2)
+a, c, e = dims(3)
+result = src[c, a, index1[e, a, e], index2[a]].order(a, c, e)
# Check
for a in range(result.size(0)):
for c in range(result.size(1)):
for e in range(result.size(2)):
- error += (result[a, c, e] - src[c, a, index1[e, a, e], index2[a], e]).abs()
+ error += (result[a, c, e] - src[c, a, index1[e, a, e], index2[a]]).abs()
print(error.item())