From: François Fleuret Date: Tue, 31 Oct 2023 08:14:24 +0000 (+0100) Subject: Update. X-Git-Url: https://fleuret.org/cgi-bin/gitweb/gitweb.cgi?a=commitdiff_plain;h=5598ee31cbcd2ebbedeadbd66518f082c66aaaa9;p=pytorch.git Update. --- diff --git a/eingather.py b/eingather.py index 734edbe..c7552d7 100755 --- a/eingather.py +++ b/eingather.py @@ -42,15 +42,72 @@ def eingather(op, src, *indexes): return do(src, s_src) -####################### +def lambda_eingather(op, src_shape, *indexes_shape): + s_src, s_dst = re.search("^([^ ]*) *-> *(.*)", op).groups() + s_indexes = re.findall("\(([^)]*)\)", s_src) + s_src = re.sub("\([^)]*\)", "_", s_src) + + all_sizes = tuple(d for s in (src_shape,) + indexes_shape for d in s) + s_all = "".join([s_src] + s_indexes) + shape = tuple(all_sizes[s_all.index(v)] for v in s_dst) + + def do(x_shape, s_x): + idx = [] + n_index = 0 + + for i in range(len(x_shape)): + v = s_x[i] + if v == "_": + f = do(indexes_shape[n_index], s_indexes[n_index]) + idx.append(lambda indexes: indexes[n_index][f(indexes)]) + n_index += 1 + else: + j = s_dst.index(v) + a = ( + torch.arange(x_shape[i]) + .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1)) + .expand(shape) + ) + idx.append(lambda indexes: a) + + print(f"{idx=}") + return lambda indexes: [ f(indexes) for f in idx] + + f = do(src_shape, s_src) + print(f"{f(0)=}") + return lambda src, *indexes: src[f(indexes)] + + +###################################################################### + +# src = torch.rand(3, 5, 3) + +# print(eingather("aba -> ab", src)) + +# f = lambda_eingather("aba -> ab", src.shape) + +# print(f(src)) + +# exit(0) + +###################################################################### src = torch.rand(3, 5, 7, 11) index1 = torch.randint(src.size(2), (src.size(3), src.size(1), src.size(3))) index2 = torch.randint(src.size(3), (src.size(1),)) +# f = lambda_eingather("ca(eae)(a) -> ace", src.shape, index1.shape, index2.shape) + +# print(f(src, index1, index2)) + # result[a, c, e] = src[c, a, index1[e, a, e], index2[a]] -result = eingather("ca(eae)(a) -> ace", src, index1, index2) +#result = eingather("ca(eae)(a) -> ace", src, index1, index2) + +from functorch.dim import dims + +a,c,e=dims(3) +result=src[c,a,index1[e,a,e],index2[a]].order(a,c,e) # Check diff --git a/tinyae.py b/tinyae.py index 1608786..b4f3aba 100755 --- a/tinyae.py +++ b/tinyae.py @@ -55,7 +55,7 @@ def log_string(s): class AutoEncoder(nn.Module): def __init__(self, nb_channels, embedding_dim): - super(AutoEncoder, self).__init__() + super().__init__() self.encoder = nn.Sequential( nn.Conv2d(1, nb_channels, kernel_size=5), # to 24x24