Update.
authorFrançois Fleuret <francois@fleuret.org>
Tue, 31 Oct 2023 08:14:24 +0000 (09:14 +0100)
committerFrançois Fleuret <francois@fleuret.org>
Tue, 31 Oct 2023 08:14:24 +0000 (09:14 +0100)
eingather.py
tinyae.py

index 734edbe..c7552d7 100755 (executable)
@@ -42,15 +42,72 @@ def eingather(op, src, *indexes):
     return do(src, s_src)
 
 
-#######################
+def lambda_eingather(op, src_shape, *indexes_shape):
+    s_src, s_dst = re.search("^([^ ]*) *-> *(.*)", op).groups()
+    s_indexes = re.findall("\(([^)]*)\)", s_src)
+    s_src = re.sub("\([^)]*\)", "_", s_src)
+
+    all_sizes = tuple(d for s in (src_shape,) + indexes_shape for d in s)
+    s_all = "".join([s_src] + s_indexes)
+    shape = tuple(all_sizes[s_all.index(v)] for v in s_dst)
+
+    def do(x_shape, s_x):
+        idx = []
+        n_index = 0
+
+        for i in range(len(x_shape)):
+            v = s_x[i]
+            if v == "_":
+                f = do(indexes_shape[n_index], s_indexes[n_index])
+                idx.append(lambda indexes: indexes[n_index][f(indexes)])
+                n_index += 1
+            else:
+                j = s_dst.index(v)
+                a = (
+                    torch.arange(x_shape[i])
+                    .reshape((1,) * j + (-1,) + (1,) * (len(s_dst) - j - 1))
+                    .expand(shape)
+                )
+                idx.append(lambda indexes: a)
+
+        print(f"{idx=}")
+        return lambda indexes: [ f(indexes) for f in idx]
+
+    f = do(src_shape, s_src)
+    print(f"{f(0)=}")
+    return lambda src, *indexes: src[f(indexes)]
+
+
+######################################################################
+
+# src = torch.rand(3, 5, 3)
+
+# print(eingather("aba -> ab", src))
+
+# f = lambda_eingather("aba -> ab", src.shape)
+
+# print(f(src))
+
+# exit(0)
+
+######################################################################
 
 src = torch.rand(3, 5, 7, 11)
 index1 = torch.randint(src.size(2), (src.size(3), src.size(1), src.size(3)))
 index2 = torch.randint(src.size(3), (src.size(1),))
 
+# f = lambda_eingather("ca(eae)(a) -> ace", src.shape, index1.shape, index2.shape)
+
+# print(f(src, index1, index2))
+
 # result[a, c, e] = src[c, a, index1[e, a, e], index2[a]]
 
-result = eingather("ca(eae)(a) -> ace", src, index1, index2)
+#result = eingather("ca(eae)(a) -> ace", src, index1, index2)
+
+from functorch.dim import dims
+
+a,c,e=dims(3)
+result=src[c,a,index1[e,a,e],index2[a]].order(a,c,e)
 
 # Check
 
index 1608786..b4f3aba 100755 (executable)
--- a/tinyae.py
+++ b/tinyae.py
@@ -55,7 +55,7 @@ def log_string(s):
 
 class AutoEncoder(nn.Module):
     def __init__(self, nb_channels, embedding_dim):
-        super(AutoEncoder, self).__init__()
+        super().__init__()
 
         self.encoder = nn.Sequential(
             nn.Conv2d(1, nb_channels, kernel_size=5),  # to 24x24