X-Git-Url: https://www.fleuret.org/cgi-bin/gitweb/gitweb.cgi?a=blobdiff_plain;f=attentiontoy1d.py;h=b463340015b2814b1f2b71c8a0611b7270d499a7;hb=4fc155881f00eda4365ff62e249138394f52a1cd;hp=2cecad8b18c9fb14e779a3bb517da0eaeceefecf;hpb=de3a0375a79cbbf4299aacc41db1426a39ca9664;p=pytorch.git diff --git a/attentiontoy1d.py b/attentiontoy1d.py index 2cecad8..b463340 100755 --- a/attentiontoy1d.py +++ b/attentiontoy1d.py @@ -7,7 +7,7 @@ import torch, math, sys, argparse -from torch import nn +from torch import nn, einsum from torch.nn import functional as F import matplotlib.pyplot as plt @@ -181,7 +181,7 @@ def save_sequence_images(filename, sequences, tr = None, bx = None): class AttentionLayer(nn.Module): def __init__(self, in_channels, out_channels, key_channels): - super(AttentionLayer, self).__init__() + super().__init__() self.conv_Q = nn.Conv1d(in_channels, key_channels, kernel_size = 1, bias = False) self.conv_K = nn.Conv1d(in_channels, key_channels, kernel_size = 1, bias = False) self.conv_V = nn.Conv1d(in_channels, out_channels, kernel_size = 1, bias = False) @@ -190,9 +190,9 @@ class AttentionLayer(nn.Module): Q = self.conv_Q(x) K = self.conv_K(x) V = self.conv_V(x) - A = Q.permute(0, 2, 1).matmul(K).softmax(2) - x = A.matmul(V.permute(0, 2, 1)).permute(0, 2, 1) - return x + A = einsum('nct,ncs->nts', Q, K).softmax(2) + y = einsum('nts,ncs->nct', A, V) + return y def __repr__(self): return self._get_name() + \ @@ -205,7 +205,8 @@ class AttentionLayer(nn.Module): def attention(self, x): Q = self.conv_Q(x) K = self.conv_K(x) - return Q.permute(0, 2, 1).matmul(K).softmax(2) + A = einsum('nct,ncs->nts', Q, K).softmax(2) + return A ######################################################################