Simplified calls to superclass constructors.
[pytorch.git] / attentiontoy1d.py
index d389f0c..b463340 100755 (executable)
@@ -181,7 +181,7 @@ def save_sequence_images(filename, sequences, tr = None, bx = None):
 
 class AttentionLayer(nn.Module):
     def __init__(self, in_channels, out_channels, key_channels):
-        super(AttentionLayer, self).__init__()
+        super().__init__()
         self.conv_Q = nn.Conv1d(in_channels, key_channels, kernel_size = 1, bias = False)
         self.conv_K = nn.Conv1d(in_channels, key_channels, kernel_size = 1, bias = False)
         self.conv_V = nn.Conv1d(in_channels, out_channels, kernel_size = 1, bias = False)