attention_dropout=0.0,
len_max=1e5,
logger=print,
- **kwargs,
+ args=None,
):
super().__init__()
attention_dropout=0.0,
len_max=1e5,
logger=print,
- **kwargs,
+ args=None,
):
super().__init__()
attention_dropout=0.0,
len_max=1e5,
logger=print,
- **kwargs,
+ args=None,
):
super().__init__()
self.caterpillar_height = caterpillar_height
self.attention_dropout = attention_dropout
- ######################################################################
- # sup_args
-
- x = kwargs.get("gate_dropout")
- if x is None:
- self.proba_gate_dropout = 0.0
- else:
- self.proba_gate_dropout = float(x)
-
- logger(f"self.proba_gate_dropout {self.proba_gate_dropout}")
-
- x = kwargs.get("default_bg")
- if x is None:
- default_bg = -math.log(caterpillar_height - 1)
- else:
- default_bg = float(x)
-
- logger(f"default_bg {default_bg}")
+ self.gate_dropout_proba = args.gate_dropout_proba
+ self.gate_dropout_sync = args.gate_dropout_sync
+ self.gate_dropout_replace = args.gate_dropout_replace
######################################################################
+ default_bg = -math.log(caterpillar_height - 1)
self.w_G = randw(nb_heads, caterpillar_height, dim_model)
self.b_G = nn.Parameter(torch.full((nb_heads, caterpillar_height), default_bg))
dim_v,
)
- def reset_inner_loss(self):
- self.acc_attention = 0
- self.acc_nb = 0
+ # def reset_inner_loss(self):
+ # self.acc_attention = 0
+ # self.acc_nb = 0
- def get_inner_loss(self):
- # warnings.warn("l2 regularization", RuntimeWarning)
- # return (self.acc_attention / self.acc_nb).pow(2).sum()
- return torch.tensor([0], device=self.w_Q.device)
+ # def get_inner_loss(self):
+ # warnings.warn("l2 regularization", RuntimeWarning)
+ # return (self.acc_attention / self.acc_nb).pow(2).sum()
+ # return torch.tensor([0], device=self.w_Q.device)
def forward(self, bs):
# Dimensions to make the source a bit clearer, that's needed
gated_V = gated_V.unflatten(2, (-1, L))
gated_K = gated_K.unflatten(2, (-1, L))
- next_V = pscan_dim(A, gated_V, init_rec_V, dim=2)
- next_K = pscan_dim(A, gated_K, init_rec_K, dim=2)
-
- next_V = next_V.flatten(2, 3)
- next_K = next_K.flatten(2, 3)
+ next_V = pscan_dim(A, gated_V, init_rec_V, dim=2).flatten(2, 3)
+ next_K = pscan_dim(A, gated_K, init_rec_K, dim=2).flatten(2, 3)
return next_V, next_K
next_V, next_K = recurrence(G, V, K)
- if self.training and self.proba_gate_dropout > 0.0:
+ if self.training and self.gate_dropout_proba > 0.0:
# G is NxHxRxT where r is the caterpillar's row.
warnings.warn("gate dropout", RuntimeWarning)
+ if self.gate_dropout_sync:
+ shape_kill = (N, 1, 1)
+ else:
+ shape_kill = (N, H, R)
+
# Pick a point in each of the NxHxR timeline and set this
# entry and the following to 1
kill = (
- torch.rand(N, H, R, t1 - t0, device=G.device).sort(dim=3).indices == 0
+ torch.rand(*shape_kill, t1 - t0, device=G.device).sort(dim=3).indices
+ == 0
).cumsum(dim=3)
# Keep these mask for only some of the NxHxR
kill = kill * (
- torch.rand(N, H, R, 1, device=G.device) <= self.proba_gate_dropout
+ torch.rand(*shape_kill, 1, device=G.device) <= self.gate_dropout_proba
)
# The coefficient to keep are the complementary
masked_next_V, masked_next_K = recurrence(G * mask, V, K)
- next_V = next_V.detach() + (masked_next_V - masked_next_V.detach()) / (
- 1 - self.proba_gate_dropout
+ if self.gate_dropout_replace:
+ next_V = next_V.detach()
+ next_K = next_K.detach()
+
+ next_V = next_V + (masked_next_V - masked_next_V.detach()) / (
+ 1 - self.gate_dropout_proba
)
- next_K = next_K.detach() + (masked_next_K - masked_next_K.detach()) / (
- 1 - self.proba_gate_dropout
+ next_K = next_K + (masked_next_K - masked_next_K.detach()) / (
+ 1 - self.gate_dropout_proba
)
self.rec_V[:, :, t0:t1] = next_V
causal=False,
attention_dropout=0.0,
logger=print,
- **kwargs,
+ args=None,
):
super().__init__()
len_max=1e5,
attention_layer="kvrec",
logger=print,
- **kwargs,
+ args=None,
):
super().__init__()
causal=causal,
attention_dropout=dropout,
logger=logger,
- **kwargs,
+ args=args,
)
elif attention_layer == "dumbrec":
return DumbRec(
nb_lines=nb_lines,
attention_dropout=dropout,
logger=logger,
- **kwargs,
+ args=args,
)
elif attention_layer == "kvrec":
return KVRec(
nb_lines=nb_lines,
attention_dropout=dropout,
logger=logger,
- **kwargs,
+ args=args,
)
elif attention_layer == "caterpillar":
return Caterpillar(
caterpillar_height=self.caterpillar_height,
attention_dropout=dropout,
logger=logger,
- **kwargs,
+ args=args,
)
else:
raise ValueError(f"Unknown attention type {attention_layer}.")