From 98927ffe4764161c154f4be17248afd5b91af721 Mon Sep 17 00:00:00 2001 From: Francois Fleuret Date: Mon, 13 Jun 2022 15:33:56 +0200 Subject: [PATCH] Update. --- mygpt.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mygpt.py b/mygpt.py index a23470b..080083a 100755 --- a/mygpt.py +++ b/mygpt.py @@ -47,16 +47,16 @@ class QKVAttention(nn.Module): def randw(*d): return nn.Parameter(torch.empty(*d).normal_(0, 1 / math.sqrt(d[-1]))) - self.wq = randw(nb_heads, dim_qk, dim_in) - self.wk = randw(nb_heads, dim_qk, dim_in) - self.wv = randw(nb_heads, dim_v, dim_in) + self.w_q = randw(nb_heads, dim_qk, dim_in) + self.w_k = randw(nb_heads, dim_qk, dim_in) + self.w_v = randw(nb_heads, dim_v, dim_in) self.causal = causal self.attention_dropout = attention_dropout def forward(self, x): - q = torch.einsum('ntc,hdc->nhtd', x, self.wq) - k = torch.einsum('ntc,hdc->nhtd', x, self.wk) - v = torch.einsum('ntc,hdc->nhtd', x, self.wv) + q = torch.einsum('ntc,hdc->nhtd', x, self.w_q) + k = torch.einsum('ntc,hdc->nhtd', x, self.w_k) + v = torch.einsum('ntc,hdc->nhtd', x, self.w_v) r = math.sqrt(q.size(3)) a = torch.einsum('nhtd,nhsd->nhts', q, k).div(r) if self.causal: -- 2.39.5