From: Francois Fleuret Date: Mon, 13 Jun 2022 13:33:56 +0000 (+0200) Subject: Update. X-Git-Url: https://fleuret.org/cgi-bin/gitweb/gitweb.cgi?a=commitdiff_plain;h=98927ffe4764161c154f4be17248afd5b91af721;p=mygpt.git Update. --- diff --git a/mygpt.py b/mygpt.py index a23470b..080083a 100755 --- a/mygpt.py +++ b/mygpt.py @@ -47,16 +47,16 @@ class QKVAttention(nn.Module): def randw(*d): return nn.Parameter(torch.empty(*d).normal_(0, 1 / math.sqrt(d[-1]))) - self.wq = randw(nb_heads, dim_qk, dim_in) - self.wk = randw(nb_heads, dim_qk, dim_in) - self.wv = randw(nb_heads, dim_v, dim_in) + self.w_q = randw(nb_heads, dim_qk, dim_in) + self.w_k = randw(nb_heads, dim_qk, dim_in) + self.w_v = randw(nb_heads, dim_v, dim_in) self.causal = causal self.attention_dropout = attention_dropout def forward(self, x): - q = torch.einsum('ntc,hdc->nhtd', x, self.wq) - k = torch.einsum('ntc,hdc->nhtd', x, self.wk) - v = torch.einsum('ntc,hdc->nhtd', x, self.wv) + q = torch.einsum('ntc,hdc->nhtd', x, self.w_q) + k = torch.einsum('ntc,hdc->nhtd', x, self.w_k) + v = torch.einsum('ntc,hdc->nhtd', x, self.w_v) r = math.sqrt(q.size(3)) a = torch.einsum('nhtd,nhsd->nhts', q, k).div(r) if self.causal: