projects
/
culture.git
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
Update.
[culture.git]
/
mygpt.py
diff --git
a/mygpt.py
b/mygpt.py
index
77c29ce
..
7117e76
100755
(executable)
--- a/
mygpt.py
+++ b/
mygpt.py
@@
-264,6
+264,7
@@
class MyGPT(nn.Module):
m.weight.fill_(1.0)
def forward(self, bs):
m.weight.fill_(1.0)
def forward(self, bs):
+ # print(f"GENERATE {bs.first} {bs.first+bs.nb}")
bs = BracketedSequence(F.pad(bs.x, (1, -1)), bs.first, bs.nb)
bs = self.embedding(bs)
bs = self.trunk(bs)
bs = BracketedSequence(F.pad(bs.x, (1, -1)), bs.first, bs.nb)
bs = self.embedding(bs)
bs = self.trunk(bs)
@@
-278,10
+279,12
@@
class MyGPT(nn.Module):
self,
input,
ar_mask,
self,
input,
ar_mask,
+ temperature=1.0,
deterministic_synthesis=False,
forbidden_tokens=None,
forced_biases=None,
):
deterministic_synthesis=False,
forbidden_tokens=None,
forced_biases=None,
):
+ sum_logits = 0
to_generate = (ar_mask.sum(0) > 0).nonzero()
if to_generate.min() > 0:
self(
to_generate = (ar_mask.sum(0) > 0).nonzero()
if to_generate.min() > 0:
self(
@@
-299,8
+302,13
@@
class MyGPT(nn.Module):
else:
dist = torch.distributions.categorical.Categorical(logits=logits)
t_next = dist.sample()
else:
dist = torch.distributions.categorical.Categorical(logits=logits)
t_next = dist.sample()
+ sum_logits += logits.log_softmax(dim=-1)[
+ torch.arange(t_next.size(0)), t_next
+ ].sum()
input[:, s] = ar_mask[:, s] * t_next + (1 - ar_mask[:, s]) * input[:, s]
input[:, s] = ar_mask[:, s] * t_next + (1 - ar_mask[:, s]) * input[:, s]
+ return sum_logits
+
def record_attention(self, v=True):
for m in self.modules():
if isinstance(m, QKVAttention):
def record_attention(self, v=True):
for m in self.modules():
if isinstance(m, QKVAttention):