projects
/
mygpt.git
/ commitdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
| commitdiff |
tree
raw
|
patch
|
inline
| side by side (parent:
f3a734b
)
Added the small weight embedding + id layer norm inits.
author
Francois Fleuret
<francois@fleuret.org>
Mon, 8 Aug 2022 05:13:54 +0000
(07:13 +0200)
committer
Francois Fleuret
<francois@fleuret.org>
Mon, 8 Aug 2022 05:13:54 +0000
(07:13 +0200)
mygpt.py
patch
|
blob
|
history
diff --git
a/mygpt.py
b/mygpt.py
index
3bce361
..
ebc9a83
100755
(executable)
--- a/
mygpt.py
+++ b/
mygpt.py
@@
-97,10
+97,6
@@
class MyGPT(nn.Module):
AddPositionalEncoding(len_max),
)
AddPositionalEncoding(len_max),
)
- # Small embedding initialization
- with torch.no_grad():
- self.embedding[0].weight.normal_(0, 2e-2)
-
trunk_blocks = [ ]
for _ in range(nb_blocks):
trunk_blocks = [ ]
for _ in range(nb_blocks):
@@
-128,6
+124,14
@@
class MyGPT(nn.Module):
self.readout = nn.Linear(in_features = dim_model, out_features = vocabulary_size)
self.readout = nn.Linear(in_features = dim_model, out_features = vocabulary_size)
+ with torch.no_grad():
+ for m in self.modules():
+ if isinstance(m, nn.Embedding):
+ m.weight.normal_(mean = 0, std = 2e-2)
+ elif isinstance(m, nn.LayerNorm):
+ m.bias.zero_()
+ m.weight.fill_(1.0)
+
def forward(self, x):
x = F.pad(x, (1, -1))
x = self.embedding(x)
def forward(self, x):
x = F.pad(x, (1, -1))
x = self.embedding(x)