torch.set_default_dtype(torch.float64)
+nb_hidden = 5
+hidden_dim = 100
+
res = 256
-nh = 100
input = torch.cat(
[
for activation in [nn.ReLU, nn.Tanh, nn.Softplus, Angles]:
for s in [1.0, 10.0]:
- layers = [nn.Linear(2, nh), activation()]
- nb_hidden = 4
- for k in range(nb_hidden):
- layers += [nn.Linear(nh, nh), activation()]
- layers += [nn.Linear(nh, 2)]
+ layers = [nn.Linear(2, hidden_dim), activation()]
+ for k in range(nb_hidden - 1):
+ layers += [nn.Linear(hidden_dim, hidden_dim), activation()]
+ layers += [nn.Linear(hidden_dim, 2)]
model = nn.Sequential(*layers)
with torch.no_grad():