nh = 400
-model = nn.Sequential(nn.Linear(1, nh), nn.ReLU(),
- nn.Dropout(0.25),
- nn.Linear(nh, nh), nn.ReLU(),
- nn.Dropout(0.25),
- nn.Linear(nh, 1))
+model = nn.Sequential(
+ nn.Linear(1, nh),
+ nn.ReLU(),
+ nn.Dropout(0.25),
+ nn.Linear(nh, nh),
+ nn.ReLU(),
+ nn.Dropout(0.25),
+ nn.Linear(nh, 1),
+)
model.train(True)
criterion = nn.MSELoss()
-optimizer = torch.optim.Adam(model.parameters(), lr = 1e-4)
+optimizer = torch.optim.Adam(model.parameters(), lr=1e-4)
for k in range(10000):
loss = criterion(model(x), y)
- if (k+1)%100 == 0: print(k+1, loss.item())
+ if (k + 1) % 100 == 0:
+ print(k + 1, loss.item())
optimizer.zero_grad()
loss.backward()
optimizer.step()
mean = v.mean(1)
std = v.std(1)
-ax.fill_between(u.numpy(), (mean-std).detach().numpy(), (mean+std).detach().numpy(), color = '#e0e0e0')
-ax.plot(u.numpy(), mean.detach().numpy(), color = 'red')
+ax.fill_between(
+ u.numpy(),
+ (mean - std).detach().numpy(),
+ (mean + std).detach().numpy(),
+ color="#e0e0e0",
+)
+ax.plot(u.numpy(), mean.detach().numpy(), color="red")
ax.scatter(x.numpy(), y.numpy())
plt.show()