From: Francois Fleuret Date: Sun, 4 Apr 2021 15:51:20 +0000 (+0200) Subject: Update. X-Git-Url: https://fleuret.org/cgi-bin/gitweb/gitweb.cgi?a=commitdiff_plain;h=0bf74b10ba4561f11d85f23d5e8f7c2cab25c269;p=pytorch.git Update. --- diff --git a/miniflow.py b/miniflow.py index eb2d4c7..04b9a23 100755 --- a/miniflow.py +++ b/miniflow.py @@ -33,15 +33,12 @@ def sample_phi(nb): ###################################################################### -# START_LOG_PROBA def LogProba(x, ldj): log_p = ldj - 0.5 * (x**2 + math.log(2*pi)) return log_p -# END_LOG_PROBA ###################################################################### -# START_MODEL class PiecewiseLinear(nn.Module): def __init__(self, nb, xmin, xmax): super().__init__() @@ -59,7 +56,6 @@ class PiecewiseLinear(nn.Module): a = (u - n).clamp(0, 1) x = (1 - a) * y[n] + a * y[n + 1] return x -# END_MODEL def invert(self, y): ys = self.alpha + self.xi.exp().cumsum(0).view(1, -1) @@ -88,7 +84,6 @@ criterion = nn.MSELoss() for k in range(nb_epochs): acc_loss = 0 -# START_OPTIMIZATION for input in train_input.split(batch_size): input.requires_grad_() output = model(input) @@ -103,7 +98,6 @@ for k in range(nb_epochs): optimizer.zero_grad() loss.backward() optimizer.step() -# END_OPTIMIZATION acc_loss += loss.item() if k%10 == 0: print(k, loss.item())