5 Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
6 Written by Francois Fleuret <francois.fleuret@idiap.ch>
8 This file is free software: you can redistribute it and/or modify
9 it under the terms of the GNU General Public License version 3 as
10 published by the Free Software Foundation.
12 It is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this file. If not, see <http://www.gnu.org/licenses/>.
26 torch.setdefaulttensortype('torch.DoubleTensor')
29 function checkGrad(model, criterion, input, target)
30 local params, gradParams = model:getParameters()
34 local output = model:forward(input)
35 local loss = criterion:forward(output, target)
36 local gradOutput = criterion:backward(output, target)
38 model:backward(input, gradOutput)
39 local analyticalGradParam = gradParams:clone()
43 for i = 1, params:size(1) do
46 params[i] = x - epsilon
47 local output0 = model:forward(input)
48 local loss0 = criterion:forward(output0, target)
50 params[i] = x + epsilon
51 local output1 = model:forward(input)
52 local loss1 = criterion:forward(output1, target)
56 local ana = analyticalGradParam[i]
57 local num = (loss1 - loss0) / (2 * epsilon)
60 err = math.max(err, torch.abs(num - ana) / torch.abs(num))
67 function printTensorTable(t)
68 if torch.type(t) == 'table' then
69 for i, t in pairs(t) do
70 print('-- ELEMENT [' .. i .. '] --')
78 -- +-- Linear(10, 10) --> ReLU --> d -->
81 -- --> a --> b -----------> c ---------------+
84 -- +--------------- e -->
95 dag:connect(b, nn.Linear(10, 15), nn.ReLU(), d)
100 dag:setOutput({ d, e })
102 -- We check it works when we put it into a nn.Sequential
103 model = nn.Sequential()
104 :add(nn.Linear(50, 50))
108 local input = torch.Tensor(30, 50):uniform()
109 local output = model:updateOutput(input):clone()
112 print('Gradient estimate error ' .. checkGrad(model, nn.MSECriterion(), input, output))
114 print('Writing /tmp/graph.dot')
115 dag:saveDot('/tmp/graph.dot')