+-- torch.setnumthreads(params.nbThreads)
+torch.setdefaulttensortype('torch.DoubleTensor')
+torch.manualSeed(2)
+
+function checkGrad(model, criterion, input, target)
+ local params, gradParams = model:getParameters()
+
+ local epsilon = 1e-5
+
+ local output = model:forward(input)
+ local loss = criterion:forward(output, target)
+ local gradOutput = criterion:backward(output, target)
+ gradParams:zero()
+ model:backward(input, gradOutput)
+ local analyticalGradParam = gradParams:clone()
+
+ local err = 0
+
+ for i = 1, params:size(1) do
+ local x = params[i]
+
+ params[i] = x - epsilon
+ local output0 = model:forward(input)
+ local loss0 = criterion:forward(output0, target)
+
+ params[i] = x + epsilon
+ local output1 = model:forward(input)
+ local loss1 = criterion:forward(output1, target)
+
+ params[i] = x
+
+ local ana = analyticalGradParam[i]
+ local num = (loss1 - loss0) / (2 * epsilon)
+
+ if num ~= ana then
+ err = math.max(err, torch.abs(num - ana) / torch.abs(num))
+ end
+ end
+
+ return err
+end
+