X-Git-Url: https://fleuret.org/cgi-bin/gitweb/gitweb.cgi?a=blobdiff_plain;f=dagnn.lua;h=0e7e8b0364ffb2b13a6319c9d3ee13c519892b17;hb=d3b0a00b9f46d4ef147e8d52b9d02ebdf78ce9d3;hp=ca51841f8442417d10097e6a6939fd86f3bceda2;hpb=39ba70f10f81274998bc6786747a33e00b313fb4;p=dagnn.git diff --git a/dagnn.lua b/dagnn.lua index ca51841..0e7e8b0 100755 --- a/dagnn.lua +++ b/dagnn.lua @@ -26,7 +26,7 @@ local DAG, parent = torch.class('nn.DAG', 'nn.Container') function DAG:__init() parent.__init(self) -- Nodes are indexed by the module they contain - self.node = { } + self.node = {} end -- Apply f on t recursively; use the corresponding elements from args @@ -76,7 +76,7 @@ function DAG:putInOrder() end until nc == 0 - self.sorted = { } + self.sorted = {} for m, d in pairs(distance) do table.insert(self.sorted, { distance = d, nnm = m }) end @@ -86,20 +86,37 @@ function DAG:putInOrder() for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end end -function DAG:computeGradOutput(gradInputSucc) - local gi +-- This accumulate x in a where they are both nested tables of +-- tensors. If first is true, set a = x. +function DAG:nestedAccTensor(a, x, first) + if torch.type(x) == 'table' then + a = a or {} + for i in pairs(x) do + a[i] = self:nestedAccTensor(a[i], x[i], first) + end + else + if first then + if a then + a:resizeAs(x):copy(x) + else + a = x:clone() + end + else + a:add(x) + end + end + return a +end + +function DAG:updateGradOutput(node) + local gradInputSucc = node.gradInputSucc if #gradInputSucc == 1 then - gi = gradInputSucc[1] -- we avoid a clone() + node.gradOutput = gradInputSucc[1] elseif #gradInputSucc > 1 then for k = 1, #gradInputSucc do - if gi then - gi:add(gradInputSucc[k]) - else - gi = gradInputSucc[k]:clone() - end + node.gradOutput = self:nestedAccTensor(node.gradOutput, gradInputSucc[k], k == 1) end end - return gi end ---------------------------------------------------------------------- @@ -167,20 +184,26 @@ function DAG:saveDot(filename) file:write('\n') - for nnma, node in pairs(self.node) do + for nnmb, node in pairs(self.node) do file:write( ' ' .. node.index - .. ' [shape=box,label=\"' .. torch.type(nnma) .. '\"]' + .. ' [shape=box,label=\"' .. torch.type(nnmb) .. '\"]' .. '\n' ) - for _, nnmb in pairs(node.succ) do + for i, nnma in pairs(node.pred) do + local decoration = '' + if #node.pred > 1 then + -- decoration = ' [headlabel=\"' .. i .. '\"]' + decoration = ' [label=\"' .. i .. '\"]' + end file:write( ' ' - .. node.index + .. self.node[nnma].index .. ' -> ' .. self.node[nnmb].index + .. decoration .. '\n' ) end @@ -200,7 +223,6 @@ function DAG:updateOutput(input) self:nestedApply( function(nnm, i) self.node[nnm].input = i - -- nnm:updateOutput(i) self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i) end, self.inputModules, @@ -220,7 +242,6 @@ function DAG:updateOutput(input) end end node.input = i - -- nnm:updateOutput(i) self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i) end end @@ -234,12 +255,13 @@ function DAG:updateOutput(input) end function DAG:updateGradInput(input, gradOutput) - assert(self.sorted, 'there has been a DAG structure change before a DAG:updateGradInput') + assert(self.sorted, 'There has been a DAG structure change before a DAG:updateGradInput') self:nestedApply( function(nnm, go) - -- nnm:updateGradInput(self.node[nnm].input, go) - self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', self.node[nnm].input, go) + local node = self.node[nnm] + node.gradOutput = go + self:rethrowErrors(nnm, node.index, 'updateGradInput', self.node[nnm].input, go) end, self.outputModules, gradOutput ) @@ -256,11 +278,10 @@ function DAG:updateGradInput(input, gradOutput) for k = #self.sorted, 1, -1 do local nnm = self.sorted[k] local node = self.node[nnm] - local pred, gradInputSucc = node.pred, node.gradInputSucc + local pred = node.pred - if #gradInputSucc > 0 then - node.gradOutput = self:computeGradOutput(gradInputSucc) - -- nnm:updateGradInput(node.input, node.gradOutput) + if #node.gradInputSucc > 0 then + self:updateGradOutput(node) self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', node.input, node.gradOutput) end @@ -285,12 +306,31 @@ end function DAG:accGradParameters(input, gradOutput, scale) scale = scale or 1 - assert(self.sorted, 'there has been a DAG structure change before a DAG:accGradParameters') + assert(self.sorted, 'There has been a DAG structure change before a DAG:accGradParameters') + + self:nestedApply( + function(nnm, go) self.node[nnm].gradOutput = go end, + self.outputModules, gradOutput + ) + + self:nestedApply( + function(nnm, i) self.node[nnm].input = i end, + self.inputModules, input + ) for k = 1, #self.modules do local nnm = self.modules[k] local node = self.node[nnm] - -- nnm:accGradParameters(node.input, node.gradOutput, scale) - self:rethrowErrors(nnm, k, 'accGradParameters', node.input, self:computeGradOutput(node.gradInputSucc), scale) + self:rethrowErrors(nnm, k, 'accGradParameters', node.input, node.gradOutput, scale) + end +end + +function DAG:clearState() + self.sorted = nil + for _, node in pairs(self.node) do + node.gradInputSucc = nil + node.input = nil + node.gradOutput = nil end + return parent.clearState(self) end