X-Git-Url: https://fleuret.org/cgi-bin/gitweb/gitweb.cgi?a=blobdiff_plain;f=dagnn.lua;h=cf452338f3749dded66500717117464ee6c82e70;hb=3aaa833355075f2d27671761490214edfbe48255;hp=de9d29b8d3e15c2d0575144f29ee0e1ed1a881de;hpb=56a476ee19396d0e7f186b238dc7d013000acb59;p=dagnn.git diff --git a/dagnn.lua b/dagnn.lua index de9d29b..cf45233 100755 --- a/dagnn.lua +++ b/dagnn.lua @@ -26,7 +26,7 @@ local DAG, parent = torch.class('nn.DAG', 'nn.Container') function DAG:__init() parent.__init(self) -- Nodes are indexed by the module they contain - self.node = { } + self.node = {} end -- Apply f on t recursively; use the corresponding elements from args @@ -76,7 +76,11 @@ function DAG:putInOrder() end until nc == 0 - self.sorted = { } + for _, nnm in pairs(self.modules) do + assert(distance[nnm], 'Some modules are not connected to inputs') + end + + self.sorted = {} for m, d in pairs(distance) do table.insert(self.sorted, { distance = d, nnm = m }) end @@ -86,18 +90,37 @@ function DAG:putInOrder() for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end end +-- This accumulates x in a where they are both nested tables of +-- tensors. If first is true, set a = x. Behavior is undefined if a +-- and x do not have the exact same structure. +function DAG:nestedAccTensor(a, x, first) + if torch.type(x) == 'table' then + local b = {} + for i in pairs(x) do + b[i] = self:nestedAccTensor(a[i], x[i], first) + end + a = b + else + if first then + if a then + a:resizeAs(x):copy(x) + else + a = x:clone() + end + else + a:add(x) + end + end + return a +end + function DAG:updateGradOutput(node) local gradInputSucc = node.gradInputSucc if #gradInputSucc == 1 then node.gradOutput = gradInputSucc[1] elseif #gradInputSucc > 1 then - if node.gradOutput then - node.gradOutput:resize(gradInputSucc[1]):copy(gradInputSucc[1]) - else - node.gradOutput = gradInputSucc[1]:clone() - end - for k = 2, #gradInputSucc do - node.gradOutput:add(gradInputSucc[k]) + for k = 1, #gradInputSucc do + node.gradOutput = self:nestedAccTensor(node.gradOutput, gradInputSucc[k], k == 1) end end end @@ -123,12 +146,8 @@ function DAG:setInput(i) self.inputModules = i self:nestedApply( function(nnm) - if #self.node[nnm].succ == 0 then - error('Input modules must have outgoing edges.') - end - if #self.node[nnm].pred > 0 then - error('Input modules cannog have incoming edges.') - end + assert(#self.node[nnm].succ > 0, 'Input modules must have outgoing edges.') + assert(#self.node[nnm].pred == 0, 'Input modules cannot have incoming edges.') end, self.inputModules ) @@ -139,12 +158,8 @@ function DAG:setOutput(o) self.outputModules = o self:nestedApply( function(nnm) - if #self.node[nnm].pred == 0 then - error('Output module must have incoming edges.') - end - if #self.node[nnm].succ > 0 then - error('Output module cannot have outgoing edges.') - end + assert(#self.node[nnm].pred > 0, 'Output module must have incoming edges.') + assert(#self.node[nnm].succ == 0, 'Output module cannot have outgoing edges.') end, self.outputModules ) @@ -205,8 +220,9 @@ function DAG:updateOutput(input) self:nestedApply( function(nnm, i) - self.node[nnm].input = i - self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i) + local node = self.node[nnm] + node.input = i + self:rethrowErrors(nnm, node.index, 'updateOutput', i) end, self.inputModules, input @@ -225,7 +241,7 @@ function DAG:updateOutput(input) end end node.input = i - self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i) + self:rethrowErrors(nnm, node.index, 'updateOutput', i) end end @@ -244,7 +260,7 @@ function DAG:updateGradInput(input, gradOutput) function(nnm, go) local node = self.node[nnm] node.gradOutput = go - self:rethrowErrors(nnm, node.index, 'updateGradInput', self.node[nnm].input, go) + self:rethrowErrors(nnm, node.index, 'updateGradInput', node.input, go) end, self.outputModules, gradOutput ) @@ -265,16 +281,15 @@ function DAG:updateGradInput(input, gradOutput) if #node.gradInputSucc > 0 then self:updateGradOutput(node) - self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', node.input, node.gradOutput) + self:rethrowErrors(nnm, node.index, 'updateGradInput', node.input, node.gradOutput) end -- We fill the gradInputSucc of our predecessors if #pred == 1 then table.insert(self.node[pred[1]].gradInputSucc, nnm.gradInput) elseif #pred > 1 then - if not torch.type(nnm.gradInput) == 'table' then - error('Should have a table gradInput since it has multiple predecessors') - end + assert(torch.type(nnm.gradInput) == 'table', + 'Should have a table gradInput since it has multiple predecessors') for n = 1, #pred do table.insert(self.node[node.pred[n]].gradInputSucc, nnm.gradInput[n]) end @@ -287,8 +302,6 @@ function DAG:updateGradInput(input, gradOutput) end function DAG:accGradParameters(input, gradOutput, scale) - scale = scale or 1 - assert(self.sorted, 'There has been a DAG structure change before a DAG:accGradParameters') self:nestedApply( @@ -307,3 +320,13 @@ function DAG:accGradParameters(input, gradOutput, scale) self:rethrowErrors(nnm, k, 'accGradParameters', node.input, node.gradOutput, scale) end end + +function DAG:clearState() + self.sorted = nil + for _, node in pairs(self.node) do + node.gradInputSucc = nil + node.input = nil + node.gradOutput = nil + end + return parent.clearState(self) +end