X-Git-Url: https://fleuret.org/cgi-bin/gitweb/gitweb.cgi?p=dagnn.git;a=blobdiff_plain;f=dagnn.lua;h=b82398c0de429bd19875776fa465222a84504bbe;hp=c6d54ad5a057d72c4029ee6c84b87a6e03a30e7d;hb=HEAD;hpb=0a630b54355382dfa68c0f3d51729bad0b4c58e6 diff --git a/dagnn.lua b/dagnn.lua index c6d54ad..b82398c 100755 --- a/dagnn.lua +++ b/dagnn.lua @@ -26,7 +26,22 @@ local DAG, parent = torch.class('nn.DAG', 'nn.Container') function DAG:__init() parent.__init(self) -- Nodes are indexed by the module they contain - self.node = { } + self.node = {} +end + +-- Apply f on t recursively; use the corresponding elements from args +-- (i.e. same keys) as second parameter to f when available; return +-- the results from f, organized in a similarly nested table. +function DAG:nestedApply(f, t, args) + if torch.type(t) == 'table' then + local result = {} + for k, s in pairs(t) do + result[k] = self:nestedApply(f, s, args and args[k]) + end + return result + else + return f(t, args) + end end function DAG:createNode(nnm) @@ -40,9 +55,87 @@ function DAG:createNode(nnm) end end --- The main use should be to add an edge between two modules, but it --- can also add a full sequence of modules -function DAG:addEdge(...) +function DAG:putInOrder() + if self.sorted then + return + end + + local distance = {} + self:nestedApply( + function(m) distance[m] = 1 end, + self.inputModules + ) + + local nc + local nl = 0 + repeat + assert(nl < #self.modules, 'Cycle detected in the graph.') + nc = 0 + for nnma, node in pairs(self.node) do + for _, nnmb in pairs(node.succ) do + if distance[nnma] and (not distance[nnmb] or distance[nnmb] < distance[nnma] + 1) then + distance[nnmb] = distance[nnma] + 1 + nc = nc + 1 + end + end + end + nl = nl + 1 + until nc == 0 + + for _, nnm in pairs(self.modules) do + assert(distance[nnm], 'Some modules are not connected to inputs.') + end + + self.sorted = {} + for m, d in pairs(distance) do + table.insert(self.sorted, { distance = d, nnm = m }) + end + + table.sort(self.sorted, function(a, b) return a.distance < b.distance end) + + for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end +end + +-- This accumulates x in a, where they are both nested tables of +-- tensors with same structures / keys. If first is true, set a = x +-- (in which case a can be nil) otherwise a = a + x. The behavior is +-- undefined if a and x do not have the exact same structure. +function DAG:nestedAccTensor(a, x, first) + if torch.type(x) == 'table' then + local b = {} + for i in pairs(x) do + b[i] = self:nestedAccTensor(a[i], x[i], first) + end + a = b + else + if first then + if a then + a:resizeAs(x):copy(x) + else + a = x:clone() + end + else + a:add(x) + end + end + return a +end + +function DAG:updateGradOutput(node) + local gradInputSucc = node.gradInputSucc + if #gradInputSucc == 1 then + node.gradOutput = gradInputSucc[1] + elseif #gradInputSucc > 1 then + for k = 1, #gradInputSucc do + node.gradOutput = self:nestedAccTensor(node.gradOutput, gradInputSucc[k], k == 1) + end + end +end + +---------------------------------------------------------------------- + +-- Connect a sequence of modules +function DAG:connect(...) self.sorted = nil local prev for _, nnm in pairs({...}) do @@ -55,19 +148,8 @@ function DAG:addEdge(...) end end --- Apply f on t recursively; use the corresponding element from args --- (i.e. same keys) as second parameter to f when available; return --- the results from f, organized in a similarly nested table. -function DAG:nestedApply(f, t, args) - if torch.type(t) == 'table' then - local result = {} - for k, s in pairs(t) do - result[k] = self:nestedApply(f, s, args and args[k]) - end - return result - else - return f(t, args) - end +function DAG:setLabel(nnm, label) + self.node[nnm].label = label end function DAG:setInput(i) @@ -75,12 +157,8 @@ function DAG:setInput(i) self.inputModules = i self:nestedApply( function(nnm) - if #self.node[nnm].succ == 0 then - error('Input modules must have outgoing edges.') - end - if #self.node[nnm].pred > 0 then - error('Input modules cannog have incoming edges.') - end + assert(#self.node[nnm].succ > 0, 'Input modules must have outgoing edges.') + assert(#self.node[nnm].pred == 0, 'Input modules cannot have incoming edges.') end, self.inputModules ) @@ -91,70 +169,81 @@ function DAG:setOutput(o) self.outputModules = o self:nestedApply( function(nnm) - if #self.node[nnm].pred == 0 then - error('Output module must have incoming edges.') - end - if #self.node[nnm].succ > 0 then - error('Output module cannot have outgoing edges.') - end + assert(#self.node[nnm].pred > 0, 'Output module must have incoming edges.') + assert(#self.node[nnm].succ == 0, 'Output module cannot have outgoing edges.') end, self.outputModules ) end -function DAG:putInOrder() - if self.sorted then - return +function DAG:print() + self:putInOrder() + + for i, d in ipairs(self.sorted) do + local decoration = '' + if self.node[d].label then + decoration = ' [' .. self.node[d].label .. ']' + end + print('#' .. i .. ' -> ' .. torch.type(d) .. decoration) end +end - local distance = {} - self:nestedApply(function(m) distance[m] = 1 end, self.inputModules) +---------------------------------------------------------------------- - local nc - repeat - nc = 0 - for nnma, node in pairs(self.node) do - for _, nnmb in pairs(node.succ) do - if distance[nnma] and (not distance[nnmb] or distance[nnmb] < distance[nnma] + 1) then - distance[nnmb] = distance[nnma] + 1 - nc = nc + 1 - end +function DAG:saveDot(filename) + local file = (filename and io.open(filename, 'w')) or io.stdout + + local function writeNestedCluster(prefix, list, indent) + local indent = indent or '' + if torch.type(list) == 'table' then + file:write(indent .. ' subgraph cluster_' .. prefix .. ' {\n'); + for k, x in pairs(list) do + writeNestedCluster(prefix .. '_' .. k, x, ' ' .. indent) end + file:write(indent .. ' }\n'); + else + file:write(indent .. ' ' .. self.node[list].index .. ' [color=red]\n') end - until nc == 0 - - self.sorted = { } - for m, d in pairs(distance) do - table.insert(self.sorted, { distance = d, nnm = m }) end - table.sort(self.sorted, function(a, b) return a.distance < b.distance end) + file:write('digraph {\n') - for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end -end + file:write('\n') -function DAG:computeGradOutput(gradInputSucc) - local gi - if #gradInputSucc == 1 then - gi = gradInputSucc[1] -- we avoid a clone() - elseif #gradInputSucc > 1 then - for k = 1, #gradInputSucc do - if gi then - gi:add(gradInputSucc[k]) - else - gi = gradInputSucc[k]:clone() + writeNestedCluster('input', self.inputModules) + writeNestedCluster('output', self.outputModules) + + file:write('\n') + + for nnmb, node in pairs(self.node) do + file:write( + ' ' + .. node.index + .. ' [shape=box,label=\"' .. (self.node[nnmb].label or torch.type(nnmb)) .. '\"]' + .. '\n' + ) + + for i, nnma in pairs(node.pred) do + local decoration = '' + if #node.pred > 1 then + -- decoration = ' [headlabel=\"' .. i .. '\"]' + decoration = ' [label=\"' .. i .. '\"]' end + file:write( + ' ' + .. self.node[nnma].index + .. ' -> ' + .. self.node[nnmb].index + .. decoration + .. '\n' + ) end + + file:write('\n') end - return gi -end -function DAG:print() - self:putInOrder() + file:write('}\n') - for i, d in ipairs(self.sorted) do - print('#' .. i .. ' -> ' .. torch.type(d)) - end end ---------------------------------------------------------------------- @@ -164,9 +253,9 @@ function DAG:updateOutput(input) self:nestedApply( function(nnm, i) - self.node[nnm].input = i - -- nnm:updateOutput(i) - self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i) + local node = self.node[nnm] + node.input = i + self:rethrowErrors(nnm, node.index, 'updateOutput', i) end, self.inputModules, input @@ -174,19 +263,19 @@ function DAG:updateOutput(input) for _, nnm in ipairs(self.sorted) do local node = self.node[nnm] - if #node.pred > 0 then + local pred = node.pred + if #pred > 0 then local i - if #node.pred == 1 then - i = node.pred[1].output - elseif #node.pred > 1 then + if #pred == 1 then + i = pred[1].output + elseif #pred > 1 then i = {} - for k = 1, #node.pred do - i[k] = node.pred[k].output + for k = 1, #pred do + i[k] = pred[k].output end end node.input = i - -- nnm:updateOutput(i) - self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i) + self:rethrowErrors(nnm, node.index, 'updateOutput', i) end end @@ -199,12 +288,13 @@ function DAG:updateOutput(input) end function DAG:updateGradInput(input, gradOutput) - assert(self.sorted, 'there has been a DAG structure change before a DAG:updateGradInput') + assert(self.sorted, 'There has been a structure change before a DAG:updateGradInput.') self:nestedApply( function(nnm, go) - -- nnm:updateGradInput(self.node[nnm].input, go) - self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', self.node[nnm].input, go) + local node = self.node[nnm] + node.gradOutput = go + self:rethrowErrors(nnm, node.index, 'updateGradInput', node.input, go) end, self.outputModules, gradOutput ) @@ -221,75 +311,59 @@ function DAG:updateGradInput(input, gradOutput) for k = #self.sorted, 1, -1 do local nnm = self.sorted[k] local node = self.node[nnm] - local pred, gradInputSucc = node.pred, node.gradInputSucc + local pred = node.pred - if #gradInputSucc > 0 then - node.gradOutput = self:computeGradOutput(gradInputSucc) - -- nnm:updateGradInput(node.input, node.gradOutput) - self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', node.input, node.gradOutput) + if #node.gradInputSucc > 0 then + self:updateGradOutput(node) + self:rethrowErrors(nnm, node.index, 'updateGradInput', node.input, node.gradOutput) end -- We fill the gradInputSucc of our predecessors if #pred == 1 then table.insert(self.node[pred[1]].gradInputSucc, nnm.gradInput) elseif #pred > 1 then - if not torch.type(nnm.gradInput) == 'table' then - error('Should have a table gradInput since it has multiple predecessors') - end + assert(torch.type(nnm.gradInput) == 'table', + 'Should have a table gradInput since it has multiple predecessors.') for n = 1, #pred do - table.insert(self.node[node.pred[n]].gradInputSucc, nnm.gradInput[n]) + table.insert(self.node[pred[n]].gradInputSucc, nnm.gradInput[n]) end end end - self.gradInput = self:nestedApply(function(m) return m.gradInput end, self.inputModules) + self.gradInput = self:nestedApply( + function(m) return m.gradInput end, + self.inputModules + ) return self.gradInput end function DAG:accGradParameters(input, gradOutput, scale) - scale = scale or 1 + assert(self.sorted, 'There has been a structure change before a DAG:accGradParameters.') + + self:nestedApply( + function(nnm, go) self.node[nnm].gradOutput = go end, + self.outputModules, gradOutput + ) - assert(self.sorted, 'there has been a DAG structure change before a DAG:accGradParameters') + self:nestedApply( + function(nnm, i) self.node[nnm].input = i end, + self.inputModules, input + ) for k = 1, #self.modules do local nnm = self.modules[k] local node = self.node[nnm] - -- nnm:accGradParameters(node.input, node.gradOutput, scale) - self:rethrowErrors(nnm, k, 'accGradParameters', node.input, self:computeGradOutput(node.gradInputSucc), scale) + self:rethrowErrors(nnm, k, 'accGradParameters', node.input, node.gradOutput, scale) end end ----------------------------------------------------------------------- - -function DAG:dot(filename) - local file = (filename and io.open(filename, 'w')) or io.stdout - - file:write('digraph {\n') - - file:write('\n') - - for nnma, node in pairs(self.node) do - file:write( - ' ' - .. node.index - .. ' [shape=box,label=\"' .. torch.type(nnma) .. '\"]' - .. '\n' - ) - - for _, nnmb in pairs(node.succ) do - file:write( - ' ' - .. node.index - .. ' -> ' - .. self.node[nnmb].index - .. '\n' - ) - end - - file:write('\n') +function DAG:clearState() + self.sorted = nil + for _, node in pairs(self.node) do + node.input = nil + node.gradInputSucc = nil + node.gradOutput = nil end - - file:write('}\n') - + return parent.clearState(self) end