function DAG:__init()
parent.__init(self)
-- Nodes are indexed by the module they contain
- self.node = { }
+ self.node = {}
end
--- Apply f on t recursively; use the corresponding element from args
+-- Apply f on t recursively; use the corresponding elements from args
-- (i.e. same keys) as second parameter to f when available; return
-- the results from f, organized in a similarly nested table.
function DAG:nestedApply(f, t, args)
end
local distance = {}
- self:nestedApply(function(m) distance[m] = 1 end, self.inputModules)
+ self:nestedApply(
+ function(m) distance[m] = 1 end,
+ self.inputModules
+ )
local nc
+ local nl = 0
repeat
nc = 0
for nnma, node in pairs(self.node) do
end
end
end
+ assert(nl < #self.modules, 'Cycle detected in the graph.')
+ nl = nl + 1
until nc == 0
- self.sorted = { }
+ for _, nnm in pairs(self.modules) do
+ assert(distance[nnm], 'Some modules are not connected to inputs')
+ end
+
+ self.sorted = {}
for m, d in pairs(distance) do
table.insert(self.sorted, { distance = d, nnm = m })
end
for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end
end
-function DAG:computeGradOutput(gradInputSucc)
- local gi
+-- This accumulates x in a, where they are both nested tables of
+-- tensors with same structures / keys. If first is true, set a = x
+-- (in which case a can be nil) otherwise a = a + x. The behavior is
+-- undefined if a and x do not have the exact same structure.
+function DAG:nestedAccTensor(a, x, first)
+ if torch.type(x) == 'table' then
+ local b = {}
+ for i in pairs(x) do
+ b[i] = self:nestedAccTensor(a[i], x[i], first)
+ end
+ a = b
+ else
+ if first then
+ if a then
+ a:resizeAs(x):copy(x)
+ else
+ a = x:clone()
+ end
+ else
+ a:add(x)
+ end
+ end
+ return a
+end
+
+function DAG:updateGradOutput(node)
+ local gradInputSucc = node.gradInputSucc
if #gradInputSucc == 1 then
- gi = gradInputSucc[1] -- we avoid a clone()
+ node.gradOutput = gradInputSucc[1]
elseif #gradInputSucc > 1 then
for k = 1, #gradInputSucc do
- if gi then
- gi:add(gradInputSucc[k])
- else
- gi = gradInputSucc[k]:clone()
- end
+ node.gradOutput = self:nestedAccTensor(node.gradOutput, gradInputSucc[k], k == 1)
end
end
- return gi
end
----------------------------------------------------------------------
end
end
+function DAG:setLabel(nnm, label)
+ self.node[nnm].label = label
+end
+
function DAG:setInput(i)
self.sorted = nil
self.inputModules = i
self:nestedApply(
function(nnm)
- if #self.node[nnm].succ == 0 then
- error('Input modules must have outgoing edges.')
- end
- if #self.node[nnm].pred > 0 then
- error('Input modules cannog have incoming edges.')
- end
+ assert(#self.node[nnm].succ > 0, 'Input modules must have outgoing edges.')
+ assert(#self.node[nnm].pred == 0, 'Input modules cannot have incoming edges.')
end,
self.inputModules
)
self.outputModules = o
self:nestedApply(
function(nnm)
- if #self.node[nnm].pred == 0 then
- error('Output module must have incoming edges.')
- end
- if #self.node[nnm].succ > 0 then
- error('Output module cannot have outgoing edges.')
- end
+ assert(#self.node[nnm].pred > 0, 'Output module must have incoming edges.')
+ assert(#self.node[nnm].succ == 0, 'Output module cannot have outgoing edges.')
end,
self.outputModules
)
self:putInOrder()
for i, d in ipairs(self.sorted) do
- print('#' .. i .. ' -> ' .. torch.type(d))
+ local decoration = ''
+ if self.node[d].label then
+ decoration = ' [' .. self.node[d].label .. ']'
+ end
+ print('#' .. i .. ' -> ' .. torch.type(d) .. decoration)
end
end
function DAG:saveDot(filename)
local file = (filename and io.open(filename, 'w')) or io.stdout
+ local function writeNestedCluster(prefix, list, indent)
+ local indent = indent or ''
+ if torch.type(list) == 'table' then
+ file:write(indent .. ' subgraph cluster_' .. prefix .. ' {\n');
+ for k, x in pairs(list) do
+ writeNestedCluster(prefix .. '_' .. k, x, ' ' .. indent)
+ end
+ file:write(indent .. ' }\n');
+ else
+ file:write(indent .. ' ' .. self.node[list].index .. ' [color=red]\n')
+ end
+ end
+
file:write('digraph {\n')
file:write('\n')
- for nnma, node in pairs(self.node) do
+ writeNestedCluster('input', self.inputModules)
+ writeNestedCluster('output', self.outputModules)
+
+ file:write('\n')
+
+ for nnmb, node in pairs(self.node) do
file:write(
' '
.. node.index
- .. ' [shape=box,label=\"' .. torch.type(nnma) .. '\"]'
+ .. ' [shape=box,label=\"' .. (self.node[nnmb].label or torch.type(nnmb)) .. '\"]'
.. '\n'
)
- for _, nnmb in pairs(node.succ) do
+ for i, nnma in pairs(node.pred) do
+ local decoration = ''
+ if #node.pred > 1 then
+ -- decoration = ' [headlabel=\"' .. i .. '\"]'
+ decoration = ' [label=\"' .. i .. '\"]'
+ end
file:write(
' '
- .. node.index
+ .. self.node[nnma].index
.. ' -> '
.. self.node[nnmb].index
+ .. decoration
.. '\n'
)
end
self:nestedApply(
function(nnm, i)
- self.node[nnm].input = i
- -- nnm:updateOutput(i)
- self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i)
+ local node = self.node[nnm]
+ node.input = i
+ self:rethrowErrors(nnm, node.index, 'updateOutput', i)
end,
self.inputModules,
input
for _, nnm in ipairs(self.sorted) do
local node = self.node[nnm]
- if #node.pred > 0 then
+ local pred = node.pred
+ if #pred > 0 then
local i
- if #node.pred == 1 then
- i = node.pred[1].output
- elseif #node.pred > 1 then
+ if #pred == 1 then
+ i = pred[1].output
+ elseif #pred > 1 then
i = {}
- for k = 1, #node.pred do
- i[k] = node.pred[k].output
+ for k = 1, #pred do
+ i[k] = pred[k].output
end
end
node.input = i
- -- nnm:updateOutput(i)
- self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i)
+ self:rethrowErrors(nnm, node.index, 'updateOutput', i)
end
end
end
function DAG:updateGradInput(input, gradOutput)
- assert(self.sorted, 'there has been a DAG structure change before a DAG:updateGradInput')
+ assert(self.sorted, 'There has been a structure change before a DAG:updateGradInput')
self:nestedApply(
function(nnm, go)
- -- nnm:updateGradInput(self.node[nnm].input, go)
- self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', self.node[nnm].input, go)
+ local node = self.node[nnm]
+ node.gradOutput = go
+ self:rethrowErrors(nnm, node.index, 'updateGradInput', node.input, go)
end,
self.outputModules, gradOutput
)
for k = #self.sorted, 1, -1 do
local nnm = self.sorted[k]
local node = self.node[nnm]
- local pred, gradInputSucc = node.pred, node.gradInputSucc
+ local pred = node.pred
- if #gradInputSucc > 0 then
- node.gradOutput = self:computeGradOutput(gradInputSucc)
- -- nnm:updateGradInput(node.input, node.gradOutput)
- self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', node.input, node.gradOutput)
+ if #node.gradInputSucc > 0 then
+ self:updateGradOutput(node)
+ self:rethrowErrors(nnm, node.index, 'updateGradInput', node.input, node.gradOutput)
end
-- We fill the gradInputSucc of our predecessors
if #pred == 1 then
table.insert(self.node[pred[1]].gradInputSucc, nnm.gradInput)
elseif #pred > 1 then
- if not torch.type(nnm.gradInput) == 'table' then
- error('Should have a table gradInput since it has multiple predecessors')
- end
+ assert(torch.type(nnm.gradInput) == 'table',
+ 'Should have a table gradInput since it has multiple predecessors')
for n = 1, #pred do
- table.insert(self.node[node.pred[n]].gradInputSucc, nnm.gradInput[n])
+ table.insert(self.node[pred[n]].gradInputSucc, nnm.gradInput[n])
end
end
end
- self.gradInput = self:nestedApply(function(m) return m.gradInput end, self.inputModules)
+ self.gradInput = self:nestedApply(
+ function(m) return m.gradInput end,
+ self.inputModules
+ )
return self.gradInput
end
function DAG:accGradParameters(input, gradOutput, scale)
- scale = scale or 1
+ assert(self.sorted, 'There has been a structure change before a DAG:accGradParameters')
- assert(self.sorted, 'there has been a DAG structure change before a DAG:accGradParameters')
+ self:nestedApply(
+ function(nnm, go) self.node[nnm].gradOutput = go end,
+ self.outputModules, gradOutput
+ )
+
+ self:nestedApply(
+ function(nnm, i) self.node[nnm].input = i end,
+ self.inputModules, input
+ )
for k = 1, #self.modules do
local nnm = self.modules[k]
local node = self.node[nnm]
- -- nnm:accGradParameters(node.input, node.gradOutput, scale)
- self:rethrowErrors(nnm, k, 'accGradParameters', node.input, self:computeGradOutput(node.gradInputSucc), scale)
+ self:rethrowErrors(nnm, k, 'accGradParameters', node.input, node.gradOutput, scale)
+ end
+end
+
+function DAG:clearState()
+ self.sorted = nil
+ for _, node in pairs(self.node) do
+ node.input = nil
+ node.gradInputSucc = nil
+ node.gradOutput = nil
end
+ return parent.clearState(self)
end