function DAG:__init()
parent.__init(self)
-- Nodes are indexed by the module they contain
- self.node = { }
+ self.node = {}
+end
+
+-- Apply f on t recursively; use the corresponding elements from args
+-- (i.e. same keys) as second parameter to f when available; return
+-- the results from f, organized in a similarly nested table.
+function DAG:nestedApply(f, t, args)
+ if torch.type(t) == 'table' then
+ local result = {}
+ for k, s in pairs(t) do
+ result[k] = self:nestedApply(f, s, args and args[k])
+ end
+ return result
+ else
+ return f(t, args)
+ end
end
function DAG:createNode(nnm)
end
end
-function DAG:addEdge(nnma, nnmb)
- self.sorted = nil
- self:createNode(nnma)
- self:createNode(nnmb)
- table.insert(self.node[nnmb].pred, nnma)
- table.insert(self.node[nnma].succ, nnmb)
+function DAG:putInOrder()
+ if self.sorted then
+ return
+ end
+
+ local distance = {}
+ self:nestedApply(function(m) distance[m] = 1 end, self.inputModules)
+
+ local nc
+ repeat
+ nc = 0
+ for nnma, node in pairs(self.node) do
+ for _, nnmb in pairs(node.succ) do
+ if distance[nnma] and (not distance[nnmb] or distance[nnmb] < distance[nnma] + 1) then
+ distance[nnmb] = distance[nnma] + 1
+ nc = nc + 1
+ end
+ end
+ end
+ until nc == 0
+
+ self.sorted = {}
+ for m, d in pairs(distance) do
+ table.insert(self.sorted, { distance = d, nnm = m })
+ end
+
+ table.sort(self.sorted, function(a, b) return a.distance < b.distance end)
+
+ for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end
end
--- Apply f on t recursively; use the corresponding element from args
--- (i.e. same keys) as second parameter to f when available; return
--- the results from f, organized in a similarly nested table.
-function DAG:nestedApply(f, t, args)
- if torch.type(t) == 'table' then
- local result = {}
- for k, s in pairs(t) do
- result[k] = self:nestedApply(f, s, args and args[k])
+-- This accumulate x in a where they are both nested tables of
+-- tensors. If first is true, set a = x.
+function DAG:nestedAccTensor(a, x, first)
+ if torch.type(x) == 'table' then
+ a = a or {}
+ for i in pairs(x) do
+ a[i] = self:nestedAccTensor(a[i], x[i], first)
end
- return result
else
- return f(t, args)
+ if first then
+ if a then
+ a:resizeAs(x):copy(x)
+ else
+ a = x:clone()
+ end
+ else
+ a:add(x)
+ end
+ end
+ return a
+end
+
+function DAG:updateGradOutput(node)
+ local gradInputSucc = node.gradInputSucc
+ if #gradInputSucc == 1 then
+ node.gradOutput = gradInputSucc[1]
+ elseif #gradInputSucc > 1 then
+ for k = 1, #gradInputSucc do
+ node.gradOutput = self:nestedAccTensor(node.gradOutput, gradInputSucc[k], k == 1)
+ end
+ end
+end
+
+----------------------------------------------------------------------
+
+-- Connect a sequence of modules
+function DAG:connect(...)
+ self.sorted = nil
+ local prev
+ for _, nnm in pairs({...}) do
+ self:createNode(nnm)
+ if prev then
+ table.insert(self.node[nnm].pred, prev)
+ table.insert(self.node[prev].succ, nnm)
+ end
+ prev = nnm
end
end
error('Input modules must have outgoing edges.')
end
if #self.node[nnm].pred > 0 then
- error('Input modules cannog have incoming edges.')
+ error('Input modules cannot have incoming edges.')
end
end,
self.inputModules
)
end
-function DAG:putInOrder()
- if self.sorted then
- return
+function DAG:print()
+ self:putInOrder()
+
+ for i, d in ipairs(self.sorted) do
+ print('#' .. i .. ' -> ' .. torch.type(d))
end
+end
- local distance = {}
- self:nestedApply(function(m) distance[m] = 1 end, self.inputModules)
+----------------------------------------------------------------------
- local nc
- repeat
- nc = 0
- for nnma, node in pairs(self.node) do
- for _, nnmb in pairs(node.succ) do
- if distance[nnma] and (not distance[nnmb] or distance[nnmb] < distance[nnma] + 1) then
- distance[nnmb] = distance[nnma] + 1
- nc = nc + 1
- end
- end
- end
- until nc == 0
+function DAG:saveDot(filename)
+ local file = (filename and io.open(filename, 'w')) or io.stdout
- self.sorted = { }
- for m, d in pairs(distance) do
- table.insert(self.sorted, { distance = d, nnm = m })
- end
+ file:write('digraph {\n')
- table.sort(self.sorted, function(a, b) return a.distance < b.distance end)
+ file:write('\n')
- for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end
-end
+ for nnmb, node in pairs(self.node) do
+ file:write(
+ ' '
+ .. node.index
+ .. ' [shape=box,label=\"' .. torch.type(nnmb) .. '\"]'
+ .. '\n'
+ )
-function DAG:computeGradOutput(gradInputSucc)
- local gi
- if #gradInputSucc == 1 then
- gi = gradInputSucc[1] -- we avoid a clone()
- elseif #gradInputSucc > 1 then
- for k = 1, #gradInputSucc do
- if gi then
- gi:add(gradInputSucc[k])
- else
- gi = gradInputSucc[k]:clone()
+ for i, nnma in pairs(node.pred) do
+ local decoration = ''
+ if #node.pred > 1 then
+ -- decoration = ' [headlabel=\"' .. i .. '\"]'
+ decoration = ' [label=\"' .. i .. '\"]'
end
+ file:write(
+ ' '
+ .. self.node[nnma].index
+ .. ' -> '
+ .. self.node[nnmb].index
+ .. decoration
+ .. '\n'
+ )
end
+
+ file:write('\n')
end
- return gi
-end
-function DAG:print()
- self:putInOrder()
+ file:write('}\n')
- for i, d in ipairs(self.sorted) do
- print('#' .. i .. ' -> ' .. torch.type(d))
- end
end
----------------------------------------------------------------------
self:nestedApply(
function(nnm, i)
self.node[nnm].input = i
- -- nnm:updateOutput(i)
self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i)
end,
self.inputModules,
end
end
node.input = i
- -- nnm:updateOutput(i)
self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i)
end
end
end
function DAG:updateGradInput(input, gradOutput)
- assert(self.sorted, 'there has been a DAG structure change before a DAG:updateGradInput')
+ assert(self.sorted, 'There has been a DAG structure change before a DAG:updateGradInput')
self:nestedApply(
function(nnm, go)
- -- nnm:updateGradInput(self.node[nnm].input, go)
- self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', self.node[nnm].input, go)
+ local node = self.node[nnm]
+ node.gradOutput = go
+ self:rethrowErrors(nnm, node.index, 'updateGradInput', self.node[nnm].input, go)
end,
self.outputModules, gradOutput
)
for k = #self.sorted, 1, -1 do
local nnm = self.sorted[k]
local node = self.node[nnm]
- local pred, gradInputSucc = node.pred, node.gradInputSucc
+ local pred = node.pred
- if #gradInputSucc > 0 then
- node.gradOutput = self:computeGradOutput(gradInputSucc)
- -- nnm:updateGradInput(node.input, node.gradOutput)
+ if #node.gradInputSucc > 0 then
+ self:updateGradOutput(node)
self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', node.input, node.gradOutput)
end
function DAG:accGradParameters(input, gradOutput, scale)
scale = scale or 1
- assert(self.sorted, 'there has been a DAG structure change before a DAG:accGradParameters')
+ assert(self.sorted, 'There has been a DAG structure change before a DAG:accGradParameters')
+
+ self:nestedApply(
+ function(nnm, go) self.node[nnm].gradOutput = go end,
+ self.outputModules, gradOutput
+ )
+
+ self:nestedApply(
+ function(nnm, i) self.node[nnm].input = i end,
+ self.inputModules, input
+ )
for k = 1, #self.modules do
local nnm = self.modules[k]
local node = self.node[nnm]
- -- nnm:accGradParameters(node.input, node.gradOutput, scale)
- self:rethrowErrors(nnm, k, 'accGradParameters', node.input, self:computeGradOutput(node.gradInputSucc), scale)
+ self:rethrowErrors(nnm, k, 'accGradParameters', node.input, node.gradOutput, scale)
end
end
-----------------------------------------------------------------------
+function DAG:clearState()
+ self.sorted = nil
+ for _, node in pairs(self.node) do
+ node.gradInputSucc = nil
+ node.input = nil
+ node.gradOutput = nil
+ end
+ return parent.clearState(self)
+end