function DAG:__init()
parent.__init(self)
-- Nodes are indexed by the module they contain
- self.node = { }
+ self.node = {}
end
--- Apply f on t recursively; use the corresponding element from args
+-- Apply f on t recursively; use the corresponding elements from args
-- (i.e. same keys) as second parameter to f when available; return
-- the results from f, organized in a similarly nested table.
function DAG:nestedApply(f, t, args)
end
until nc == 0
- self.sorted = { }
+ self.sorted = {}
for m, d in pairs(distance) do
table.insert(self.sorted, { distance = d, nnm = m })
end
for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end
end
-function DAG:computeGradOutput(gradInputSucc)
- local gi
+-- This accumulate x in a where they are both nested tables of
+-- tensors. If first is true, set a = x.
+function DAG:nestedAccTensor(a, x, first)
+ if torch.type(x) == 'table' then
+ a = a or {}
+ for i in pairs(x) do
+ a[i] = self:nestedAccTensor(a[i], x[i], first)
+ end
+ else
+ if first then
+ if a then
+ a:resizeAs(x):copy(x)
+ else
+ a = x:clone()
+ end
+ else
+ a:add(x)
+ end
+ end
+ return a
+end
+
+function DAG:updateGradOutput(node)
+ local gradInputSucc = node.gradInputSucc
if #gradInputSucc == 1 then
- gi = gradInputSucc[1] -- we avoid a clone()
+ node.gradOutput = gradInputSucc[1]
elseif #gradInputSucc > 1 then
for k = 1, #gradInputSucc do
- if gi then
- gi:add(gradInputSucc[k])
- else
- gi = gradInputSucc[k]:clone()
- end
+ node.gradOutput = self:nestedAccTensor(node.gradOutput, gradInputSucc[k], k == 1)
end
end
- return gi
end
----------------------------------------------------------------------
file:write('\n')
- for nnma, node in pairs(self.node) do
+ for nnmb, node in pairs(self.node) do
file:write(
' '
.. node.index
- .. ' [shape=box,label=\"' .. torch.type(nnma) .. '\"]'
+ .. ' [shape=box,label=\"' .. torch.type(nnmb) .. '\"]'
.. '\n'
)
- for _, nnmb in pairs(node.succ) do
+ for i, nnma in pairs(node.pred) do
+ local decoration = ''
+ if #node.pred > 1 then
+ -- decoration = ' [headlabel=\"' .. i .. '\"]'
+ decoration = ' [label=\"' .. i .. '\"]'
+ end
file:write(
' '
- .. node.index
+ .. self.node[nnma].index
.. ' -> '
.. self.node[nnmb].index
+ .. decoration
.. '\n'
)
end
self:nestedApply(
function(nnm, i)
self.node[nnm].input = i
- -- nnm:updateOutput(i)
self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i)
end,
self.inputModules,
end
end
node.input = i
- -- nnm:updateOutput(i)
self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i)
end
end
end
function DAG:updateGradInput(input, gradOutput)
- assert(self.sorted, 'there has been a DAG structure change before a DAG:updateGradInput')
+ assert(self.sorted, 'There has been a DAG structure change before a DAG:updateGradInput')
self:nestedApply(
function(nnm, go)
- -- nnm:updateGradInput(self.node[nnm].input, go)
- self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', self.node[nnm].input, go)
+ local node = self.node[nnm]
+ node.gradOutput = go
+ self:rethrowErrors(nnm, node.index, 'updateGradInput', self.node[nnm].input, go)
end,
self.outputModules, gradOutput
)
for k = #self.sorted, 1, -1 do
local nnm = self.sorted[k]
local node = self.node[nnm]
- local pred, gradInputSucc = node.pred, node.gradInputSucc
+ local pred = node.pred
- if #gradInputSucc > 0 then
- node.gradOutput = self:computeGradOutput(gradInputSucc)
- -- nnm:updateGradInput(node.input, node.gradOutput)
+ if #node.gradInputSucc > 0 then
+ self:updateGradOutput(node)
self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', node.input, node.gradOutput)
end
function DAG:accGradParameters(input, gradOutput, scale)
scale = scale or 1
- assert(self.sorted, 'there has been a DAG structure change before a DAG:accGradParameters')
+ assert(self.sorted, 'There has been a DAG structure change before a DAG:accGradParameters')
+
+ self:nestedApply(
+ function(nnm, go) self.node[nnm].gradOutput = go end,
+ self.outputModules, gradOutput
+ )
+
+ self:nestedApply(
+ function(nnm, i) self.node[nnm].input = i end,
+ self.inputModules, input
+ )
for k = 1, #self.modules do
local nnm = self.modules[k]
local node = self.node[nnm]
- -- nnm:accGradParameters(node.input, node.gradOutput, scale)
- self:rethrowErrors(nnm, k, 'accGradParameters', node.input, self:computeGradOutput(node.gradInputSucc), scale)
+ self:rethrowErrors(nnm, k, 'accGradParameters', node.input, node.gradOutput, scale)
+ end
+end
+
+function DAG:clearState()
+ self.sorted = nil
+ for _, node in pairs(self.node) do
+ node.gradInputSucc = nil
+ node.input = nil
+ node.gradOutput = nil
end
+ return parent.clearState(self)
end