end
local distance = {}
- self:nestedApply(function(m) distance[m] = 1 end, self.inputModules)
+ self:nestedApply(
+ function(m) distance[m] = 1 end,
+ self.inputModules
+ )
local nc
+ local nl = 0
repeat
nc = 0
for nnma, node in pairs(self.node) do
end
end
end
+ assert(nl < #self.modules, 'Cycle detected in the graph.')
+ nl = nl + 1
until nc == 0
for _, nnm in pairs(self.modules) do
for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end
end
--- This accumulates x in a where they are both nested tables of
--- tensors. If first is true, set a = x. Behavior is undefined if a
--- and x do not have the exact same structure.
+-- This accumulates x in a, where they are both nested tables of
+-- tensors with same structures / keys. If first is true, set a = x
+-- (in which case a can be nil) otherwise a = a + x. The behavior is
+-- undefined if a and x do not have the exact same structure.
function DAG:nestedAccTensor(a, x, first)
if torch.type(x) == 'table' then
local b = {}
for _, nnm in ipairs(self.sorted) do
local node = self.node[nnm]
- if #node.pred > 0 then
+ local pred = node.pred
+ if #pred > 0 then
local i
- if #node.pred == 1 then
- i = node.pred[1].output
- elseif #node.pred > 1 then
+ if #pred == 1 then
+ i = pred[1].output
+ elseif #pred > 1 then
i = {}
- for k = 1, #node.pred do
- i[k] = node.pred[k].output
+ for k = 1, #pred do
+ i[k] = pred[k].output
end
end
node.input = i
end
function DAG:updateGradInput(input, gradOutput)
- assert(self.sorted, 'There has been a DAG structure change before a DAG:updateGradInput')
+ assert(self.sorted, 'There has been a structure change before a DAG:updateGradInput')
self:nestedApply(
function(nnm, go)
assert(torch.type(nnm.gradInput) == 'table',
'Should have a table gradInput since it has multiple predecessors')
for n = 1, #pred do
- table.insert(self.node[node.pred[n]].gradInputSucc, nnm.gradInput[n])
+ table.insert(self.node[pred[n]].gradInputSucc, nnm.gradInput[n])
end
end
end
- self.gradInput = self:nestedApply(function(m) return m.gradInput end, self.inputModules)
+ self.gradInput = self:nestedApply(
+ function(m) return m.gradInput end,
+ self.inputModules
+ )
return self.gradInput
end
function DAG:accGradParameters(input, gradOutput, scale)
- assert(self.sorted, 'There has been a DAG structure change before a DAG:accGradParameters')
+ assert(self.sorted, 'There has been a structure change before a DAG:accGradParameters')
self:nestedApply(
function(nnm, go) self.node[nnm].gradOutput = go end,
function DAG:clearState()
self.sorted = nil
for _, node in pairs(self.node) do
- node.gradInputSucc = nil
node.input = nil
+ node.gradInputSucc = nil
node.gradOutput = nil
end
return parent.clearState(self)