+-- This accumulates x in a, where they are both nested tables of
+-- tensors with same structures / keys. If first is true, set a = x
+-- (in which case a can be nil) otherwise a = a + x. The behavior is
+-- undefined if a and x do not have the exact same structure.
+function DAG:nestedAccTensor(a, x, first)
+ if torch.type(x) == 'table' then
+ local b = {}
+ for i in pairs(x) do
+ b[i] = self:nestedAccTensor(a[i], x[i], first)
+ end
+ a = b
+ else
+ if first then
+ if a then
+ a:resizeAs(x):copy(x)
+ else
+ a = x:clone()
+ end
+ else
+ a:add(x)
+ end
+ end
+ return a
+end
+
+function DAG:updateGradOutput(node)
+ local gradInputSucc = node.gradInputSucc
+ if #gradInputSucc == 1 then
+ node.gradOutput = gradInputSucc[1]
+ elseif #gradInputSucc > 1 then
+ for k = 1, #gradInputSucc do
+ node.gradOutput = self:nestedAccTensor(node.gradOutput, gradInputSucc[k], k == 1)
+ end
+ end
+end
+
+----------------------------------------------------------------------
+
+-- Connect a sequence of modules
+function DAG:connect(...)
+ self.sorted = nil
+ local prev
+ for _, nnm in pairs({...}) do
+ self:createNode(nnm)
+ if prev then
+ table.insert(self.node[nnm].pred, prev)
+ table.insert(self.node[prev].succ, nnm)
+ end
+ prev = nnm
+ end
+end
+
+function DAG:setLabel(nnm, label)
+ self.node[nnm].label = label
+end
+
+function DAG:setInput(i)
+ self.sorted = nil
+ self.inputModules = i
+ self:nestedApply(
+ function(nnm)
+ assert(#self.node[nnm].succ > 0, 'Input modules must have outgoing edges.')
+ assert(#self.node[nnm].pred == 0, 'Input modules cannot have incoming edges.')
+ end,
+ self.inputModules
+ )
+end
+
+function DAG:setOutput(o)
+ self.sorted = nil
+ self.outputModules = o
+ self:nestedApply(
+ function(nnm)
+ assert(#self.node[nnm].pred > 0, 'Output module must have incoming edges.')
+ assert(#self.node[nnm].succ == 0, 'Output module cannot have outgoing edges.')
+ end,
+ self.outputModules
+ )
+end
+