From d3b0a00b9f46d4ef147e8d52b9d02ebdf78ce9d3 Mon Sep 17 00:00:00 2001 From: Francois Fleuret Date: Fri, 13 Jan 2017 22:54:37 +0100 Subject: [PATCH] Now deals when a module whose input is a table has multiple successors. --- dagnn.lua | 45 ++++++++++++++++++++++++++++++++++++--------- 1 file changed, 36 insertions(+), 9 deletions(-) diff --git a/dagnn.lua b/dagnn.lua index de9d29b..0e7e8b0 100755 --- a/dagnn.lua +++ b/dagnn.lua @@ -26,7 +26,7 @@ local DAG, parent = torch.class('nn.DAG', 'nn.Container') function DAG:__init() parent.__init(self) -- Nodes are indexed by the module they contain - self.node = { } + self.node = {} end -- Apply f on t recursively; use the corresponding elements from args @@ -76,7 +76,7 @@ function DAG:putInOrder() end until nc == 0 - self.sorted = { } + self.sorted = {} for m, d in pairs(distance) do table.insert(self.sorted, { distance = d, nnm = m }) end @@ -86,18 +86,35 @@ function DAG:putInOrder() for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end end +-- This accumulate x in a where they are both nested tables of +-- tensors. If first is true, set a = x. +function DAG:nestedAccTensor(a, x, first) + if torch.type(x) == 'table' then + a = a or {} + for i in pairs(x) do + a[i] = self:nestedAccTensor(a[i], x[i], first) + end + else + if first then + if a then + a:resizeAs(x):copy(x) + else + a = x:clone() + end + else + a:add(x) + end + end + return a +end + function DAG:updateGradOutput(node) local gradInputSucc = node.gradInputSucc if #gradInputSucc == 1 then node.gradOutput = gradInputSucc[1] elseif #gradInputSucc > 1 then - if node.gradOutput then - node.gradOutput:resize(gradInputSucc[1]):copy(gradInputSucc[1]) - else - node.gradOutput = gradInputSucc[1]:clone() - end - for k = 2, #gradInputSucc do - node.gradOutput:add(gradInputSucc[k]) + for k = 1, #gradInputSucc do + node.gradOutput = self:nestedAccTensor(node.gradOutput, gradInputSucc[k], k == 1) end end end @@ -307,3 +324,13 @@ function DAG:accGradParameters(input, gradOutput, scale) self:rethrowErrors(nnm, k, 'accGradParameters', node.input, node.gradOutput, scale) end end + +function DAG:clearState() + self.sorted = nil + for _, node in pairs(self.node) do + node.gradInputSucc = nil + node.input = nil + node.gradOutput = nil + end + return parent.clearState(self) +end -- 2.20.1