From: Francois Fleuret Date: Thu, 12 Jan 2017 17:00:33 +0000 (+0100) Subject: Update. X-Git-Url: https://fleuret.org/cgi-bin/gitweb/gitweb.cgi?p=dagnn.git;a=commitdiff_plain;h=063f198047f0202fa921aa09b772369b14ae8be2 Update. --- diff --git a/dagnn.lua b/dagnn.lua index 158ef78..7fc1018 100755 --- a/dagnn.lua +++ b/dagnn.lua @@ -32,9 +32,11 @@ end function DAG:createNode(nnm) if not self.node[nnm] then self:add(nnm) -- Add it to the object as a Container - self.node[nnm] = {} - self.node[nnm].succ = {} - self.node[nnm].pred = {} + local node = {} + node.succ = {} + node.pred = {} + node.index = #self.modules + self.node[nnm] = node end end @@ -98,14 +100,10 @@ function DAG:putInOrder() return end - -- First, we sort the nodes according to the DAG order - local distance = {} - self:nestedApply(function(m) distance[m] = 1 end, self.inputModules) local nc - repeat nc = 0 for nnma, node in pairs(self.node) do @@ -128,6 +126,22 @@ function DAG:putInOrder() for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end end +function DAG:computeGradOutput(gradInputSucc) + local gi + if #gradInputSucc == 1 then + gi = gradInputSucc[1] -- we avoid a clone() + elseif #gradInputSucc > 1 then + for k = 1, #gradInputSucc do + if gi then + gi:add(gradInputSucc[k]) + else + gi = gradInputSucc[k]:clone() + end + end + end + return gi +end + function DAG:print() self:putInOrder() @@ -136,13 +150,16 @@ function DAG:print() end end +---------------------------------------------------------------------- + function DAG:updateOutput(input) self:putInOrder() self:nestedApply( function(nnm, i) self.node[nnm].input = i - nnm:updateOutput(i) + -- nnm:updateOutput(i) + self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i) end, self.inputModules, input @@ -161,7 +178,8 @@ function DAG:updateOutput(input) end end node.input = i - nnm:updateOutput(i) + -- nnm:updateOutput(i) + self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i) end end @@ -173,27 +191,14 @@ function DAG:updateOutput(input) return self.output end -function DAG:computeGradInput(gradInputSucc) - local gi - if #gradInputSucc == 1 then - gi = gradInputSucc[1] -- we avoid a clone() - elseif #gradInputSucc > 1 then - for k = 1, #gradInputSucc do - if gi then - gi:add(gradInputSucc[k]) - else - gi = gradInputSucc[k]:clone() - end - end - end - return gi -end - function DAG:updateGradInput(input, gradOutput) - self:putInOrder() + assert(self.sorted, 'there has been a DAG structure change before a DAG:updateGradInput') self:nestedApply( - function(nnm, go) nnm:updateGradInput(self.node[nnm].input, go) end, + function(nnm, go) + -- nnm:updateGradInput(self.node[nnm].input, go) + self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', self.node[nnm].input, go) + end, self.outputModules, gradOutput ) @@ -212,7 +217,9 @@ function DAG:updateGradInput(input, gradOutput) local pred, gradInputSucc = node.pred, node.gradInputSucc if #gradInputSucc > 0 then - nnm:updateGradInput(node.input, self:computeGradInput(gradInputSucc)) + node.gradOutput = self:computeGradOutput(gradInputSucc) + -- nnm:updateGradInput(node.input, node.gradOutput) + self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', node.input, node.gradOutput) end -- We fill the gradInputSucc of our predecessors @@ -236,21 +243,14 @@ end function DAG:accGradParameters(input, gradOutput, scale) scale = scale or 1 - self:putInOrder() - - self:nestedApply( - function(nnm, go) nnm:updateGradInput(self.node[nnm].input, go) end, - self.outputModules, gradOutput - ) - - self:nestedApply( - function(nnm, i) self.node[nnm].input = i end, - self.inputModules, input - ) + assert(self.sorted, 'there has been a DAG structure change before a DAG:accGradParameters') - for k = #self.sorted, 1, -1 do - local nnm = self.sorted[k] + for k = 1, #self.modules do + local nnm = self.modules[k] local node = self.node[nnm] - nnm:accGradParameters(node.input, self:computeGradInput(node.gradInputSucc), scale) + -- nnm:accGradParameters(node.input, node.gradOutput, scale) + self:rethrowErrors(nnm, k, 'accGradParameters', node.input, self:computeGradOutput(node.gradInputSucc), scale) end end + +----------------------------------------------------------------------