4 Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
5 Written by Francois Fleuret <francois.fleuret@idiap.ch>
7 This file is free software: you can redistribute it and/or modify
8 it under the terms of the GNU General Public License version 3 as
9 published by the Free Software Foundation.
11 It is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with this file. If not, see <http://www.gnu.org/licenses/>.
24 local DAG, parent = torch.class('nn.DAG', 'nn.Container')
28 -- Nodes are indexed by the module they contain
32 function DAG:createNode(nnm)
33 if not self.node[nnm] then
34 self:add(nnm) -- Add it to the object as a Container
38 node.index = #self.modules
43 function DAG:addEdge(nnma, nnmb)
47 table.insert(self.node[nnmb].pred, nnma)
48 table.insert(self.node[nnma].succ, nnmb)
51 -- Apply f on t recursively; use the corresponding element from args
52 -- (i.e. same keys) as second parameter to f when available; return
53 -- the results from f, organized in a similarly nested table.
54 function DAG:nestedApply(f, t, args)
55 if torch.type(t) == 'table' then
57 for k, s in pairs(t) do
58 result[k] = self:nestedApply(f, s, args and args[k])
66 function DAG:setInput(i)
71 if #self.node[nnm].succ == 0 then
72 error('Input modules must have outgoing edges.')
74 if #self.node[nnm].pred > 0 then
75 error('Input modules cannog have incoming edges.')
82 function DAG:setOutput(o)
84 self.outputModules = o
87 if #self.node[nnm].pred == 0 then
88 error('Output module must have incoming edges.')
90 if #self.node[nnm].succ > 0 then
91 error('Output module cannot have outgoing edges.')
98 function DAG:putInOrder()
104 self:nestedApply(function(m) distance[m] = 1 end, self.inputModules)
109 for nnma, node in pairs(self.node) do
110 for _, nnmb in pairs(node.succ) do
111 if distance[nnma] and (not distance[nnmb] or distance[nnmb] < distance[nnma] + 1) then
112 distance[nnmb] = distance[nnma] + 1
120 for m, d in pairs(distance) do
121 table.insert(self.sorted, { distance = d, nnm = m })
124 table.sort(self.sorted, function(a, b) return a.distance < b.distance end)
126 for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end
129 function DAG:computeGradOutput(gradInputSucc)
131 if #gradInputSucc == 1 then
132 gi = gradInputSucc[1] -- we avoid a clone()
133 elseif #gradInputSucc > 1 then
134 for k = 1, #gradInputSucc do
136 gi:add(gradInputSucc[k])
138 gi = gradInputSucc[k]:clone()
148 for i, d in ipairs(self.sorted) do
149 print('#' .. i .. ' -> ' .. torch.type(d))
153 ----------------------------------------------------------------------
155 function DAG:updateOutput(input)
160 self.node[nnm].input = i
161 -- nnm:updateOutput(i)
162 self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i)
168 for _, nnm in ipairs(self.sorted) do
169 local node = self.node[nnm]
170 if #node.pred > 0 then
172 if #node.pred == 1 then
173 i = node.pred[1].output
174 elseif #node.pred > 1 then
176 for k = 1, #node.pred do
177 i[k] = node.pred[k].output
181 -- nnm:updateOutput(i)
182 self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i)
186 self.output = self:nestedApply(
187 function(m) return m.output end,
194 function DAG:updateGradInput(input, gradOutput)
195 assert(self.sorted, 'there has been a DAG structure change before a DAG:updateGradInput')
199 -- nnm:updateGradInput(self.node[nnm].input, go)
200 self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', self.node[nnm].input, go)
202 self.outputModules, gradOutput
206 function(nnm, i) self.node[nnm].input = i end,
207 self.inputModules, input
210 for _, node in pairs(self.node) do
211 node.gradInputSucc = {}
214 for k = #self.sorted, 1, -1 do
215 local nnm = self.sorted[k]
216 local node = self.node[nnm]
217 local pred, gradInputSucc = node.pred, node.gradInputSucc
219 if #gradInputSucc > 0 then
220 node.gradOutput = self:computeGradOutput(gradInputSucc)
221 -- nnm:updateGradInput(node.input, node.gradOutput)
222 self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', node.input, node.gradOutput)
225 -- We fill the gradInputSucc of our predecessors
227 table.insert(self.node[pred[1]].gradInputSucc, nnm.gradInput)
228 elseif #pred > 1 then
229 if not torch.type(nnm.gradInput) == 'table' then
230 error('Should have a table gradInput since it has multiple predecessors')
233 table.insert(self.node[node.pred[n]].gradInputSucc, nnm.gradInput[n])
238 self.gradInput = self:nestedApply(function(m) return m.gradInput end, self.inputModules)
240 return self.gradInput
243 function DAG:accGradParameters(input, gradOutput, scale)
246 assert(self.sorted, 'there has been a DAG structure change before a DAG:accGradParameters')
248 for k = 1, #self.modules do
249 local nnm = self.modules[k]
250 local node = self.node[nnm]
251 -- nnm:accGradParameters(node.input, node.gradOutput, scale)
252 self:rethrowErrors(nnm, k, 'accGradParameters', node.input, self:computeGradOutput(node.gradInputSucc), scale)
256 ----------------------------------------------------------------------