4 Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
5 Written by Francois Fleuret <francois.fleuret@idiap.ch>
7 This file is free software: you can redistribute it and/or modify
8 it under the terms of the GNU General Public License version 3 as
9 published by the Free Software Foundation.
11 It is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with this file. If not, see <http://www.gnu.org/licenses/>.
24 local DAG, parent = torch.class('nn.DAG', 'nn.Container')
28 -- Nodes are indexed by the module they contain
32 -- Apply f on t recursively; use the corresponding elements from args
33 -- (i.e. same keys) as second parameter to f when available; return
34 -- the results from f, organized in a similarly nested table.
35 function DAG:nestedApply(f, t, args)
36 if torch.type(t) == 'table' then
38 for k, s in pairs(t) do
39 result[k] = self:nestedApply(f, s, args and args[k])
47 function DAG:createNode(nnm)
48 if not self.node[nnm] then
49 self:add(nnm) -- Add it to the object as a Container
53 node.index = #self.modules
58 function DAG:putInOrder()
64 self:nestedApply(function(m) distance[m] = 1 end, self.inputModules)
69 for nnma, node in pairs(self.node) do
70 for _, nnmb in pairs(node.succ) do
71 if distance[nnma] and (not distance[nnmb] or distance[nnmb] < distance[nnma] + 1) then
72 distance[nnmb] = distance[nnma] + 1
79 for _, nnm in pairs(self.modules) do
80 assert(distance[nnm], 'Some modules are not connected to inputs')
84 for m, d in pairs(distance) do
85 table.insert(self.sorted, { distance = d, nnm = m })
88 table.sort(self.sorted, function(a, b) return a.distance < b.distance end)
90 for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end
93 -- This accumulates x in a where they are both nested tables of
94 -- tensors. If first is true, set a = x. Behavior is undefined if a
95 -- and x do not have the exact same structure.
96 function DAG:nestedAccTensor(a, x, first)
97 if torch.type(x) == 'table' then
100 b[i] = self:nestedAccTensor(a[i], x[i], first)
106 a:resizeAs(x):copy(x)
117 function DAG:updateGradOutput(node)
118 local gradInputSucc = node.gradInputSucc
119 if #gradInputSucc == 1 then
120 node.gradOutput = gradInputSucc[1]
121 elseif #gradInputSucc > 1 then
122 for k = 1, #gradInputSucc do
123 node.gradOutput = self:nestedAccTensor(node.gradOutput, gradInputSucc[k], k == 1)
128 ----------------------------------------------------------------------
130 -- Connect a sequence of modules
131 function DAG:connect(...)
134 for _, nnm in pairs({...}) do
137 table.insert(self.node[nnm].pred, prev)
138 table.insert(self.node[prev].succ, nnm)
144 function DAG:setInput(i)
146 self.inputModules = i
149 assert(#self.node[nnm].succ > 0, 'Input modules must have outgoing edges.')
150 assert(#self.node[nnm].pred == 0, 'Input modules cannot have incoming edges.')
156 function DAG:setOutput(o)
158 self.outputModules = o
161 assert(#self.node[nnm].pred > 0, 'Output module must have incoming edges.')
162 assert(#self.node[nnm].succ == 0, 'Output module cannot have outgoing edges.')
171 for i, d in ipairs(self.sorted) do
172 print('#' .. i .. ' -> ' .. torch.type(d))
176 ----------------------------------------------------------------------
178 function DAG:saveDot(filename)
179 local file = (filename and io.open(filename, 'w')) or io.stdout
181 file:write('digraph {\n')
185 for nnmb, node in pairs(self.node) do
189 .. ' [shape=box,label=\"' .. torch.type(nnmb) .. '\"]'
193 for i, nnma in pairs(node.pred) do
194 local decoration = ''
195 if #node.pred > 1 then
196 -- decoration = ' [headlabel=\"' .. i .. '\"]'
197 decoration = ' [label=\"' .. i .. '\"]'
201 .. self.node[nnma].index
203 .. self.node[nnmb].index
216 ----------------------------------------------------------------------
218 function DAG:updateOutput(input)
223 local node = self.node[nnm]
225 self:rethrowErrors(nnm, node.index, 'updateOutput', i)
231 for _, nnm in ipairs(self.sorted) do
232 local node = self.node[nnm]
233 if #node.pred > 0 then
235 if #node.pred == 1 then
236 i = node.pred[1].output
237 elseif #node.pred > 1 then
239 for k = 1, #node.pred do
240 i[k] = node.pred[k].output
244 self:rethrowErrors(nnm, node.index, 'updateOutput', i)
248 self.output = self:nestedApply(
249 function(m) return m.output end,
256 function DAG:updateGradInput(input, gradOutput)
257 assert(self.sorted, 'There has been a DAG structure change before a DAG:updateGradInput')
261 local node = self.node[nnm]
263 self:rethrowErrors(nnm, node.index, 'updateGradInput', node.input, go)
265 self.outputModules, gradOutput
269 function(nnm, i) self.node[nnm].input = i end,
270 self.inputModules, input
273 for _, node in pairs(self.node) do
274 node.gradInputSucc = {}
277 for k = #self.sorted, 1, -1 do
278 local nnm = self.sorted[k]
279 local node = self.node[nnm]
280 local pred = node.pred
282 if #node.gradInputSucc > 0 then
283 self:updateGradOutput(node)
284 self:rethrowErrors(nnm, node.index, 'updateGradInput', node.input, node.gradOutput)
287 -- We fill the gradInputSucc of our predecessors
289 table.insert(self.node[pred[1]].gradInputSucc, nnm.gradInput)
290 elseif #pred > 1 then
291 assert(torch.type(nnm.gradInput) == 'table',
292 'Should have a table gradInput since it has multiple predecessors')
294 table.insert(self.node[node.pred[n]].gradInputSucc, nnm.gradInput[n])
299 self.gradInput = self:nestedApply(function(m) return m.gradInput end, self.inputModules)
301 return self.gradInput
304 function DAG:accGradParameters(input, gradOutput, scale)
305 assert(self.sorted, 'There has been a DAG structure change before a DAG:accGradParameters')
308 function(nnm, go) self.node[nnm].gradOutput = go end,
309 self.outputModules, gradOutput
313 function(nnm, i) self.node[nnm].input = i end,
314 self.inputModules, input
317 for k = 1, #self.modules do
318 local nnm = self.modules[k]
319 local node = self.node[nnm]
320 self:rethrowErrors(nnm, k, 'accGradParameters', node.input, node.gradOutput, scale)
324 function DAG:clearState()
326 for _, node in pairs(self.node) do
327 node.gradInputSucc = nil
329 node.gradOutput = nil
331 return parent.clearState(self)