4 Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/
5 Written by Francois Fleuret <francois.fleuret@idiap.ch>
7 This file is free software: you can redistribute it and/or modify
8 it under the terms of the GNU General Public License version 3 as
9 published by the Free Software Foundation.
11 It is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with this file. If not, see <http://www.gnu.org/licenses/>.
24 local DAG, parent = torch.class('nn.DAG', 'nn.Container')
28 -- Nodes are indexed by the module they contain
32 function DAG:createNode(nnm)
33 if not self.node[nnm] then
34 self:add(nnm) -- Add it to the object as a Container
38 node.index = #self.modules
43 -- The main use should be to add an edge between two modules, but it
44 -- can also add a full sequence of modules
45 function DAG:connect(...)
48 for _, nnm in pairs({...}) do
51 table.insert(self.node[nnm].pred, prev)
52 table.insert(self.node[prev].succ, nnm)
58 -- Apply f on t recursively; use the corresponding element from args
59 -- (i.e. same keys) as second parameter to f when available; return
60 -- the results from f, organized in a similarly nested table.
61 function DAG:nestedApply(f, t, args)
62 if torch.type(t) == 'table' then
64 for k, s in pairs(t) do
65 result[k] = self:nestedApply(f, s, args and args[k])
73 function DAG:setInput(i)
78 if #self.node[nnm].succ == 0 then
79 error('Input modules must have outgoing edges.')
81 if #self.node[nnm].pred > 0 then
82 error('Input modules cannog have incoming edges.')
89 function DAG:setOutput(o)
91 self.outputModules = o
94 if #self.node[nnm].pred == 0 then
95 error('Output module must have incoming edges.')
97 if #self.node[nnm].succ > 0 then
98 error('Output module cannot have outgoing edges.')
105 function DAG:putInOrder()
111 self:nestedApply(function(m) distance[m] = 1 end, self.inputModules)
116 for nnma, node in pairs(self.node) do
117 for _, nnmb in pairs(node.succ) do
118 if distance[nnma] and (not distance[nnmb] or distance[nnmb] < distance[nnma] + 1) then
119 distance[nnmb] = distance[nnma] + 1
127 for m, d in pairs(distance) do
128 table.insert(self.sorted, { distance = d, nnm = m })
131 table.sort(self.sorted, function(a, b) return a.distance < b.distance end)
133 for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end
136 function DAG:computeGradOutput(gradInputSucc)
138 if #gradInputSucc == 1 then
139 gi = gradInputSucc[1] -- we avoid a clone()
140 elseif #gradInputSucc > 1 then
141 for k = 1, #gradInputSucc do
143 gi:add(gradInputSucc[k])
145 gi = gradInputSucc[k]:clone()
155 for i, d in ipairs(self.sorted) do
156 print('#' .. i .. ' -> ' .. torch.type(d))
160 ----------------------------------------------------------------------
162 function DAG:updateOutput(input)
167 self.node[nnm].input = i
168 -- nnm:updateOutput(i)
169 self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i)
175 for _, nnm in ipairs(self.sorted) do
176 local node = self.node[nnm]
177 if #node.pred > 0 then
179 if #node.pred == 1 then
180 i = node.pred[1].output
181 elseif #node.pred > 1 then
183 for k = 1, #node.pred do
184 i[k] = node.pred[k].output
188 -- nnm:updateOutput(i)
189 self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i)
193 self.output = self:nestedApply(
194 function(m) return m.output end,
201 function DAG:updateGradInput(input, gradOutput)
202 assert(self.sorted, 'there has been a DAG structure change before a DAG:updateGradInput')
206 -- nnm:updateGradInput(self.node[nnm].input, go)
207 self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', self.node[nnm].input, go)
209 self.outputModules, gradOutput
213 function(nnm, i) self.node[nnm].input = i end,
214 self.inputModules, input
217 for _, node in pairs(self.node) do
218 node.gradInputSucc = {}
221 for k = #self.sorted, 1, -1 do
222 local nnm = self.sorted[k]
223 local node = self.node[nnm]
224 local pred, gradInputSucc = node.pred, node.gradInputSucc
226 if #gradInputSucc > 0 then
227 node.gradOutput = self:computeGradOutput(gradInputSucc)
228 -- nnm:updateGradInput(node.input, node.gradOutput)
229 self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', node.input, node.gradOutput)
232 -- We fill the gradInputSucc of our predecessors
234 table.insert(self.node[pred[1]].gradInputSucc, nnm.gradInput)
235 elseif #pred > 1 then
236 if not torch.type(nnm.gradInput) == 'table' then
237 error('Should have a table gradInput since it has multiple predecessors')
240 table.insert(self.node[node.pred[n]].gradInputSucc, nnm.gradInput[n])
245 self.gradInput = self:nestedApply(function(m) return m.gradInput end, self.inputModules)
247 return self.gradInput
250 function DAG:accGradParameters(input, gradOutput, scale)
253 assert(self.sorted, 'there has been a DAG structure change before a DAG:accGradParameters')
255 for k = 1, #self.modules do
256 local nnm = self.modules[k]
257 local node = self.node[nnm]
258 -- nnm:accGradParameters(node.input, node.gradOutput, scale)
259 self:rethrowErrors(nnm, k, 'accGradParameters', node.input, self:computeGradOutput(node.gradInputSucc), scale)
263 ----------------------------------------------------------------------
265 function DAG:dot(filename)
266 local file = (filename and io.open(filename, 'w')) or io.stdout
268 file:write('digraph {\n')
272 for nnma, node in pairs(self.node) do
276 .. ' [shape=box,label=\"' .. torch.type(nnma) .. '\"]'
280 for _, nnmb in pairs(node.succ) do
285 .. self.node[nnmb].index