X-Git-Url: https://www.fleuret.org/cgi-bin/gitweb/gitweb.cgi?a=blobdiff_plain;f=dagnn.lua;h=92032640e9c250a8dfbc7db04871805bd2f319d2;hb=9dad4fa1118632bfa02c01e4d6a8a5a129061a54;hp=05672e9bad5997012063b7c4f76510306cb58181;hpb=34ed0d49d9b6b03811cd92c9513edf4ec5d4d2d2;p=dagnn.git diff --git a/dagnn.lua b/dagnn.lua index 05672e9..9203264 100755 --- a/dagnn.lua +++ b/dagnn.lua @@ -1,4 +1,23 @@ +--[[ + + Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/ + Written by Francois Fleuret + + This file is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License version 3 as + published by the Free Software Foundation. + + It is distributed in the hope that it will be useful, but WITHOUT + ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public + License for more details. + + You should have received a copy of the GNU General Public License + along with this file. If not, see . + +]]-- + require 'torch' require 'nn' @@ -6,47 +25,55 @@ local DAG, parent = torch.class('nn.DAG', 'nn.Container') function DAG:__init() parent.__init(self) - -- Nodes are indexed by the module they encompass + -- Nodes are indexed by the module they contain self.node = { } end function DAG:createNode(nnm) if not self.node[nnm] then self:add(nnm) -- Add it to the object as a Container - self.node[nnm] = {} - self.node[nnm].succ = {} - self.node[nnm].pred = {} + local node = {} + node.succ = {} + node.pred = {} + node.index = #self.modules + self.node[nnm] = node end end -function DAG:addEdge(nnma, nnmb) +-- The main use should be to add an edge between two modules, but it +-- can also add a full sequence of modules +function DAG:connect(...) self.sorted = nil - self:createNode(nnma) - self:createNode(nnmb) - table.insert(self.node[nnmb].pred, nnma) - table.insert(self.node[nnma].succ, nnmb) + local prev + for _, nnm in pairs({...}) do + self:createNode(nnm) + if prev then + table.insert(self.node[nnm].pred, prev) + table.insert(self.node[prev].succ, nnm) + end + prev = nnm + end end --- Apply f on t recursively; use the corresponding a1 and a2 elements --- (i.e. same keys) as second and third parameters to f when --- available; return the results from f, organized in a similarly --- nested table. -function DAG:nestApply(f, t, a1, a2) +-- Apply f on t recursively; use the corresponding element from args +-- (i.e. same keys) as second parameter to f when available; return +-- the results from f, organized in a similarly nested table. +function DAG:nestedApply(f, t, args) if torch.type(t) == 'table' then local result = {} for k, s in pairs(t) do - result[k] = self:nestApply(f, s, a1 and a1[k], a2 and a2[k]) + result[k] = self:nestedApply(f, s, args and args[k]) end return result else - return f(t, a1, a2) + return f(t, args) end end function DAG:setInput(i) self.sorted = nil self.inputModules = i - self:nestApply( + self:nestedApply( function(nnm) if #self.node[nnm].succ == 0 then error('Input modules must have outgoing edges.') @@ -62,7 +89,7 @@ end function DAG:setOutput(o) self.sorted = nil self.outputModules = o - self:nestApply( + self:nestedApply( function(nnm) if #self.node[nnm].pred == 0 then error('Output module must have incoming edges.') @@ -80,14 +107,10 @@ function DAG:putInOrder() return end - -- First, we sort the nodes according to the DAG order - local distance = {} - - self:nestApply(function(m) distance[m] = 1 end, self.inputModules) + self:nestedApply(function(m) distance[m] = 1 end, self.inputModules) local nc - repeat nc = 0 for nnma, node in pairs(self.node) do @@ -110,6 +133,22 @@ function DAG:putInOrder() for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end end +function DAG:computeGradOutput(gradInputSucc) + local gi + if #gradInputSucc == 1 then + gi = gradInputSucc[1] -- we avoid a clone() + elseif #gradInputSucc > 1 then + for k = 1, #gradInputSucc do + if gi then + gi:add(gradInputSucc[k]) + else + gi = gradInputSucc[k]:clone() + end + end + end + return gi +end + function DAG:print() self:putInOrder() @@ -118,13 +157,16 @@ function DAG:print() end end +---------------------------------------------------------------------- + function DAG:updateOutput(input) self:putInOrder() - self:nestApply( + self:nestedApply( function(nnm, i) self.node[nnm].input = i - nnm:updateOutput(i) + -- nnm:updateOutput(i) + self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i) end, self.inputModules, input @@ -143,23 +185,35 @@ function DAG:updateOutput(input) end end node.input = i - nnm:updateOutput(i) + -- nnm:updateOutput(i) + self:rethrowErrors(nnm, self.node[nnm].index, 'updateOutput', i) end end - self.output = self:nestApply(function(m) return m.output end, self.outputModules) + self.output = self:nestedApply( + function(m) return m.output end, + self.outputModules + ) return self.output end function DAG:updateGradInput(input, gradOutput) - self:putInOrder() + assert(self.sorted, 'there has been a DAG structure change before a DAG:updateGradInput') - self:nestApply( - function(nnm, go) nnm:updateGradInput(self.node[nnm].input, go) end, + self:nestedApply( + function(nnm, go) + -- nnm:updateGradInput(self.node[nnm].input, go) + self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', self.node[nnm].input, go) + end, self.outputModules, gradOutput ) + self:nestedApply( + function(nnm, i) self.node[nnm].input = i end, + self.inputModules, input + ) + for _, node in pairs(self.node) do node.gradInputSucc = {} end @@ -167,23 +221,12 @@ function DAG:updateGradInput(input, gradOutput) for k = #self.sorted, 1, -1 do local nnm = self.sorted[k] local node = self.node[nnm] - local pred, succ, gradInputSucc = node.pred, node.succ, node.gradInputSucc + local pred, gradInputSucc = node.pred, node.gradInputSucc if #gradInputSucc > 0 then - -- We update nnm:gradInput - local gi - if #gradInputSucc == 1 then - gi = gradInputSucc[1] -- we avoid a clone() - elseif #gradInputSucc > 1 then - for k = 1, #gradInputSucc do - if gi then - gi:add(gradInputSucc[k]) - else - gi = gradInputSucc[k]:clone() - end - end - end - nnm:updateGradInput(node.input, gi) + node.gradOutput = self:computeGradOutput(gradInputSucc) + -- nnm:updateGradInput(node.input, node.gradOutput) + self:rethrowErrors(nnm, self.node[nnm].index, 'updateGradInput', node.input, node.gradOutput) end -- We fill the gradInputSucc of our predecessors @@ -199,9 +242,54 @@ function DAG:updateGradInput(input, gradOutput) end end - self.gradInput = self:nestApply(function(m) return m.gradInput end, self.inputModules) + self.gradInput = self:nestedApply(function(m) return m.gradInput end, self.inputModules) return self.gradInput end -return DAG +function DAG:accGradParameters(input, gradOutput, scale) + scale = scale or 1 + + assert(self.sorted, 'there has been a DAG structure change before a DAG:accGradParameters') + + for k = 1, #self.modules do + local nnm = self.modules[k] + local node = self.node[nnm] + -- nnm:accGradParameters(node.input, node.gradOutput, scale) + self:rethrowErrors(nnm, k, 'accGradParameters', node.input, self:computeGradOutput(node.gradInputSucc), scale) + end +end + +---------------------------------------------------------------------- + +function DAG:dot(filename) + local file = (filename and io.open(filename, 'w')) or io.stdout + + file:write('digraph {\n') + + file:write('\n') + + for nnma, node in pairs(self.node) do + file:write( + ' ' + .. node.index + .. ' [shape=box,label=\"' .. torch.type(nnma) .. '\"]' + .. '\n' + ) + + for _, nnmb in pairs(node.succ) do + file:write( + ' ' + .. node.index + .. ' -> ' + .. self.node[nnmb].index + .. '\n' + ) + end + + file:write('\n') + end + + file:write('}\n') + +end