X-Git-Url: https://www.fleuret.org/cgi-bin/gitweb/gitweb.cgi?p=dagnn.git;a=blobdiff_plain;f=dagnn.lua;h=5921c05da410a041186e8cd8806046b4673fd027;hp=0f93d95f63a87b320d9e6d261a89c9798b4d9b55;hb=84b07c45eb8a2785a81cad7bcf6fadbac0d63f8f;hpb=d0743d66135ed7cedcb3777cfa5dda883cbeadb3 diff --git a/dagnn.lua b/dagnn.lua index 0f93d95..5921c05 100755 --- a/dagnn.lua +++ b/dagnn.lua @@ -1,4 +1,23 @@ +--[[ + + Copyright (c) 2016 Idiap Research Institute, http://www.idiap.ch/ + Written by Francois Fleuret + + This file is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License version 3 as + published by the Free Software Foundation. + + It is distributed in the hope that it will be useful, but WITHOUT + ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public + License for more details. + + You should have received a copy of the GNU General Public License + along with this file. If not, see . + +]]-- + require 'torch' require 'nn' @@ -7,27 +26,10 @@ local DAG, parent = torch.class('nn.DAG', 'nn.Container') function DAG:__init() parent.__init(self) -- Nodes are indexed by the module they contain - self.node = { } -end - -function DAG:createNode(nnm) - if not self.node[nnm] then - self:add(nnm) -- Add it to the object as a Container - self.node[nnm] = {} - self.node[nnm].succ = {} - self.node[nnm].pred = {} - end + self.node = {} end -function DAG:addEdge(nnma, nnmb) - self.sorted = nil - self:createNode(nnma) - self:createNode(nnmb) - table.insert(self.node[nnmb].pred, nnma) - table.insert(self.node[nnma].succ, nnmb) -end - --- Apply f on t recursively; use the corresponding element from args +-- Apply f on t recursively; use the corresponding elements from args -- (i.e. same keys) as second parameter to f when available; return -- the results from f, organized in a similarly nested table. function DAG:nestedApply(f, t, args) @@ -42,6 +44,99 @@ function DAG:nestedApply(f, t, args) end end +function DAG:createNode(nnm) + if not self.node[nnm] then + self:add(nnm) -- Add it to the object as a Container + local node = {} + node.succ = {} + node.pred = {} + node.index = #self.modules + self.node[nnm] = node + end +end + +function DAG:putInOrder() + if self.sorted then + return + end + + local distance = {} + self:nestedApply(function(m) distance[m] = 1 end, self.inputModules) + + local nc + repeat + nc = 0 + for nnma, node in pairs(self.node) do + for _, nnmb in pairs(node.succ) do + if distance[nnma] and (not distance[nnmb] or distance[nnmb] < distance[nnma] + 1) then + distance[nnmb] = distance[nnma] + 1 + nc = nc + 1 + end + end + end + until nc == 0 + + self.sorted = {} + for m, d in pairs(distance) do + table.insert(self.sorted, { distance = d, nnm = m }) + end + + table.sort(self.sorted, function(a, b) return a.distance < b.distance end) + + for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end +end + +-- This accumulates x in a where they are both nested tables of +-- tensors. If first is true, set a = x. Behavior is undefined if a +-- and x do not have the exact same structure. +function DAG:nestedAccTensor(a, x, first) + if torch.type(x) == 'table' then + local b = {} + for i in pairs(x) do + b[i] = self:nestedAccTensor(a[i], x[i], first) + end + a = b + else + if first then + if a then + a:resizeAs(x):copy(x) + else + a = x:clone() + end + else + a:add(x) + end + end + return a +end + +function DAG:updateGradOutput(node) + local gradInputSucc = node.gradInputSucc + if #gradInputSucc == 1 then + node.gradOutput = gradInputSucc[1] + elseif #gradInputSucc > 1 then + for k = 1, #gradInputSucc do + node.gradOutput = self:nestedAccTensor(node.gradOutput, gradInputSucc[k], k == 1) + end + end +end + +---------------------------------------------------------------------- + +-- Connect a sequence of modules +function DAG:connect(...) + self.sorted = nil + local prev + for _, nnm in pairs({...}) do + self:createNode(nnm) + if prev then + table.insert(self.node[nnm].pred, prev) + table.insert(self.node[prev].succ, nnm) + end + prev = nnm + end +end + function DAG:setInput(i) self.sorted = nil self.inputModules = i @@ -51,7 +146,7 @@ function DAG:setInput(i) error('Input modules must have outgoing edges.') end if #self.node[nnm].pred > 0 then - error('Input modules cannog have incoming edges.') + error('Input modules cannot have incoming edges.') end end, self.inputModules @@ -74,56 +169,64 @@ function DAG:setOutput(o) ) end -function DAG:putInOrder() - if self.sorted then - return +function DAG:print() + self:putInOrder() + + for i, d in ipairs(self.sorted) do + print('#' .. i .. ' -> ' .. torch.type(d)) end +end - -- First, we sort the nodes according to the DAG order +---------------------------------------------------------------------- - local distance = {} +function DAG:saveDot(filename) + local file = (filename and io.open(filename, 'w')) or io.stdout - self:nestedApply(function(m) distance[m] = 1 end, self.inputModules) + file:write('digraph {\n') - local nc + file:write('\n') - repeat - nc = 0 - for nnma, node in pairs(self.node) do - for _, nnmb in pairs(node.succ) do - if distance[nnma] and (not distance[nnmb] or distance[nnmb] < distance[nnma] + 1) then - distance[nnmb] = distance[nnma] + 1 - nc = nc + 1 - end + for nnmb, node in pairs(self.node) do + file:write( + ' ' + .. node.index + .. ' [shape=box,label=\"' .. torch.type(nnmb) .. '\"]' + .. '\n' + ) + + for i, nnma in pairs(node.pred) do + local decoration = '' + if #node.pred > 1 then + -- decoration = ' [headlabel=\"' .. i .. '\"]' + decoration = ' [label=\"' .. i .. '\"]' end + file:write( + ' ' + .. self.node[nnma].index + .. ' -> ' + .. self.node[nnmb].index + .. decoration + .. '\n' + ) end - until nc == 0 - self.sorted = { } - for m, d in pairs(distance) do - table.insert(self.sorted, { distance = d, nnm = m }) + file:write('\n') end - table.sort(self.sorted, function(a, b) return a.distance < b.distance end) + file:write('}\n') - for i, a in ipairs(self.sorted) do self.sorted[i] = a.nnm end end -function DAG:print() - self:putInOrder() - - for i, d in ipairs(self.sorted) do - print('#' .. i .. ' -> ' .. torch.type(d)) - end -end +---------------------------------------------------------------------- function DAG:updateOutput(input) self:putInOrder() self:nestedApply( function(nnm, i) - self.node[nnm].input = i - nnm:updateOutput(i) + local node = self.node[nnm] + node.input = i + self:rethrowErrors(nnm, node.index, 'updateOutput', i) end, self.inputModules, input @@ -142,7 +245,7 @@ function DAG:updateOutput(input) end end node.input = i - nnm:updateOutput(i) + self:rethrowErrors(nnm, node.index, 'updateOutput', i) end end @@ -154,27 +257,15 @@ function DAG:updateOutput(input) return self.output end -function DAG:computeGradInput(gradInputSucc) - local gi - if #gradInputSucc == 1 then - gi = gradInputSucc[1] -- we avoid a clone() - elseif #gradInputSucc > 1 then - for k = 1, #gradInputSucc do - if gi then - gi:add(gradInputSucc[k]) - else - gi = gradInputSucc[k]:clone() - end - end - end - return gi -end - function DAG:updateGradInput(input, gradOutput) - self:putInOrder() + assert(self.sorted, 'There has been a DAG structure change before a DAG:updateGradInput') self:nestedApply( - function(nnm, go) nnm:updateGradInput(self.node[nnm].input, go) end, + function(nnm, go) + local node = self.node[nnm] + node.gradOutput = go + self:rethrowErrors(nnm, node.index, 'updateGradInput', node.input, go) + end, self.outputModules, gradOutput ) @@ -190,10 +281,11 @@ function DAG:updateGradInput(input, gradOutput) for k = #self.sorted, 1, -1 do local nnm = self.sorted[k] local node = self.node[nnm] - local pred, gradInputSucc = node.pred, node.gradInputSucc + local pred = node.pred - if #gradInputSucc > 0 then - nnm:updateGradInput(node.input, self:computeGradInput(gradInputSucc)) + if #node.gradInputSucc > 0 then + self:updateGradOutput(node) + self:rethrowErrors(nnm, node.index, 'updateGradInput', node.input, node.gradOutput) end -- We fill the gradInputSucc of our predecessors @@ -215,12 +307,10 @@ function DAG:updateGradInput(input, gradOutput) end function DAG:accGradParameters(input, gradOutput, scale) - scale = scale or 1 - - self:putInOrder() + assert(self.sorted, 'There has been a DAG structure change before a DAG:accGradParameters') self:nestedApply( - function(nnm, go) nnm:updateGradInput(self.node[nnm].input, go) end, + function(nnm, go) self.node[nnm].gradOutput = go end, self.outputModules, gradOutput ) @@ -229,11 +319,19 @@ function DAG:accGradParameters(input, gradOutput, scale) self.inputModules, input ) - for k = #self.sorted, 1, -1 do - local nnm = self.sorted[k] + for k = 1, #self.modules do + local nnm = self.modules[k] local node = self.node[nnm] - nnm:accGradParameters(node.input, self:computeGradInput(node.gradInputSucc), scale) + self:rethrowErrors(nnm, k, 'accGradParameters', node.input, node.gradOutput, scale) end end -return DAG +function DAG:clearState() + self.sorted = nil + for _, node in pairs(self.node) do + node.gradInputSucc = nil + node.input = nil + node.gradOutput = nil + end + return parent.clearState(self) +end