+function DAG:accGradParameters(input, gradOutput, scale)
+ assert(self.sorted, 'There has been a structure change before a DAG:accGradParameters.')
+
+ self:nestedApply(
+ function(nnm, go) self.node[nnm].gradOutput = go end,
+ self.outputModules, gradOutput
+ )
+
+ self:nestedApply(
+ function(nnm, i) self.node[nnm].input = i end,
+ self.inputModules, input
+ )
+
+ for k = 1, #self.modules do
+ local nnm = self.modules[k]
+ local node = self.node[nnm]
+ self:rethrowErrors(nnm, k, 'accGradParameters', node.input, node.gradOutput, scale)
+ end
+end
+
+function DAG:clearState()
+ self.sorted = nil
+ for _, node in pairs(self.node) do
+ node.input = nil
+ node.gradInputSucc = nil
+ node.gradOutput = nil
+ end
+ return parent.clearState(self)
+end