+ for i = 1, params:size(1) do
+ local x = params[i]
+
+ params[i] = x - epsilon
+ local output0 = model:forward(input)
+ local loss0 = criterion:forward(output0, target)
+
+ params[i] = x + epsilon
+ local output1 = model:forward(input)
+ local loss1 = criterion:forward(output1, target)
+
+ params[i] = x
+
+ local ana = analyticalGradParam[i]
+ local num = (loss1 - loss0) / (2 * epsilon)
+ local err
+
+ if num == ana then
+ err = 0
+ else
+ err = torch.abs(num - ana) / torch.abs(num)
+ end
+
+ print(
+ 'CHECK '
+ .. err
+ .. ' checkGrad ' .. i
+ .. ' analytical ' .. ana
+ .. ' numerical ' .. num
+ )
+ end
+
+end
+
+function printTensorTable(t)
+ if torch.type(t) == 'table' then
+ for i, t in pairs(t) do
+ print('-- ELEMENT [' .. i .. '] --')
+ printTensorTable(t)
+ end
+ else
+ print(tostring(t))
+ end
+end
+
+-- +- Linear(10, 10) -> ReLU ---> d --+
+-- / / \
+-- / / \
+-- --> a --> b -----------> c --------------+ e -->
+-- \ /
+-- \ /
+-- +-- Mul(-1) --------+
+
+model = nn.DAG()
+
+a = nn.Linear(50, 10)
+b = nn.ReLU()
+c = nn.Linear(10, 15)
+d = nn.CMulTable()
+e = nn.CAddTable()