Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/nn.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAdam Paszke <adam.paszke@gmail.com>2016-03-02 23:56:26 +0300
committerAdam Paszke <adam.paszke@gmail.com>2016-03-05 15:56:03 +0300
commitb1cf092d84bb6bfdbb6442d13cc0900e3aea7109 (patch)
tree2a27da19f92bd6262d01abc3fd816c12f39fd7b5 /Concat.lua
parent3a2a1b42e6e6c61addbf82f1efaa7b35c2a3144f (diff)
Improve error handling
When an error occurs in some module, all containers up to the topmost one will be printed now. Also, removed zeroGradParameters from ConcatTable, because it was no different from its parent's implementation.
Diffstat (limited to 'Concat.lua')
-rw-r--r--Concat.lua10
1 files changed, 5 insertions, 5 deletions
diff --git a/Concat.lua b/Concat.lua
index 402fa38..0b64398 100644
--- a/Concat.lua
+++ b/Concat.lua
@@ -9,7 +9,7 @@ end
function Concat:updateOutput(input)
local outs = {}
for i=1,#self.modules do
- local currentOutput = self.modules[i]:updateOutput(input)
+ local currentOutput = self:rethrowErrors(self.modules[i], i, 'updateOutput', input)
outs[i] = currentOutput
if i == 1 then
self.size:resize(currentOutput:dim()):copy(currentOutput:size())
@@ -34,7 +34,7 @@ function Concat:updateGradInput(input, gradOutput)
local offset = 1
for i,module in ipairs(self.modules) do
local currentOutput = module.output
- local currentGradInput = module:updateGradInput(input, gradOutput:narrow(self.dimension, offset, currentOutput:size(self.dimension)))
+ local currentGradInput = self:rethrowErrors(module, i, 'updateGradInput', gradOutput:narrow(self.dimension, offset, currentOutput:size(self.dimension)))
if currentGradInput then -- if the module does not produce a gradInput (for example first layer), then ignore it and move on.
if i==1 then
@@ -53,7 +53,7 @@ function Concat:accGradParameters(input, gradOutput, scale)
local offset = 1
for i,module in ipairs(self.modules) do
local currentOutput = module.output
- module:accGradParameters(
+ self:rethrowErrors(module, i, 'accGradParameters',
input,
gradOutput:narrow(self.dimension, offset, currentOutput:size(self.dimension)),
scale)
@@ -67,7 +67,7 @@ function Concat:backward(input, gradOutput, scale)
local offset = 1
for i,module in ipairs(self.modules) do
local currentOutput = module.output
- local currentGradInput = module:backward(input, gradOutput:narrow(self.dimension, offset, currentOutput:size(self.dimension)), scale)
+ local currentGradInput = self:rethrowErrors(module, i, 'backward', input, gradOutput:narrow(self.dimension, offset, currentOutput:size(self.dimension)), scale)
if currentGradInput then -- if the module does not produce a gradInput (for example first layer), then ignore it and move on.
if i==1 then
self.gradInput:copy(currentGradInput)
@@ -84,7 +84,7 @@ function Concat:accUpdateGradParameters(input, gradOutput, lr)
local offset = 1
for i,module in ipairs(self.modules) do
local currentOutput = module.output
- module:accUpdateGradParameters(
+ self:rethrowErrors(module, i, 'accUpdateGradParameters',
input,
gradOutput:narrow(self.dimension, offset, currentOutput:size(self.dimension)),
lr)