Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/soumith/cudnn.torch.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorsoumith <soumith@fb.com>2014-12-25 03:11:30 +0300
committersoumith <soumith@fb.com>2014-12-25 03:11:30 +0300
commit8616db035c1aeb376e878c48c910fa783d3b0409 (patch)
tree694a82d256b9461d4167f36f6b765fc318722170 /test
parent2498d19b44663196db752bc7c9cd436f43ccd3d3 (diff)
repro bug
Diffstat (limited to 'test')
-rw-r--r--test/test.lua75
1 files changed, 66 insertions, 9 deletions
diff --git a/test/test.lua b/test/test.lua
index 03f7c2f..49a941b 100644
--- a/test/test.lua
+++ b/test/test.lua
@@ -411,10 +411,34 @@ function cudnntest.Tanh_single()
mytester:asserteq(rescuda:dim(), 3, 'error in dimension')
mytester:asserteq(resgrad:dim(), 3, 'error in dimension')
local error = rescuda:float() - groundtruth:float()
- mytester:assertlt(error:abs():max(), precision_forward,
+ local errmax = error:abs():max()
+ if (errmax ~= errmax) then
+ local state = {}
+ state.input = input
+ state.gradOutput = gradOutput
+ state.rescuda = rescuda
+ state.resgrad = resgrad
+ state.groundtruth = groundtruth
+ state.groundgrad = groundgrad
+ print(#input)
+ torch.save('badTanh.t7', state)
+ end
+ mytester:assertlt(errmax, precision_forward,
'error on state (forward) ')
error = resgrad:float() - groundgrad:float()
- mytester:assertlt(error:abs():max(), precision_backward,
+ errmax = error:abs():max()
+ if (errmax ~= errmax) then
+ local state = {}
+ state.input = input
+ state.gradOutput = gradOutput
+ state.rescuda = rescuda
+ state.resgrad = resgrad
+ state.groundtruth = groundtruth
+ state.groundgrad = groundgrad
+ print(#input)
+ torch.save('badTanh.t7', state)
+ end
+ mytester:assertlt(errmax, precision_backward,
'error on state (backward) ')
end
@@ -478,10 +502,27 @@ function cudnntest.Sigmoid_single()
mytester:asserteq(rescuda:dim(), 3, 'error in dimension')
mytester:asserteq(resgrad:dim(), 3, 'error in dimension')
local error = rescuda:float() - groundtruth:float()
- mytester:assertlt(error:abs():max(), precision_forward,
+ local errmax = error:abs():max()
+ if (errmax ~= errmax) then
+ print(#input)
+ end
+ mytester:assertlt(errmax, precision_forward,
'error on state (forward) ')
error = resgrad:float() - groundgrad:float()
- mytester:assertlt(error:abs():max(), precision_backward,
+ errmax = error:abs():max()
+ if (errmax ~= errmax) then
+ local state = {}
+ state.input = input
+ state.gradOutput = gradOutput
+ state.rescuda = rescuda
+ state.resgrad = resgrad
+ state.groundtruth = groundtruth
+ state.groundgrad = groundgrad
+ print(#input)
+ torch.save('badSoftMax.t7', state)
+ print(#input)
+ end
+ mytester:assertlt(errmax, precision_backward,
'error on state (backward) ')
end
@@ -544,13 +585,29 @@ function cudnntest.SoftMax_single()
cutorch.synchronize()
mytester:asserteq(rescuda:dim(), 3, 'error in dimension')
mytester:asserteq(resgrad:dim(), 3, 'error in dimension')
-
local error = rescuda:float() - groundtruth:float()
- mytester:assertlt(error:abs():max(),
- precision_forward, 'error on state (forward) ')
+ local errmax = error:abs():max()
+ if (errmax ~= errmax) then
+ local state = {}
+ state.input = input
+ state.gradOutput = gradOutput
+ state.rescuda = rescuda
+ state.resgrad = resgrad
+ state.groundtruth = groundtruth
+ state.groundgrad = groundgrad
+ print(#input)
+ torch.save('badSoftMax.t7', state)
+ print(#input)
+ end
+ mytester:assertlt(errmax, precision_forward,
+ 'error on state (forward) ')
error = resgrad:float() - groundgrad:float()
- mytester:assertlt(error:abs():max(),
- precision_backward, 'error on state (backward) ')
+ errmax = error:abs():max()
+ if (errmax ~= errmax) then
+ print(#input)
+ end
+ mytester:assertlt(errmax, precision_backward,
+ 'error on state (backward) ')
end
function cudnntest.SoftMax_batch()