From 8616db035c1aeb376e878c48c910fa783d3b0409 Mon Sep 17 00:00:00 2001 From: soumith Date: Wed, 24 Dec 2014 16:11:30 -0800 Subject: repro bug --- test/test.lua | 75 ++++++++++++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 66 insertions(+), 9 deletions(-) (limited to 'test') diff --git a/test/test.lua b/test/test.lua index 03f7c2f..49a941b 100644 --- a/test/test.lua +++ b/test/test.lua @@ -411,10 +411,34 @@ function cudnntest.Tanh_single() mytester:asserteq(rescuda:dim(), 3, 'error in dimension') mytester:asserteq(resgrad:dim(), 3, 'error in dimension') local error = rescuda:float() - groundtruth:float() - mytester:assertlt(error:abs():max(), precision_forward, + local errmax = error:abs():max() + if (errmax ~= errmax) then + local state = {} + state.input = input + state.gradOutput = gradOutput + state.rescuda = rescuda + state.resgrad = resgrad + state.groundtruth = groundtruth + state.groundgrad = groundgrad + print(#input) + torch.save('badTanh.t7', state) + end + mytester:assertlt(errmax, precision_forward, 'error on state (forward) ') error = resgrad:float() - groundgrad:float() - mytester:assertlt(error:abs():max(), precision_backward, + errmax = error:abs():max() + if (errmax ~= errmax) then + local state = {} + state.input = input + state.gradOutput = gradOutput + state.rescuda = rescuda + state.resgrad = resgrad + state.groundtruth = groundtruth + state.groundgrad = groundgrad + print(#input) + torch.save('badTanh.t7', state) + end + mytester:assertlt(errmax, precision_backward, 'error on state (backward) ') end @@ -478,10 +502,27 @@ function cudnntest.Sigmoid_single() mytester:asserteq(rescuda:dim(), 3, 'error in dimension') mytester:asserteq(resgrad:dim(), 3, 'error in dimension') local error = rescuda:float() - groundtruth:float() - mytester:assertlt(error:abs():max(), precision_forward, + local errmax = error:abs():max() + if (errmax ~= errmax) then + print(#input) + end + mytester:assertlt(errmax, precision_forward, 'error on state (forward) ') error = resgrad:float() - groundgrad:float() - mytester:assertlt(error:abs():max(), precision_backward, + errmax = error:abs():max() + if (errmax ~= errmax) then + local state = {} + state.input = input + state.gradOutput = gradOutput + state.rescuda = rescuda + state.resgrad = resgrad + state.groundtruth = groundtruth + state.groundgrad = groundgrad + print(#input) + torch.save('badSoftMax.t7', state) + print(#input) + end + mytester:assertlt(errmax, precision_backward, 'error on state (backward) ') end @@ -544,13 +585,29 @@ function cudnntest.SoftMax_single() cutorch.synchronize() mytester:asserteq(rescuda:dim(), 3, 'error in dimension') mytester:asserteq(resgrad:dim(), 3, 'error in dimension') - local error = rescuda:float() - groundtruth:float() - mytester:assertlt(error:abs():max(), - precision_forward, 'error on state (forward) ') + local errmax = error:abs():max() + if (errmax ~= errmax) then + local state = {} + state.input = input + state.gradOutput = gradOutput + state.rescuda = rescuda + state.resgrad = resgrad + state.groundtruth = groundtruth + state.groundgrad = groundgrad + print(#input) + torch.save('badSoftMax.t7', state) + print(#input) + end + mytester:assertlt(errmax, precision_forward, + 'error on state (forward) ') error = resgrad:float() - groundgrad:float() - mytester:assertlt(error:abs():max(), - precision_backward, 'error on state (backward) ') + errmax = error:abs():max() + if (errmax ~= errmax) then + print(#input) + end + mytester:assertlt(errmax, precision_backward, + 'error on state (backward) ') end function cudnntest.SoftMax_batch() -- cgit v1.2.3