Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/soumith/cudnn.torch.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorsoumith <soumith@fb.com>2014-11-18 05:59:09 +0300
committersoumith <soumith@fb.com>2014-11-18 05:59:09 +0300
commit56b6d5426509b4d0bef7d2648fad72ab4c122c84 (patch)
treedb8a21f36fe03093c0b383a5cf6523ab4e97de13 /test
parent7b21377ffe067a86917715f522eb544239c2ec6c (diff)
adding non-batch mode
Diffstat (limited to 'test')
-rw-r--r--test/test.lua277
1 files changed, 270 insertions, 7 deletions
diff --git a/test/test.lua b/test/test.lua
index 6965219..3750418 100644
--- a/test/test.lua
+++ b/test/test.lua
@@ -9,7 +9,7 @@ local nloop = 1
local times = {}
-function cudnntest.SpatialConvolution_forward()
+function cudnntest.SpatialConvolution_forward_batch()
local bs = math.random(1,32)
local from = math.random(1,32)
local to = math.random(1,64)
@@ -36,7 +36,7 @@ function cudnntest.SpatialConvolution_forward()
end
-function cudnntest.SpatialConvolution_backward()
+function cudnntest.SpatialConvolution_backward_batch()
local bs = math.random(1,32)
local from = math.random(1,32)
local to = math.random(1,64)
@@ -84,7 +84,83 @@ function cudnntest.SpatialConvolution_backward()
mytester:assertlt(berror:abs():max(), precision_backward, 'error on bias (backward) ')
end
-function cudnntest.SpatialMaxPooling()
+function cudnntest.SpatialConvolution_forward_single()
+ local from = math.random(1,32)
+ local to = math.random(1,64)
+ local ki = math.random(3,15)
+ local kj = math.random(3,15)
+ local si = 1 -- not supported by CPU version yet
+ local sj = si
+ local outi = math.random(1,64)
+ local outj = math.random(1,64)
+ local ini = (outi-1)*si+ki
+ local inj = (outj-1)*sj+kj
+
+ local input = torch.randn(from,inj,ini):cuda()
+ local sconv = nn.SpatialConvolutionMM(from,to,ki,kj,si,sj):cuda()
+ local groundtruth = sconv:forward(input)
+ cutorch.synchronize()
+ local gconv = cudnn.SpatialConvolution(from,to,ki,kj,si,sj):cuda()
+ gconv.weight:copy(sconv.weight)
+ gconv.bias:copy(sconv.bias)
+ local rescuda = gconv:forward(input)
+ cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 3, 'error in dimension')
+ local error = rescuda:float() - groundtruth:float()
+ mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ')
+end
+
+
+function cudnntest.SpatialConvolution_backward_single()
+ local from = math.random(1,32)
+ local to = math.random(1,64)
+ local ki = math.random(3,15)
+ local kj = math.random(3,15)
+ local si = 1 -- not supported by CPU version yet
+ local sj = si
+ local outi = math.random(1,64)
+ local outj = math.random(1,64)
+ local ini = (outi-1)*si+ki
+ local inj = (outj-1)*sj+kj
+
+ local input = torch.randn(from,inj,ini):cuda()
+ local gradOutput = torch.randn(to,outj,outi):cuda()
+ local sconv = nn.SpatialConvolutionMM(from,to,ki,kj,si,sj):cuda()
+ sconv:forward(input)
+ sconv:zeroGradParameters()
+ local groundgrad = sconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ local groundweight = sconv.gradWeight
+ local groundbias = sconv.gradBias
+
+ local gconv = cudnn.SpatialConvolution(from,to,ki,kj,si,sj):cuda()
+ gconv.weight:copy(sconv.weight)
+ gconv.bias:copy(sconv.bias)
+ gconv:forward(input)
+
+ -- serialize and deserialize
+ torch.save('modelTemp.t7', gconv)
+ gconv = torch.load('modelTemp.t7')
+
+ gconv:forward(input)
+ gconv:zeroGradParameters()
+ local rescuda = gconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 3, 'error in dimension')
+ local weightcuda = gconv.gradWeight
+ local biascuda = gconv.gradBias
+
+ local error = rescuda:float() - groundgrad:float()
+ local werror = weightcuda:float() - groundweight:float()
+ local berror = biascuda:float() - groundbias:float()
+
+ mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ')
+ mytester:assertlt(werror:abs():max(), precision_backward, 'error on weight (backward) ')
+ mytester:assertlt(berror:abs():max(), precision_backward, 'error on bias (backward) ')
+end
+
+
+function cudnntest.SpatialMaxPooling_batch()
local bs = math.random(1,32)
local from = math.random(1,32)
local ki = math.random(2,4)
@@ -110,13 +186,83 @@ function cudnntest.SpatialMaxPooling()
local rescuda = gconv:forward(input)
local resgrad = gconv:backward(input, gradOutput)
cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 4, 'error in dimension')
+ mytester:asserteq(resgrad:dim(), 4, 'error in dimension')
+ local error = rescuda:float() - groundtruth:float()
+ mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ')
+ error = resgrad:float() - groundgrad:float()
+ mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ')
+end
+
+function cudnntest.SpatialMaxPooling_single()
+ local from = math.random(1,32)
+ local ki = math.random(2,4)
+ local kj = math.random(2,4)
+ local si = ki
+ local sj = kj
+ local outi = math.random(1,64)
+ local outj = math.random(1,64)
+ local ini = (outi-1)*si+ki
+ local inj = (outj-1)*sj+kj
+ local input = torch.randn(from,inj,ini):cuda()
+ local gradOutput = torch.randn(from,outj,outi):cuda()
+
+ local sconv = nn.SpatialMaxPooling(ki,kj,si,sj):cuda()
+ local groundtruth = sconv:forward(input)
+ local groundgrad = sconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ local gconv = cudnn.SpatialMaxPooling(ki,kj,si,sj):cuda()
+ local rescuda = gconv:forward(input)
+ -- serialize and deserialize
+ torch.save('modelTemp.t7', gconv)
+ gconv = torch.load('modelTemp.t7')
+ local rescuda = gconv:forward(input)
+ local resgrad = gconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 3, 'error in dimension')
+ mytester:asserteq(resgrad:dim(), 3, 'error in dimension')
+ local error = rescuda:float() - groundtruth:float()
+ mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ')
+ error = resgrad:float() - groundgrad:float()
+ mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ')
+end
+
+function cudnntest.ReLU_single()
+ local from = math.random(1,32)
+ local ki = math.random(2,4)
+ local kj = math.random(2,4)
+ local si = ki
+ local sj = kj
+ local outi = math.random(1,64)
+ local outj = math.random(1,64)
+ local ini = outi
+ local inj = outj
+ local input = torch.randn(from,inj,ini):cuda()
+ local gradOutput = torch.randn(from,outj,outi):cuda()
+
+ local sconv = nn.ReLU():cuda()
+ local groundtruth = sconv:forward(input)
+ local groundgrad = sconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ local gconv = cudnn.ReLU():cuda()
+ local rescuda = gconv:forward(input)
+
+ -- serialize and deserialize
+ torch.save('modelTemp.t7', gconv)
+ gconv = torch.load('modelTemp.t7')
+
+ local rescuda = gconv:forward(input)
+ local resgrad = gconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 3, 'error in dimension')
+ mytester:asserteq(resgrad:dim(), 3, 'error in dimension')
local error = rescuda:float() - groundtruth:float()
mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ')
error = resgrad:float() - groundgrad:float()
mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ')
end
-function cudnntest.ReLU()
+function cudnntest.ReLU_batch()
local bs = math.random(1,32)
local from = math.random(1,32)
local ki = math.random(2,4)
@@ -144,13 +290,50 @@ function cudnntest.ReLU()
local rescuda = gconv:forward(input)
local resgrad = gconv:backward(input, gradOutput)
cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 4, 'error in dimension')
+ mytester:asserteq(resgrad:dim(), 4, 'error in dimension')
local error = rescuda:float() - groundtruth:float()
mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ')
error = resgrad:float() - groundgrad:float()
mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ')
end
-function cudnntest.Tanh()
+function cudnntest.Tanh_single()
+ local from = math.random(1,32)
+ local ki = math.random(2,4)
+ local kj = math.random(2,4)
+ local si = ki
+ local sj = kj
+ local outi = math.random(1,64)
+ local outj = math.random(1,64)
+ local ini = outi
+ local inj = outj
+ local input = torch.randn(from,inj,ini):cuda()
+ local gradOutput = torch.randn(from,outj,outi):cuda()
+
+ local sconv = nn.Tanh():cuda()
+ local groundtruth = sconv:forward(input)
+ local groundgrad = sconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ local gconv = cudnn.Tanh():cuda()
+ local rescuda = gconv:forward(input)
+
+ -- serialize and deserialize
+ torch.save('modelTemp.t7', gconv)
+ gconv = torch.load('modelTemp.t7')
+
+ local rescuda = gconv:forward(input)
+ local resgrad = gconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 3, 'error in dimension')
+ mytester:asserteq(resgrad:dim(), 3, 'error in dimension')
+ local error = rescuda:float() - groundtruth:float()
+ mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ')
+ error = resgrad:float() - groundgrad:float()
+ mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ')
+end
+
+function cudnntest.Tanh_batch()
local bs = math.random(1,32)
local from = math.random(1,32)
local ki = math.random(2,4)
@@ -178,13 +361,50 @@ function cudnntest.Tanh()
local rescuda = gconv:forward(input)
local resgrad = gconv:backward(input, gradOutput)
cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 4, 'error in dimension')
+ mytester:asserteq(resgrad:dim(), 4, 'error in dimension')
+ local error = rescuda:float() - groundtruth:float()
+ mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ')
+ error = resgrad:float() - groundgrad:float()
+ mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ')
+end
+
+function cudnntest.Sigmoid_single()
+ local from = math.random(1,32)
+ local ki = math.random(2,4)
+ local kj = math.random(2,4)
+ local si = ki
+ local sj = kj
+ local outi = math.random(1,64)
+ local outj = math.random(1,64)
+ local ini = outi
+ local inj = outj
+ local input = torch.randn(from,inj,ini):cuda()
+ local gradOutput = torch.randn(from,outj,outi):cuda()
+
+ local sconv = nn.Sigmoid():cuda()
+ local groundtruth = sconv:forward(input)
+ local groundgrad = sconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ local gconv = cudnn.Sigmoid():cuda()
+ local rescuda = gconv:forward(input)
+
+ -- serialize and deserialize
+ torch.save('modelTemp.t7', gconv)
+ gconv = torch.load('modelTemp.t7')
+
+ local rescuda = gconv:forward(input)
+ local resgrad = gconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 3, 'error in dimension')
+ mytester:asserteq(resgrad:dim(), 3, 'error in dimension')
local error = rescuda:float() - groundtruth:float()
mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ')
error = resgrad:float() - groundgrad:float()
mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ')
end
-function cudnntest.Sigmoid()
+function cudnntest.Sigmoid_batch()
local bs = math.random(1,32)
local from = math.random(1,32)
local ki = math.random(2,4)
@@ -212,13 +432,53 @@ function cudnntest.Sigmoid()
local rescuda = gconv:forward(input)
local resgrad = gconv:backward(input, gradOutput)
cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 4, 'error in dimension')
+ mytester:asserteq(resgrad:dim(), 4, 'error in dimension')
local error = rescuda:float() - groundtruth:float()
mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ')
error = resgrad:float() - groundgrad:float()
mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ')
end
-function cudnntest.SoftMax()
+function cudnntest.SoftMax_single()
+ local from = math.random(1,32)
+ local ki = math.random(2,4)
+ local kj = math.random(2,4)
+ local si = ki
+ local sj = kj
+ local outi = math.random(1,64)
+ local outj = math.random(1,64)
+ local ini = outi
+ local inj = outj
+ local input = torch.randn(from,inj,ini):cuda()
+ local gradOutput = torch.randn(from,outj,outi):cuda()
+
+ local sconv = nn.SoftMax():cuda()
+ local groundtruth = sconv:forward(input:view(-1))
+ local groundgrad = sconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ local gconv = cudnn.SoftMax():cuda()
+ local rescuda = gconv:forward(input)
+
+ -- serialize and deserialize
+ torch.save('modelTemp.t7', gconv)
+ gconv = torch.load('modelTemp.t7')
+
+ local rescuda = gconv:forward(input)
+ local resgrad = gconv:backward(input, gradOutput)
+ cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 3, 'error in dimension')
+ mytester:asserteq(resgrad:dim(), 3, 'error in dimension')
+
+ local error = rescuda:float() - groundtruth:float()
+ mytester:assertlt(error:abs():max(),
+ precision_forward, 'error on state (forward) ')
+ error = resgrad:float() - groundgrad:float()
+ mytester:assertlt(error:abs():max(),
+ precision_backward, 'error on state (backward) ')
+end
+
+function cudnntest.SoftMax_batch()
local bs = math.random(1,32)
local from = math.random(1,32)
local ki = math.random(2,4)
@@ -246,6 +506,9 @@ function cudnntest.SoftMax()
local rescuda = gconv:forward(input)
local resgrad = gconv:backward(input, gradOutput)
cutorch.synchronize()
+ mytester:asserteq(rescuda:dim(), 4, 'error in dimension')
+ mytester:asserteq(resgrad:dim(), 4, 'error in dimension')
+
local error = rescuda:float() - groundtruth:float()
mytester:assertlt(error:abs():max(),
precision_forward, 'error on state (forward) ')