Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/nn.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSoumith Chintala <soumith@gmail.com>2015-05-18 18:32:27 +0300
committerSoumith Chintala <soumith@gmail.com>2015-05-18 18:32:27 +0300
commit0908c98eadcc947b2836be6d8a42805f1d0fa9b6 (patch)
tree5c05b7f842701bcbc15961c1b9488acb8d5660a5 /test.lua
parent3de925cff92921aafa2bf4a93fcd5395c6322862 (diff)
parentfae191262caeede9c55c4f39df9e3077412e13a7 (diff)
Merge pull request #266 from nicholas-leonard/SpatialDivisiveNormalization
Batch Mode
Diffstat (limited to 'test.lua')
-rw-r--r--test.lua162
1 files changed, 158 insertions, 4 deletions
diff --git a/test.lua b/test.lua
index d82e470..e69d3c8 100644
--- a/test.lua
+++ b/test.lua
@@ -1153,7 +1153,7 @@ function nntest.SpatialSubtractiveNormalization_2dkernel()
local nbfeatures = math.random(3,5)
local kernel = torch.Tensor(kersize,kersize):fill(1)
local module = nn.SpatialSubtractiveNormalization(nbfeatures,kernel)
- local input = torch.rand(nbfeatures,inputSize,inputSize)
+ local input = torch.rand(nbfeatures,inputSize,inputSize/2)
local err = jac.testJacobian(module,input)
mytester:assertlt(err,precision, 'error on state ')
@@ -1161,6 +1161,30 @@ function nntest.SpatialSubtractiveNormalization_2dkernel()
local ferr,berr = jac.testIO(module,input)
mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+
+ -- test batch mode
+ local output = module:forward(input):clone()
+ local gradOutput = output:clone():uniform(0,1)
+ local gradInput = module:backward(input, gradOutput):clone()
+ local batchSize = 4
+ local input2 = torch.rand(batchSize,nbfeatures,inputSize,inputSize/2)
+ input2[2]:copy(input)
+
+ local output2 = module:forward(input2)
+ local gradOutput2 = output2:clone():uniform(0,1)
+ gradOutput2[2]:copy(gradOutput)
+ local gradInput2 = module:backward(input2, gradOutput2)
+
+ mytester:assertTensorEq(output2[2], output, 0.000001, "SpatialSubstractiveNormalization 2d forward batch err")
+ mytester:assertTensorEq(gradOutput2[2], gradOutput, 0.000001, "SpatialSubstractiveNormalization 2d backward batch err")
+
+ local err = jac.testJacobian(module,input2)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ local ferr,berr = jac.testIO(module,input2)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+
end
function nntest.SpatialSubtractiveNormalization_1dkernel()
@@ -1169,7 +1193,7 @@ function nntest.SpatialSubtractiveNormalization_1dkernel()
local nbfeatures = math.random(3,5)
local kernel = torch.Tensor(kersize):fill(1)
local module = nn.SpatialSubtractiveNormalization(nbfeatures,kernel)
- local input = torch.rand(nbfeatures,inputSize,inputSize)
+ local input = torch.rand(nbfeatures,inputSize,inputSize/2)
local err = jac.testJacobian(module,input)
mytester:assertlt(err,precision, 'error on state ')
@@ -1177,6 +1201,29 @@ function nntest.SpatialSubtractiveNormalization_1dkernel()
local ferr,berr = jac.testIO(module,input)
mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+
+ -- test batch mode
+ local output = module:forward(input):clone()
+ local gradOutput = output:clone():uniform(0,1)
+ local gradInput = module:backward(input, gradOutput):clone()
+ local batchSize = 4
+ local input2 = torch.rand(batchSize,nbfeatures,inputSize,inputSize/2)
+ input2[2]:copy(input)
+
+ local output2 = module:forward(input2)
+ local gradOutput2 = output2:clone():uniform(0,1)
+ gradOutput2[2]:copy(gradOutput)
+ local gradInput2 = module:backward(input2, gradOutput2)
+
+ mytester:assertTensorEq(output2[2], output, 0.000001, "SpatialSubstractiveNormalization 1d forward batch err")
+ mytester:assertTensorEq(gradOutput2[2], gradOutput, 0.000001, "SpatialSubstractiveNormalization 1d backward batch err")
+
+ local err = jac.testJacobian(module,input2)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ local ferr,berr = jac.testIO(module,input2)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
end
function nntest.SpatialDivisiveNormalization_2dkernel()
@@ -1185,7 +1232,7 @@ function nntest.SpatialDivisiveNormalization_2dkernel()
local nbfeatures = math.random(3,5)
local kernel = torch.Tensor(kersize,kersize):fill(1)
local module = nn.SpatialDivisiveNormalization(nbfeatures,kernel)
- local input = torch.rand(nbfeatures,inputSize,inputSize)
+ local input = torch.rand(nbfeatures,inputSize,inputSize/2)
local err = jac.testJacobian(module,input)
mytester:assertlt(err,precision, 'error on state ')
@@ -1193,6 +1240,29 @@ function nntest.SpatialDivisiveNormalization_2dkernel()
local ferr,berr = jac.testIO(module,input)
mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+
+ -- test batch mode
+ local output = module:forward(input):clone()
+ local gradOutput = output:clone():uniform(0,1)
+ local gradInput = module:backward(input, gradOutput):clone()
+ local batchSize = 4
+ local input2 = torch.rand(batchSize,nbfeatures,inputSize,inputSize/2)
+ input2[2]:copy(input)
+
+ local output2 = module:forward(input2)
+ local gradOutput2 = output2:clone():uniform(0,1)
+ gradOutput2[2]:copy(gradOutput)
+ local gradInput2 = module:backward(input2, gradOutput2)
+
+ mytester:assertTensorEq(output2[2], output, 0.000001, "SpatialDivisiveNormalization 2d forward batch err")
+ mytester:assertTensorEq(gradOutput2[2], gradOutput, 0.000001, "SpatialDivisiveNormalization 2d backward batch err")
+
+ local err = jac.testJacobian(module,input2)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ local ferr,berr = jac.testIO(module,input2)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
end
function nntest.SpatialDivisiveNormalization_1dkernel()
@@ -1201,7 +1271,46 @@ function nntest.SpatialDivisiveNormalization_1dkernel()
local nbfeatures = math.random(3,5)
local kernel = torch.Tensor(kersize):fill(1)
local module = nn.SpatialDivisiveNormalization(nbfeatures,kernel)
- local input = torch.rand(nbfeatures,inputSize,inputSize)
+ local input = torch.rand(nbfeatures,inputSize,inputSize/2)
+
+ local err = jac.testJacobian(module,input)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ local ferr,berr = jac.testIO(module,input)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+
+ -- test batch mode
+ local output = module:forward(input):clone()
+ local gradOutput = output:clone():uniform(0,1)
+ local gradInput = module:backward(input, gradOutput):clone()
+ local batchSize = 4
+ local input2 = torch.rand(batchSize,nbfeatures,inputSize,inputSize/2)
+ input2[2]:copy(input)
+
+ local output2 = module:forward(input2)
+ local gradOutput2 = output2:clone():uniform(0,1)
+ gradOutput2[2]:copy(gradOutput)
+ local gradInput2 = module:backward(input2, gradOutput2)
+
+ mytester:assertTensorEq(output2[2], output, 0.000001, "SpatialDivisiveNormalization 1d forward batch err")
+ mytester:assertTensorEq(gradOutput2[2], gradOutput, 0.000001, "SpatialDivisiveNormalization 1d backward batch err")
+
+ local err = jac.testJacobian(module,input2)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ local ferr,berr = jac.testIO(module,input2)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+end
+
+function nntest.SpatialContrastiveNormalization()
+ local inputSize = math.random(6,9)
+ local kersize = 3
+ local nbfeatures = math.random(3,5)
+ local kernel = torch.Tensor(kersize,kersize):fill(1)
+ local module = nn.SpatialContrastiveNormalization(nbfeatures,kernel)
+ local input = torch.rand(nbfeatures,inputSize,inputSize/2)
local err = jac.testJacobian(module,input)
mytester:assertlt(err,precision, 'error on state ')
@@ -1209,6 +1318,32 @@ function nntest.SpatialDivisiveNormalization_1dkernel()
local ferr,berr = jac.testIO(module,input)
mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+
+ -- test batch mode and type
+ local output = module:forward(input):clone()
+ local gradOutput = output:clone():uniform(0,1)
+ local gradInput = module:backward(input, gradOutput):clone()
+ local batchSize = 4
+ local input2 = torch.rand(batchSize,nbfeatures,inputSize,inputSize/2):float()
+ input2[2]:copy(input)
+
+ module:float() -- type-cast
+ local output2 = module:forward(input2)
+ local gradOutput2 = output2:clone():uniform(0,1)
+ gradOutput2[2]:copy(gradOutput)
+ local gradInput2 = module:backward(input2, gradOutput2)
+
+ mytester:assertTensorEq(output2[2], output:float(), 0.000001, "SpatialContrastiveNormalization 2d forward batch err")
+ mytester:assertTensorEq(gradOutput2[2], gradOutput:float(), 0.000001, "SpatialContrastiveNormalization 2d backward batch err")
+
+ module:double()
+ input2 = input2:double()
+ local err = jac.testJacobian(module,input2)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ local ferr,berr = jac.testIO(module,input2)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
end
function nntest.SpatialConvolution()
@@ -3428,6 +3563,25 @@ function nntest.Replicate()
mytester:assertTensorEq(vOutput1, expected1, precision, 'Wrong tiling of data when replicating vector.')
mytester:assertTensorEq(vOutput2, expected2, precision, 'Wrong tiling of data when replicating vector.')
+
+ -- batch mode
+ local vector = torch.rand(4,3)
+
+ local r1 = nn.Replicate(2, 1, 1)
+ local r2 = nn.Replicate(2, 2, 1)
+
+ local vOutput1 = r1:forward(vector):clone()
+ local vOutput2 = r2:forward(vector):clone()
+
+ local expected1 = torch.zeros(4, 2, 3)
+ local expected2 = torch.zeros(4, 3, 2)
+ expected1:select(2, 1):copy(vector)
+ expected1:select(2, 2):copy(vector)
+ expected2:select(3, 1):copy(vector)
+ expected2:select(3, 2):copy(vector)
+
+ mytester:assertTensorEq(vOutput1, expected1, precision, 'Wrong tiling of data when replicating batch vector.')
+ mytester:assertTensorEq(vOutput2, expected2, precision, 'Wrong tiling of data when replicating batch vector.')
end
function nntest.BatchNormalization()