Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/nn.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorsoumith <soumith@fb.com>2015-03-14 05:00:35 +0300
committersoumith <soumith@fb.com>2015-03-21 08:50:48 +0300
commitfa12c6fc92095b65f44163bb70b3ba32f0970229 (patch)
tree37ea40667100eeb1d79b5e5df3973247f8c6f7c0 /test.lua
parent77c0d8f81ad149ccaa0c9ab2ce22a56b88ed70eb (diff)
adding batch normalization
Diffstat (limited to 'test.lua')
-rw-r--r--test.lua106
1 files changed, 106 insertions, 0 deletions
diff --git a/test.lua b/test.lua
index 3661d03..a2c2465 100644
--- a/test.lua
+++ b/test.lua
@@ -3188,6 +3188,112 @@ function nntest.CosineEmbeddingCriterion()
equal(grads[2], zero, 'gradient should be zero')
end
+function nntest.BatchNormalization()
+ local nframes = torch.random(50,70)
+ local indim = torch.random(1,10)
+ local input = torch.zeros(nframes, indim):uniform()
+ local module = nn.BatchNormalization(indim)
+
+ local err = jac.testJacobian(module,input)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ local err = jac.testJacobianParameters(module, input,
+ module.weight, module.gradWeight)
+ mytester:assertlt(err,precision, 'error on weight ')
+
+ local err = jac.testJacobianParameters(module, input,
+ module.bias, module.gradBias)
+ mytester:assertlt(err,precision, 'error on weight ')
+
+ local err = jac.testJacobianUpdateParameters(module, input, module.weight)
+ mytester:assertlt(err,precision, 'error on weight [direct update] ')
+
+ local err = jac.testJacobianUpdateParameters(module, input, module.bias)
+ mytester:assertlt(err,precision, 'error on bias [direct update] ')
+
+ for t,err in pairs(jac.testAllUpdate(module, input,
+ 'weight', 'gradWeight')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on weight [%s]', t))
+ end
+
+ for t,err in pairs(jac.testAllUpdate(module, input,
+ 'bias', 'gradBias')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on bias [%s]', t))
+ end
+
+ -- IO
+ local ferr,berr = jac.testIO(module,input)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+
+ -- batch norm without affine transform
+ module = nn.BatchNormalization(0)
+
+ local err = jac.testJacobian(module,input)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ -- IO
+ local ferr,berr = jac.testIO(module,input)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+end
+
+function nntest.SpatialBatchNormalization()
+ local nframes = torch.random(1,10)
+ local indim = torch.random(1,4)
+ local ini = torch.random(1,5)
+ local inj = torch.random(1,5)
+ local input = torch.zeros(nframes, indim, ini, inj):uniform()
+ local module = nn.SpatialBatchNormalization(indim)
+
+ local err = jac.testJacobian(module,input)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ local err = jac.testJacobianParameters(module, input,
+ module.weight, module.gradWeight)
+ mytester:assertlt(err,precision, 'error on weight ')
+
+ local err = jac.testJacobianParameters(module, input,
+ module.bias, module.gradBias)
+ mytester:assertlt(err,precision, 'error on weight ')
+
+ local err = jac.testJacobianUpdateParameters(module, input, module.weight)
+ mytester:assertlt(err,precision, 'error on weight [direct update] ')
+
+ local err = jac.testJacobianUpdateParameters(module, input, module.bias)
+ mytester:assertlt(err,precision, 'error on bias [direct update] ')
+
+ for t,err in pairs(jac.testAllUpdate(module, input,
+ 'weight', 'gradWeight')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on weight [%s]', t))
+ end
+
+ for t,err in pairs(jac.testAllUpdate(module, input,
+ 'bias', 'gradBias')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on bias [%s]', t))
+ end
+
+ -- IO
+ local ferr,berr = jac.testIO(module,input)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+
+ -- batch norm without affine transform
+ module = nn.SpatialBatchNormalization(0)
+
+ local err = jac.testJacobian(module,input)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ -- IO
+ local ferr,berr = jac.testIO(module,input)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+end
+
mytester:add(nntest)
if not nn then