Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/nn.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSoumith Chintala <soumith@gmail.com>2015-03-21 08:52:30 +0300
committerSoumith Chintala <soumith@gmail.com>2015-03-21 08:52:30 +0300
commita716e978686d8d5cfb3092aff9c7773883717d2b (patch)
tree2ad7bedf709f5b96199203aa5c0a1023b1b8fa85 /test.lua
parentc2394aefb897914f16e958ec9489d9327fa7e8c6 (diff)
parentfa12c6fc92095b65f44163bb70b3ba32f0970229 (diff)
Merge pull request #189 from torch/batchnorm
Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift
Diffstat (limited to 'test.lua')
-rw-r--r--test.lua106
1 files changed, 106 insertions, 0 deletions
diff --git a/test.lua b/test.lua
index f05e502..84410c9 100644
--- a/test.lua
+++ b/test.lua
@@ -3195,6 +3195,112 @@ function nntest.CosineEmbeddingCriterion()
equal(grads[2], zero, 'gradient should be zero')
end
+function nntest.BatchNormalization()
+ local nframes = torch.random(50,70)
+ local indim = torch.random(1,10)
+ local input = torch.zeros(nframes, indim):uniform()
+ local module = nn.BatchNormalization(indim)
+
+ local err = jac.testJacobian(module,input)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ local err = jac.testJacobianParameters(module, input,
+ module.weight, module.gradWeight)
+ mytester:assertlt(err,precision, 'error on weight ')
+
+ local err = jac.testJacobianParameters(module, input,
+ module.bias, module.gradBias)
+ mytester:assertlt(err,precision, 'error on weight ')
+
+ local err = jac.testJacobianUpdateParameters(module, input, module.weight)
+ mytester:assertlt(err,precision, 'error on weight [direct update] ')
+
+ local err = jac.testJacobianUpdateParameters(module, input, module.bias)
+ mytester:assertlt(err,precision, 'error on bias [direct update] ')
+
+ for t,err in pairs(jac.testAllUpdate(module, input,
+ 'weight', 'gradWeight')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on weight [%s]', t))
+ end
+
+ for t,err in pairs(jac.testAllUpdate(module, input,
+ 'bias', 'gradBias')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on bias [%s]', t))
+ end
+
+ -- IO
+ local ferr,berr = jac.testIO(module,input)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+
+ -- batch norm without affine transform
+ module = nn.BatchNormalization(0)
+
+ local err = jac.testJacobian(module,input)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ -- IO
+ local ferr,berr = jac.testIO(module,input)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+end
+
+function nntest.SpatialBatchNormalization()
+ local nframes = torch.random(1,10)
+ local indim = torch.random(1,4)
+ local ini = torch.random(1,5)
+ local inj = torch.random(1,5)
+ local input = torch.zeros(nframes, indim, ini, inj):uniform()
+ local module = nn.SpatialBatchNormalization(indim)
+
+ local err = jac.testJacobian(module,input)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ local err = jac.testJacobianParameters(module, input,
+ module.weight, module.gradWeight)
+ mytester:assertlt(err,precision, 'error on weight ')
+
+ local err = jac.testJacobianParameters(module, input,
+ module.bias, module.gradBias)
+ mytester:assertlt(err,precision, 'error on weight ')
+
+ local err = jac.testJacobianUpdateParameters(module, input, module.weight)
+ mytester:assertlt(err,precision, 'error on weight [direct update] ')
+
+ local err = jac.testJacobianUpdateParameters(module, input, module.bias)
+ mytester:assertlt(err,precision, 'error on bias [direct update] ')
+
+ for t,err in pairs(jac.testAllUpdate(module, input,
+ 'weight', 'gradWeight')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on weight [%s]', t))
+ end
+
+ for t,err in pairs(jac.testAllUpdate(module, input,
+ 'bias', 'gradBias')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on bias [%s]', t))
+ end
+
+ -- IO
+ local ferr,berr = jac.testIO(module,input)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+
+ -- batch norm without affine transform
+ module = nn.SpatialBatchNormalization(0)
+
+ local err = jac.testJacobian(module,input)
+ mytester:assertlt(err,precision, 'error on state ')
+
+ -- IO
+ local ferr,berr = jac.testIO(module,input)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+end
+
mytester:add(nntest)
if not nn then