diff options
author | soumith <soumith@fb.com> | 2015-05-15 00:29:42 +0300 |
---|---|---|
committer | soumith <soumith@fb.com> | 2015-06-03 08:08:12 +0300 |
commit | 83a3815dc70255c978405e8e966d7b02d580cc11 (patch) | |
tree | 0a947fe2d6889de9c5d77d8947bee3129cfb2754 /test.lua | |
parent | 975360a3ddd0ee0fccf7b86a3ce5120f6a9c55bd (diff) |
batchnorm is clonable by adding the running estimates to constructor
fixing batchnorm tests
Diffstat (limited to 'test.lua')
-rw-r--r-- | test.lua | 11 |
1 files changed, 9 insertions, 2 deletions
@@ -438,6 +438,13 @@ function nntest.Sqrt() local err = out:dist(in1:sqrt()) mytester:assertlt(err, 1e-15, torch.typename(module) .. ' - forward err ') + -- Test zero inputs; we will avoid a div-by-zero by setting to zero + local zin = torch.DoubleTensor(5, 7):zero() + module:forward(zin) + local zgradout = torch.rand(5, 7) + local zgradin = module:backward(zin, zgradout) + mytester:assertTensorEq(zgradin, torch.DoubleTensor(5, 7):zero(), 0.000001, "error in sqrt backward singularity") + local ini = math.random(3,5) local inj = math.random(3,5) local ink = math.random(3,5) @@ -3471,7 +3478,7 @@ function nntest.BatchNormalization() mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ') -- batch norm without affine transform - module = nn.BatchNormalization(0) + module = nn.BatchNormalization(indim, 1e-5, 0.1, false) local err = jac.testJacobian(module,input) mytester:assertlt(err,precision, 'error on state ') @@ -3525,7 +3532,7 @@ function nntest.SpatialBatchNormalization() mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ') -- batch norm without affine transform - module = nn.SpatialBatchNormalization(0) + module = nn.SpatialBatchNormalization(indim, 1e-5, 0.1, false) local err = jac.testJacobian(module,input) mytester:assertlt(err,precision, 'error on state ') |