Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/nn.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorJonathan Tompson <tompson@cims.nyu.edu>2014-07-01 17:39:07 +0400
committerJonathan Tompson <tompson@cims.nyu.edu>2014-07-01 17:39:07 +0400
commite73293539b424118a5e79dff8ae74dc337a76325 (patch)
treea322a4d86711e72892050afd3d0afa0c2d12826b /test
parenta87db79806b7f9ef43c5eb4f0cba63f745c2c827 (diff)
Fixed the Linear stage when the output size is 1.
Diffstat (limited to 'test')
-rw-r--r--test/test.lua95
1 files changed, 49 insertions, 46 deletions
diff --git a/test/test.lua b/test/test.lua
index 5e4bce7..6832885 100644
--- a/test/test.lua
+++ b/test/test.lua
@@ -239,69 +239,72 @@ end
function nntest.Linear()
local ini = math.random(5,7)
- local inj = math.random(5,7)
+ local inj_vals = {math.random(5,7), 1} -- Also test the inj = 1 spetial case
local input = torch.Tensor(ini):zero()
- local module = nn.Linear(ini,inj)
- -- 1D
- local err = jac.testJacobian(module,input)
- mytester:assertlt(err,precision, 'error on state ')
+ for ind, inj in pairs(inj_vals) do
+ local module = nn.Linear(ini,inj)
- local err = jac.testJacobianParameters(module, input, module.weight, module.gradWeight)
- mytester:assertlt(err,precision, 'error on weight ')
+ -- 1D
+ local err = jac.testJacobian(module,input)
+ mytester:assertlt(err,precision, 'error on state ')
- local err = jac.testJacobianParameters(module, input, module.bias, module.gradBias)
- mytester:assertlt(err,precision, 'error on bias ')
+ local err = jac.testJacobianParameters(module, input, module.weight, module.gradWeight)
+ mytester:assertlt(err,precision, 'error on weight ')
- local err = jac.testJacobianUpdateParameters(module, input, module.weight)
- mytester:assertlt(err,precision, 'error on weight [direct update] ')
+ local err = jac.testJacobianParameters(module, input, module.bias, module.gradBias)
+ mytester:assertlt(err,precision, 'error on bias ')
- local err = jac.testJacobianUpdateParameters(module, input, module.bias)
- mytester:assertlt(err,precision, 'error on bias [direct update] ')
+ local err = jac.testJacobianUpdateParameters(module, input, module.weight)
+ mytester:assertlt(err,precision, 'error on weight [direct update] ')
- for t,err in pairs(jac.testAllUpdate(module, input, 'weight', 'gradWeight')) do
- mytester:assertlt(err, precision, string.format(
- 'error on weight [%s]', t))
- end
+ local err = jac.testJacobianUpdateParameters(module, input, module.bias)
+ mytester:assertlt(err,precision, 'error on bias [direct update] ')
- for t,err in pairs(jac.testAllUpdate(module, input, 'bias', 'gradBias')) do
- mytester:assertlt(err, precision, string.format(
- 'error on bias [%s]', t))
- end
+ for t,err in pairs(jac.testAllUpdate(module, input, 'weight', 'gradWeight')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on weight [%s]', t))
+ end
- -- 2D
- local nframe = math.random(50,70)
- local input = torch.Tensor(nframe, ini):zero()
+ for t,err in pairs(jac.testAllUpdate(module, input, 'bias', 'gradBias')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on bias [%s]', t))
+ end
- local err = jac.testJacobian(module,input)
- mytester:assertlt(err,precision, 'error on state ')
+ -- 2D
+ local nframe = math.random(50,70)
+ local input = torch.Tensor(nframe, ini):zero()
- local err = jac.testJacobianParameters(module, input, module.weight, module.gradWeight)
- mytester:assertlt(err,precision, 'error on weight ')
+ local err = jac.testJacobian(module,input)
+ mytester:assertlt(err,precision, 'error on state ')
- local err = jac.testJacobianParameters(module, input, module.bias, module.gradBias)
- mytester:assertlt(err,precision, 'error on weight ')
+ local err = jac.testJacobianParameters(module, input, module.weight, module.gradWeight)
+ mytester:assertlt(err,precision, 'error on weight ')
- local err = jac.testJacobianUpdateParameters(module, input, module.weight)
- mytester:assertlt(err,precision, 'error on weight [direct update] ')
+ local err = jac.testJacobianParameters(module, input, module.bias, module.gradBias)
+ mytester:assertlt(err,precision, 'error on weight ')
- local err = jac.testJacobianUpdateParameters(module, input, module.bias)
- mytester:assertlt(err,precision, 'error on bias [direct update] ')
+ local err = jac.testJacobianUpdateParameters(module, input, module.weight)
+ mytester:assertlt(err,precision, 'error on weight [direct update] ')
- for t,err in pairs(jac.testAllUpdate(module, input, 'weight', 'gradWeight')) do
- mytester:assertlt(err, precision, string.format(
- 'error on weight [%s]', t))
- end
+ local err = jac.testJacobianUpdateParameters(module, input, module.bias)
+ mytester:assertlt(err,precision, 'error on bias [direct update] ')
- for t,err in pairs(jac.testAllUpdate(module, input, 'bias', 'gradBias')) do
- mytester:assertlt(err, precision, string.format(
- 'error on bias [%s]', t))
- end
+ for t,err in pairs(jac.testAllUpdate(module, input, 'weight', 'gradWeight')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on weight [%s]', t))
+ end
- -- IO
- local ferr,berr = jac.testIO(module,input)
- mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
- mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+ for t,err in pairs(jac.testAllUpdate(module, input, 'bias', 'gradBias')) do
+ mytester:assertlt(err, precision, string.format(
+ 'error on bias [%s]', t))
+ end
+
+ -- IO
+ local ferr,berr = jac.testIO(module,input)
+ mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ')
+ mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ')
+ end -- for ind, inj in pairs(inj_vals) do
end
function nntest.SparseLinear()