diff options
Diffstat (limited to 'test')
-rw-r--r-- | test/test.lua | 95 |
1 files changed, 49 insertions, 46 deletions
diff --git a/test/test.lua b/test/test.lua index 88adf96..04b1dd8 100644 --- a/test/test.lua +++ b/test/test.lua @@ -239,69 +239,72 @@ end function nntest.Linear() local ini = math.random(5,7) - local inj = math.random(5,7) + local inj_vals = {math.random(5,7), 1} -- Also test the inj = 1 spetial case local input = torch.Tensor(ini):zero() - local module = nn.Linear(ini,inj) - -- 1D - local err = jac.testJacobian(module,input) - mytester:assertlt(err,precision, 'error on state ') + for ind, inj in pairs(inj_vals) do + local module = nn.Linear(ini,inj) - local err = jac.testJacobianParameters(module, input, module.weight, module.gradWeight) - mytester:assertlt(err,precision, 'error on weight ') + -- 1D + local err = jac.testJacobian(module,input) + mytester:assertlt(err,precision, 'error on state ') - local err = jac.testJacobianParameters(module, input, module.bias, module.gradBias) - mytester:assertlt(err,precision, 'error on bias ') + local err = jac.testJacobianParameters(module, input, module.weight, module.gradWeight) + mytester:assertlt(err,precision, 'error on weight ') - local err = jac.testJacobianUpdateParameters(module, input, module.weight) - mytester:assertlt(err,precision, 'error on weight [direct update] ') + local err = jac.testJacobianParameters(module, input, module.bias, module.gradBias) + mytester:assertlt(err,precision, 'error on bias ') - local err = jac.testJacobianUpdateParameters(module, input, module.bias) - mytester:assertlt(err,precision, 'error on bias [direct update] ') + local err = jac.testJacobianUpdateParameters(module, input, module.weight) + mytester:assertlt(err,precision, 'error on weight [direct update] ') - for t,err in pairs(jac.testAllUpdate(module, input, 'weight', 'gradWeight')) do - mytester:assertlt(err, precision, string.format( - 'error on weight [%s]', t)) - end + local err = jac.testJacobianUpdateParameters(module, input, module.bias) + mytester:assertlt(err,precision, 'error on bias [direct update] ') - for t,err in pairs(jac.testAllUpdate(module, input, 'bias', 'gradBias')) do - mytester:assertlt(err, precision, string.format( - 'error on bias [%s]', t)) - end + for t,err in pairs(jac.testAllUpdate(module, input, 'weight', 'gradWeight')) do + mytester:assertlt(err, precision, string.format( + 'error on weight [%s]', t)) + end - -- 2D - local nframe = math.random(50,70) - local input = torch.Tensor(nframe, ini):zero() + for t,err in pairs(jac.testAllUpdate(module, input, 'bias', 'gradBias')) do + mytester:assertlt(err, precision, string.format( + 'error on bias [%s]', t)) + end - local err = jac.testJacobian(module,input) - mytester:assertlt(err,precision, 'error on state ') + -- 2D + local nframe = math.random(50,70) + local input = torch.Tensor(nframe, ini):zero() - local err = jac.testJacobianParameters(module, input, module.weight, module.gradWeight) - mytester:assertlt(err,precision, 'error on weight ') + local err = jac.testJacobian(module,input) + mytester:assertlt(err,precision, 'error on state ') - local err = jac.testJacobianParameters(module, input, module.bias, module.gradBias) - mytester:assertlt(err,precision, 'error on weight ') + local err = jac.testJacobianParameters(module, input, module.weight, module.gradWeight) + mytester:assertlt(err,precision, 'error on weight ') - local err = jac.testJacobianUpdateParameters(module, input, module.weight) - mytester:assertlt(err,precision, 'error on weight [direct update] ') + local err = jac.testJacobianParameters(module, input, module.bias, module.gradBias) + mytester:assertlt(err,precision, 'error on weight ') - local err = jac.testJacobianUpdateParameters(module, input, module.bias) - mytester:assertlt(err,precision, 'error on bias [direct update] ') + local err = jac.testJacobianUpdateParameters(module, input, module.weight) + mytester:assertlt(err,precision, 'error on weight [direct update] ') - for t,err in pairs(jac.testAllUpdate(module, input, 'weight', 'gradWeight')) do - mytester:assertlt(err, precision, string.format( - 'error on weight [%s]', t)) - end + local err = jac.testJacobianUpdateParameters(module, input, module.bias) + mytester:assertlt(err,precision, 'error on bias [direct update] ') - for t,err in pairs(jac.testAllUpdate(module, input, 'bias', 'gradBias')) do - mytester:assertlt(err, precision, string.format( - 'error on bias [%s]', t)) - end + for t,err in pairs(jac.testAllUpdate(module, input, 'weight', 'gradWeight')) do + mytester:assertlt(err, precision, string.format( + 'error on weight [%s]', t)) + end - -- IO - local ferr,berr = jac.testIO(module,input) - mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ') - mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ') + for t,err in pairs(jac.testAllUpdate(module, input, 'bias', 'gradBias')) do + mytester:assertlt(err, precision, string.format( + 'error on bias [%s]', t)) + end + + -- IO + local ferr,berr = jac.testIO(module,input) + mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ') + mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ') + end -- for ind, inj in pairs(inj_vals) do end function nntest.SparseLinear() |