From 905ea8c1a4033af2af0f90e92e16597f442f3512 Mon Sep 17 00:00:00 2001 From: karpathy Date: Tue, 5 May 2015 14:55:07 -0700 Subject: Adding Batch L2 Normalization Layer that makes all rows of input Tensor unit L2 norm --- test.lua | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) (limited to 'test.lua') diff --git a/test.lua b/test.lua index 9414a66..959c369 100644 --- a/test.lua +++ b/test.lua @@ -3554,6 +3554,29 @@ function nntest.Padding() mytester:assertTensorEq(gradInput, input, 0.00001, "Padding backward error") end +function nntest.L2Normalize() + local ini = math.random(6,8) + local inj = math.random(3,5) + local input = torch.randn(ini, inj) + + local module = nn.L2Normalize() + + -- test correctness of output + local output = module:forward(input) + local norms = torch.norm(output, 2, 2) + local desired_norms = torch.ones(ini) + mytester:assertTensorEq(norms, desired_norms, 0.000001, 'L2Normalize forward err') + + -- test the Jacobian + local err = jac.testJacobian(module,input) + mytester:assertlt(err, precision, 'error on state ') + + -- test IO correctness + local ferr, berr = jac.testIO(module,input) + mytester:asserteq(ferr, 0, torch.typename(module) .. ' - i/o forward err ') + mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ') +end + mytester:add(nntest) if not nn then -- cgit v1.2.3