Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/nn.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorRoss Goroshin <rgoroshin@gmail.com>2014-07-22 02:15:24 +0400
committerRoss Goroshin <rgoroshin@gmail.com>2014-07-22 02:42:14 +0400
commitd24686035d2e29ce5eae3416ecf3172edd84d71c (patch)
tree1f95292b83bdb5a97f69a696467f89c579895314 /test
parentf91f38632a71e5dac622931919bcc36427408cbb (diff)
Added L1Penalty and a unit test.
updated the test for L1Penalty. Added documentation for L1Penalty.
Diffstat (limited to 'test')
-rw-r--r--test/test.lua24
1 files changed, 24 insertions, 0 deletions
diff --git a/test/test.lua b/test/test.lua
index 55fe7b6..0f4c235 100644
--- a/test/test.lua
+++ b/test/test.lua
@@ -2260,6 +2260,30 @@ function nntest.FlattenTable()
mytester:assertlt(err, precision, 'error on bprop ')
end
+function nntest.L1Penalty()
+ local weight = 1
+ local sizeAverage = false
+ local m = nn.L1Penalty(weight, sizeAverage)
+
+ local input = torch.rand(2,10):add(-0.5)
+ input[1][1] = 0
+
+ local out = m:forward(input)
+ local grad = m:backward(input, torch.ones(input:size()))
+
+ local err = input:clone():abs():sum()*weight - m.loss
+ mytester:assertlt(math.abs(err), precision, 'error on fprop ')
+
+ local true_grad = (input:gt(0):typeAs(grad) +
+ input:lt(0):typeAs(grad):mul(-1)):mul(weight)
+ mytester:assertlt((true_grad - grad):abs():max(), precision,
+ 'error on bprop ')
+
+ -- Note: We cannot use the Jacobian test for this Module since the backward
+ -- gradient cannot be estimated using finite differences (ie, the loss
+ -- during BPROP is not included in the FPROP output)
+end
+
mytester:add(nntest)
if not nn then