diff options
author | Clement Farabet <clement.farabet@gmail.com> | 2014-07-10 01:02:29 +0400 |
---|---|---|
committer | Clement Farabet <clement.farabet@gmail.com> | 2014-07-10 01:02:29 +0400 |
commit | ca59595974936b9ff42377677e7553627f397197 (patch) | |
tree | 24b6c255c9ab8e0136379308a23c3ea2e72f1344 /test | |
parent | 3f35d9bce7e0335b801f3bfe36d8a86cd53ba4ed (diff) | |
parent | ad64576acb778bae935085e2458d88968939c006 (diff) |
Merge pull request #31 from nicholas-leonard/nnx
Moving Dropout + ReLU in from nnx
Diffstat (limited to 'test')
-rw-r--r-- | test/test.lua | 30 |
1 files changed, 30 insertions, 0 deletions
diff --git a/test/test.lua b/test/test.lua index 0c9a43c..45cc2fe 100644 --- a/test/test.lua +++ b/test/test.lua @@ -60,6 +60,36 @@ function nntest.CMul() mytester:asserteq(berr, 0, torch.typename(module) .. ' - i/o backward err ') end +function nntest.Dropout() + local p = 0.2 --prob of droping out a neuron + local input = torch.Tensor(1000):fill((1-p)) + local module = nn.Dropout(p) + -- version 2 + local output = module:forward(input) + mytester:assert(math.abs(output:mean() - (1-p)) < 0.05, 'dropout output') + local gradInput = module:backward(input, input) + mytester:assert(math.abs(gradInput:mean() - (1-p)) < 0.05, 'dropout gradInput') + -- version 1 (old nnx version) + local input = input:fill(1) + local module = nn.Dropout(p,true) + local output = module:forward(input) + mytester:assert(math.abs(output:mean() - (1-p)) < 0.05, 'dropout output') + local gradInput = module:backward(input, input) + mytester:assert(math.abs(gradInput:mean() - (1-p)) < 0.05, 'dropout gradInput') +end + +function nntest.ReLU() + local input = torch.randn(3,4) + local gradOutput = torch.randn(3,4) + local module = nn.ReLU() + local output = module:forward(input) + local output2 = input:clone():gt(input, 0):cmul(input) + mytester:assertTensorEq(output, output2, 0.000001, 'ReLU output') + local gradInput = module:backward(input, gradOutput) + local gradInput2 = input:clone():gt(input, 0):cmul(gradOutput) + mytester:assertTensorEq(gradInput, gradInput2, 0.000001, 'ReLU gradInput') +end + function nntest.Exp() local ini = math.random(10,20) local inj = math.random(10,20) |