diff options
author | nicholas-leonard <nick@nikopia.org> | 2014-07-10 00:23:09 +0400 |
---|---|---|
committer | nicholas-leonard <nick@nikopia.org> | 2014-07-10 00:23:09 +0400 |
commit | f08c7fcae24a0302043701e5e4c36a8557b431a9 (patch) | |
tree | 895323a89309a83c8f20196fb16198916c606c40 /test | |
parent | cfb269369c9f1a1b523b2d9865df241868a601db (diff) |
added Dropout version 2
Diffstat (limited to 'test')
-rw-r--r-- | test/test.lua | 10 |
1 files changed, 9 insertions, 1 deletions
diff --git a/test/test.lua b/test/test.lua index a3c816f..cc82e9e 100644 --- a/test/test.lua +++ b/test/test.lua @@ -62,8 +62,16 @@ end function nntest.Dropout() local p = 0.2 --prob of droping out a neuron - local input = torch.Tensor(1000):fill(1) + local input = torch.Tensor(1000):fill((1-p)) local module = nn.Dropout(p) + -- version 2 + local output = module:forward(input) + mytester:assert(math.abs(output:mean() - (1-p)) < 0.05, 'dropout output') + local gradInput = module:backward(input, input) + mytester:assert(math.abs(gradInput:mean() - (1-p)) < 0.05, 'dropout gradInput') + -- version 1 (old nnx version) + local input = input:fill(1) + local module = nn.Dropout(p,true) local output = module:forward(input) mytester:assert(math.abs(output:mean() - (1-p)) < 0.05, 'dropout output') local gradInput = module:backward(input, input) |