From ae1c144739c18b50169717d8517b3b180dbc3c93 Mon Sep 17 00:00:00 2001 From: SeanNaren Date: Sat, 11 Jun 2016 12:43:04 +0100 Subject: Added clipped ReLU --- test/test.lua | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'test') diff --git a/test/test.lua b/test/test.lua index a448317..aac45b4 100644 --- a/test/test.lua +++ b/test/test.lua @@ -962,6 +962,16 @@ function cudnntest.ReLU_batch() nonlinBatch('ReLU') end +function cudnntest.ClippedReLU_single() + local input = torch.randn(1, 32):cuda() + local ceiling = 0.1 + local module = cudnn.ClippedReLU(true, ceiling):cuda() + local output = module:forward(input) + local expectedOutput = input:clone() + expectedOutput[expectedOutput:ge(ceiling)] = ceiling + mytester:assertTensorEq(output, expectedOutput) +end + function cudnntest.Tanh_single() nonlinSingle('Tanh') end -- cgit v1.2.3