Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/soumith/cudnn.torch.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ClippedReLU.lua4
-rw-r--r--README.md1
-rw-r--r--test/test.lua3
3 files changed, 5 insertions, 3 deletions
diff --git a/ClippedReLU.lua b/ClippedReLU.lua
index 866e62a..58ebdd7 100644
--- a/ClippedReLU.lua
+++ b/ClippedReLU.lua
@@ -1,10 +1,10 @@
local ClippedReLU, parent = torch.class('cudnn.ClippedReLU','cudnn._Pointwise')
-function ClippedReLU:__init(inplace, ceiling)
+function ClippedReLU:__init(ceiling, inplace)
parent.__init(self)
- self.inplace = inplace
assert(ceiling, "No ceiling was given to ClippedReLU")
self.ceiling = ceiling
+ self.inplace = inplace or false
end
function ClippedReLU:updateOutput(input)
diff --git a/README.md b/README.md
index 233395b..c55e44a 100644
--- a/README.md
+++ b/README.md
@@ -22,6 +22,7 @@ cudnn.SpatialAveragePooling(kW, kH, dW, dH, padW, padH)
-- the pointwise functions take an additional optional argument. if inplace=true then they do operations in-place without using any extra memory for themselves
cudnn.ReLU(inplace[=false])
+cudnn.ClippedReLU(ceiling, inplace[=false])
cudnn.Tanh(inplace[=false])
cudnn.Sigmoid(inplace[=false])
diff --git a/test/test.lua b/test/test.lua
index aac45b4..47305ae 100644
--- a/test/test.lua
+++ b/test/test.lua
@@ -965,10 +965,11 @@ end
function cudnntest.ClippedReLU_single()
local input = torch.randn(1, 32):cuda()
local ceiling = 0.1
- local module = cudnn.ClippedReLU(true, ceiling):cuda()
+ local module = cudnn.ClippedReLU(ceiling):cuda()
local output = module:forward(input)
local expectedOutput = input:clone()
expectedOutput[expectedOutput:ge(ceiling)] = ceiling
+ expectedOutput[expectedOutput:le(0)] = 0
mytester:assertTensorEq(output, expectedOutput)
end