Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/soumith/cudnn.torch.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorsoumith <soumith@fb.com>2014-11-26 05:23:52 +0300
committersoumith <soumith@fb.com>2014-11-26 05:23:52 +0300
commitb0e6e3f0ad10e931a1f83197b127a442e179e67e (patch)
tree4d58247a109e9a211317625e1263a637a5981070 /ReLU.lua
parentb38b1a17251639eb68df4acde5784a661e2c8888 (diff)
refactoring lots of duplicated code
Diffstat (limited to 'ReLU.lua')
-rw-r--r--ReLU.lua48
1 files changed, 2 insertions, 46 deletions
diff --git a/ReLU.lua b/ReLU.lua
index b7f6ae1..d25e69c 100644
--- a/ReLU.lua
+++ b/ReLU.lua
@@ -1,50 +1,6 @@
-local ReLU, parent = torch.class('cudnn.ReLU','nn.Module')
-local ffi = require 'ffi'
-local C = cudnn.C
-local errcheck = cudnn.errcheck
+local ReLU, parent = torch.class('cudnn.ReLU','cudnn._Pointwise')
function ReLU:__init()
parent.__init(self)
- self.iSize = torch.LongStorage(4):fill(0)
-end
-
-function ReLU:createIODescriptors(input)
- local batch = true
- if input:dim() == 3 then
- input = input:view(1, input:size(1), input:size(2), input:size(3))
- batch = false
- end
- assert(input:dim() == 4 and input:isContiguous());
- if not self.iDesc or not self.oDesc or
- input:size(1) ~= self.iSize[1] or input:size(2) ~= self.iSize[2]
- or input:size(3) ~= self.iSize[3] or input:size(4) ~= self.iSize[4] then
- self.iSize = input:size()
- self.gradInput:resizeAs(input)
- self.output:resizeAs(input)
- self.iDesc = cudnn.toDescriptor(input)
- self.oDesc = cudnn.toDescriptor(self.output)
- if not batch then
- self.gradInput = self.gradInput:view(self.gradInput:size(2), self.gradInput:size(3), self.gradInput:size(4))
- self.output = self.output:view(self.output:size(2), self.output:size(3), self.output:size(4))
- end
- end
-end
-
-function ReLU:updateOutput(input)
- self:createIODescriptors(input)
- errcheck('cudnnActivationForward', cudnn.handle[cutorch.getDevice()-1], 'CUDNN_ACTIVATION_RELU',
- self.iDesc[0], input:data(),
- self.oDesc[0], self.output:data());
- return self.output
-end
-
-function ReLU:updateGradInput(input, gradOutput)
- assert((gradOutput:dim() == 4 or gradOutput:dim() == 3) and gradOutput:isContiguous());
- self:createIODescriptors(input)
- errcheck('cudnnActivationBackward', cudnn.handle[cutorch.getDevice()-1], 'CUDNN_ACTIVATION_RELU',
- self.oDesc[0], self.output:data(),
- self.oDesc[0], gradOutput:data(),
- self.iDesc[0], input:data(),
- self.iDesc[0], self.gradInput:data());
- return self.gradInput
+ self.mode = 'CUDNN_ACTIVATION_RELU'
end