Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/soumith/cudnn.torch.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorsoumith <soumith@fb.com>2014-11-18 05:59:09 +0300
committersoumith <soumith@fb.com>2014-11-18 05:59:09 +0300
commit56b6d5426509b4d0bef7d2648fad72ab4c122c84 (patch)
treedb8a21f36fe03093c0b383a5cf6523ab4e97de13 /ReLU.lua
parent7b21377ffe067a86917715f522eb544239c2ec6c (diff)
adding non-batch mode
Diffstat (limited to 'ReLU.lua')
-rw-r--r--ReLU.lua22
1 files changed, 15 insertions, 7 deletions
diff --git a/ReLU.lua b/ReLU.lua
index 7ffcea5..b7f6ae1 100644
--- a/ReLU.lua
+++ b/ReLU.lua
@@ -5,11 +5,17 @@ local errcheck = cudnn.errcheck
function ReLU:__init()
parent.__init(self)
- self.iSize = torch.LongStorage(4):fill(0)
+ self.iSize = torch.LongStorage(4):fill(0)
end
function ReLU:createIODescriptors(input)
- if not self.iDesc or not self.oDesc or
+ local batch = true
+ if input:dim() == 3 then
+ input = input:view(1, input:size(1), input:size(2), input:size(3))
+ batch = false
+ end
+ assert(input:dim() == 4 and input:isContiguous());
+ if not self.iDesc or not self.oDesc or
input:size(1) ~= self.iSize[1] or input:size(2) ~= self.iSize[2]
or input:size(3) ~= self.iSize[3] or input:size(4) ~= self.iSize[4] then
self.iSize = input:size()
@@ -17,26 +23,28 @@ function ReLU:createIODescriptors(input)
self.output:resizeAs(input)
self.iDesc = cudnn.toDescriptor(input)
self.oDesc = cudnn.toDescriptor(self.output)
+ if not batch then
+ self.gradInput = self.gradInput:view(self.gradInput:size(2), self.gradInput:size(3), self.gradInput:size(4))
+ self.output = self.output:view(self.output:size(2), self.output:size(3), self.output:size(4))
+ end
end
end
function ReLU:updateOutput(input)
- assert(input:dim() == 4 and input:isContiguous());
self:createIODescriptors(input)
errcheck('cudnnActivationForward', cudnn.handle[cutorch.getDevice()-1], 'CUDNN_ACTIVATION_RELU',
- self.iDesc[0], input:data(),
+ self.iDesc[0], input:data(),
self.oDesc[0], self.output:data());
return self.output
end
function ReLU:updateGradInput(input, gradOutput)
- assert(input:dim() == 4 and input:isContiguous());
- assert(gradOutput:dim() == 4 and gradOutput:isContiguous());
+ assert((gradOutput:dim() == 4 or gradOutput:dim() == 3) and gradOutput:isContiguous());
self:createIODescriptors(input)
errcheck('cudnnActivationBackward', cudnn.handle[cutorch.getDevice()-1], 'CUDNN_ACTIVATION_RELU',
self.oDesc[0], self.output:data(),
self.oDesc[0], gradOutput:data(),
- self.iDesc[0], input:data(),
+ self.iDesc[0], input:data(),
self.iDesc[0], self.gradInput:data());
return self.gradInput
end