diff options
author | soumith <soumith@gmail.com> | 2016-08-06 21:48:57 +0300 |
---|---|---|
committer | soumith <soumith@gmail.com> | 2016-08-06 21:50:31 +0300 |
commit | 7afb2414753b9f34ceee6c5cca022d9eb2652a83 (patch) | |
tree | ebe261c7042d02b03b3f9607d515f3609edd2d55 /SpatialDivisiveNormalization.lua | |
parent | 327e6af4bcfbcbe1c4221b6dd9190602f411a2c3 (diff) |
working double precision
Diffstat (limited to 'SpatialDivisiveNormalization.lua')
-rw-r--r-- | SpatialDivisiveNormalization.lua | 12 |
1 files changed, 6 insertions, 6 deletions
diff --git a/SpatialDivisiveNormalization.lua b/SpatialDivisiveNormalization.lua index 3462b61..6917ec8 100644 --- a/SpatialDivisiveNormalization.lua +++ b/SpatialDivisiveNormalization.lua @@ -52,8 +52,8 @@ function DivisiveNorm:createIODescriptors(input) end end -local one = torch.FloatTensor({1}); -local zero = torch.FloatTensor({0}); + + function DivisiveNorm:updateOutput(input) if not self.DivisiveNormDesc then self:resetPoolDescriptors() end @@ -61,9 +61,9 @@ function DivisiveNorm:updateOutput(input) errcheck('cudnnDivisiveNormCrossChannelForward', cudnn.getHandle(), self.DivisiveNormDesc[0], 'CUDNN_DivisiveNorm_CROSS_CHANNEL_DIM1', - one:data(), + cudnn.scalar(input, 1), self.iDesc[0], input:data(), - zero:data(), + cudnn.scalar(input, 0), self.iDesc[0], self.output:data()); return self.output end @@ -80,11 +80,11 @@ function DivisiveNorm:updateGradInput(input, gradOutput) errcheck('cudnnDivisiveNormCrossChannelBackward', cudnn.getHandle(), self.DivisiveNormDesc[0], 'CUDNN_DivisiveNorm_CROSS_CHANNEL_DIM1', - one:data(), + cudnn.scalar(input, 1), self.iDesc[0], self.output:data(), self.iDesc[0], gradOutput:data(), self.iDesc[0], input:data(), - zero:data(), + cudnn.scalar(input, 0), self.iDesc[0], self.gradInput:data()); return self.gradInput end |