diff options
author | soumith <soumith@fb.com> | 2014-11-26 06:11:14 +0300 |
---|---|---|
committer | soumith <soumith@fb.com> | 2014-11-26 06:12:14 +0300 |
commit | 875067f4eb6f5c8eab77ee1acd030fd5e2225fc5 (patch) | |
tree | ebfe4a97600a40947dfb0a875ea67840f8c0d81c | |
parent | b0e6e3f0ad10e931a1f83197b127a442e179e67e (diff) |
lint fixes
-rw-r--r-- | Pointwise.lua | 11 | ||||
-rw-r--r-- | Pooling.lua | 8 | ||||
-rw-r--r-- | SpatialConvolution.lua | 15 | ||||
-rw-r--r-- | SpatialSoftMax.lua | 13 | ||||
-rw-r--r-- | test/test.lua | 101 |
5 files changed, 77 insertions, 71 deletions
diff --git a/Pointwise.lua b/Pointwise.lua index b9157bd..f20f865 100644 --- a/Pointwise.lua +++ b/Pointwise.lua @@ -23,8 +23,12 @@ function Pointwise:createIODescriptors(input) self.iDesc = cudnn.toDescriptor(input) self.oDesc = cudnn.toDescriptor(self.output) if not batch then - self.gradInput = self.gradInput:view(self.gradInput:size(2), self.gradInput:size(3), self.gradInput:size(4)) - self.output = self.output:view(self.output:size(2), self.output:size(3), self.output:size(4)) + self.gradInput = self.gradInput:view(self.gradInput:size(2), + self.gradInput:size(3), + self.gradInput:size(4)) + self.output = self.output:view(self.output:size(2), + self.output:size(3), + self.output:size(4)) end end end @@ -39,7 +43,8 @@ function Pointwise:updateOutput(input) end function Pointwise:updateGradInput(input, gradOutput) - assert((gradOutput:dim() == 4 or gradOutput:dim() == 3) and gradOutput:isContiguous()); + assert((gradOutput:dim() == 4 or gradOutput:dim() == 3) + and gradOutput:isContiguous()); self:createIODescriptors(input) errcheck('cudnnActivationBackward', cudnn.handle[cutorch.getDevice()-1], self.mode, diff --git a/Pooling.lua b/Pooling.lua index 94c616a..97eb617 100644 --- a/Pooling.lua +++ b/Pooling.lua @@ -46,8 +46,12 @@ function Pooling:createIODescriptors(input) self.iDesc = cudnn.toDescriptor(input) self.oDesc = cudnn.toDescriptor(self.output) if not batch then - self.gradInput = self.gradInput:view(self.gradInput:size(2), self.gradInput:size(3), self.gradInput:size(4)) - self.output = self.output:view(self.output:size(2), self.output:size(3), self.output:size(4)) + self.gradInput = self.gradInput:view(self.gradInput:size(2), + self.gradInput:size(3), + self.gradInput:size(4)) + self.output = self.output:view(self.output:size(2), + self.output:size(3), + self.output:size(4)) end end end diff --git a/SpatialConvolution.lua b/SpatialConvolution.lua index d175f90..e939592 100644 --- a/SpatialConvolution.lua +++ b/SpatialConvolution.lua @@ -1,6 +1,5 @@ local SpatialConvolution, parent = torch.class('cudnn.SpatialConvolution', 'nn.SpatialConvolution') local ffi = require 'ffi' -local C = cudnn.C local errcheck = cudnn.errcheck function SpatialConvolution:__init(nInputPlane, nOutputPlane, kW, kH, dW, dH, padW, padH) @@ -64,8 +63,12 @@ function SpatialConvolution:createIODescriptors(input) -- create descriptor for output self.oDesc = cudnn.toDescriptor(self.output) if not batch then - self.gradInput = self.gradInput:view(self.gradInput:size(2), self.gradInput:size(3), self.gradInput:size(4)) - self.output = self.output:view(self.output:size(2), self.output:size(3), self.output:size(4)) + self.gradInput = self.gradInput:view(self.gradInput:size(2), + self.gradInput:size(3), + self.gradInput:size(4)) + self.output = self.output:view(self.output:size(2), + self.output:size(3), + self.output:size(4)) end end end @@ -87,7 +90,8 @@ end function SpatialConvolution:updateGradInput(input, gradOutput) if not self.gradInput then return end - assert((gradOutput:dim() == 3 or gradOutput:dim() == 4) and gradOutput:isContiguous()); + assert((gradOutput:dim() == 3 or gradOutput:dim() == 4) + and gradOutput:isContiguous()); if not self.weightDesc then self:resetWeightDescriptors() end self:createIODescriptors(input) errcheck('cudnnConvolutionBackwardData', cudnn.handle[cutorch.getDevice()-1], @@ -101,7 +105,8 @@ end function SpatialConvolution:accGradParameters(input, gradOutput, scale) assert(scale == nil or scale == 1) - assert((gradOutput:dim() == 3 or gradOutput:dim() == 4) and gradOutput:isContiguous()); + assert((gradOutput:dim() == 3 or gradOutput:dim() == 4) + and gradOutput:isContiguous()); self:createIODescriptors(input) if not self.weightDesc then self:resetWeightDescriptors() end -- gradBias diff --git a/SpatialSoftMax.lua b/SpatialSoftMax.lua index e245f24..3a4106d 100644 --- a/SpatialSoftMax.lua +++ b/SpatialSoftMax.lua @@ -1,6 +1,4 @@ local SpatialSoftMax, parent = torch.class('cudnn.SpatialSoftMax', 'nn.Module') -local ffi = require 'ffi' -local C = cudnn.C local errcheck = cudnn.errcheck function SpatialSoftMax:__init(fast) @@ -30,8 +28,12 @@ function SpatialSoftMax:createIODescriptors(input) self.iDesc = cudnn.toDescriptor(input) self.oDesc = cudnn.toDescriptor(self.output) if not batch then - self.gradInput = self.gradInput:view(self.gradInput:size(2), self.gradInput:size(3), self.gradInput:size(4)) - self.output = self.output:view(self.output:size(2), self.output:size(3), self.output:size(4)) + self.gradInput = self.gradInput:view(self.gradInput:size(2), + self.gradInput:size(3), + self.gradInput:size(4)) + self.output = self.output:view(self.output:size(2), + self.output:size(3), + self.output:size(4)) end end end @@ -47,7 +49,8 @@ function SpatialSoftMax:updateOutput(input) end function SpatialSoftMax:updateGradInput(input, gradOutput) - assert((gradOutput:dim() == 4 or gradOutput:dim() == 3) and gradOutput:isContiguous()); + assert((gradOutput:dim() == 4 or gradOutput:dim() == 3) + and gradOutput:isContiguous()); self:createIODescriptors(input) errcheck('cudnnSoftmaxBackward', cudnn.handle[cutorch.getDevice()-1], diff --git a/test/test.lua b/test/test.lua index 192f187..855ea71 100644 --- a/test/test.lua +++ b/test/test.lua @@ -7,6 +7,7 @@ local precision_backward = 1e-2 local precision_jac = 1e-3 local nloop = 1 local times = {} +local mytester function cudnntest.SpatialConvolution_forward_batch() @@ -107,7 +108,8 @@ function cudnntest.SpatialConvolution_forward_single() cutorch.synchronize() mytester:asserteq(rescuda:dim(), 3, 'error in dimension') local error = rescuda:float() - groundtruth:float() - mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ') + mytester:assertlt(error:abs():max(), precision_forward, + 'error on state (forward) ') end @@ -154,9 +156,12 @@ function cudnntest.SpatialConvolution_backward_single() local werror = weightcuda:float() - groundweight:float() local berror = biascuda:float() - groundbias:float() - mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ') - mytester:assertlt(werror:abs():max(), precision_backward, 'error on weight (backward) ') - mytester:assertlt(berror:abs():max(), precision_backward, 'error on bias (backward) ') + mytester:assertlt(error:abs():max(), precision_backward, + 'error on state (backward) ') + mytester:assertlt(werror:abs():max(), precision_backward, + 'error on weight (backward) ') + mytester:assertlt(berror:abs():max(), precision_backward, + 'error on bias (backward) ') end @@ -212,7 +217,7 @@ function cudnntest.SpatialMaxPooling_single() local groundgrad = sconv:backward(input, gradOutput) cutorch.synchronize() local gconv = cudnn.SpatialMaxPooling(ki,kj,si,sj):cuda() - local rescuda = gconv:forward(input) + local _ = gconv:forward(input) -- serialize and deserialize torch.save('modelTemp.t7', gconv) gconv = torch.load('modelTemp.t7') @@ -222,17 +227,15 @@ function cudnntest.SpatialMaxPooling_single() mytester:asserteq(rescuda:dim(), 3, 'error in dimension') mytester:asserteq(resgrad:dim(), 3, 'error in dimension') local error = rescuda:float() - groundtruth:float() - mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ') + mytester:assertlt(error:abs():max(), precision_forward, + 'error on state (forward) ') error = resgrad:float() - groundgrad:float() - mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ') + mytester:assertlt(error:abs():max(), precision_backward, + 'error on state (backward) ') end function cudnntest.ReLU_single() local from = math.random(1,32) - local ki = math.random(2,4) - local kj = math.random(2,4) - local si = ki - local sj = kj local outi = math.random(1,64) local outj = math.random(1,64) local ini = outi @@ -245,7 +248,7 @@ function cudnntest.ReLU_single() local groundgrad = sconv:backward(input, gradOutput) cutorch.synchronize() local gconv = cudnn.ReLU():cuda() - local rescuda = gconv:forward(input) + local _ = gconv:forward(input) -- serialize and deserialize torch.save('modelTemp.t7', gconv) @@ -257,18 +260,16 @@ function cudnntest.ReLU_single() mytester:asserteq(rescuda:dim(), 3, 'error in dimension') mytester:asserteq(resgrad:dim(), 3, 'error in dimension') local error = rescuda:float() - groundtruth:float() - mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ') + mytester:assertlt(error:abs():max(), precision_forward, + 'error on state (forward) ') error = resgrad:float() - groundgrad:float() - mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ') + mytester:assertlt(error:abs():max(), precision_backward, + 'error on state (backward) ') end function cudnntest.ReLU_batch() local bs = math.random(1,32) local from = math.random(1,32) - local ki = math.random(2,4) - local kj = math.random(2,4) - local si = ki - local sj = kj local outi = math.random(1,64) local outj = math.random(1,64) local ini = outi @@ -293,17 +294,15 @@ function cudnntest.ReLU_batch() mytester:asserteq(rescuda:dim(), 4, 'error in dimension') mytester:asserteq(resgrad:dim(), 4, 'error in dimension') local error = rescuda:float() - groundtruth:float() - mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ') + mytester:assertlt(error:abs():max(), precision_forward, + 'error on state (forward) ') error = resgrad:float() - groundgrad:float() - mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ') + mytester:assertlt(error:abs():max(), precision_backward, + 'error on state (backward) ') end function cudnntest.Tanh_single() local from = math.random(1,32) - local ki = math.random(2,4) - local kj = math.random(2,4) - local si = ki - local sj = kj local outi = math.random(1,64) local outj = math.random(1,64) local ini = outi @@ -316,7 +315,7 @@ function cudnntest.Tanh_single() local groundgrad = sconv:backward(input, gradOutput) cutorch.synchronize() local gconv = cudnn.Tanh():cuda() - local rescuda = gconv:forward(input) + local _ = gconv:forward(input) -- serialize and deserialize torch.save('modelTemp.t7', gconv) @@ -328,18 +327,16 @@ function cudnntest.Tanh_single() mytester:asserteq(rescuda:dim(), 3, 'error in dimension') mytester:asserteq(resgrad:dim(), 3, 'error in dimension') local error = rescuda:float() - groundtruth:float() - mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ') + mytester:assertlt(error:abs():max(), precision_forward, + 'error on state (forward) ') error = resgrad:float() - groundgrad:float() - mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ') + mytester:assertlt(error:abs():max(), precision_backward, + 'error on state (backward) ') end function cudnntest.Tanh_batch() local bs = math.random(1,32) local from = math.random(1,32) - local ki = math.random(2,4) - local kj = math.random(2,4) - local si = ki - local sj = kj local outi = math.random(1,64) local outj = math.random(1,64) local ini = outi @@ -364,17 +361,15 @@ function cudnntest.Tanh_batch() mytester:asserteq(rescuda:dim(), 4, 'error in dimension') mytester:asserteq(resgrad:dim(), 4, 'error in dimension') local error = rescuda:float() - groundtruth:float() - mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ') + mytester:assertlt(error:abs():max(), precision_forward, + 'error on state (forward) ') error = resgrad:float() - groundgrad:float() - mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ') + mytester:assertlt(error:abs():max(), precision_backward, + 'error on state (backward) ') end function cudnntest.Sigmoid_single() local from = math.random(1,32) - local ki = math.random(2,4) - local kj = math.random(2,4) - local si = ki - local sj = kj local outi = math.random(1,64) local outj = math.random(1,64) local ini = outi @@ -387,7 +382,7 @@ function cudnntest.Sigmoid_single() local groundgrad = sconv:backward(input, gradOutput) cutorch.synchronize() local gconv = cudnn.Sigmoid():cuda() - local rescuda = gconv:forward(input) + local _ = gconv:forward(input) -- serialize and deserialize torch.save('modelTemp.t7', gconv) @@ -399,18 +394,16 @@ function cudnntest.Sigmoid_single() mytester:asserteq(rescuda:dim(), 3, 'error in dimension') mytester:asserteq(resgrad:dim(), 3, 'error in dimension') local error = rescuda:float() - groundtruth:float() - mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ') + mytester:assertlt(error:abs():max(), precision_forward, + 'error on state (forward) ') error = resgrad:float() - groundgrad:float() - mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ') + mytester:assertlt(error:abs():max(), precision_backward, + 'error on state (backward) ') end function cudnntest.Sigmoid_batch() local bs = math.random(1,32) local from = math.random(1,32) - local ki = math.random(2,4) - local kj = math.random(2,4) - local si = ki - local sj = kj local outi = math.random(1,64) local outj = math.random(1,64) local ini = outi @@ -435,17 +428,15 @@ function cudnntest.Sigmoid_batch() mytester:asserteq(rescuda:dim(), 4, 'error in dimension') mytester:asserteq(resgrad:dim(), 4, 'error in dimension') local error = rescuda:float() - groundtruth:float() - mytester:assertlt(error:abs():max(), precision_forward, 'error on state (forward) ') + mytester:assertlt(error:abs():max(), precision_forward, + 'error on state (forward) ') error = resgrad:float() - groundgrad:float() - mytester:assertlt(error:abs():max(), precision_backward, 'error on state (backward) ') + mytester:assertlt(error:abs():max(), precision_backward, + 'error on state (backward) ') end function cudnntest.SoftMax_single() local from = math.random(1,32) - local ki = math.random(2,4) - local kj = math.random(2,4) - local si = ki - local sj = kj local outi = math.random(1,64) local outj = math.random(1,64) local ini = outi @@ -458,7 +449,7 @@ function cudnntest.SoftMax_single() local groundgrad = sconv:backward(input, gradOutput) cutorch.synchronize() local gconv = cudnn.SoftMax():cuda() - local rescuda = gconv:forward(input) + local _ = gconv:forward(input) -- serialize and deserialize torch.save('modelTemp.t7', gconv) @@ -481,10 +472,6 @@ end function cudnntest.SoftMax_batch() local bs = math.random(1,32) local from = math.random(1,32) - local ki = math.random(2,4) - local kj = math.random(2,4) - local si = ki - local sj = kj local outi = math.random(1,64) local outj = math.random(1,64) local ini = outi @@ -527,5 +514,7 @@ mytester:add(cudnntest) for i=1,cutorch.getDeviceCount() do print('Running test on device: ' .. i) cutorch.setDevice(i) - mytester:run() + mytester:run(tests) end + +os.execute('rm -f modelTemp.t7') |