diff options
author | Ronan Collobert <ronan@collobert.com> | 2012-02-03 14:04:01 +0400 |
---|---|---|
committer | Ronan Collobert <ronan@collobert.com> | 2012-02-03 14:04:01 +0400 |
commit | 5a180a34b73b483ecf66ec88d42c2c371de7520c (patch) | |
tree | 3ec9311398e4ebfb30e8771990894f73188cab9e | |
parent | 5ff7821884e4d06273b265fb8d95ea303474c01c (diff) |
{min,max,sum,mean,var,std}all now without 'all' in lua
-rw-r--r-- | Add.lua | 2 | ||||
-rw-r--r-- | Jacobian.lua | 8 | ||||
-rw-r--r-- | SpatialConvolutionMap.lua | 4 | ||||
-rw-r--r-- | SpatialSubtractiveNormalization.lua | 2 | ||||
-rw-r--r-- | WeightedEuclidean.lua | 2 |
5 files changed, 9 insertions, 9 deletions
@@ -47,7 +47,7 @@ end function Add:accGradParameters(input, gradOutput, scale) scale = scale or 1 if self.gradBias:size(1) == 1 then - self.gradBias[1] = self.gradBias[1] + scale*gradOutput:sumall(); + self.gradBias[1] = self.gradBias[1] + scale*gradOutput:sum(); else self.gradBias:add(scale, gradOutput) end diff --git a/Jacobian.lua b/Jacobian.lua index 04330ac..baff6fc 100644 --- a/Jacobian.lua +++ b/Jacobian.lua @@ -118,7 +118,7 @@ function nn.Jacobian.testJacobian (module, input, minval, maxval) local jac_fprop = nn.Jacobian.forward(module,input) local jac_bprop = nn.Jacobian.backward(module,input) local error = jac_fprop-jac_bprop - return error:abs():maxall() + return error:abs():max() end function nn.Jacobian.testJacobianParameters (module, input, param, dparam, minval, maxval) @@ -130,7 +130,7 @@ function nn.Jacobian.testJacobianParameters (module, input, param, dparam, minva local jac_bprop = nn.Jacobian.backward(module, input, param, dparam) local jac_fprop = nn.Jacobian.forward(module, input, param) local error = jac_fprop - jac_bprop - return error:abs():maxall() + return error:abs():max() end function nn.Jacobian.testJacobianUpdateParameters (module, input, param, minval, maxval) @@ -143,7 +143,7 @@ function nn.Jacobian.testJacobianUpdateParameters (module, input, param, minval, local params_fprop = nn.Jacobian.forwardUpdate(module, input, param) local error = params_fprop - params_bprop - return error:abs():maxall() + return error:abs():max() end function nn.Jacobian.testIO(module,input, minval, maxval) @@ -177,7 +177,7 @@ function nn.Jacobian.testIO(module,input, minval, maxval) local errf = fo - fo2 local errb = bo - bo2 - return errf:abs():maxall(), errb:abs():maxall() + return errf:abs():max(), errb:abs():max() end function nn.Jacobian.testAllUpdate(module, input, weight, gradWeight) diff --git a/SpatialConvolutionMap.lua b/SpatialConvolutionMap.lua index 0dbff2f..4b525ba 100644 --- a/SpatialConvolutionMap.lua +++ b/SpatialConvolutionMap.lua @@ -65,8 +65,8 @@ function SpatialConvolutionMap:__init(conMatrix, kW, kH, dW, dH) self.dW = dW self.dH = dH self.connTable = conMatrix - self.nInputPlane = self.connTable:select(2,1):maxall() - self.nOutputPlane = self.connTable:select(2,2):maxall() + self.nInputPlane = self.connTable:select(2,1):max() + self.nOutputPlane = self.connTable:select(2,2):max() self.weight = torch.Tensor(self.connTable:size(1), kH, kW) self.bias = torch.Tensor(self.nOutputPlane) diff --git a/SpatialSubtractiveNormalization.lua b/SpatialSubtractiveNormalization.lua index 4df0fc1..070e4b8 100644 --- a/SpatialSubtractiveNormalization.lua +++ b/SpatialSubtractiveNormalization.lua @@ -17,7 +17,7 @@ function SpatialSubtractiveNormalization:__init(nInputPlane, kernel) end -- normalize kernel - self.kernel:div(self.kernel:sumall() * self.nInputPlane) + self.kernel:div(self.kernel:sum() * self.nInputPlane) -- padding values local padH = math.floor(self.kernel:size(1)/2) diff --git a/WeightedEuclidean.lua b/WeightedEuclidean.lua index 2761228..5337eec 100644 --- a/WeightedEuclidean.lua +++ b/WeightedEuclidean.lua @@ -46,7 +46,7 @@ function WeightedEuclidean:updateOutput(input) self.temp:copy(input):add(-1,self.templates:select(2,o)) self.temp:cmul(self.temp) self.temp:cmul(self.diagCov:select(2,o)):cmul(self.diagCov:select(2,o)) - self.output[o] = math.sqrt(self.temp:sumall()) + self.output[o] = math.sqrt(self.temp:sum()) end return self.output end |