Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/nn.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClement Farabet <clement.farabet@gmail.com>2012-10-22 22:53:37 +0400
committerClement Farabet <clement.farabet@gmail.com>2012-10-22 22:53:37 +0400
commitb1bd980dc1ea466a640e4af87535fed1044c978c (patch)
tree3b7517e44a8e433aaf74714770ab7a111fb3ca8e
parentffa6395c00d5cf66467733bd0b19364a503704be (diff)
Using better optimized SpatialConvolutionMap, for Spatial*Normalization
layer
-rw-r--r--SpatialDivisiveNormalization.lua28
-rw-r--r--SpatialSubtractiveNormalization.lua14
2 files changed, 15 insertions, 27 deletions
diff --git a/SpatialDivisiveNormalization.lua b/SpatialDivisiveNormalization.lua
index 7715402..23a2c0b 100644
--- a/SpatialDivisiveNormalization.lua
+++ b/SpatialDivisiveNormalization.lua
@@ -29,15 +29,11 @@ function SpatialDivisiveNormalization:__init(nInputPlane, kernel, threshold, thr
self.meanestimator = nn.Sequential()
self.meanestimator:add(nn.SpatialZeroPadding(padW, padW, padH, padH))
if kdim == 2 then
- self.meanestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane),
- self.kernel:size(2), self.kernel:size(1)))
+ self.meanestimator:add(nn.SpatialConvolution(self.nInputPlane, 1, self.kernel:size(2), self.kernel:size(1)))
else
- self.meanestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane),
- self.kernel:size(1), 1))
- self.meanestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane),
- 1, self.kernel:size(1)))
+ self.meanestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane), self.kernel:size(1), 1))
+ self.meanestimator:add(nn.SpatialConvolution(self.nInputPlane, 1, 1, self.kernel:size(1)))
end
- self.meanestimator:add(nn.Sum(1))
self.meanestimator:add(nn.Replicate(self.nInputPlane))
-- create convolutional std estimator
@@ -45,15 +41,11 @@ function SpatialDivisiveNormalization:__init(nInputPlane, kernel, threshold, thr
self.stdestimator:add(nn.Square())
self.stdestimator:add(nn.SpatialZeroPadding(padW, padW, padH, padH))
if kdim == 2 then
- self.stdestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane),
- self.kernel:size(2), self.kernel:size(1)))
+ self.stdestimator:add(nn.SpatialConvolution(self.nInputPlane, 1, self.kernel:size(2), self.kernel:size(1)))
else
- self.stdestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane),
- self.kernel:size(1), 1))
- self.stdestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane),
- 1, self.kernel:size(1)))
+ self.stdestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane), self.kernel:size(1), 1))
+ self.stdestimator:add(nn.SpatialConvolution(self.nInputPlane, 1, 1, self.kernel:size(1)))
end
- self.stdestimator:add(nn.Sum(1))
self.stdestimator:add(nn.Replicate(self.nInputPlane))
self.stdestimator:add(nn.Sqrt())
@@ -61,8 +53,8 @@ function SpatialDivisiveNormalization:__init(nInputPlane, kernel, threshold, thr
if kdim == 2 then
self.kernel:div(self.kernel:sum() * self.nInputPlane)
for i = 1,self.nInputPlane do
- self.meanestimator.modules[2].weight[i] = self.kernel
- self.stdestimator.modules[3].weight[i] = self.kernel
+ self.meanestimator.modules[2].weight[1][i] = self.kernel
+ self.stdestimator.modules[3].weight[1][i] = self.kernel
end
self.meanestimator.modules[2].bias:zero()
self.stdestimator.modules[3].bias:zero()
@@ -70,9 +62,9 @@ function SpatialDivisiveNormalization:__init(nInputPlane, kernel, threshold, thr
self.kernel:div(self.kernel:sum() * math.sqrt(self.nInputPlane))
for i = 1,self.nInputPlane do
self.meanestimator.modules[2].weight[i]:copy(self.kernel)
- self.meanestimator.modules[3].weight[i]:copy(self.kernel)
+ self.meanestimator.modules[3].weight[1][i]:copy(self.kernel)
self.stdestimator.modules[3].weight[i]:copy(self.kernel)
- self.stdestimator.modules[4].weight[i]:copy(self.kernel)
+ self.stdestimator.modules[4].weight[1][i]:copy(self.kernel)
end
self.meanestimator.modules[2].bias:zero()
self.meanestimator.modules[3].bias:zero()
diff --git a/SpatialSubtractiveNormalization.lua b/SpatialSubtractiveNormalization.lua
index dfa8fd2..f2c2c31 100644
--- a/SpatialSubtractiveNormalization.lua
+++ b/SpatialSubtractiveNormalization.lua
@@ -30,27 +30,23 @@ function SpatialSubtractiveNormalization:__init(nInputPlane, kernel)
self.meanestimator = nn.Sequential()
self.meanestimator:add(nn.SpatialZeroPadding(padW, padW, padH, padH))
if kdim == 2 then
- self.meanestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane),
- self.kernel:size(2), self.kernel:size(1)))
+ self.meanestimator:add(nn.SpatialConvolution(self.nInputPlane, 1, self.kernel:size(2), self.kernel:size(1)))
else
- self.meanestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane),
- self.kernel:size(1), 1))
- self.meanestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane),
- 1, self.kernel:size(1)))
+ self.meanestimator:add(nn.SpatialConvolutionMap(nn.tables.oneToOne(self.nInputPlane), self.kernel:size(1), 1))
+ self.meanestimator:add(nn.SpatialConvolution(self.nInputPlane, 1, 1, self.kernel:size(1)))
end
- self.meanestimator:add(nn.Sum(1))
self.meanestimator:add(nn.Replicate(self.nInputPlane))
-- set kernel and bias
if kdim == 2 then
for i = 1,self.nInputPlane do
- self.meanestimator.modules[2].weight[i] = self.kernel
+ self.meanestimator.modules[2].weight[1][i] = self.kernel
end
self.meanestimator.modules[2].bias:zero()
else
for i = 1,self.nInputPlane do
self.meanestimator.modules[2].weight[i]:copy(self.kernel)
- self.meanestimator.modules[3].weight[i]:copy(self.kernel)
+ self.meanestimator.modules[3].weight[1][i]:copy(self.kernel)
end
self.meanestimator.modules[2].bias:zero()
self.meanestimator.modules[3].bias:zero()