Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/nn.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorsoumith <soumith@fb.com>2016-01-26 01:17:22 +0300
committersoumith <soumith@fb.com>2016-01-26 01:17:22 +0300
commit21dcae363958aee56052db717b2dced76e6ec3cf (patch)
tree2812150afdc0f1f038d8fc4b0dd875980af66bc4 /Mean.lua
parent475f5156aacfd6455b32659b51da336d7398047c (diff)
refactoring Mean into Sum
Diffstat (limited to 'Mean.lua')
-rw-r--r--Mean.lua43
1 files changed, 8 insertions, 35 deletions
diff --git a/Mean.lua b/Mean.lua
index 14ad0d1..8087ac9 100644
--- a/Mean.lua
+++ b/Mean.lua
@@ -1,41 +1,14 @@
-local Mean, parent = torch.class('nn.Mean', 'nn.Module')
+local Mean, parent = torch.class('nn.Mean', 'nn.Sum')
-function Mean:__init(dimension, nInputDims)
- parent.__init(self)
- dimension = dimension or 1
- self.dimension = dimension
- -- do not assign default value to nInputDims or it will break backward compatibility
- self.nInputDims = nInputDims
- self._gradInput = torch.Tensor()
-end
+--[[
-function Mean:_getPositiveDimension(input)
- local dimension = self.dimension
- if dimension < 0 then
- dimension = input:dim() + dimension + 1
- elseif self.nInputDims and input:dim()==(self.nInputDims+1) then
- dimension = dimension + 1
- end
- return dimension
-end
+This file is still here because of backward compatibility.
-function Mean:updateOutput(input)
- local dimension = self:_getPositiveDimension(input)
- self.output:mean(input, dimension)
- if self.output:nDimension() > 1 then
- self.output = self.output:select(dimension, 1)
- end
- return self.output
-end
+Please use instead "nn.Sum(dimension, nInputDims, sizeAverage)"
+
+]]--
-function Mean:updateGradInput(input, gradOutput)
- local dimension = self:_getPositiveDimension(input)
- self._gradInput:resizeAs(gradOutput):copy(gradOutput)
- self._gradInput:mul(1/input:size(dimension))
- if input:nDimension() > 1 then
- self._gradInput = nn.utils.addSingletonDimension(self._gradInput, dimension)
- end
- self.gradInput = self._gradInput:expandAs(input)
- return self.gradInput
+function Mean:__init(dimension, nInputDims)
+ parent.__init(self, dimension, nInputDims, true)
end