Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/nn.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJonathan Tompson <jonathantompson@gmail.com>2015-03-27 00:39:43 +0300
committerJonathan Tompson <jonathantompson@gmail.com>2015-03-27 00:39:43 +0300
commit91b494dd4d53ace615651ba2574f90ffc7d4a2df (patch)
tree40566777bdb0734c7a278fa50723e41a62815760 /SpatialDropout.lua
parent6577b5535eb136e4668dbd1fe509c79df19678e1 (diff)
Added SpatialDropout + doc + test.
Diffstat (limited to 'SpatialDropout.lua')
-rwxr-xr-xSpatialDropout.lua43
1 files changed, 43 insertions, 0 deletions
diff --git a/SpatialDropout.lua b/SpatialDropout.lua
new file mode 100755
index 0000000..6736783
--- /dev/null
+++ b/SpatialDropout.lua
@@ -0,0 +1,43 @@
+local SpatialDropout, Parent = torch.class('nn.SpatialDropout', 'nn.Module')
+
+function SpatialDropout:__init(p)
+ Parent.__init(self)
+ self.p = p or 0.5
+ self.train = true
+ self.noise = torch.Tensor()
+end
+
+function SpatialDropout:updateOutput(input)
+ self.output:resizeAs(input):copy(input)
+ if self.train then
+ if input:dim() == 4 then
+ self.noise:resize(input:size(1), input:size(2), 1, 1)
+ elseif input:dim() == 3 then
+ self.noise:resize(input:size(1), 1, 1)
+ else
+ error('Input must be 4D (nbatch, nfeat, h, w) or 2D (nfeat, h, w)')
+ end
+ self.noise:bernoulli(1-self.p)
+ -- We expand the random dropouts to the entire feature map because the
+ -- features are likely correlated accross the map and so the dropout
+ -- should also be correlated.
+ self.output:cmul(torch.expandAs(self.noise, input))
+ else
+ self.output:mul(1-self.p)
+ end
+ return self.output
+end
+
+function SpatialDropout:updateGradInput(input, gradOutput)
+ if self.train then
+ self.gradInput:resizeAs(gradOutput):copy(gradOutput)
+ self.gradInput:cmul(torch.expandAs(self.noise, input)) -- simply mask the gradients with the noise vector
+ else
+ error('backprop only defined while training')
+ end
+ return self.gradInput
+end
+
+function SpatialDropout:setp(p)
+ self.p = p
+end