diff options
author | nicholas-leonard <nick@nikopia.org> | 2014-07-09 22:26:32 +0400 |
---|---|---|
committer | nicholas-leonard <nick@nikopia.org> | 2014-07-09 22:26:32 +0400 |
commit | b7822796d2d02815076b3e10b0b32fcc3c133242 (patch) | |
tree | 62a12e9ee155c236497873cbe5e8901c9a41cd3a | |
parent | 0413ddd6dc0a35b5281fcaaebc73144b15f285fa (diff) |
Dropout
-rw-r--r-- | Dropout.lua | 41 | ||||
-rw-r--r-- | init.lua | 1 |
2 files changed, 42 insertions, 0 deletions
diff --git a/Dropout.lua b/Dropout.lua new file mode 100644 index 0000000..ac6c463 --- /dev/null +++ b/Dropout.lua @@ -0,0 +1,41 @@ +local Dropout, Parent = torch.class('nn.Dropout', 'nn.Module') + +function Dropout:__init(p) + Parent.__init(self) + self.p = p or 0.5 + self.train = true + if self.p >= 1 or self.p < 0 then + error('<Dropout> illegal percentage, must be 0 <= p < 1') + end + self.noise = torch.Tensor() + self.fnoise = torch.Tensor() +end + +function Dropout:updateOutput(input) + self.output:resizeAs(input):copy(input) + if self.train then + self.fnoise = self.fnoise:float() + self.fnoise:resize(input:size()) + self.noise:resizeAs(input) + self.fnoise:bernoulli(1-self.p) + self.noise:copy(self.fnoise) + self.output:cmul(self.noise) + else + self.output:mul(1-self.p) + end + return self.output +end + +function Dropout:updateGradInput(input, gradOutput) + if self.train then + self.gradInput:resizeAs(gradOutput):copy(gradOutput) + self.gradInput:cmul(self.noise) -- simply mask the gradients with the noise vector + else + error('backprop only defined while training') + end + return self.gradInput +end + +function Dropout:setp(p) + self.p = p +end @@ -26,6 +26,7 @@ include('Mul.lua') include('MulConstant.lua') include('Add.lua') include('AddConstant.lua') +include('Dropout.lua') include('CAddTable.lua') include('CDivTable.lua') |