diff options
author | Joan Puigcerver <joapuipe@gmail.com> | 2016-07-05 12:27:19 +0300 |
---|---|---|
committer | Joan Puigcerver <joapuipe@gmail.com> | 2016-07-05 12:27:19 +0300 |
commit | d716d9f265d19cf529d9e62e702e1409a28018f5 (patch) | |
tree | bbe7893f11a5bd2c7de5a7d3f38dfc3cc4c2eaa7 | |
parent | cfdcc281a5bb9565c884e1c89f0840ffdb184001 (diff) |
Added missing changes
-rw-r--r-- | BLSTM.lua | 4 | ||||
-rw-r--r-- | GRU.lua | 4 | ||||
-rw-r--r-- | LSTM.lua | 4 | ||||
-rw-r--r-- | RNN.lua | 6 | ||||
-rw-r--r-- | RNNReLU.lua | 4 | ||||
-rw-r--r-- | RNNTanh.lua | 4 |
6 files changed, 13 insertions, 13 deletions
@@ -1,7 +1,7 @@ local BLSTM, parent = torch.class('cudnn.BLSTM', 'cudnn.RNN') -function BLSTM:__init(inputSize, hiddenSize, numLayers, batchFirst) - parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst) +function BLSTM:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout) + parent.__init(self, inputSize, hiddenSize, numLayers, batchFirst, dropout) self.bidirectional = 'CUDNN_BIDIRECTIONAL' self.mode = 'CUDNN_LSTM' self.numDirections = 2 @@ -1,7 +1,7 @@ local GRU, parent = torch.class('cudnn.GRU', 'cudnn.RNN') -function GRU:__init(inputSize, hiddenSize, numLayers, batchFirst) - parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst) +function GRU:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout) + parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst, dropout) self.mode = 'CUDNN_GRU' self:reset() end @@ -1,7 +1,7 @@ local LSTM, parent = torch.class('cudnn.LSTM', 'cudnn.RNN') -function LSTM:__init(inputSize, hiddenSize, numLayers, batchFirst) - parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst) +function LSTM:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout) + parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst, dropout) self.mode = 'CUDNN_LSTM' self:reset() end @@ -4,7 +4,7 @@ local errcheck = cudnn.errcheck local DESCS = {'rnnDesc', 'dropoutDesc', 'wDesc', 'xDescs', 'yDescs', 'hxDesc', 'hyDesc', 'cxDesc', 'cyDesc'} -function RNN:__init(inputSize, hiddenSize, numLayers, batchFirst) +function RNN:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout) parent.__init(self) self.datatype = 'CUDNN_DATA_FLOAT' @@ -17,7 +17,7 @@ function RNN:__init(inputSize, hiddenSize, numLayers, batchFirst) self.numDirections = 1 -- set to 2 for bi-directional. self.inputMode = 'CUDNN_LINEAR_INPUT' self.mode = 'CUDNN_RNN_RELU' - self.dropout = 0 + self.dropout = dropout or 0 self.seed = 0x01234567 self.batchFirst = batchFirst or false -- Set to true for batch x time x inputdim. @@ -279,7 +279,7 @@ function RNN:updateOutput(input) local oSize = torch.LongStorage({self.seqLength, self.miniBatch, self.hiddenSize * self.numDirections}) local oStride = torch.LongStorage({self.miniBatch * self.hiddenSize * self.numDirections, self.hiddenSize * self.numDirections, 1}) self.output:resize(oSize, oStride) - local y = self.output + local y = self.output local w = self.weight local hy = self:resizeHidden(self.hiddenOutput):zero() local cy = self:resizeHidden(self.cellOutput):zero() diff --git a/RNNReLU.lua b/RNNReLU.lua index 3aa8ee9..fc262e2 100644 --- a/RNNReLU.lua +++ b/RNNReLU.lua @@ -1,7 +1,7 @@ local RNNReLU, parent = torch.class('cudnn.RNNReLU', 'cudnn.RNN') -function RNNReLU:__init(inputSize, hiddenSize, numLayers, batchFirst) - parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst) +function RNNReLU:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout) + parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst, dropout) self.mode = 'CUDNN_RNN_RELU' self:reset() end diff --git a/RNNTanh.lua b/RNNTanh.lua index 98fa87c..3382a52 100644 --- a/RNNTanh.lua +++ b/RNNTanh.lua @@ -1,7 +1,7 @@ local RNNTanh, parent = torch.class('cudnn.RNNTanh', 'cudnn.RNN') -function RNNTanh:__init(inputSize, hiddenSize, numLayers, batchFirst) - parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst) +function RNNTanh:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout) + parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst, dropout) self.mode = 'CUDNN_RNN_TANH' self:reset() end |