Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/soumith/cudnn.torch.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--BLSTM.lua4
-rw-r--r--GRU.lua4
-rw-r--r--LSTM.lua4
-rw-r--r--RNN.lua6
-rw-r--r--RNNReLU.lua4
-rw-r--r--RNNTanh.lua4
6 files changed, 13 insertions, 13 deletions
diff --git a/BLSTM.lua b/BLSTM.lua
index 8feebf1..d17dbca 100644
--- a/BLSTM.lua
+++ b/BLSTM.lua
@@ -1,7 +1,7 @@
local BLSTM, parent = torch.class('cudnn.BLSTM', 'cudnn.RNN')
-function BLSTM:__init(inputSize, hiddenSize, numLayers, batchFirst)
- parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst)
+function BLSTM:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout)
+ parent.__init(self, inputSize, hiddenSize, numLayers, batchFirst, dropout)
self.bidirectional = 'CUDNN_BIDIRECTIONAL'
self.mode = 'CUDNN_LSTM'
self.numDirections = 2
diff --git a/GRU.lua b/GRU.lua
index 615bdf7..c41fc5d 100644
--- a/GRU.lua
+++ b/GRU.lua
@@ -1,7 +1,7 @@
local GRU, parent = torch.class('cudnn.GRU', 'cudnn.RNN')
-function GRU:__init(inputSize, hiddenSize, numLayers, batchFirst)
- parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst)
+function GRU:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout)
+ parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst, dropout)
self.mode = 'CUDNN_GRU'
self:reset()
end
diff --git a/LSTM.lua b/LSTM.lua
index 29c199c..d9f10a2 100644
--- a/LSTM.lua
+++ b/LSTM.lua
@@ -1,7 +1,7 @@
local LSTM, parent = torch.class('cudnn.LSTM', 'cudnn.RNN')
-function LSTM:__init(inputSize, hiddenSize, numLayers, batchFirst)
- parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst)
+function LSTM:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout)
+ parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst, dropout)
self.mode = 'CUDNN_LSTM'
self:reset()
end
diff --git a/RNN.lua b/RNN.lua
index 5970388..4871676 100644
--- a/RNN.lua
+++ b/RNN.lua
@@ -4,7 +4,7 @@ local errcheck = cudnn.errcheck
local DESCS = {'rnnDesc', 'dropoutDesc', 'wDesc', 'xDescs', 'yDescs', 'hxDesc', 'hyDesc', 'cxDesc', 'cyDesc'}
-function RNN:__init(inputSize, hiddenSize, numLayers, batchFirst)
+function RNN:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout)
parent.__init(self)
self.datatype = 'CUDNN_DATA_FLOAT'
@@ -17,7 +17,7 @@ function RNN:__init(inputSize, hiddenSize, numLayers, batchFirst)
self.numDirections = 1 -- set to 2 for bi-directional.
self.inputMode = 'CUDNN_LINEAR_INPUT'
self.mode = 'CUDNN_RNN_RELU'
- self.dropout = 0
+ self.dropout = dropout or 0
self.seed = 0x01234567
self.batchFirst = batchFirst or false -- Set to true for batch x time x inputdim.
@@ -279,7 +279,7 @@ function RNN:updateOutput(input)
local oSize = torch.LongStorage({self.seqLength, self.miniBatch, self.hiddenSize * self.numDirections})
local oStride = torch.LongStorage({self.miniBatch * self.hiddenSize * self.numDirections, self.hiddenSize * self.numDirections, 1})
self.output:resize(oSize, oStride)
- local y = self.output
+ local y = self.output
local w = self.weight
local hy = self:resizeHidden(self.hiddenOutput):zero()
local cy = self:resizeHidden(self.cellOutput):zero()
diff --git a/RNNReLU.lua b/RNNReLU.lua
index 3aa8ee9..fc262e2 100644
--- a/RNNReLU.lua
+++ b/RNNReLU.lua
@@ -1,7 +1,7 @@
local RNNReLU, parent = torch.class('cudnn.RNNReLU', 'cudnn.RNN')
-function RNNReLU:__init(inputSize, hiddenSize, numLayers, batchFirst)
- parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst)
+function RNNReLU:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout)
+ parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst, dropout)
self.mode = 'CUDNN_RNN_RELU'
self:reset()
end
diff --git a/RNNTanh.lua b/RNNTanh.lua
index 98fa87c..3382a52 100644
--- a/RNNTanh.lua
+++ b/RNNTanh.lua
@@ -1,7 +1,7 @@
local RNNTanh, parent = torch.class('cudnn.RNNTanh', 'cudnn.RNN')
-function RNNTanh:__init(inputSize, hiddenSize, numLayers, batchFirst)
- parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst)
+function RNNTanh:__init(inputSize, hiddenSize, numLayers, batchFirst, dropout)
+ parent.__init(self,inputSize, hiddenSize, numLayers, batchFirst, dropout)
self.mode = 'CUDNN_RNN_TANH'
self:reset()
end