Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/soumith/cudnn.torch.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSoumith Chintala <soumith@gmail.com>2016-06-27 18:40:20 +0300
committerGitHub <noreply@github.com>2016-06-27 18:40:20 +0300
commite7b1a0c5e36b2983986c58d28d45d0a9649c84ee (patch)
tree22cf7d4a463e20abd47803331d0bbc34ee433b18
parent8fe112b6ea816bde1b7030eacac2d27a8d890513 (diff)
parent0880602c7f06c83e714f545b5ba2fe6f2083651d (diff)
Merge pull request #207 from nhynes/batchfirst-untranspose
Un-untranspose RNN outputs after doing grad updates
-rw-r--r--RNN.lua7
1 files changed, 7 insertions, 0 deletions
diff --git a/RNN.lua b/RNN.lua
index daf3b19..23b1d46 100644
--- a/RNN.lua
+++ b/RNN.lua
@@ -437,6 +437,7 @@ function RNN:updateGradInput(input, gradOutput)
self.reserve:data(), self.reserve:size(1) * 4) -- sizeof(float)
if (self.batchFirst) then
self.gradInput = self.gradInput:transpose(1, 2)
+ self.output = self.output:transpose(1, 2)
end
return self.gradInput
end
@@ -445,6 +446,7 @@ function RNN:accGradParameters(input, gradOutput, scale)
if (self.batchFirst) then
input = input:transpose(1, 2)
gradOutput = gradOutput:transpose(1, 2)
+ self.output = self.output:transpose(1, 2)
end
scale = scale or 1
if scale == 0 then return end
@@ -502,6 +504,11 @@ function RNN:accGradParameters(input, gradOutput, scale)
self.dw:data(),
scaleTensor:data())
end
+
+ if (self.batchFirst) then
+ gradOutput = gradOutput:transpose(1, 2)
+ self.output = self.output:transpose(1, 2)
+ end
end
function RNN:clearDesc()