diff options
author | Soumith Chintala <soumith@gmail.com> | 2016-06-27 18:40:20 +0300 |
---|---|---|
committer | GitHub <noreply@github.com> | 2016-06-27 18:40:20 +0300 |
commit | e7b1a0c5e36b2983986c58d28d45d0a9649c84ee (patch) | |
tree | 22cf7d4a463e20abd47803331d0bbc34ee433b18 | |
parent | 8fe112b6ea816bde1b7030eacac2d27a8d890513 (diff) | |
parent | 0880602c7f06c83e714f545b5ba2fe6f2083651d (diff) |
Merge pull request #207 from nhynes/batchfirst-untranspose
Un-untranspose RNN outputs after doing grad updates
-rw-r--r-- | RNN.lua | 7 |
1 files changed, 7 insertions, 0 deletions
@@ -437,6 +437,7 @@ function RNN:updateGradInput(input, gradOutput) self.reserve:data(), self.reserve:size(1) * 4) -- sizeof(float) if (self.batchFirst) then self.gradInput = self.gradInput:transpose(1, 2) + self.output = self.output:transpose(1, 2) end return self.gradInput end @@ -445,6 +446,7 @@ function RNN:accGradParameters(input, gradOutput, scale) if (self.batchFirst) then input = input:transpose(1, 2) gradOutput = gradOutput:transpose(1, 2) + self.output = self.output:transpose(1, 2) end scale = scale or 1 if scale == 0 then return end @@ -502,6 +504,11 @@ function RNN:accGradParameters(input, gradOutput, scale) self.dw:data(), scaleTensor:data()) end + + if (self.batchFirst) then + gradOutput = gradOutput:transpose(1, 2) + self.output = self.output:transpose(1, 2) + end end function RNN:clearDesc() |