diff options
author | Clement Farabet <clement.farabet@gmail.com> | 2011-11-11 07:20:51 +0400 |
---|---|---|
committer | Clement Farabet <clement.farabet@gmail.com> | 2011-11-11 07:20:51 +0400 |
commit | 0c02ea85701e19620cbd616d9b42c0eb8b303f3b (patch) | |
tree | c018f8d682f547cd5477327c8bbc6fe4cb1702df | |
parent | 51ddc5118ddf431a44be1be6e7131f8f3b68ab6d (diff) | |
parent | 96466e6ade982cf157d6b979f702b3c32924dff4 (diff) |
Merge branch 'master' of github.com:clementfarabet/lua---nnx
-rw-r--r-- | DistNLLCriterion.lua | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/DistNLLCriterion.lua b/DistNLLCriterion.lua index 6f6679f..01290c4 100644 --- a/DistNLLCriterion.lua +++ b/DistNLLCriterion.lua @@ -39,7 +39,7 @@ function DistNLLCriterion:normalize(input, target) end end -function DistNLLCriterion:denormalize(input) +function DistNLLCriterion:denormalize() -- denormalize gradients if not self.inputIsLogProbability and not self.inputIsProbability then self.gradInput = self.inputLogSoftMax:backward(self.input, self.gradLogInput) @@ -70,7 +70,7 @@ function DistNLLCriterion:backward(input, target) for i = 1,input:size(1) do self.gradLogInput[i] = -self.probTarget[i] end - self:denormalize(input) + self:denormalize() return self.gradInput end |