diff options
author | Clement Farabet <clement.farabet@gmail.com> | 2011-11-11 00:13:02 +0400 |
---|---|---|
committer | Clement Farabet <clement.farabet@gmail.com> | 2011-11-11 00:13:02 +0400 |
commit | 96466e6ade982cf157d6b979f702b3c32924dff4 (patch) | |
tree | 01ab123eafa2dfa5ce312ab8d9c6a1f723c892e5 /DistNLLCriterion.lua | |
parent | dcbb40662044aab242223eb5151df2da2552e80d (diff) |
Removed useless param.
Diffstat (limited to 'DistNLLCriterion.lua')
-rw-r--r-- | DistNLLCriterion.lua | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/DistNLLCriterion.lua b/DistNLLCriterion.lua index 6f6679f..01290c4 100644 --- a/DistNLLCriterion.lua +++ b/DistNLLCriterion.lua @@ -39,7 +39,7 @@ function DistNLLCriterion:normalize(input, target) end end -function DistNLLCriterion:denormalize(input) +function DistNLLCriterion:denormalize() -- denormalize gradients if not self.inputIsLogProbability and not self.inputIsProbability then self.gradInput = self.inputLogSoftMax:backward(self.input, self.gradLogInput) @@ -70,7 +70,7 @@ function DistNLLCriterion:backward(input, target) for i = 1,input:size(1) do self.gradLogInput[i] = -self.probTarget[i] end - self:denormalize(input) + self:denormalize() return self.gradInput end |