diff options
author | Andreas Köpf <andreas.koepf@xamla.com> | 2016-01-26 01:23:02 +0300 |
---|---|---|
committer | Andreas Köpf <andreas.koepf@xamla.com> | 2016-02-01 21:54:07 +0300 |
commit | 68f61cf984f582ed3d4ece5d9e9073f19e57345e (patch) | |
tree | aaf84f4efaf3ee1717fff2b3ad1a35fadb19dedf /SoftMax.lua | |
parent | ab95570bc4a26a515c30d80b37dcd68af102cb9f (diff) |
Add THNN conversion of {RReLU, Sigmoid, SmoothL1Criterion,SoftMax, SoftPlus}
Diffstat (limited to 'SoftMax.lua')
-rw-r--r-- | SoftMax.lua | 14 |
1 files changed, 12 insertions, 2 deletions
diff --git a/SoftMax.lua b/SoftMax.lua index 22f0eda..23a444c 100644 --- a/SoftMax.lua +++ b/SoftMax.lua @@ -1,9 +1,19 @@ local SoftMax, _ = torch.class('nn.SoftMax', 'nn.Module') function SoftMax:updateOutput(input) - return input.nn.SoftMax_updateOutput(self, input) + input.THNN.SoftMax_updateOutput( + input:cdata(), + self.output:cdata() + ) + return self.output end function SoftMax:updateGradInput(input, gradOutput) - return input.nn.SoftMax_updateGradInput(self, input, gradOutput) + input.THNN.SoftMax_updateGradInput( + input:cdata(), + gradOutput:cdata(), + self.gradInput:cdata(), + self.output:cdata() + ) + return self.gradInput end |