diff options
author | Andreas Köpf <andreas.koepf@xamla.com> | 2016-01-26 01:23:02 +0300 |
---|---|---|
committer | Andreas Köpf <andreas.koepf@xamla.com> | 2016-02-01 21:54:07 +0300 |
commit | 68f61cf984f582ed3d4ece5d9e9073f19e57345e (patch) | |
tree | aaf84f4efaf3ee1717fff2b3ad1a35fadb19dedf /Sigmoid.lua | |
parent | ab95570bc4a26a515c30d80b37dcd68af102cb9f (diff) |
Add THNN conversion of {RReLU, Sigmoid, SmoothL1Criterion,SoftMax, SoftPlus}
Diffstat (limited to 'Sigmoid.lua')
-rw-r--r-- | Sigmoid.lua | 14 |
1 files changed, 12 insertions, 2 deletions
diff --git a/Sigmoid.lua b/Sigmoid.lua index efde004..0126f6f 100644 --- a/Sigmoid.lua +++ b/Sigmoid.lua @@ -1,9 +1,19 @@ local Sigmoid = torch.class('nn.Sigmoid', 'nn.Module') function Sigmoid:updateOutput(input) - return input.nn.Sigmoid_updateOutput(self, input) + input.THNN.Sigmoid_updateOutput( + input:cdata(), + self.output:cdata() + ) + return self.output end function Sigmoid:updateGradInput(input, gradOutput) - return input.nn.Sigmoid_updateGradInput(self, input, gradOutput) + input.THNN.Sigmoid_updateGradInput( + input:cdata(), + gradOutput:cdata(), + self.gradInput:cdata(), + self.output:cdata() + ) + return self.gradInput end |