Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/nn.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndreas Köpf <andreas.koepf@xamla.com>2016-01-05 18:23:34 +0300
committerAndreas Köpf <andreas.koepf@xamla.com>2016-01-05 18:23:34 +0300
commit690aea1dcdd56f01e5da052f8d5d6447d912d643 (patch)
tree83900a690386a7a91e129d3ee59b6ccac2cf5e5c /LeakyReLU.lua
parent4dc0df7d35c5ab32c55d28f1137708d8af9eadfd (diff)
Add THNN conversion of {ELU, LeakyReLU, LogSigmoid, LogSoftMax, LookupTable}
Diffstat (limited to 'LeakyReLU.lua')
-rw-r--r--LeakyReLU.lua74
1 files changed, 41 insertions, 33 deletions
diff --git a/LeakyReLU.lua b/LeakyReLU.lua
index fdc294c..56b7f25 100644
--- a/LeakyReLU.lua
+++ b/LeakyReLU.lua
@@ -1,33 +1,41 @@
-local LeakyReLU, parent = torch.class('nn.LeakyReLU','nn.Module')
-
-function LeakyReLU:__init(negval,ip)
- parent.__init(self)
- if type(negval) == 'boolean' then
- local ip = negval
- self.negval = 1/100
- else
- self.negval = negval or (1/100)
- end
- -- default for inplace is false
- self.inplace = ip or false
- if self.negval < 0 then
- self.inplace = false
- end
-end
-
-function LeakyReLU:__tostring__()
- return torch.type(self) .. string.format('(%g)', self.negval)
-end
-
-
-function LeakyReLU:updateOutput(input)
- input.nn.LeakyReLU_updateOutput(self, input)
- return self.output
-end
-
-function LeakyReLU:updateGradInput(input, gradOutput)
- input.nn.LeakyReLU_updateGradInput(self, input, gradOutput)
- return self.gradInput
-end
-
-
+local LeakyReLU, parent = torch.class('nn.LeakyReLU','nn.Module')
+
+function LeakyReLU:__init(negval,ip)
+ parent.__init(self)
+ if type(negval) == 'boolean' then
+ local ip = negval
+ self.negval = 1/100
+ else
+ self.negval = negval or (1/100)
+ end
+ -- default for inplace is false
+ self.inplace = ip or false
+ if self.negval < 0 then
+ self.inplace = false
+ end
+end
+
+function LeakyReLU:updateOutput(input)
+ input.THNN.LeakyReLU_updateOutput(
+ input:cdata(),
+ self.output:cdata(),
+ self.negval,
+ self.inplace
+ )
+ return self.output
+end
+
+function LeakyReLU:updateGradInput(input, gradOutput)
+ input.THNN.LeakyReLU_updateGradInput(
+ input:cdata(),
+ gradOutput:cdata(),
+ self.gradInput:cdata(),
+ self.negval,
+ self.inplace
+ )
+ return self.gradInput
+end
+
+function LeakyReLU:__tostring__()
+ return torch.type(self) .. string.format('(%g)', self.negval)
+end