Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/marian-nmt/marian.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRoman Grundkiewicz <rgrundki@exseed.ed.ac.uk>2017-10-29 21:22:26 +0300
committerRoman Grundkiewicz <rgrundki@exseed.ed.ac.uk>2017-10-29 21:22:26 +0300
commit34e13035203987c856d6ba14f18d855f5f28cb19 (patch)
tree8f2c12ce5c14cde1d1d7b883401b11e14c339443 /src/graph/node_operators_unary.h
parent857289d1bc0afa14d1338f7b6a29cb614b03bcac (diff)
Use PReLU in LeakyReLU
Diffstat (limited to 'src/graph/node_operators_unary.h')
-rw-r--r--src/graph/node_operators_unary.h48
1 files changed, 4 insertions, 44 deletions
diff --git a/src/graph/node_operators_unary.h b/src/graph/node_operators_unary.h
index 819192dc..7433c12a 100644
--- a/src/graph/node_operators_unary.h
+++ b/src/graph/node_operators_unary.h
@@ -235,46 +235,6 @@ struct ReLUNodeOp : public UnaryNodeOp {
/**
* Represents a <a
- * href="https://en.wikipedia.org/wiki/Rectifier_(neural_networks)">leaky
- * rectified linear unit</a> node in an expression graph.
- * It is equivalent to the parametric ReLU with \f$ \alpha = 0.01 \f$.
- *
- * This node implements the activation function:
- * \f[
- * f(x) =
- * \begin{cases}
- * 0.01 & \text{if } x \leq 0 \\
- * x & \text{if } x > 0
- * \end{cases}
- * \f]
- *
- * and its derivative:
- * \f[
- * f^\prime(x) =
- * \begin{cases}
- * 0.01 & \text{if } x \leq 0 \\
- * 1 & \text{if } x > 0
- * \end{cases}
- * \f]
- */
-struct LeakyReLUNodeOp : public UnaryNodeOp {
- template <typename... Args>
- LeakyReLUNodeOp(Args... args) : UnaryNodeOp(args...) {}
-
- NodeOps forwardOps() {
- return {NodeOp(Element(_1 = LeakyReLU(_2), val_, child(0)->val()))};
- }
-
- NodeOps backwardOps() {
- return {NodeOp(
- Add(_1 * LeakyReLUback(_2), child(0)->grad(), adj_, child(0)->val()))};
- }
-
- const std::string type() { return "LeakyReLU"; }
-};
-
-/**
- * Represents a <a
* href="https://en.wikipedia.org/wiki/Rectifier_(neural_networks)">parametric
* rectified linear unit</a> node in an expression graph.
* For \f$ \alpha = 0.01 \f$ (the default value) it is equivalent to Leaky
@@ -308,8 +268,8 @@ struct PReLUNodeOp : public UnaryNodeOp {
}
NodeOps backwardOps() {
- return {NodeOp(
- Add(_1 * PReLUback(_2, alpha_), child(0)->grad(), adj_, child(0)->val()))};
+ return {NodeOp(Add(
+ _1 * PReLUback(_2, alpha_), child(0)->grad(), adj_, child(0)->val()))};
}
const std::string type() { return "PReLU"; }
@@ -809,8 +769,8 @@ struct TransposeNodeOp : public UnaryNodeOp {
Shape newShape(Expr a, Shape permute) {
Shape shape = a->shape();
- UTIL_THROW_IF2(shape.size() != permute.size(),
- "Shape and transpose axis have different number of dimensions");
+ ABORT_IF(shape.size() != permute.size(),
+ "Shape and transpose axis have different number of dimensions");
for(int i = 0; i < shape.size(); ++i)
shape.set(i, a->shape()[permute[i]]);