Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.xiph.org/xiph/opus.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJean-Marc Valin <jmvalin@amazon.com>2023-07-29 02:13:00 +0300
committerJean-Marc Valin <jmvalin@amazon.com>2023-08-02 00:52:49 +0300
commit5eaa4a504f865e73c0e480fb95113e67f9310ffa (patch)
tree64e7570197621dafa3ed14e6e846a89d32e86723
parent5e045405739f8b58817a0ae1a97bceb5bd113dcf (diff)
Add Gated Linear Unit (GLU)
-rw-r--r--dnn/nnet.c11
1 files changed, 11 insertions, 0 deletions
diff --git a/dnn/nnet.c b/dnn/nnet.c
index a01f3726..1c0035d0 100644
--- a/dnn/nnet.c
+++ b/dnn/nnet.c
@@ -142,6 +142,17 @@ void compute_generic_gru(const LinearLayer *input_weights, const LinearLayer *re
state[i] = h[i];
}
+void compute_gated_activation(const LinearLayer *layer, float *output, const float *input, int activation)
+{
+ int i;
+ float act1[MAX_INPUTS];
+ celt_assert(layer->nb_inputs == layer->nb_outputs);
+ compute_linear(layer, output, input);
+ compute_activation(output, output, layer->nb_outputs, ACTIVATION_SIGMOID);
+ compute_activation(act1, input, layer->nb_outputs, activation);
+ for (i=0;i<layer->nb_outputs;i++) output[i] *= act1[i];
+}
+
void compute_activation(float *output, const float *input, int N, int activation)
{
int i;