diff options
author | Jean-Marc Valin <jmvalin@amazon.com> | 2023-07-29 02:13:00 +0300 |
---|---|---|
committer | Jean-Marc Valin <jmvalin@amazon.com> | 2023-08-02 00:52:49 +0300 |
commit | 5eaa4a504f865e73c0e480fb95113e67f9310ffa (patch) | |
tree | 64e7570197621dafa3ed14e6e846a89d32e86723 | |
parent | 5e045405739f8b58817a0ae1a97bceb5bd113dcf (diff) |
Add Gated Linear Unit (GLU)
-rw-r--r-- | dnn/nnet.c | 11 |
1 files changed, 11 insertions, 0 deletions
@@ -142,6 +142,17 @@ void compute_generic_gru(const LinearLayer *input_weights, const LinearLayer *re state[i] = h[i]; } +void compute_gated_activation(const LinearLayer *layer, float *output, const float *input, int activation) +{ + int i; + float act1[MAX_INPUTS]; + celt_assert(layer->nb_inputs == layer->nb_outputs); + compute_linear(layer, output, input); + compute_activation(output, output, layer->nb_outputs, ACTIVATION_SIGMOID); + compute_activation(act1, input, layer->nb_outputs, activation); + for (i=0;i<layer->nb_outputs;i++) output[i] *= act1[i]; +} + void compute_activation(float *output, const float *input, int N, int activation) { int i; |