diff options
author | Jean-Marc Valin <jmvalin@amazon.com> | 2023-10-15 04:19:46 +0300 |
---|---|---|
committer | Jean-Marc Valin <jmvalin@amazon.com> | 2023-10-15 04:19:46 +0300 |
commit | cbd3a80552a918e71ced43e7e2af35092a170629 (patch) | |
tree | 001cbea94a1ffaa8cbe5b051d297d063c01eaec0 /dnn/nnet.c | |
parent | 0b7c02caf4548e3bd90c39c13913146358e4de8b (diff) |
minor tweaks
Diffstat (limited to 'dnn/nnet.c')
-rw-r--r-- | dnn/nnet.c | 7 |
1 files changed, 6 insertions, 1 deletions
@@ -150,7 +150,12 @@ void compute_glu(const LinearLayer *layer, float *output, const float *input) celt_assert(layer->nb_inputs == layer->nb_outputs); compute_linear(layer, act2, input); compute_activation(act2, act2, layer->nb_outputs, ACTIVATION_SIGMOID); - for (i=0;i<layer->nb_outputs;i++) output[i] = input[i]*act2[i]; + if (input == output) { + /* Give a vectorization hint to the compiler for the in-place case. */ + for (i=0;i<layer->nb_outputs;i++) output[i] = output[i]*act2[i]; + } else { + for (i=0;i<layer->nb_outputs;i++) output[i] = input[i]*act2[i]; + } } void compute_gated_activation(const LinearLayer *layer, float *output, const float *input, int activation) |