diff options
author | Jean-Marc Valin <jmvalin@amazon.com> | 2023-07-25 04:31:37 +0300 |
---|---|---|
committer | Jean-Marc Valin <jmvalin@amazon.com> | 2023-07-28 02:54:10 +0300 |
commit | b1f94b1e9229ffc801e6190775f563f3398ab27a (patch) | |
tree | 3c270c35ff478ee28ed2a95eab3b38764b860c0c | |
parent | 60d67b11126e87ec41ae09b34aa4a133a3af5f7c (diff) |
Add compute_generic_dense()
And missing prototypes
-rw-r--r-- | dnn/nnet.c | 6 | ||||
-rw-r--r-- | dnn/nnet.h | 5 |
2 files changed, 11 insertions, 0 deletions
@@ -102,6 +102,12 @@ void compute_linear(const LinearLayer *linear, float *out, const float *in) } } +void compute_generic_dense(const LinearLayer *layer, float *output, const float *input, int activation) +{ + compute_linear(layer, output, input); + compute_activation(output, output, layer->nb_outputs, activation); +} + #define MAX_RNN_NEURONS_ALL IMAX(IMAX(MAX_RNN_NEURONS, PLC_MAX_RNN_NEURONS), DRED_MAX_RNN_NEURONS) @@ -131,6 +131,11 @@ typedef struct { int dim; } EmbeddingLayer; +void compute_linear(const LinearLayer *linear, float *out, const float *in); +void compute_generic_dense(const LinearLayer *layer, float *output, const float *input, int activation); +void compute_generic_gru(const LinearLayer *input_weights, const LinearLayer *recurrent_weights, float *state, const float *in); +void compute_generic_conv1d(const LinearLayer *layer, float *output, float *mem, const float *input, int input_size, int activation); + void compute_activation(float *output, const float *input, int N, int activation); void _lpcnet_compute_dense(const DenseLayer *layer, float *output, const float *input); |