diff options
author | Jean-Marc Valin <jmvalin@amazon.com> | 2023-07-25 04:31:37 +0300 |
---|---|---|
committer | Jean-Marc Valin <jmvalin@amazon.com> | 2023-07-26 10:17:37 +0300 |
commit | 03b3483af99c52b558370c5bce064e654aa22af0 (patch) | |
tree | eba658dc9febaf57ff75b962df66eb0155bbec15 | |
parent | f239c0394089a8f9a42e4b8dc744a778fa7e5224 (diff) |
Add compute_generic_dense()
And missing prototypes
-rw-r--r-- | dnn/nnet.c | 6 | ||||
-rw-r--r-- | dnn/nnet.h | 5 |
2 files changed, 11 insertions, 0 deletions
@@ -102,6 +102,12 @@ void compute_linear(const LinearLayer *linear, float *out, const float *in) } } +void compute_generic_dense(const LinearLayer *layer, float *output, const float *input, int activation) +{ + compute_linear(layer, output, input); + compute_activation(output, output, layer->nb_outputs, activation); +} + #define MAX_RNN_NEURONS_ALL IMAX(IMAX(MAX_RNN_NEURONS, PLC_MAX_RNN_NEURONS), DRED_MAX_RNN_NEURONS) @@ -131,6 +131,11 @@ typedef struct { int dim; } EmbeddingLayer; +void compute_linear(const LinearLayer *linear, float *out, const float *in); +void compute_generic_dense(const LinearLayer *layer, float *output, const float *input, int activation); +void compute_generic_gru(const LinearLayer *input_weights, const LinearLayer *recurrent_weights, float *state, const float *in); +void compute_generic_conv1d(const LinearLayer *layer, float *output, float *mem, const float *input, int input_size, int activation); + void compute_activation(float *output, const float *input, int N, int activation); void _lpcnet_compute_dense(const DenseLayer *layer, float *output, const float *input); |