Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.xiph.org/xiph/opus.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJean-Marc Valin <jmvalin@amazon.com>2023-10-20 22:12:42 +0300
committerJean-Marc Valin <jmvalin@amazon.com>2023-10-20 22:13:43 +0300
commit1032e47d3f3376947280d2c7769c522b6474c6ad (patch)
tree318102192efd776fd963a462fbf129e75cbf21ea
parent7f0d456c4b3c1579f0884f2e26c55fea45d7e00a (diff)
more cleanup
-rw-r--r--dnn/nnet.h55
-rw-r--r--dnn/parse_lpcnet_weights.c68
2 files changed, 0 insertions, 123 deletions
diff --git a/dnn/nnet.h b/dnn/nnet.h
index e385663a..ebfb23de 100644
--- a/dnn/nnet.h
+++ b/dnn/nnet.h
@@ -94,16 +94,6 @@ typedef struct {
typedef struct {
const float *bias;
- const float *input_weights;
- const float *factor;
- int nb_inputs;
- int nb_neurons;
- int nb_channels;
- int activation;
-} MDenseLayer;
-
-typedef struct {
- const float *bias;
const float *subias;
const qweight *input_weights;
const int *input_weights_idx;
@@ -116,17 +106,6 @@ typedef struct {
typedef struct {
const float *bias;
- const float *subias;
- const float *diag_weights;
- const qweight *recurrent_weights;
- const int *idx;
- int nb_neurons;
- int activation;
- int reset_after;
-} SparseGRULayer;
-
-typedef struct {
- const float *bias;
const float *input_weights;
int nb_inputs;
int kernel_size;
@@ -151,8 +130,6 @@ void compute_activation(float *output, const float *input, int N, int activation
void _lpcnet_compute_dense(const DenseLayer *layer, float *output, const float *input);
-void compute_mdense(const MDenseLayer *layer, float *output, const float *input);
-
void compute_gruB(const GRULayer *gru, const float* gru_b_condition, float *state, const float *input);
@@ -184,15 +161,6 @@ int conv2d_init(Conv2dLayer *layer, const WeightArray *arrays,
int ktime,
int kheight);
-int mdense_init(MDenseLayer *layer, const WeightArray *arrays,
- const char *bias,
- const char *input_weights,
- const char *factor,
- int nb_inputs,
- int nb_neurons,
- int nb_channels,
- int activation);
-
int dense_init(DenseLayer *layer, const WeightArray *arrays,
const char *bias,
const char *input_weights,
@@ -211,30 +179,7 @@ int gru_init(GRULayer *layer, const WeightArray *arrays,
int activation,
int reset_after);
-int sparse_gru_init(SparseGRULayer *layer, const WeightArray *arrays,
- const char *bias,
- const char *subias,
- const char *diag_weights,
- const char *recurrent_weights,
- const char *idx,
- int nb_neurons,
- int activation,
- int reset_after);
-
-int conv1d_init(Conv1DLayer *layer, const WeightArray *arrays,
- const char *bias,
- const char *input_weights,
- int nb_inputs,
- int kernel_size,
- int nb_neurons,
- int activation);
-
void compute_conv2d(const Conv2dLayer *conv, float *out, float *mem, const float *in, int height, int hstride, int activation);
-int embedding_init(EmbeddingLayer *layer, const WeightArray *arrays,
- const char *embedding_weights,
- int nb_inputs,
- int dim);
-
#endif /* _MLP_H_ */
diff --git a/dnn/parse_lpcnet_weights.c b/dnn/parse_lpcnet_weights.c
index 9805ec8c..be2dafdc 100644
--- a/dnn/parse_lpcnet_weights.c
+++ b/dnn/parse_lpcnet_weights.c
@@ -175,24 +175,6 @@ int linear_init(LinearLayer *layer, const WeightArray *arrays,
return 0;
}
-int mdense_init(MDenseLayer *layer, const WeightArray *arrays,
- const char *bias,
- const char *input_weights,
- const char *factor,
- int nb_inputs,
- int nb_neurons,
- int nb_channels,
- int activation)
-{
- if ((layer->bias = find_array_check(arrays, bias, nb_neurons*nb_channels*sizeof(layer->bias[0]))) == NULL) return 1;
- if ((layer->input_weights = find_array_check(arrays, input_weights, nb_inputs*nb_channels*nb_neurons*sizeof(layer->input_weights[0]))) == NULL) return 1;
- if ((layer->factor = find_array_check(arrays, factor, nb_channels*nb_neurons*sizeof(layer->factor[0]))) == NULL) return 1;
- layer->nb_inputs = nb_inputs;
- layer->nb_neurons = nb_neurons;
- layer->nb_channels = nb_channels;
- layer->activation = activation;
- return 0;
-}
int dense_init(DenseLayer *layer, const WeightArray *arrays,
const char *bias,
@@ -233,45 +215,6 @@ int gru_init(GRULayer *layer, const WeightArray *arrays,
return 0;
}
-int sparse_gru_init(SparseGRULayer *layer, const WeightArray *arrays,
- const char *bias,
- const char *subias,
- const char *diag_weights,
- const char *recurrent_weights,
- const char *idx,
- int nb_neurons,
- int activation,
- int reset_after)
-{
- int total_blocks;
- if ((layer->bias = find_array_check(arrays, bias, 6*nb_neurons*sizeof(layer->bias[0]))) == NULL) return 1;
- if ((layer->subias = find_array_check(arrays, subias, 6*nb_neurons*sizeof(layer->subias[0]))) == NULL) return 1;
- if ((layer->diag_weights = find_array_check(arrays, diag_weights, 3*nb_neurons*sizeof(layer->diag_weights[0]))) == NULL) return 1;
- if ((layer->idx = find_idx_check(arrays, idx, nb_neurons, 3*nb_neurons, &total_blocks)) == NULL) return 1;
- if ((layer->recurrent_weights = find_array_check(arrays, recurrent_weights, SPARSE_BLOCK_SIZE*total_blocks*sizeof(layer->recurrent_weights[0]))) == NULL) return 1;
- layer->nb_neurons = nb_neurons;
- layer->activation = activation;
- layer->reset_after = reset_after;
- return 0;
-}
-
-int conv1d_init(Conv1DLayer *layer, const WeightArray *arrays,
- const char *bias,
- const char *input_weights,
- int nb_inputs,
- int kernel_size,
- int nb_neurons,
- int activation)
-{
- if ((layer->bias = find_array_check(arrays, bias, nb_neurons*sizeof(layer->bias[0]))) == NULL) return 1;
- if ((layer->input_weights = find_array_check(arrays, input_weights, kernel_size*nb_inputs*nb_neurons*sizeof(layer->input_weights[0]))) == NULL) return 1;
- layer->nb_inputs = nb_inputs;
- layer->kernel_size = kernel_size;
- layer->nb_neurons = nb_neurons;
- layer->activation = activation;
- return 0;
-}
-
int conv2d_init(Conv2dLayer *layer, const WeightArray *arrays,
const char *bias,
const char *float_weights,
@@ -297,17 +240,6 @@ int conv2d_init(Conv2dLayer *layer, const WeightArray *arrays,
return 0;
}
-int embedding_init(EmbeddingLayer *layer, const WeightArray *arrays,
- const char *embedding_weights,
- int nb_inputs,
- int dim)
-{
- if ((layer->embedding_weights = find_array_check(arrays, embedding_weights, nb_inputs*dim*sizeof(layer->embedding_weights[0]))) == NULL) return 1;
- layer->nb_inputs = nb_inputs;
- layer->dim = dim;
- return 0;
-}
-
#if 0