From 9d40e5cb0813464a9c6089210cbb72d99b94f253 Mon Sep 17 00:00:00 2001 From: Jean-Marc Valin Date: Sun, 23 Jul 2023 14:21:21 -0400 Subject: [PATCH] Add loading for LinearLayer Untested --- dnn/nnet.h | 11 ++++++++++ dnn/parse_lpcnet_weights.c | 42 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) diff --git a/dnn/nnet.h b/dnn/nnet.h index 92a36ec2..a8746c16 100644 --- a/dnn/nnet.h +++ b/dnn/nnet.h @@ -158,6 +158,17 @@ extern const WeightArray lpcnet_plc_arrays[]; extern const WeightArray rdovae_enc_arrays[]; extern const WeightArray rdovae_dec_arrays[]; +int linear_init(LinearLayer *layer, const WeightArray *arrays, + const char *bias, + const char *subias, + const char *weights, + const char *float_weights, + const char *weights_idx, + const char *diag, + const char *scale, + int nb_inputs, + int nb_outputs); + int mdense_init(MDenseLayer *layer, const WeightArray *arrays, const char *bias, const char *input_weights, diff --git a/dnn/parse_lpcnet_weights.c b/dnn/parse_lpcnet_weights.c index 493ecb0a..833f972f 100644 --- a/dnn/parse_lpcnet_weights.c +++ b/dnn/parse_lpcnet_weights.c @@ -113,6 +113,48 @@ static const void *find_idx_check(const WeightArray *arrays, const char *name, i return a->data; } +int linear_init(LinearLayer *layer, const WeightArray *arrays, + const char *bias, + const char *subias, + const char *weights, + const char *float_weights, + const char *weights_idx, + const char *diag, + const char *scale, + int nb_inputs, + int nb_outputs) +{ + int total_blocks; + if ((layer->bias = find_array_check(arrays, bias, nb_outputs*sizeof(layer->bias[0]))) == NULL) return 1; + if ((layer->subias = find_array_check(arrays, subias, nb_outputs*sizeof(layer->subias[0]))) == NULL) return 1; + layer->weights = NULL; + layer->float_weights = NULL; + layer->weights_idx = NULL; + if (weights_idx != NULL) { + if ((layer->weights_idx = find_idx_check(arrays, weights_idx, nb_outputs, nb_inputs, &total_blocks)) == NULL) return 1; + } + if (weights_idx != NULL) { + if (weights != NULL) { + if ((layer->weights = find_array_check(arrays, weights, SPARSE_BLOCK_SIZE*total_blocks*sizeof(layer->weights[0]))) == NULL) return 1; + } + if (float_weights != NULL) { + if ((layer->float_weights = find_array_check(arrays, float_weights, SPARSE_BLOCK_SIZE*total_blocks*sizeof(layer->float_weights[0]))) == NULL) return 1; + } + } else { + if (weights != NULL) { + if ((layer->weights = find_array_check(arrays, weights, nb_inputs*nb_outputs*sizeof(layer->weights[0]))) == NULL) return 1; + } + if (float_weights != NULL) { + if ((layer->float_weights = find_array_check(arrays, float_weights, nb_inputs*nb_outputs*sizeof(layer->float_weights[0]))) == NULL) return 1; + } + } + if ((layer->diag = find_array_check(arrays, diag, nb_outputs*sizeof(layer->diag[0]))) == NULL) return 1; + if ((layer->scale = find_array_check(arrays, scale, nb_outputs*sizeof(layer->scale[0]))) == NULL) return 1; + layer->nb_inputs = nb_inputs; + layer->nb_outputs = nb_outputs; + return 0; +} + int mdense_init(MDenseLayer *layer, const WeightArray *arrays, const char *bias, const char *input_weights,