mirror of
https://github.com/xiph/opus.git
synced 2025-05-31 07:37:42 +00:00
Pre-computing GRU_A's input contribution.
This commit is contained in:
parent
040aa437c3
commit
732fce9ab2
4 changed files with 90 additions and 13 deletions
|
@ -91,18 +91,22 @@ def dump_gru_layer(self, f, hf):
|
|||
CuDNNGRU.dump_layer = dump_gru_layer
|
||||
GRU.dump_layer = dump_gru_layer
|
||||
|
||||
def dump_dense_layer_impl(name, weights, bias, activation, f, hf):
|
||||
printVector(f, weights, name + '_weights')
|
||||
printVector(f, bias, name + '_bias')
|
||||
f.write('const DenseLayer {} = {{\n {}_bias,\n {}_weights,\n {}, {}, ACTIVATION_{}\n}};\n\n'
|
||||
.format(name, name, name, weights.shape[0], weights.shape[1], activation))
|
||||
hf.write('#define {}_OUT_SIZE {}\n'.format(name.upper(), weights.shape[1]))
|
||||
hf.write('extern const DenseLayer {};\n\n'.format(name));
|
||||
|
||||
def dump_dense_layer(self, f, hf):
|
||||
name = self.name
|
||||
print("printing layer " + name + " of type " + self.__class__.__name__)
|
||||
weights = self.get_weights()
|
||||
printVector(f, weights[0], name + '_weights')
|
||||
printVector(f, weights[-1], name + '_bias')
|
||||
activation = self.activation.__name__.upper()
|
||||
f.write('const DenseLayer {} = {{\n {}_bias,\n {}_weights,\n {}, {}, ACTIVATION_{}\n}};\n\n'
|
||||
.format(name, name, name, weights[0].shape[0], weights[0].shape[1], activation))
|
||||
hf.write('#define {}_OUT_SIZE {}\n'.format(name.upper(), weights[0].shape[1]))
|
||||
hf.write('extern const DenseLayer {};\n\n'.format(name));
|
||||
dump_dense_layer_impl(name, weights[0], weights[1], activation, f, hf)
|
||||
return False
|
||||
|
||||
Dense.dump_layer = dump_dense_layer
|
||||
|
||||
def dump_mdense_layer(self, f, hf):
|
||||
|
@ -141,15 +145,18 @@ def dump_conv1d_layer(self, f, hf):
|
|||
Conv1D.dump_layer = dump_conv1d_layer
|
||||
|
||||
|
||||
def dump_embedding_layer_impl(name, weights, f, hf):
|
||||
printVector(f, weights, name + '_weights')
|
||||
f.write('const EmbeddingLayer {} = {{\n {}_weights,\n {}, {}\n}};\n\n'
|
||||
.format(name, name, weights.shape[0], weights.shape[1]))
|
||||
hf.write('#define {}_OUT_SIZE {}\n'.format(name.upper(), weights.shape[1]))
|
||||
hf.write('extern const EmbeddingLayer {};\n\n'.format(name));
|
||||
|
||||
def dump_embedding_layer(self, f, hf):
|
||||
name = self.name
|
||||
print("printing layer " + name + " of type " + self.__class__.__name__)
|
||||
weights = self.get_weights()
|
||||
printVector(f, weights[0], name + '_weights')
|
||||
f.write('const EmbeddingLayer {} = {{\n {}_weights,\n {}, {}\n}};\n\n'
|
||||
.format(name, name, weights[0].shape[0], weights[0].shape[1]))
|
||||
hf.write('#define {}_OUT_SIZE {}\n'.format(name.upper(), weights[0].shape[1]))
|
||||
hf.write('extern const EmbeddingLayer {};\n\n'.format(name));
|
||||
weights = self.get_weights()[0]
|
||||
dump_embedding_layer_impl(name, weights, f, hf)
|
||||
return False
|
||||
Embedding.dump_layer = dump_embedding_layer
|
||||
|
||||
|
@ -178,6 +185,21 @@ f.write('#ifdef HAVE_CONFIG_H\n#include "config.h"\n#endif\n\n#include "nnet.h"\
|
|||
hf.write('/*This file is automatically generated from a Keras model*/\n\n')
|
||||
hf.write('#ifndef RNN_DATA_H\n#define RNN_DATA_H\n\n#include "nnet.h"\n\n')
|
||||
|
||||
embed_size = lpcnet.embed_size
|
||||
|
||||
E = model.get_layer('embed_sig').get_weights()[0]
|
||||
W = model.layers[18].get_weights()[0][:embed_size,:]
|
||||
dump_embedding_layer_impl('gru_a_embed_sig', np.dot(E, W), f, hf)
|
||||
W = model.layers[18].get_weights()[0][embed_size:2*embed_size,:]
|
||||
dump_embedding_layer_impl('gru_a_embed_pred', np.dot(E, W), f, hf)
|
||||
E = model.get_layer('embed_exc').get_weights()[0]
|
||||
W = model.layers[18].get_weights()[0][2*embed_size:3*embed_size,:]
|
||||
dump_embedding_layer_impl('gru_a_embed_exc', np.dot(E, W), f, hf)
|
||||
W = model.layers[18].get_weights()[0][3*embed_size:,:]
|
||||
#FIXME: dump only half the biases
|
||||
b = model.layers[18].get_weights()[2]
|
||||
dump_dense_layer_impl('gru_a_dense_feature', W, b, 'LINEAR', f, hf)
|
||||
|
||||
layer_list = []
|
||||
for i, layer in enumerate(model.layers):
|
||||
if layer.dump_layer(f, hf):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue