mirror of
https://github.com/xiph/opus.git
synced 2025-05-21 10:58:30 +00:00
Dump embedding
This commit is contained in:
parent
d93239e955
commit
477d08734d
2 changed files with 13 additions and 4 deletions
|
@ -42,7 +42,6 @@ def dump_gru_layer(self, f, hf):
|
|||
printVector(f, weights[0], name + '_weights')
|
||||
printVector(f, weights[1], name + '_recurrent_weights')
|
||||
printVector(f, weights[-1], name + '_bias')
|
||||
#activation = re.search('function (.*) at', str(layer.activation)).group(1).upper()
|
||||
if hasattr(self, 'activation'):
|
||||
activation = self.activation.__name__.upper()
|
||||
else:
|
||||
|
@ -65,7 +64,6 @@ def dump_dense_layer(self, f, hf):
|
|||
weights = self.get_weights()
|
||||
printVector(f, weights[0], name + '_weights')
|
||||
printVector(f, weights[-1], name + '_bias')
|
||||
#activation = re.search('function (.*) at', str(layer.activation)).group(1).upper()
|
||||
if hasattr(self, 'activation'):
|
||||
activation = self.activation.__name__.upper()
|
||||
else:
|
||||
|
@ -84,7 +82,6 @@ def dump_mdense_layer(self, f, hf):
|
|||
printVector(f, weights[0], name + '_weights')
|
||||
printVector(f, weights[1], name + '_bias')
|
||||
printVector(f, weights[1], name + '_factor')
|
||||
#activation = re.search('function (.*) at', str(layer.activation)).group(1).upper()
|
||||
if hasattr(self, 'activation'):
|
||||
activation = self.activation.__name__.upper()
|
||||
else:
|
||||
|
@ -96,6 +93,18 @@ def dump_mdense_layer(self, f, hf):
|
|||
return False
|
||||
MDense.dump_layer = dump_mdense_layer
|
||||
|
||||
def dump_embedding_layer(self, f, hf):
|
||||
name = self.name
|
||||
print("printing layer " + name + " of type " + self.__class__.__name__)
|
||||
weights = self.get_weights()
|
||||
printVector(f, weights[0], name + '_weights')
|
||||
f.write('const EmbeddingLayer {} = {{\n {}_weights,\n {}, {}\n}};\n\n'
|
||||
.format(name, name, weights[0].shape[0], weights[0].shape[1]))
|
||||
hf.write('#define {}_SIZE {}\n'.format(name.upper(), weights[0].shape[1]))
|
||||
hf.write('extern const EmbeddingLayer {};\n\n'.format(name));
|
||||
return False
|
||||
Embedding.dump_layer = dump_embedding_layer
|
||||
|
||||
|
||||
model, _, _ = lpcnet.new_lpcnet_model(rnn_units1=640, use_gpu=False)
|
||||
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['sparse_categorical_accuracy'])
|
||||
|
|
|
@ -102,7 +102,7 @@ def new_lpcnet_model(rnn_units1=384, rnn_units2=16, nb_used_features = 38, use_g
|
|||
embed2 = Embedding(256, embed_size, embeddings_initializer=PCMInit(), name='embed_exc')
|
||||
cexc = Reshape((-1, embed_size))(embed2(exc))
|
||||
|
||||
pembed = Embedding(256, 64)
|
||||
pembed = Embedding(256, 64, name='embed_pitch')
|
||||
cat_feat = Concatenate()([feat, Reshape((-1, 64))(pembed(pitch))])
|
||||
|
||||
cfeat = fconv2(fconv1(cat_feat))
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue