WIP: Adding a constraint

This commit is contained in:
Jean-Marc Valin 2020-12-24 02:50:20 -05:00
parent c045702e51
commit 1657bae024
2 changed files with 24 additions and 5 deletions

View file

@ -29,6 +29,7 @@ import math
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, GRU, Dense, Embedding, Reshape, Concatenate, Lambda, Conv1D, Multiply, Add, Bidirectional, MaxPooling1D, Activation
from tensorflow.keras import backend as K
from tensorflow.keras.constraints import Constraint
from tensorflow.keras.initializers import Initializer
from tensorflow.keras.callbacks import Callback
from mdense import MDense
@ -115,6 +116,21 @@ class PCMInit(Initializer):
'seed': self.seed
}
class WeightClip(Constraint):
'''Clips the weights incident to each hidden unit to be inside a range
'''
def __init__(self, c=2):
self.c = c
def __call__(self, p):
return K.clip(p, -self.c, self.c)
def get_config(self):
return {'name': self.__class__.__name__,
'c': self.c}
constraint = WeightClip(0.999)
def new_lpcnet_model(rnn_units1=384, rnn_units2=16, nb_used_features = 38, training=False, adaptation=False):
pcm = Input(shape=(None, 3))
feat = Input(shape=(None, nb_used_features))
@ -142,8 +158,10 @@ def new_lpcnet_model(rnn_units1=384, rnn_units2=16, nb_used_features = 38, train
rep = Lambda(lambda x: K.repeat_elements(x, frame_size, 1))
rnn = GRU(rnn_units1, return_sequences=True, return_state=True, recurrent_activation="sigmoid", reset_after='true', name='gru_a')
rnn2 = GRU(rnn_units2, return_sequences=True, return_state=True, recurrent_activation="sigmoid", reset_after='true', name='gru_b')
rnn = GRU(rnn_units1, return_sequences=True, return_state=True, recurrent_activation="sigmoid", reset_after='true', name='gru_a',
recurrent_constraint = constraint)
rnn2 = GRU(rnn_units2, return_sequences=True, return_state=True, recurrent_activation="sigmoid", reset_after='true', name='gru_b',
kernel_constraint=constraint)
rnn_in = Concatenate()([cpcm, rep(cfeat)])
md = MDense(pcm_levels, activation='softmax', name='dual_fc')

View file

@ -102,14 +102,15 @@ del pred
del in_exc
# dump models to disk as we go
checkpoint = ModelCheckpoint('lpcnet32v_384_10_G16_{epoch:02d}.h5')
checkpoint = ModelCheckpoint('lpcnet32y_384_10_G16_{epoch:02d}.h5')
#Set this to True to adapt an existing model (e.g. on new data)
adaptation = False
model.load_weights('lpcnet32v_384_10_G16_00.h5')
if adaptation:
#Adapting from an existing model
model.load_weights('lpcnet24c_384_10_G16_120.h5')
model.load_weights('lpcnet32v_384_10_G16_100.h5')
sparsify = lpcnet.Sparsify(0, 0, 1, (0.05, 0.05, 0.2))
lr = 0.0001
decay = 0
@ -120,5 +121,5 @@ else:
decay = 5e-5
model.compile(optimizer=Adam(lr, decay=decay, beta_2=0.99), loss='sparse_categorical_crossentropy')
model.save_weights('lpcnet32v_384_10_G16_00.h5');
model.save_weights('lpcnet32y_384_10_G16_00.h5');
model.fit([in_data, features, periods], out_exc, batch_size=batch_size, epochs=nb_epochs, validation_split=0.0, callbacks=[checkpoint, sparsify])