Hard quantization for training

Also, using stateful GRU to randomize initialization
This commit is contained in:
Jean-Marc Valin 2021-10-04 02:53:46 -04:00
parent 3b8d64d746
commit c5a17a0716
3 changed files with 77 additions and 28 deletions

View file

@ -0,0 +1,26 @@
import numpy as np
from tensorflow.keras.utils import Sequence
class LPCNetLoader(Sequence):
def __init__(self, data, features, periods, batch_size):
self.batch_size = batch_size
self.nb_batches = np.minimum(np.minimum(data.shape[0], features.shape[0]), periods.shape[0])//self.batch_size
self.data = data[:self.nb_batches*self.batch_size, :]
self.features = features[:self.nb_batches*self.batch_size, :]
self.periods = periods[:self.nb_batches*self.batch_size, :]
self.on_epoch_end()
def on_epoch_end(self):
self.indices = np.arange(self.nb_batches*self.batch_size)
np.random.shuffle(self.indices)
def __getitem__(self, index):
data = self.data[self.indices[index*self.batch_size:(index+1)*self.batch_size], :, :]
in_data = data[: , :, :3]
out_data = data[: , :, 3:4]
features = self.features[self.indices[index*self.batch_size:(index+1)*self.batch_size], :, :]
periods = self.periods[self.indices[index*self.batch_size:(index+1)*self.batch_size], :, :]
return ([in_data, features, periods], out_data)
def __len__(self):
return self.nb_batches