Faster activation functions for AVX

Using rational function approximation for tanh() and sigmoid.
This commit is contained in:
Jean-Marc Valin 2021-06-29 04:05:48 -04:00
parent 5571ef1b8e
commit e35441f2cc
3 changed files with 220 additions and 13 deletions

View file

@ -80,8 +80,9 @@ void compute_activation(float *output, float *input, int N, int activation)
output[i] = relu(input[i]);
} else if (activation == ACTIVATION_SOFTMAX) {
#ifdef SOFTMAX_HACK
for (i=0;i<N;i++)
output[i] = input[i];
RNN_COPY(output, input, N);
/*for (i=0;i<N;i++)
output[i] = input[i];*/
#else
float sum = 0;
softmax(output, input, N);