mirror of
https://github.com/xiph/opus.git
synced 2025-05-31 07:37:42 +00:00
Faster activation functions for AVX
Using rational function approximation for tanh() and sigmoid.
This commit is contained in:
parent
5571ef1b8e
commit
e35441f2cc
3 changed files with 220 additions and 13 deletions
|
@ -80,8 +80,9 @@ void compute_activation(float *output, float *input, int N, int activation)
|
|||
output[i] = relu(input[i]);
|
||||
} else if (activation == ACTIVATION_SOFTMAX) {
|
||||
#ifdef SOFTMAX_HACK
|
||||
for (i=0;i<N;i++)
|
||||
output[i] = input[i];
|
||||
RNN_COPY(output, input, N);
|
||||
/*for (i=0;i<N;i++)
|
||||
output[i] = input[i];*/
|
||||
#else
|
||||
float sum = 0;
|
||||
softmax(output, input, N);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue