Add swish activation support

This commit is contained in:
Jean-Marc Valin 2022-01-24 16:22:29 -05:00
parent 93d6c3975a
commit 57f5681987
2 changed files with 14 additions and 0 deletions

View file

@ -46,6 +46,17 @@
#define SOFTMAX_HACK
#define MAX_ACTIVATIONS (4096)
static OPUS_INLINE void vec_swish(float *y, const float *x, int N)
{
int i;
float tmp[MAX_ACTIVATIONS];
celt_assert(N <= MAX_ACTIVATIONS);
vec_sigmoid(tmp, x, N);
for (i=0;i<N;i++)
y[i] = x[i]*tmp[i];
}
static OPUS_INLINE float relu(float x)
{
@ -75,6 +86,8 @@ void compute_activation(float *output, const float *input, int N, int activation
vec_sigmoid(output, input, N);
} else if (activation == ACTIVATION_TANH) {
vec_tanh(output, input, N);
} else if (activation == ACTIVATION_SWISH) {
vec_swish(output, input, N);
} else if (activation == ACTIVATION_RELU) {
for (i=0;i<N;i++)
output[i] = relu(input[i]);

View file

@ -36,6 +36,7 @@
#define ACTIVATION_TANH 2
#define ACTIVATION_RELU 3
#define ACTIVATION_SOFTMAX 4
#define ACTIVATION_SWISH 5
typedef struct {
const float *bias;