mirror of
https://github.com/xiph/opus.git
synced 2025-06-02 00:27:43 +00:00
Add swish activation support
This commit is contained in:
parent
93d6c3975a
commit
57f5681987
2 changed files with 14 additions and 0 deletions
13
dnn/nnet.c
13
dnn/nnet.c
|
@ -46,6 +46,17 @@
|
|||
|
||||
#define SOFTMAX_HACK
|
||||
|
||||
#define MAX_ACTIVATIONS (4096)
|
||||
|
||||
static OPUS_INLINE void vec_swish(float *y, const float *x, int N)
|
||||
{
|
||||
int i;
|
||||
float tmp[MAX_ACTIVATIONS];
|
||||
celt_assert(N <= MAX_ACTIVATIONS);
|
||||
vec_sigmoid(tmp, x, N);
|
||||
for (i=0;i<N;i++)
|
||||
y[i] = x[i]*tmp[i];
|
||||
}
|
||||
|
||||
static OPUS_INLINE float relu(float x)
|
||||
{
|
||||
|
@ -75,6 +86,8 @@ void compute_activation(float *output, const float *input, int N, int activation
|
|||
vec_sigmoid(output, input, N);
|
||||
} else if (activation == ACTIVATION_TANH) {
|
||||
vec_tanh(output, input, N);
|
||||
} else if (activation == ACTIVATION_SWISH) {
|
||||
vec_swish(output, input, N);
|
||||
} else if (activation == ACTIVATION_RELU) {
|
||||
for (i=0;i<N;i++)
|
||||
output[i] = relu(input[i]);
|
||||
|
|
|
@ -36,6 +36,7 @@
|
|||
#define ACTIVATION_TANH 2
|
||||
#define ACTIVATION_RELU 3
|
||||
#define ACTIVATION_SOFTMAX 4
|
||||
#define ACTIVATION_SWISH 5
|
||||
|
||||
typedef struct {
|
||||
const float *bias;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue