diff --git a/dnn/nnet.c b/dnn/nnet.c index 567362f9..e9439363 100644 --- a/dnn/nnet.c +++ b/dnn/nnet.c @@ -80,8 +80,9 @@ void compute_activation(float *output, float *input, int N, int activation) output[i] = relu(input[i]); } else if (activation == ACTIVATION_SOFTMAX) { #ifdef SOFTMAX_HACK - for (i=0;i1 values caused by the reciprocal approximation. */ - Y = _mm256_sub_ps(one, _mm256_rcp_ps(_mm256_add_ps(Y, one))); + Y = sigmoid8_approx(X); _mm256_storeu_ps(&y[i], Y); } for (;i