stupid uninitialized variable in the mlp training caught by cppcheck

This commit is contained in:
Jean-Marc Valin 2013-11-08 21:47:44 -05:00
parent 5174817d36
commit a599ccd217

View file

@ -121,8 +121,6 @@ double compute_gradient(MLPTrain *net, float *inputs, float *outputs, int nbSamp
double netOut[MAX_NEURONS]; double netOut[MAX_NEURONS];
double error[MAX_NEURONS]; double error[MAX_NEURONS];
for (i=0;i<outDim;i++)
error_rate[i] = 0;
topo = net->topo; topo = net->topo;
inDim = net->topo[0]; inDim = net->topo[0];
hiddenDim = net->topo[1]; hiddenDim = net->topo[1];
@ -135,6 +133,8 @@ double compute_gradient(MLPTrain *net, float *inputs, float *outputs, int nbSamp
memset(W1_grad, 0, W1_size*sizeof(double)); memset(W1_grad, 0, W1_size*sizeof(double));
for (i=0;i<outDim;i++) for (i=0;i<outDim;i++)
netOut[i] = outputs[i]; netOut[i] = outputs[i];
for (i=0;i<outDim;i++)
error_rate[i] = 0;
for (s=0;s<nbSamples;s++) for (s=0;s<nbSamples;s++)
{ {
float *in, *out; float *in, *out;