diff options
author | Jean-Marc Valin <jean-marc.valin@octasic.com> | 2011-05-28 01:11:59 +0400 |
---|---|---|
committer | Jean-Marc Valin <jean-marc.valin@octasic.com> | 2011-05-28 01:11:59 +0400 |
commit | 3a63272142d5d9199885c21c47ec297d034a6324 (patch) | |
tree | 322b1c53b030af79f0c58d350b113ac79b8e5cb6 | |
parent | d9e062e0736221a3b46b6191f415393cc921bc1d (diff) |
Fixes a few training issues
-rw-r--r-- | src/mlp_train.c | 12 |
1 files changed, 6 insertions, 6 deletions
diff --git a/src/mlp_train.c b/src/mlp_train.c index 2a8a23a1..328a136b 100644 --- a/src/mlp_train.c +++ b/src/mlp_train.c @@ -149,7 +149,7 @@ double compute_gradient(MLPTrain *net, float *inputs, float *outputs, int nbSamp netOut[i] = tansig_approx(sum); error[i] = out[i] - netOut[i]; rms += error[i]*error[i]; - *error_rate += fabs(error[i])>.5; + *error_rate += fabs(error[i])>1; } /* Back-propagate error */ for (i=0;i<outDim;i++) @@ -301,22 +301,22 @@ float mlp_train_backprop(MLPTrain *net, float *inputs, float *outputs, int nbSam } float mean_rate = 0, min_rate = 1e10; - rms = sqrt(rms/(outDim*nbSamples)); + rms = (rms/(outDim*nbSamples)); error_rate = (error_rate/(outDim*nbSamples)); fprintf (stderr, "%f (%f) ", error_rate, last_rms); for (i=0;i<W0_size;i++) { - if (W0_oldgrad[i]*W0_grad[i] >= 0) + if (W0_oldgrad[i]*W0_grad[i] > 0) W0_rate[i] *= 1.01; - else + else if (W0_oldgrad[i]*W0_grad[i] < 0) W0_rate[i] *= .9; mean_rate += W0_rate[i]; if (W0_rate[i] < min_rate) min_rate = W0_rate[i]; if (W0_rate[i] < 1e-15) W0_rate[i] = 1e-15; - if (W0_rate[i] > 1) - W0_rate[i] = 1; + if (W0_rate[i] > .01) + W0_rate[i] = .01; W0_oldgrad[i] = W0_grad[i]; W0_old2[i] = W0_old[i]; W0_old[i] = W0[i]; |