Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/clementfarabet/lua---nnx.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClement Farabet <clement.farabet@gmail.com>2011-07-07 10:25:08 +0400
committerClement Farabet <clement.farabet@gmail.com>2011-07-07 10:25:08 +0400
commit603dae08fd2eb10964ee9e546b5e90d7528aaf6e (patch)
tree17caa733a045e739b094683005674a0fcf706dc4 /StochasticTrainer.lua
parentc20ef531db79a83bb751b65798e4d1b77dc7833d (diff)
Added old DataSet clsases.
Diffstat (limited to 'StochasticTrainer.lua')
-rw-r--r--StochasticTrainer.lua8
1 files changed, 4 insertions, 4 deletions
diff --git a/StochasticTrainer.lua b/StochasticTrainer.lua
index 526ec49..a1a78bc 100644
--- a/StochasticTrainer.lua
+++ b/StochasticTrainer.lua
@@ -23,7 +23,7 @@ function StochasticTrainer:__init(...)
{arg='learningRateDecay', type='number', help='learning rate decay (rate = rate * (1-decay), at each epoch)', default=0},
{arg='weightDecay', type='number', help='amount of weight decay (W = W - decay*W)', default=0},
{arg='momentum', type='number', help='amount of momentum on weights (dE/W = dE/dW + momentum*prev(dE/dW))', default=0},
- {arg='maxIteration', type='number', help='maximum number of epochs', default=50},
+ {arg='maxEpoch', type='number', help='maximum number of epochs', default=50},
{arg='maxTarget', type='boolean', help='replaces an CxHxW target map by a HxN target of max values (for NLL criterions)', default=false},
{arg='dispProgress', type='boolean', help='display a progress bar during training/testing', default=true},
@@ -65,7 +65,7 @@ function StochasticTrainer:train(dataset)
for t = 1,dataset:size() do
-- disp progress
if self.dispProgress then
- xlua.dispProgress(t, dataset:size())
+ xlua.progress(t, dataset:size())
end
-- load new sample
@@ -117,7 +117,7 @@ function StochasticTrainer:train(dataset)
end
-- weight decay ?
- if self.weightDecay ~= 0 then
+ if self.weightDecay ~= 0 and module.decayParameters then
module:decayParameters(self.weightDecay)
end
@@ -175,7 +175,7 @@ function StochasticTrainer:test(dataset)
for t = 1,dataset:size() do
-- disp progress
if self.dispProgress then
- xlua.dispProgress(t, dataset:size())
+ xlua.progress(t, dataset:size())
end
-- get new sample