Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/clementfarabet/lua---nnx.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClement Farabet <clement.farabet@gmail.com>2011-08-24 23:08:47 +0400
committerClement Farabet <clement.farabet@gmail.com>2011-08-24 23:08:47 +0400
commit888996a806590abecc28beb27762175ccded6db8 (patch)
tree7bebb66eab5f1f2de52ae9dc1a76b653005577ab
parent690d2d89dd3c709cc1ac0b2e85c4cd9eaf3e9530 (diff)
Various cleanups.
-rw-r--r--ConfusionMatrix.lua2
-rw-r--r--OnlineTrainer.lua2
-rw-r--r--lbfgs.c1
3 files changed, 3 insertions, 2 deletions
diff --git a/ConfusionMatrix.lua b/ConfusionMatrix.lua
index ca514f1..d9809df 100644
--- a/ConfusionMatrix.lua
+++ b/ConfusionMatrix.lua
@@ -72,7 +72,7 @@ function ConfusionMatrix:__tostring__()
str = str .. ' ['
end
for p = 1,nclasses do
- str = str .. '' .. string.format('%8d\t', self.mat[t][p])
+ str = str .. '' .. string.format('%5d ', self.mat[t][p])
end
if self.classes then
if t == nclasses then
diff --git a/OnlineTrainer.lua b/OnlineTrainer.lua
index 1c47d5e..2b7f2b5 100644
--- a/OnlineTrainer.lua
+++ b/OnlineTrainer.lua
@@ -67,7 +67,7 @@ function OnlineTrainer:train(dataset)
while true do
print('<trainer> on training set:')
- print("<trainer> stochastic gradient descent epoch # " .. self.epoch)
+ print("<trainer> online epoch # " .. self.epoch .. '[batchSize = ' .. self.batchSize .. ']')
self.time = sys.clock()
self.currentError = 0
diff --git a/lbfgs.c b/lbfgs.c
index 13c8cef..2970509 100644
--- a/lbfgs.c
+++ b/lbfgs.c
@@ -1422,6 +1422,7 @@ int lbfgs_run(lua_State *L) {
// initialize the parameters for the L-BFGS optimization
lbfgs_parameter_init(&param);
param.max_iterations = lua_tonumber(L, 3);
+ param.linesearch = LBFGS_LINESEARCH_BACKTRACKING;
// Start the L-BFGS optimization; this will invoke the callback functions
// evaluate() and progress() when necessary.