Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/optim.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMartin Vejmelka <vejmelkam@gmail.com>2015-10-18 13:28:33 +0300
committerMartin Vejmelka <vejmelkam@gmail.com>2015-10-18 13:28:33 +0300
commitc43737e27af414f82263a5875dc2586ae7b4d0a5 (patch)
tree9e80a4a7cae507bd9488bb834a80a2d5a2605d1c
parente5db0346903c2328a80f9fcc7a5ee2ce4a1352e1 (diff)
nag: momentum default is 0.9, added check for non-positive momentum
-rw-r--r--nag.lua16
1 files changed, 9 insertions, 7 deletions
diff --git a/nag.lua b/nag.lua
index 620e449..28c1326 100644
--- a/nag.lua
+++ b/nag.lua
@@ -30,12 +30,16 @@ function optim.nag(opfunc, x, config, state)
local lr = config.learningRate or 1e-3
local lrd = config.learningRateDecay or 0
local wd = config.weightDecay or 0
- local mom = config.momentum or 0
+ local mom = config.momentum or 0.9
local damp = config.dampening or mom
local lrs = config.learningRates
state.evalCounter = state.evalCounter or 0
local nevals = state.evalCounter
+ if mom <= 0 then
+ error('Momentum must be positive for Nesterov Accelerated Gradient')
+ end
+
-- (1) evaluate f(x) and df/dx
-- first step in the direction of the momentum vector
if not state.x_copy then
@@ -59,12 +63,10 @@ function optim.nag(opfunc, x, config, state)
local clr = lr / (1 + nevals*lrd)
-- (4) apply momentum
- if mom ~= 0 then
- if not state.dfdx then
- state.dfdx = torch.Tensor():typeAs(dfdx):resizeAs(dfdx):fill(0)
- else
- state.dfdx:mul(mom)
- end
+ if not state.dfdx then
+ state.dfdx = torch.Tensor():typeAs(dfdx):resizeAs(dfdx):fill(0)
+ else
+ state.dfdx:mul(mom)
end
-- (5) parameter update with single or individual learning rates