Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/optim.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSoumith Chintala <soumith@gmail.com>2016-06-03 17:42:30 +0300
committerSoumith Chintala <soumith@gmail.com>2016-06-03 17:42:30 +0300
commit6759dc8a210b1f93184a23bda9c4ca5eb8c2b71a (patch)
tree8e2b911f279db71a0262063197d4ad24ad312657
parent76e666f78bde9522ca20ac3426370416e23d0283 (diff)
parenta4a9801c8004ea49a218240ae96468a4636ac9e4 (diff)
Merge pull request #115 from torch/revert-113-sgd-lrs-fix
Revert "Fix bug with sgd individual learning rates"
-rw-r--r--sgd.lua11
1 files changed, 3 insertions, 8 deletions
diff --git a/sgd.lua b/sgd.lua
index d96bd4b..ea13c55 100644
--- a/sgd.lua
+++ b/sgd.lua
@@ -69,20 +69,15 @@ function optim.sgd(opfunc, x, config, state)
end
-- (4) learning rate decay (annealing)
- local clr, clrs
- if lrs then
- clrs = lrs / (1 + nevals*lrd)
- else
- clr = lr / (1 + nevals*lrd)
- end
+ local clr = lr / (1 + nevals*lrd)
-- (5) parameter update with single or individual learning rates
if lrs then
if not state.deltaParameters then
state.deltaParameters = torch.Tensor():typeAs(x):resizeAs(dfdx)
end
- state.deltaParameters:copy(clrs):cmul(dfdx)
- x:add(-state.deltaParameters)
+ state.deltaParameters:copy(lrs):cmul(dfdx)
+ x:add(-clr, state.deltaParameters)
else
x:add(-clr, dfdx)
end