diff options
author | Soumith Chintala <soumith@gmail.com> | 2016-06-03 17:42:30 +0300 |
---|---|---|
committer | Soumith Chintala <soumith@gmail.com> | 2016-06-03 17:42:30 +0300 |
commit | 6759dc8a210b1f93184a23bda9c4ca5eb8c2b71a (patch) | |
tree | 8e2b911f279db71a0262063197d4ad24ad312657 | |
parent | 76e666f78bde9522ca20ac3426370416e23d0283 (diff) | |
parent | a4a9801c8004ea49a218240ae96468a4636ac9e4 (diff) |
Merge pull request #115 from torch/revert-113-sgd-lrs-fix
Revert "Fix bug with sgd individual learning rates"
-rw-r--r-- | sgd.lua | 11 |
1 files changed, 3 insertions, 8 deletions
@@ -69,20 +69,15 @@ function optim.sgd(opfunc, x, config, state) end -- (4) learning rate decay (annealing) - local clr, clrs - if lrs then - clrs = lrs / (1 + nevals*lrd) - else - clr = lr / (1 + nevals*lrd) - end + local clr = lr / (1 + nevals*lrd) -- (5) parameter update with single or individual learning rates if lrs then if not state.deltaParameters then state.deltaParameters = torch.Tensor():typeAs(x):resizeAs(dfdx) end - state.deltaParameters:copy(clrs):cmul(dfdx) - x:add(-state.deltaParameters) + state.deltaParameters:copy(lrs):cmul(dfdx) + x:add(-clr, state.deltaParameters) else x:add(-clr, dfdx) end |