Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/optim.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChen Buskilla <chenb67@gmail.com>2016-06-15 10:47:23 +0300
committerChen Buskilla <chenb67@gmail.com>2016-06-15 10:47:23 +0300
commit8adbeedcbc1f6b3bd4afc3f25bb0a77084000d5b (patch)
treef532915e9dca0d7e5482c1d16d5d6ed4c8f792ee
parent2f75fecbbd148877ba4a7345138376b74b2d509f (diff)
add weight decay support to adamax
-rw-r--r--adamax.lua6
1 files changed, 6 insertions, 0 deletions
diff --git a/adamax.lua b/adamax.lua
index 7075345..c06fddd 100644
--- a/adamax.lua
+++ b/adamax.lua
@@ -28,10 +28,16 @@ function optim.adamax(opfunc, x, config, state)
local beta1 = config.beta1 or 0.9
local beta2 = config.beta2 or 0.999
local epsilon = config.epsilon or 1e-38
+ local wd = config.weightDecay or 0
-- (1) evaluate f(x) and df/dx
local fx, dfdx = opfunc(x)
+ -- (2) weight decay
+ if wd ~= 0 then
+ dfdx:add(wd, x)
+ end
+
-- Initialization
state.t = state.t or 0
-- Exponential moving average of gradient values