diff options
author | Marco Scoffier <github@metm.org> | 2011-11-03 07:44:33 +0400 |
---|---|---|
committer | Marco Scoffier <github@metm.org> | 2011-11-03 07:44:33 +0400 |
commit | 372d8bf80c88040a4b18a9b6a72eb3e8914b88a6 (patch) | |
tree | f9c659e0a6a875b19651c65d0bfd5162588f2220 /ASGDOptimization.lua | |
parent | 947a61dcea4d0ca854953fa3168f0a9b71f90e6f (diff) |
added test() function to ASGD
Diffstat (limited to 'ASGDOptimization.lua')
-rw-r--r-- | ASGDOptimization.lua | 22 |
1 files changed, 21 insertions, 1 deletions
diff --git a/ASGDOptimization.lua b/ASGDOptimization.lua index 892d740..03a2058 100644 --- a/ASGDOptimization.lua +++ b/ASGDOptimization.lua @@ -64,4 +64,24 @@ function ASGD:optimize() -- (4c) update mu_t -- mu_t = 1/max(1,t-t0) self.mu_t = 1 / math.max(1,self.t - self.t0) -end
\ No newline at end of file +end + +-- in ASGD we keep a copy of the parameters which is an averaged +-- version of the current parameters. This function is to test with +-- those averaged parameters. Best to run on batches because we have +-- to copy the full parameter vector + +function ASGD:test(_inputs, _targets) -- function test + -- (0) make a backup of the online parameters + self.backup = self.backup or + self.parameters.new():resizeAs(self.parameters) + self.backup:copy(self.parameters) + -- (1) copy average parameters into the model + self.parameters:copy(self.a) + -- (2) do the test with the average parameters + self.output = self.module:forward(_inputs) + self.error = self.criterion:forward(self.output, _targets) + -- (3) copy back the online parameters to continue training + self.parameters:copy(self.backup) + return self.error +end |