Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/clementfarabet/lua---nnx.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClement Farabet <clement.farabet@gmail.com>2011-09-21 01:37:17 +0400
committerClement Farabet <clement.farabet@gmail.com>2011-09-21 01:37:17 +0400
commitb2388376c573e7cff9d3f24b6ae4c017c4faa18f (patch)
treea71b2ac3659b0cef54673001314946dc83638de4
parent9c423052291f79903ae44de99dda99590068548c (diff)
Fixed MAJOR bug. The new backward() doesnt update weights anymore.
So any module should call its accGradParameters().
-rw-r--r--SpatialFovea.lua7
1 files changed, 7 insertions, 0 deletions
diff --git a/SpatialFovea.lua b/SpatialFovea.lua
index 11215c5..b9e3fe4 100644
--- a/SpatialFovea.lua
+++ b/SpatialFovea.lua
@@ -314,6 +314,13 @@ function SpatialFovea:zeroGradParameters()
end
end
+function SpatialFovea:accGradParameters(input, gradOutput, scale)
+ -- accumulate gradients for all processors
+ for idx = 1,#self.processors do
+ self.gradNarrowed[idx] = self.processors[idx]:accGradParameters(self.narrowed[idx], self.gradProcessed[idx], scale)
+ end
+end
+
function SpatialFovea:updateParameters(learningRate)
for idx = 1,#self.processors do
self.processors[idx]:updateParameters(learningRate)