Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/clementfarabet/lua---nnx.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClement Farabet <clement.farabet@gmail.com>2011-09-04 11:04:22 +0400
committerClement Farabet <clement.farabet@gmail.com>2011-09-04 11:04:22 +0400
commitd82237992e1f8ec21ff937a7117fa439e2e8c068 (patch)
treefbb57980f052552211e32c0a1a5a67a4265e3aa8
parent816f1bf40219624d87a9f8ea6bc8d50ae2b7e7c7 (diff)
Using new sfork function.
-rw-r--r--BatchOptimization.lua14
1 files changed, 2 insertions, 12 deletions
diff --git a/BatchOptimization.lua b/BatchOptimization.lua
index ae7a6d9..ebf2144 100644
--- a/BatchOptimization.lua
+++ b/BatchOptimization.lua
@@ -14,8 +14,6 @@ function Batch:__init(...)
help='a criterion to estimate the error', req=true},
{arg='parallelize', type='number',
help='parallelize onto N cores (experimental!)', default=1},
- {arg='servers', type='number',
- help='server list, to parallelize over the network (experimental!)'},
{arg='verbose', type='number',
help='verbose level during training [0-2]', default=0}
)
@@ -23,7 +21,7 @@ function Batch:__init(...)
self.gradParameters = nnx.flattenParameters(nnx.getGradParameters(self.module))
self.evalCounter = 0
self.sampleCounter = 0
- if self.parallelize > 1 or self.servers then
+ if self.parallelize > 1 then
self:setup_mapreduce()
end
self.P = self.parallelize
@@ -308,15 +306,7 @@ function Batch:setup_mapreduce ()
]]
-- (2) startup all workers
- if self.servers then
- self.parallelize = 0
- for _,server in ipairs(self.servers) do
- self.parallelize = self.parallelize + server[1]
- end
- parallel.nfork(self.servers)
- else
- parallel.nfork(self.parallelize)
- end
+ parallel.sfork(self.parallelize)
parallel.children:exec(worker_code)
-- (3) and send them the module + criterion architecture