Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/clementfarabet/lua---nnx.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarco Scoffier <github@metm.org>2011-09-28 01:20:52 +0400
committerMarco Scoffier <github@metm.org>2011-09-28 01:20:52 +0400
commit31b97b114f144aa286493c9a83acfd2b8ed32ff7 (patch)
treec5bc888232e743616edad2f5b38ac41c65bbf9e4 /init.lua
parent07a8194cf5d9c737af1b291d2b5b057a1f369437 (diff)
parent2f609a3e56dfb05f3b5246690b341cb44c32ba9e (diff)
Merge branch 'genetic'
Conflicts: BatchOptimization.lua nnx-1.0-1.rockspec
Diffstat (limited to 'init.lua')
-rw-r--r--init.lua3
1 files changed, 2 insertions, 1 deletions
diff --git a/init.lua b/init.lua
index 6bc3617..ef4b030 100644
--- a/init.lua
+++ b/init.lua
@@ -104,6 +104,7 @@ torch.include('nnx', 'Optimization.lua')
torch.include('nnx', 'BatchOptimization.lua')
torch.include('nnx', 'SGDOptimization.lua')
torch.include('nnx', 'LBFGSOptimization.lua')
+torch.include('nnx', 'GeneticSGDOptimization.lua')
-- trainers:
torch.include('nnx', 'Trainer.lua')
@@ -212,7 +213,7 @@ function nnx.flattenParameters(parameters)
if param:storage() == parameters[i]:storage() then
offsets[k] = offsets[i]
if storageOffsets[k] ~= storageOffsets[i] or elements[k] ~= elements[i] then
- error('<nnx.flattenParameters> canot flatten shared weights with different structures')
+ error('<nnx.flattenParameters> cannot flatten shared weights with different structures')
end
isView = true
break