diff options
author | Marco Scoffier <github@metm.org> | 2011-09-28 01:20:52 +0400 |
---|---|---|
committer | Marco Scoffier <github@metm.org> | 2011-09-28 01:20:52 +0400 |
commit | 31b97b114f144aa286493c9a83acfd2b8ed32ff7 (patch) | |
tree | c5bc888232e743616edad2f5b38ac41c65bbf9e4 /init.lua | |
parent | 07a8194cf5d9c737af1b291d2b5b057a1f369437 (diff) | |
parent | 2f609a3e56dfb05f3b5246690b341cb44c32ba9e (diff) |
Merge branch 'genetic'
Conflicts:
BatchOptimization.lua
nnx-1.0-1.rockspec
Diffstat (limited to 'init.lua')
-rw-r--r-- | init.lua | 3 |
1 files changed, 2 insertions, 1 deletions
@@ -104,6 +104,7 @@ torch.include('nnx', 'Optimization.lua') torch.include('nnx', 'BatchOptimization.lua') torch.include('nnx', 'SGDOptimization.lua') torch.include('nnx', 'LBFGSOptimization.lua') +torch.include('nnx', 'GeneticSGDOptimization.lua') -- trainers: torch.include('nnx', 'Trainer.lua') @@ -212,7 +213,7 @@ function nnx.flattenParameters(parameters) if param:storage() == parameters[i]:storage() then offsets[k] = offsets[i] if storageOffsets[k] ~= storageOffsets[i] or elements[k] ~= elements[i] then - error('<nnx.flattenParameters> canot flatten shared weights with different structures') + error('<nnx.flattenParameters> cannot flatten shared weights with different structures') end isView = true break |