diff options
author | Clement Farabet <clement.farabet@gmail.com> | 2012-09-21 23:33:25 +0400 |
---|---|---|
committer | Clement Farabet <clement.farabet@gmail.com> | 2012-09-21 23:33:25 +0400 |
commit | c1cb8b84d6f7a9ab3d76dee256e79667616fa2ac (patch) | |
tree | 630e9d93cc45bf56da84917744c2ba512b730577 /Module.lua | |
parent | c4ec7d3e275074df635b78e2c1558931c5931919 (diff) |
Added an extra corner case to getParameters().
This might finally fix all the possible corners.
Fix by Michael Matthieu.
Diffstat (limited to 'Module.lua')
-rw-r--r-- | Module.lua | 26 |
1 files changed, 19 insertions, 7 deletions
@@ -170,17 +170,29 @@ function Module:getParameters() parameters[k]:stride()) parameters[k]:zero() end - if (flatParameters:sum() ~= 0) then - print("<getParameters()> WARNING: found " - .. flatParameters:sum() .. " holes in the parameters vector (i.e. " - .. flatParameters:sum() .. " storage elements that are unused, this " - .. "might be an issue for your optimization procedure)") + + local cumSumOfHoles = flatParameters:cumsum(1) + local nUsedParameters = nParameters - cumSumOfHoles[#cumSumOfHoles] + local flatUsedParameters = torch.Tensor(nUsedParameters) + local flatUsedStorage = flatUsedParameters:storage() + + for k = 1,#parameters do + local offset = cumSumOfHoles[parameters[k]:storageOffset()] + parameters[k]:set(flatUsedStorage, + parameters[k]:storageOffset() - offset, + parameters[k]:size(), + parameters[k]:stride()) end - for k, v in pairs(storages) do + for k, v in pairs(storages) do -- we could remove this loop flatParameters[{{v+1,v+k:size()}}]:copy(torch.Tensor():set(k)) end - return flatParameters + print('crap') + for k = 1,flatUsedParameters:nElement() do + flatUsedParameters[k] = flatParameters[k+cumSumOfHoles[k] ] + end + print('0') + return flatUsedParameters end -- flatten parameters and gradients |