diff options
author | Clement Farabet <clement.farabet@gmail.com> | 2011-09-29 22:58:55 +0400 |
---|---|---|
committer | Clement Farabet <clement.farabet@gmail.com> | 2011-09-29 22:58:55 +0400 |
commit | be617aeef103ad45f32c79c53fc4fa511ef5dee9 (patch) | |
tree | 69815b7e94e4d8de087d2622c53c5a92bb855ba9 /init.lua | |
parent | 90992ed65a2b7194faa68d4d6bb6047e0e38eb47 (diff) |
Allowing reflatten of already flat parameters.
Diffstat (limited to 'init.lua')
-rw-r--r-- | init.lua | 19 |
1 files changed, 19 insertions, 0 deletions
@@ -195,6 +195,25 @@ function nnx.getGradParameters(...) end function nnx.flattenParameters(parameters) + -- already flat ? + local flat = true + for k = 2,#parameters do + if parameters[k]:storage() ~= parameters[k-1]:storage() then + flat = false + break + end + end + if flat then + local nParameters = 0 + for k,param in ipairs(parameters) do + nParameters = nParameters + param:nElement() + end + flatParameters = torch.Tensor(parameters[1]:storage()) + if nParameters ~= flatParameters:nElement() then + error('<nnx.flattenParameters> weird parameters') + end + return flatParameters + end -- compute offsets of each parameter local offsets = {} local sizes = {} |