Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/clementfarabet/lua---nnx.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClement Farabet <clement.farabet@gmail.com>2011-09-29 22:58:55 +0400
committerClement Farabet <clement.farabet@gmail.com>2011-09-29 22:58:55 +0400
commitbe617aeef103ad45f32c79c53fc4fa511ef5dee9 (patch)
tree69815b7e94e4d8de087d2622c53c5a92bb855ba9 /init.lua
parent90992ed65a2b7194faa68d4d6bb6047e0e38eb47 (diff)
Allowing reflatten of already flat parameters.
Diffstat (limited to 'init.lua')
-rw-r--r--init.lua19
1 files changed, 19 insertions, 0 deletions
diff --git a/init.lua b/init.lua
index c1cae5f..28d3d8a 100644
--- a/init.lua
+++ b/init.lua
@@ -195,6 +195,25 @@ function nnx.getGradParameters(...)
end
function nnx.flattenParameters(parameters)
+ -- already flat ?
+ local flat = true
+ for k = 2,#parameters do
+ if parameters[k]:storage() ~= parameters[k-1]:storage() then
+ flat = false
+ break
+ end
+ end
+ if flat then
+ local nParameters = 0
+ for k,param in ipairs(parameters) do
+ nParameters = nParameters + param:nElement()
+ end
+ flatParameters = torch.Tensor(parameters[1]:storage())
+ if nParameters ~= flatParameters:nElement() then
+ error('<nnx.flattenParameters> weird parameters')
+ end
+ return flatParameters
+ end
-- compute offsets of each parameter
local offsets = {}
local sizes = {}