Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/clementfarabet/lua---nnx.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClement Farabet <clement.farabet@gmail.com>2011-10-02 08:39:02 +0400
committerClement Farabet <clement.farabet@gmail.com>2011-10-02 08:39:02 +0400
commit5e5c3f3b1676f7633b2ed1a2896ca2ef93dbd423 (patch)
tree38b00f5f40bb45fcc71cad2e16eb6404d2181386 /init.lua
parentc8406078fc871a8c1e0b7b0c967053859bf9d9f7 (diff)
More careful auto re-alloc for flattenParameters.
Diffstat (limited to 'init.lua')
-rw-r--r--init.lua4
1 files changed, 2 insertions, 2 deletions
diff --git a/init.lua b/init.lua
index bb1c160..78e758c 100644
--- a/init.lua
+++ b/init.lua
@@ -209,7 +209,7 @@ function nnx.flattenParameters(parameters)
for k,param in ipairs(parameters) do
nParameters = nParameters + param:nElement()
end
- flatParameters = torch.Tensor(parameters[1]:storage())
+ flatParameters = parameters[1].new(parameters[1]:storage())
if nParameters ~= flatParameters:nElement() then
error('<nnx.flattenParameters> weird parameters')
end
@@ -245,7 +245,7 @@ function nnx.flattenParameters(parameters)
end
end
-- create flat vector
- local flatParameters = torch.Tensor(nParameters)
+ local flatParameters = parameters[1].new(nParameters)
local storage = flatParameters:storage()
-- reallocate all parameters in flat vector
for i = 1,#parameters do