diff options
author | Soumith Chintala <soumith@gmail.com> | 2016-01-12 11:50:09 +0300 |
---|---|---|
committer | Soumith Chintala <soumith@gmail.com> | 2016-01-12 11:50:09 +0300 |
commit | 31b31ca3a8e711fe6a13972aa393513449f1d307 (patch) | |
tree | ae4cecd3d44d00829e094a4036ecd83789422fb5 /Module.lua | |
parent | 775399f23d4594ffcea9ce80095f0809b4bc1db3 (diff) |
Revert "Don't re-flatten parameters if they are already flattened"
Diffstat (limited to 'Module.lua')
-rw-r--r-- | Module.lua | 25 |
1 files changed, 0 insertions, 25 deletions
@@ -188,31 +188,6 @@ function Module.flatten(parameters) local Tensor = parameters[1].new local TmpTensor = Module._flattenTensorBuffer[torch.type(parameters[1])] or Tensor - - local function checkContiguous(tensors) - local storage, start, nextOffset - for i, param in ipairs(parameters) do - if param:storage() then - if not storage then - storage = param:storage() - start = param:storageOffset() - nextOffset = start - end - if param:storage() ~= storage or not param:isContiguous() or nextOffset ~= param:storageOffset() then - return false - end - nextOffset = nextOffset + param:nElement() - end - end - return true, Tensor(storage, start, nextOffset - 1) - end - - -- 0. If the parameters are already flattened use them as-is - local contiguous, flat = checkContiguous(parameters) - if contiguous then - return flat - end - -- 1. construct the set of all unique storages referenced by parameter tensors local storages = {} local nParameters = 0 |