Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/clementfarabet/lua---nnx.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authornicholas-leonard <nick@nikopia.org>2016-01-19 19:23:22 +0300
committernicholas-leonard <nick@nikopia.org>2016-01-19 19:23:22 +0300
commita335f9ae41c0c56251b526d7a502a03feb513e59 (patch)
treeef4d91259dedfd11a60a1b88974c17ec5b661363 /SoftMaxTree.lua
parentc080463dfcfc1cf0f2eac3628ce956141f854a8b (diff)
sharedClone is handled by dpnn
Diffstat (limited to 'SoftMaxTree.lua')
-rw-r--r--SoftMaxTree.lua38
1 files changed, 1 insertions, 37 deletions
diff --git a/SoftMaxTree.lua b/SoftMaxTree.lua
index 27bc1f7..e1bd3e1 100644
--- a/SoftMaxTree.lua
+++ b/SoftMaxTree.lua
@@ -307,7 +307,7 @@ function SoftMaxTree:type(type)
if (type == 'torch.CudaTensor') then
-- cunnx needs this for filling self.updates
self._nodeUpdateHost = torch.IntTensor()
- self._nodeUpdateCuda = torch.CudaTensor()
+ self._nodeUpdateCuda = torch.CudaIntTensor()
self._paramUpdateHost = torch.IntTensor()
self._paramUpdateCuda = torch.CudaTensor()
self.parentChildrenCuda = self.parentChildren:type(type)
@@ -325,42 +325,6 @@ function SoftMaxTree:type(type)
return self
end
--- generate a Clone that shares parameters and metadata
--- without wasting memory
-function SoftMaxTree:sharedClone()
- -- init a dummy clone (with small memory footprint)
- local dummyTree = {[1]=torch.IntTensor{1,2}}
- local smt = nn.SoftMaxTree(self.inputSize, dummyTree, 1, self.accUpdate)
- -- clone should have same type
- local type = self.weight:type()
- smt:type(type)
- -- share all the metadata
- smt.rootId = self.rootId
- smt.nChildNode = self.nChildNode
- smt.nParentNode = self.nParentNode
- smt.minNodeId = self.minNodeId
- smt.maxNodeId = self.maxNodeId
- smt.maxParentId = self.maxParentId
- smt.maxChildId = self.maxChildId
- smt.maxFamily = self.maxFamily
- smt.childIds = self.childIds
- smt.parentIds = self.parentIds
- smt.parentChildren = self.parentChildren
- smt.childParent = self.childParent
- smt.maxFamilyPath = self.maxFamilyPath
- smt.maxDept = self.maxDept
- smt.updates = self.updates
- if not self.accUpdate then
- smt.gradWeight = self.gradWeight
- smt.gradBias = self.gradBias
- end
- if type == 'torch.CudaTensor' then
- smt.parentChildrenCuda = self.parentChildrenCuda
- smt.childParentCuda = self.childParentCuda
- end
- return smt:share(self, 'weight', 'bias')
-end
-
function SoftMaxTree:maxNorm(maxNorm)
local params = self:parameters()
if params then