Welcome to mirror list, hosted at ThFree Co, Russian Federation.

PushTable.lua - github.com/clementfarabet/lua---nnx.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
blob: 3131c6909b066c51ad4deca746b56da68cb45219 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
local PushTable, parent = torch.class("nn.PushTable", "nn.Module")

function PushTable:__init(index)
   self._index = index
   self._pulls = {}
   self.output = {}
   self._gradInput = torch.Tensor()
   self.gradInput = {}
   self._forward = false
end

function PushTable:pull(index)
   local pull = nn.PullTable(self, index)
   table.insert(self._pulls, pull)
   return pull
end

function PushTable:updateOutput(inputTable)
   for i, input in ipairs(inputTable) do
      if i < self._index then
         self.output[i] = input
      elseif i > self._index then
         self.output[i-1] = input
      end
   end
   
   local input = inputTable[self._index]
   for i,pull in ipairs(self._pulls) do
      pull:_updateOutput(input)
   end
   
   self._forward = true
   return self.output
end

function PushTable:_updateGradInput(gradOutput)
   if self._forward then
      if torch.type(self.gradInput) ~= torch.type(gradOutput) then
         self._gradInput = gradOutput.new()
      end
      self._gradInput:resizeAs(gradOutput)
      self._gradInput:copy(gradOutput)
   else
      self._gradInput:add(gradOutput)
   end
   self._forward = false
end

function PushTable:updateGradInput(inputTable, gradOutputTable)
   for i, gradOutput in ipairs(gradOutputTable) do
      if i < self._index then
         self.gradInput[i] = gradOutput
      elseif i > self._index then
         self.gradInput[i+1] = gradOutput
      end
   end
   self.gradInput[self._index] = self._gradInput
   assert(#inputTable == #self.gradInput, "tables size mismatch")
   return self.gradInput
end


function PushTable:type(type, tensorCache)
   assert(type, 'PullTable: must provide a type to convert to')

   tensorCache = tensorCache or {}

   -- find all tensors and convert them
   for key,param in pairs(self) do
       if(key ~= "_pulls") then
             self[key] = nn.utils.recursiveType(param, type, tensorCache)
        end
   end
   return self
end