Welcome to mirror list, hosted at ThFree Co, Russian Federation.

SpatialLPPooling.lua - github.com/torch/nn.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
blob: 7e0b9574edd1a05063fc2ddaf20d022d3e31aef2 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
local SpatialLPPooling, parent = torch.class('nn.SpatialLPPooling', 'nn.Sequential')

function SpatialLPPooling:__init(nInputPlane, pnorm, kW, kH, dW, dH)
   parent.__init(self)

   dW = dW or kW
   dH = dH or kH
   
   self.kW = kW
   self.kH = kH
   self.dW = dW
   self.dH = dH

   self.nInputPlane = nInputPlane

   if pnorm == 2 then
      self:add(nn.Square())
   else
      self:add(nn.Power(pnorm))
   end
   self:add(nn.SpatialSubSampling(nInputPlane, kW, kH, dW, dH))
   if pnorm == 2 then
      self:add(nn.Sqrt())
   else
      self:add(nn.Power(1/pnorm))
   end

   self:get(2).bias:zero()
   self:get(2).weight:fill(1)
end

-- the module is a Sequential: by default, it'll try to learn the parameters
-- of the sub sampler: we avoid that by redefining its methods.
function SpatialLPPooling:reset()
end

function SpatialLPPooling:accGradParameters()
end

function SpatialLPPooling:accUpdateGradParameters()
end

function SpatialLPPooling:zeroGradParameters()
end

function SpatialLPPooling:updateParameters()
end