Welcome to mirror list, hosted at ThFree Co, Russian Federation.

init.lua - github.com/torch/torch7.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
blob: b1a0012618500f18e9d23697aef6ca3cf3c2d001 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
-- We are using paths.require to appease mkl

-- Make this work with LuaJIT in Lua 5.2 compatibility mode, which
-- renames string.gfind (already deprecated in 5.1)
if not string.gfind then
   string.gfind = string.gmatch
end
if not table.unpack then
   table.unpack = unpack
end

require "paths"
paths.require "libtorch"

-- Keep track of all thread local variables torch.
-- if a Lua VM is passed to another thread thread local
-- variables need to be updated.
function torch.updatethreadlocals()
   torch.updateerrorhandlers()
   local tracking = torch._heaptracking
   if tracking == nil then tracking = false end
   torch.setheaptracking(tracking)
end

--- package stuff
function torch.packageLuaPath(name)
   if not name then
      local ret = string.match(torch.packageLuaPath('torch'), '(.*)/')
      if not ret then --windows?
         ret = string.match(torch.packageLuaPath('torch'), '(.*)\\')
      end
      return ret
   end
   for path in string.gmatch(package.path, "[^;]+") do
      path = string.gsub(path, "%?", name)
      local f = io.open(path)
      if f then
         f:close()
         local ret = string.match(path, "(.*)/")
         if not ret then --windows?
            ret = string.match(path, "(.*)\\")
         end
         return ret
      end
   end
end

local function include(file, depth)
   paths.dofile(file, 3 + (depth or 0))
end
rawset(_G, 'include', include)

function torch.include(package, file)
   dofile(torch.packageLuaPath(package) .. '/' .. file)
end

function torch.class(...)
   local tname, parenttname, module
   if select('#', ...) == 3
      and type(select(1, ...)) == 'string'
      and type(select(2, ...)) == 'string'
      and type(select(3, ...)) == 'table'
   then
      tname = select(1, ...)
      parenttname = select(2, ...)
      module = select(3, ...)
   elseif select('#', ...) == 2
      and type(select(1, ...)) == 'string'
      and type(select(2, ...)) == 'string'
   then
      tname = select(1, ...)
      parenttname = select(2, ...)
   elseif select('#', ...) == 2
      and type(select(1, ...)) == 'string'
      and type(select(2, ...)) == 'table'
   then
      tname = select(1, ...)
      module = select(2, ...)
   elseif select('#', ...) == 1
      and type(select(1, ...)) == 'string'
   then
      tname = select(1, ...)
   else
      error('<class name> [<parent class name>] [<module table>] expected')
   end

   local function constructor(...)
      local self = {}
      torch.setmetatable(self, tname)
      if self.__init then
         self:__init(...)
      end
      return self
   end

   local function factory()
      local self = {}
      torch.setmetatable(self, tname)
      return self
   end

   local mt = torch.newmetatable(tname, parenttname, constructor, nil, factory, module)
   local mpt
   if parenttname then
      mpt = torch.getmetatable(parenttname)
   end
   return mt, mpt
end

function torch.setdefaulttensortype(typename)
   assert(type(typename) == 'string', 'string expected')
   if torch.getconstructortable(typename) then
      torch.Tensor = torch.getconstructortable(typename)
      torch.Storage = torch.getconstructortable(torch.typename(torch.Tensor(1):storage()))
   else
      error(string.format("<%s> is not a string describing a torch object", typename))
   end
end

function torch.type(obj)
   local class = torch.typename(obj)
   if not class then
      class = type(obj)
   end
   return class
end

--[[ See if a given object is an instance of the provided torch class. ]]
function torch.isTypeOf(obj, typeSpec)
   -- typeSpec can be provided as either a string, pattern, or the constructor.
   -- If the constructor is used, we look in the __typename field of the
   -- metatable to find a string to compare to.
   if type(typeSpec) ~= 'string' then
      typeSpec = getmetatable(typeSpec).__typename
	  assert(type(typeSpec) == 'string',
             "type must be provided as [regexp] string, or factory")
   end

   local mt = getmetatable(obj)
   while mt do
      if type(mt) == 'table' and mt.__typename then
         local match = mt.__typename:match(typeSpec)
         -- Require full match for non-pattern specs
         if match and (match ~= typeSpec or match == mt.__typename) then
            return true
         end
      end
      mt = getmetatable(mt)
   end
   return false
end

torch.setdefaulttensortype('torch.DoubleTensor')

require('torch.Tensor')
require('torch.File')
require('torch.CmdLine')
require('torch.FFInterface')
require('torch.Tester')
require('torch.TestSuite')
require('torch.test')
function torch.totable(obj)
   if torch.isTensor(obj) or torch.isStorage(obj) then
      return obj:totable()
   else
      error("obj must be a Storage or a Tensor")
   end
end

function torch.isTensor(obj)
   local typename = torch.typename(obj)
   if typename and typename:find('torch.*Tensor') then
      return true
   end
   return false
end

function torch.isStorage(obj)
   local typename = torch.typename(obj)
   if typename and typename:find('torch.*Storage') then
      return true
   end
   return false
end
-- alias for convenience
torch.Tensor.isTensor = torch.isTensor

-- remove this line to disable automatic heap-tracking for garbage collection
torch.setheaptracking(true)

function torch.multinomialAliasSetup(probs, state)
   if torch.type(state) == 'table' then 
      state[1], state[2] = torch.multinomialAliasSetup_(probs, state[1], state[2])
   else
      state = {}
      state[1], state[2] = torch.multinomialAliasSetup_(probs)
   end
   return state
end

function torch.multinomialAlias(output, state)
   torch.DoubleTensor.multinomialAlias_(output, state[1], state[2])
   return output
end

return torch