Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/torch7.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRui Guo <guorui.xt@gmail.com>2016-09-26 05:45:10 +0300
committerRui Guo <guorui.xt@gmail.com>2016-09-26 05:45:10 +0300
commitf277ca8aacda22d0c3268195015fb21b510d67e5 (patch)
tree5e9f7d9daf0ed7aac0503c480ce7e7df7d7c40f7
parent14a48d8bec257654fdc85f0be5b2d15d1c626acf (diff)
bug fix for read/writeLong in THMemoryFile
-rw-r--r--lib/TH/THMemoryFile.c11
-rw-r--r--test/longSize.lua95
-rw-r--r--test/test_sharedmem.lua28
3 files changed, 84 insertions, 50 deletions
diff --git a/lib/TH/THMemoryFile.c b/lib/TH/THMemoryFile.c
index d39b841..453e11e 100644
--- a/lib/TH/THMemoryFile.c
+++ b/lib/TH/THMemoryFile.c
@@ -1,5 +1,6 @@
#include "THMemoryFile.h"
#include "THFilePrivate.h"
+#include "stdint.h"
typedef struct THMemoryFile__
{
@@ -373,7 +374,7 @@ static size_t THMemoryFile_readLong(THFile *self, long *data, size_t n)
size_t i;
size_t nByte = 4*n;
size_t nByteRemaining = (mfself->position + nByte <= mfself->size ? nByte : mfself->size-mfself->position);
- int *storage = (int *)(mfself->storage->data + mfself->position);
+ int32_t *storage = (int32_t *)(mfself->storage->data + mfself->position);
nread = nByteRemaining/4;
for(i = 0; i < nread; i++)
data[i] = storage[i];
@@ -383,12 +384,12 @@ static size_t THMemoryFile_readLong(THFile *self, long *data, size_t n)
{
int i, big_endian = !THDiskFile_isLittleEndianCPU();
size_t nByte = 8*n;
- long *storage = (long *)(mfself->storage->data + mfself->position);
+ int32_t *storage = (int32_t *)(mfself->storage->data + mfself->position);
size_t nByteRemaining = (mfself->position + nByte <= mfself->size ? nByte : mfself->size-mfself->position);
nread = nByteRemaining/8;
for(i = 0; i < nread; i++)
data[i] = storage[2*i + big_endian];
- mfself->position += nread*4;
+ mfself->position += nread*8;
}
}
else
@@ -449,8 +450,8 @@ static size_t THMemoryFile_writeLong(THFile *self, long *data, size_t n)
{
int i;
size_t nByte = 4*n;
- int *storage = (int *)(mfself->storage->data + mfself->position);
THMemoryFile_grow(mfself, mfself->position+nByte);
+ int32_t *storage = (int32_t *)(mfself->storage->data + mfself->position);
for(i = 0; i < n; i++)
storage[i] = data[i];
mfself->position += nByte;
@@ -459,8 +460,8 @@ static size_t THMemoryFile_writeLong(THFile *self, long *data, size_t n)
{
int i, big_endian = !THDiskFile_isLittleEndianCPU();
size_t nByte = 8*n;
- long *storage = (long *)(mfself->storage->data + mfself->position);
THMemoryFile_grow(mfself, mfself->position+nByte);
+ int32_t *storage = (int32_t *)(mfself->storage->data + mfself->position);
for(i = 0; i < n; i++)
{
storage[2*i + !big_endian] = 0;
diff --git a/test/longSize.lua b/test/longSize.lua
index 82eef04..566e0f4 100644
--- a/test/longSize.lua
+++ b/test/longSize.lua
@@ -1,42 +1,59 @@
-tensor = torch.rand(2,3)
-f = torch.DiskFile('tensor8.bin','w')
-f:binary()
-f:longSize(8)
-f:writeObject(tensor)
-f:close()
-f = torch.DiskFile('tensor8.bin','r')
-f:binary()
-f:longSize(8)
-tensor2 = f:readObject()
-f:close()
-print('Tensors are same: ',tensor:norm()==tensor2:norm())
+require 'torch'
-f = torch.DiskFile('tensor4.bin','w')
-f:binary()
-f:longSize(4)
-f:writeObject(tensor)
-f:close()
-f = torch.DiskFile('tensor4.bin','r')
-f:binary()
-f:longSize(4)
-tensor2 = f:readObject()
-f:close()
-print('Tensors are same: ',tensor:norm()==tensor2:norm())
+local tester = torch.Tester()
+local tests = torch.TestSuite()
-f = torch.MemoryFile()
-f:binary()
-f:longSize(8)
-f:writeObject(tensor)
-f:seek(1)
-tensor2 = f:readObject()
-f:close()
-print('Tensors are same: ',tensor:norm()==tensor2:norm())
+local tensor = torch.rand(2,3)
-f = torch.MemoryFile()
-f:binary()
-f:longSize(4)
-f:writeObject(tensor)
-f:seek(1)
-tensor2 = f:readObject()
-f:close()
-print('Tensors are same: ',tensor:norm()==tensor2:norm())
+function tests.diskFileLongSize8()
+ f = torch.DiskFile('tensor8.bin','w')
+ f:binary()
+ f:longSize(8)
+ f:writeObject(tensor)
+ f:close()
+ f = torch.DiskFile('tensor8.bin','r')
+ f:binary()
+ f:longSize(8)
+ tensor2 = f:readObject()
+ f:close()
+ tester:assert(tensor:norm()==tensor2:norm())
+end
+
+function tests.diskFileLongSize4()
+ f = torch.DiskFile('tensor4.bin','w')
+ f:binary()
+ f:longSize(4)
+ f:writeObject(tensor)
+ f:close()
+ f = torch.DiskFile('tensor4.bin','r')
+ f:binary()
+ f:longSize(4)
+ tensor2 = f:readObject()
+ f:close()
+ tester:assert(tensor:norm()==tensor2:norm())
+end
+
+function tests.memoryFileLongSize8()
+ f = torch.MemoryFile()
+ f:binary()
+ f:longSize(8)
+ f:writeObject(tensor)
+ f:seek(1)
+ tensor2 = f:readObject()
+ f:close()
+ tester:assert(tensor:norm()==tensor2:norm())
+end
+
+function tests.memoryFileLongSize4()
+ f = torch.MemoryFile()
+ f:binary()
+ f:longSize(4)
+ f:writeObject(tensor)
+ f:seek(1)
+ tensor2 = f:readObject()
+ f:close()
+ tester:assert(tensor:norm()==tensor2:norm())
+end
+
+tester:add(tests)
+tester:run()
diff --git a/test/test_sharedmem.lua b/test/test_sharedmem.lua
index 14cdeaf..1230e59 100644
--- a/test/test_sharedmem.lua
+++ b/test/test_sharedmem.lua
@@ -1,4 +1,5 @@
require 'torch'
+local ffi = require 'ffi'
local tester = torch.Tester()
local tests = torch.TestSuite()
@@ -13,38 +14,52 @@ local function createSharedMemStorage(name, size, storageType)
return storage, shmName
end
+local function shmFilePath(shmName)
+ return (ffi.os ~= 'Windows' and '/dev/shm/' or '') .. shmName
+end
+
+local function removeShmFile(shmFileName)
+ if ffi.os == 'Windows' then
+ os.remove(shmFileName)
+ end
+end
+
function tests.createSharedMemFile()
local storage, shmName = createSharedMemStorage()
+ local shmFileName = shmFilePath(shmName)
-- check that file is at /dev/shm
- tester:assert(paths.filep('/dev/shm/' .. shmName),
- 'Shared memory file does not exist')
+ tester:assert(paths.filep(shmFileName),
+ 'Shared memory file exists')
-- collect storage and make sure that file is gone
storage = nil
collectgarbage()
collectgarbage()
- tester:assert(not paths.filep('/dev/shm/' .. shmName),
- 'Shared memory file still exists')
+ removeShmFile(shmFileName)
+ tester:assert(not paths.filep(shmFileName),
+ 'Shared memory file does not exists')
end
function tests.checkContents()
local storage, shmName = createSharedMemStorage()
+ local shmFileName = shmFilePath(shmName)
local tensor = torch.FloatTensor(storage, 1, torch.LongStorage{storage:size()})
tensor:copy(torch.rand(storage:size()))
- local sharedFile = torch.DiskFile('/dev/shm/'..shmName, 'r'):binary()
+ local sharedFile = torch.DiskFile(shmFileName, 'r'):binary()
for i = 1, storage:size() do
tester:assert(sharedFile:readFloat() == storage[i], 'value is not correct')
end
sharedFile:close()
+ removeShmFile(shmFileName)
end
function tests.testSharing()
-- since we are going to cast numbers into double (lua default)
-- we specifically generate double storage
local storage, shmName = createSharedMemStorage(nil, nil, 'DoubleStorage')
- local shmFileName = '/dev/shm/' .. shmName
+ local shmFileName = shmFilePath(shmName)
local tensor = torch.DoubleTensor(storage, 1, torch.LongStorage{storage:size()})
tensor:copy(torch.rand(storage:size()))
local tensorCopy = tensor.new():resizeAs(tensor):copy(tensor)
@@ -70,6 +85,7 @@ function tests.testSharing()
for i = 1, tensor:size(1) do
tester:asserteq(tensor[i], rval, 'content is wrong')
end
+ removeShmFile(shmFileName)
end
tester:add(tests)