Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/cutorch.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTrevor Killeen <killeentm@gmail.com>2016-11-11 00:44:55 +0300
committerTrevor Killeen <killeentm@gmail.com>2016-11-12 00:23:02 +0300
commit6da426b51f3557045d90442c95bf82de6a8caf82 (patch)
tree7d3e57154b27af531a00bffad9b153ed7b3af023
parentf2fc5d495a1164f02b270d09a4cd9b3adb479e5e (diff)
[cutorch rand2gen] test fixes, add floor to geometric distribution transform
-rw-r--r--lib/THC/generic/THCTensorRandom.cu4
-rw-r--r--test/test.lua11
2 files changed, 8 insertions, 7 deletions
diff --git a/lib/THC/generic/THCTensorRandom.cu b/lib/THC/generic/THCTensorRandom.cu
index 16d1edd..28af274 100644
--- a/lib/THC/generic/THCTensorRandom.cu
+++ b/lib/THC/generic/THCTensorRandom.cu
@@ -304,9 +304,9 @@ THC_API void THCTensor_(bernoulli)(THCState* state, THCTensor *self_, double p)
#if defined(THC_REAL_IS_DOUBLE)
-GENERATE_KERNEL1(generate_geometric, double, double p, double, curand_uniform_double, (log(1-x) / log(p)) + 1)
+GENERATE_KERNEL1(generate_geometric, double, double p, double, curand_uniform_double, floor((log(1-x) / log(p)) + 1))
#else
-GENERATE_KERNEL1(generate_geometric, real, double p, float, curand_uniform, (ScalarConvert<float, real>::to((log(1-x) / log(p)) + 1)))
+GENERATE_KERNEL1(generate_geometric, real, double p, float, curand_uniform, (ScalarConvert<float, real>::to(floorf((log(1-x) / log(p)) + 1))))
#endif
THC_API void THCTensor_(geometric)(THCState* state, THCTensor *self_, double p)
diff --git a/test/test.lua b/test/test.lua
index fcba6cc..7747c3a 100644
--- a/test/test.lua
+++ b/test/test.lua
@@ -2527,7 +2527,7 @@ function test.uniform()
local t = torch.CudaTensor(sz1, sz2)
for _, typename in ipairs(float_typenames) do
- local x = t:type(t2cpu[typename])
+ local x = t:type(typename)
x:uniform(min, max)
checkIfUniformlyDistributed(x, min, max)
end
@@ -2541,7 +2541,7 @@ function test.bernoulli()
local t = torch.CudaTensor(sz1, sz2)
for _, typename in ipairs(typenames) do
- local x = t:type(t2cpu[typename])
+ local x = t:type(typename)
x:bernoulli(p)
local mean = x:sum() / (sz1 * sz2)
tester:assertalmosteq(mean, p, 0.1, "mean is not equal to p")
@@ -2579,7 +2579,7 @@ function test.logNormal()
local t = torch.CudaTensor(sz1, sz2)
for _, typename in ipairs(float_typenames) do
- local x = t:type(t2cpu[typename])
+ local x = t:type(typename)
x:logNormal(mean, std)
local logt = x:log()
tester:assertalmosteq(logt:mean(), mean, tolerance, "mean is wrong")
@@ -2595,8 +2595,9 @@ function test.geometric()
local t = torch.CudaTensor(sz1, sz2)
for _, typename in ipairs(float_typenames) do
- local x = t:type(t2cpu[typename])
+ local x = t:type(typename)
x:geometric(p)
+
local u = torch.FloatTensor(sz1, sz2):fill(1) -
((x:float() - 1) * math.log(p)):exp()
checkIfUniformlyDistributed(u, 0, 1)
@@ -2627,7 +2628,7 @@ function test.cauchy()
local t = torch.CudaTensor(sz1, sz2)
for _, typename in ipairs(float_typenames) do
- local x = t:type(t2cpu[typename])
+ local x = t:type(typename)
x:cauchy(median, sigma)
local u = ((x:float() - median) / sigma):atan() / math.pi + 0.5
checkIfUniformlyDistributed(u, 0, 1)