Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/torch/optim.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorR. Gokberk Cinbis <gokberkcinbis@gmail.com>2016-08-25 00:12:00 +0300
committerGitHub <noreply@github.com>2016-08-25 00:12:00 +0300
commit40ff09870167a11a1c2eab1041d7ec8204aa47cf (patch)
treeff1863de343246f71ddce4cc03635798dc48f850
parent6c59c359d199417e1cf1b65b42d2d614713e4749 (diff)
Reduce numerical errors.
x[i]+eps-2*eps may not result in exactly the same x[i], which may increase approximation error in the gradient estimate.
-rw-r--r--checkgrad.lua3
1 files changed, 2 insertions, 1 deletions
diff --git a/checkgrad.lua b/checkgrad.lua
index aecb969..402d9fc 100644
--- a/checkgrad.lua
+++ b/checkgrad.lua
@@ -27,11 +27,12 @@ function optim.checkgrad(opfunc, x, eps)
local eps = eps or 1e-7
local dC_est = torch.Tensor():typeAs(dC):resizeAs(dC)
for i = 1,dC:size(1) do
+ local tmp = x[i]
x[i] = x[i] + eps
local C1 = opfunc(x)
x[i] = x[i] - 2 * eps
local C2 = opfunc(x)
- x[i] = x[i] + eps
+ x[i] = tmp
dC_est[i] = (C1 - C2) / (2 * eps)
end