Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/clementfarabet/lua---nnx.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorClement Farabet <clement.farabet@gmail.com>2012-02-04 04:58:36 +0400
committerClement Farabet <clement.farabet@gmail.com>2012-02-04 04:58:36 +0400
commitcd87ba18021770ed72334865b9a0b96e69fedeba (patch)
treeb80746705899268910e7b0b4b13ef7494b42eadf /generic
parent6402f6b13f5c8a4c3adcbbe64889531666af4fde (diff)
newpack version
Diffstat (limited to 'generic')
-rw-r--r--generic/SpatialGraph.c2
-rw-r--r--generic/SpatialLinear.c10
2 files changed, 6 insertions, 6 deletions
diff --git a/generic/SpatialGraph.c b/generic/SpatialGraph.c
index a1db79a..011dc56 100644
--- a/generic/SpatialGraph.c
+++ b/generic/SpatialGraph.c
@@ -63,7 +63,7 @@ static int nn_(SpatialGraph_updateOutput)(lua_State *L)
// add epsilon to input (to get rid of 0s)
THTensor *inputb = THTensor_(newWithSize3d)(input->size[0], input->size[1], input->size[2]);
THTensor_(copy)(inputb, input);
- THTensor_(add)(inputb, 1e-12);
+ THTensor_(add)(inputb, inputb, 1e-12);
// Sum[ (Xi * Xi+1) ]
int x,y,k;
diff --git a/generic/SpatialLinear.c b/generic/SpatialLinear.c
index 524d313..2924e50 100644
--- a/generic/SpatialLinear.c
+++ b/generic/SpatialLinear.c
@@ -32,7 +32,7 @@ static int nn_(SpatialLinear_updateOutput)(lua_State *L)
for (ik=0; ik<ichannels; ik++) {
// get input plane
THTensor_(select)(inputPlane, input, 0, ik);
- THTensor_(cadd)(outputPlane, THTensor_(get2d)(weight,ok,ik), inputPlane);
+ THTensor_(cadd)(outputPlane, outputPlane, THTensor_(get2d)(weight,ok,ik), inputPlane);
}
}
@@ -94,16 +94,16 @@ static int nn_(SpatialLinear_updateGradInput)(lua_State *L)
THTensor_(select)(input_xy, input_y, 1, x);
// compute dE/dW and dE/dB
- THTensor_(addr)(gradWeight, 1, gradOutput_xy, input_xy);
- THTensor_(cadd)(gradBias, 1, gradOutput_xy);
+ THTensor_(addr)(gradWeight, 1, gradWeight, 1, gradOutput_xy, input_xy);
+ THTensor_(cadd)(gradBias, gradBias, 1, gradOutput_xy);
// weight decay
if (weightDecay != 0) {
- THTensor_(cadd)(gradWeight, 1, weight);
+ THTensor_(cadd)(gradWeight, gradWeight, 1, weight);
}
// compute dE/dI
- THTensor_(addmv)(gradInput_xy, 1, 1, weight_t, gradOutput_xy);
+ THTensor_(addmv)(gradInput_xy, 1, gradInput_xy, 1, weight_t, gradOutput_xy);
}
}