Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/marian-nmt/marian.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMarcin Junczys-Dowmunt <junczys@amu.edu.pl>2016-05-04 01:43:39 +0300
committerMarcin Junczys-Dowmunt <junczys@amu.edu.pl>2016-05-04 01:43:39 +0300
commite5fcec663c49b3ed23d2b444bafaf74bece4ae86 (patch)
tree69916e98eb4def58e1dfbe3199d65daf7cfa6d93 /src
parentd19f1f610f840c02f1e71512ce3ec5a2fe1f579b (diff)
more coolness
Diffstat (limited to 'src')
-rwxr-xr-xsrc/a.outbin199184 -> 156942 bytes
-rw-r--r--src/mad.h14
-rw-r--r--src/test.cpp57
3 files changed, 40 insertions, 31 deletions
diff --git a/src/a.out b/src/a.out
index ebfbbce5..d8b53810 100755
--- a/src/a.out
+++ b/src/a.out
Binary files differ
diff --git a/src/mad.h b/src/mad.h
index f55e7bee..47d27be4 100644
--- a/src/mad.h
+++ b/src/mad.h
@@ -40,7 +40,7 @@ class Vimpl : public Chainable {
virtual void set_zero_adjoint() { adj_ = 0; }
const Tensor& val() const { return val_; };
- Tensor& adj() { return adj_; };
+ Tensor& grad() { return adj_; };
protected:
const Tensor val_;
@@ -72,15 +72,15 @@ class Var {
return vimpl_->val();
}
- Tensor& adj() {
- return vimpl_->adj();
+ Tensor& grad() {
+ return vimpl_->grad();
}
VimplPtr vimpl() const {
return vimpl_;
}
- void grad() {
+ void calc_gradients() {
mad::grad(vimpl_);
}
@@ -99,7 +99,7 @@ struct LogVimpl : public OpVimpl {
LogVimpl(VimplPtr a) : OpVimpl(std::log(a->val()), a) { }
void chain() {
- a_->adj() += adj_ / a_->val();
+ a_->grad() += adj_ / a_->val();
}
};
@@ -122,8 +122,8 @@ struct PlusVimplVV : public OpVimplVV {
PlusVimplVV(VimplPtr a, VimplPtr b) : OpVimplVV(a->val() + b->val(), a, b) { }
void chain() {
- a_->adj() += adj_;
- b_->adj() += adj_;
+ a_->grad() += adj_;
+ b_->grad() += adj_;
}
};
diff --git a/src/test.cpp b/src/test.cpp
index e919925d..2d7a05f4 100644
--- a/src/test.cpp
+++ b/src/test.cpp
@@ -3,32 +3,41 @@
#include "mad.h"
-int main(int argc, char** argv) {
+mad::Var layer(size_t max) {
using namespace mad;
- {
- srand(time(NULL));
- size_t max = rand() % 20 + 1;
-
- Var x0 = 1, x1 = 2, x2 = 3;
- std::vector<Var> x = { x0, x1, x2 };
-
- Var y = 0.0;
- for(int i = 0; i < max; i++) {
- Var xi = i;
- y = y + x0 + log(x2) + x1;
- for(int j = 0; j < i; ++i) {
- y = y + xi;
- x.push_back(xi);
- }
+
+ Var x0 = 1, x1 = 2, x2 = 3;
+ Var y = 0.0;
+ for(int i = 0; i < max; i++) {
+ Var xi = i;
+ y = y + x0 + log(x2) + x1;
+ for(int j = 0; j < i; ++j) {
+ y = y + xi;
}
-
-
- set_zero_all_adjoints();
- y.grad();
-
- std::cerr << "y = " << y.val() << std::endl;
- for(int i = 0; i < x.size(); ++i)
- std::cerr << "dy/dx_" << i << " = " << x[i].adj() << std::endl;
}
+
+ return y;
+}
+
+int main(int argc, char** argv) {
+ srand(time(NULL));
+
+ using namespace mad;
+
+ Var y1 = layer(10);
+ Var y2 = layer(5);
+
+ Var y = y1 + log(y2);
+
+ set_zero_all_adjoints();
+ y.calc_gradients();
+
+ std::cerr << "y1 = " << y1.val() << std::endl;
+ std::cerr << "y2 = " << y2.val() << std::endl;
+ std::cerr << "y = " << y.val() << std::endl;
+
+ std::cerr << "dy/dy1 = " << y1.grad() << std::endl;
+ std::cerr << "dy/dy2 = " << y2.grad() << std::endl;
+
} \ No newline at end of file