Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/moses-smt/nplm.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRico Sennrich <rico.sennrich@gmx.ch>2015-02-24 14:00:58 +0300
committerRico Sennrich <rico.sennrich@gmx.ch>2015-02-24 14:00:58 +0300
commitd3f0c1bf474f370cb47fc6420e55781e39e5a801 (patch)
tree6b0066dabace37972bab3a83f62019373ddc368b
parent8d5573b9ce39b995ea275162a6f376dc0ed8f0c4 (diff)
switch on more verbose output for testNeuralNetwork with option '--debug 1'
-rw-r--r--src/param.h1
-rw-r--r--src/testNeuralNetwork.cpp13
2 files changed, 12 insertions, 2 deletions
diff --git a/src/param.h b/src/param.h
index 0615690..8502312 100644
--- a/src/param.h
+++ b/src/param.h
@@ -54,6 +54,7 @@ struct param
double normalization_init;
int num_threads;
+ int debug;
bool share_embeddings;
diff --git a/src/testNeuralNetwork.cpp b/src/testNeuralNetwork.cpp
index c60f9c5..72d8a81 100644
--- a/src/testNeuralNetwork.cpp
+++ b/src/testNeuralNetwork.cpp
@@ -24,6 +24,8 @@ int main (int argc, char *argv[])
// program options //
CmdLine cmd("Tests a two-layer neural probabilistic language model.", ' ' , "0.1");
+ ValueArg<int> debug("", "debug", "Debug level. Higher debug levels print log-probabilities of each n-gram (level 1), and n-gram itself (level 2). Default: 0.", false, 0, "int", cmd);
+
ValueArg<int> num_threads("", "num_threads", "Number of threads. Default: maximum.", false, 0, "int", cmd);
ValueArg<int> minibatch_size("", "minibatch_size", "Minibatch size. Default: 64.", false, 64, "int", cmd);
@@ -38,6 +40,7 @@ int main (int argc, char *argv[])
myParam.num_threads = num_threads.getValue();
myParam.minibatch_size = minibatch_size.getValue();
+ myParam.debug = debug.getValue();
cerr << "Command line: " << endl;
cerr << boost::algorithm::join(vector<string>(argv, argv+argc), " ") << endl;
@@ -115,8 +118,14 @@ int main (int argc, char *argv[])
minibatch_log_likelihood);
log_likelihood += minibatch_log_likelihood;
- /*for (int i=0; i<current_minibatch_size; i++)
- cerr << minibatch.block(0,i,myParam.ngram_size,1) << " " << output_probs(minibatch(myParam.ngram_size-1,i),i) << endl;*/
+ if (myParam.debug > 0) {
+ for (int i=0; i<current_minibatch_size; i++) {
+ if (myParam.debug > 1) {
+ cerr << minibatch.block(0,i,myParam.ngram_size,1).transpose() << " ";
+ }
+ cerr << output_probs(minibatch(myParam.ngram_size-1,i),i) << endl;
+ }
+ }
}
cerr << "Test log-likelihood: " << log_likelihood << endl;