From 337ead1bbf4d4f1e6297a6c584a4718f8ba90173 Mon Sep 17 00:00:00 2001 From: Rico Sennrich Date: Mon, 17 Nov 2014 11:01:00 +0000 Subject: re-apply 31412f (osx compile) --- src/prepareNeuralLM.cpp | 4 ++-- src/trainNeuralNetwork.cpp | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/prepareNeuralLM.cpp b/src/prepareNeuralLM.cpp index 13a534a..adedc72 100644 --- a/src/prepareNeuralLM.cpp +++ b/src/prepareNeuralLM.cpp @@ -240,7 +240,7 @@ void writeMmapNgrams(const string &input_filename, if (i %500000 == 0) { cerr<<"Shuffled "<(0, i-1)(rng); + data_size_t j = boost::random::uniform_int_distribution(0, i-1)(rng); for (int k=0;k(0, i-1)(rng); + data_size_t j = boost::random::uniform_int_distribution(0, i-1)(rng); for (int k=0;kat(i*ngram_size+k); mMapVec->at(i*ngram_size+k) = diff --git a/src/trainNeuralNetwork.cpp b/src/trainNeuralNetwork.cpp index e231c20..a4cac12 100644 --- a/src/trainNeuralNetwork.cpp +++ b/src/trainNeuralNetwork.cpp @@ -312,7 +312,7 @@ int main(int argc, char** argv) if (i %500000 == 0) { cerr<<"Shuffled "<(0, i-1)(rng); + data_size_t j = boost::random::uniform_int_distribution(0, i-1)(rng); for (int k=0;kat(i*myParam.ngram_size+k); training_data_flat_mmap->at(i*myParam.ngram_size+k) = @@ -326,7 +326,7 @@ int main(int argc, char** argv) if (i %500000 == 0) { cerr<<"Shuffled "<(0, i-1)(rng); + data_size_t j = boost::random::uniform_int_distribution(0, i-1)(rng); for (int k=0;k(0, i-1)(rng); + data_size_t j = boost::random::uniform_int_distribution(0, i-1)(rng); for (int k=0;kat(i*myParam.ngram_size+k); training_data_flat_mmap->at(i*myParam.ngram_size+k) = @@ -396,7 +396,7 @@ int main(int argc, char** argv) // Randomly shuffle training data to improve learning for (data_size_t i=training_data_size-1; i>0; i--) { - data_size_t j = uniform_int_distribution(0, i-1)(rng); + data_size_t j = boost::random::uniform_int_distribution(0, i-1)(rng); training_data.col(i).swap(training_data.col(j)); } } -- cgit v1.2.3