Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/moses-smt/mosesdecoder.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/moses/LM
diff options
context:
space:
mode:
authorHieu Hoang <hieuhoang@gmail.com>2015-12-11 20:29:10 +0300
committerHieu Hoang <hieuhoang@gmail.com>2015-12-11 20:29:10 +0300
commite5d3306519a43dc02a85cbf2e8e67e3699bf91b8 (patch)
treea8dfaa1bf86781df1c06e624a967dba9301166d4 /moses/LM
parent51d21b09c0c59b2449cce2ba2f524cd3dc5ff9b1 (diff)
parent40ed3df9cb513681569a2fcde32a5215c38253f3 (diff)
Merge ../mosesdecoder into perf_moses2
Diffstat (limited to 'moses/LM')
-rw-r--r--moses/LM/BackwardTest.cpp5
-rw-r--r--moses/LM/BilingualLM.cpp3
-rw-r--r--moses/LM/BilingualLM.h2
-rw-r--r--moses/LM/DALMWrapper.cpp2
-rw-r--r--moses/LM/DALMWrapper.h2
-rw-r--r--moses/LM/IRST.cpp2
-rw-r--r--moses/LM/IRST.h2
-rw-r--r--moses/LM/MaxEntSRI.cpp2
-rw-r--r--moses/LM/MaxEntSRI.h2
-rw-r--r--moses/LM/NeuralLMWrapper.cpp2
-rw-r--r--moses/LM/NeuralLMWrapper.h2
-rw-r--r--moses/LM/RDLM.cpp2
-rw-r--r--moses/LM/RDLM.h2
-rw-r--r--moses/LM/Rand.cpp2
-rw-r--r--moses/LM/Rand.h2
-rw-r--r--moses/LM/SRI.cpp2
-rw-r--r--moses/LM/SRI.h2
-rw-r--r--moses/LM/oxlm/OxLM.cpp2
-rw-r--r--moses/LM/oxlm/OxLM.h2
19 files changed, 23 insertions, 19 deletions
diff --git a/moses/LM/BackwardTest.cpp b/moses/LM/BackwardTest.cpp
index bd9c74379..ef45d6e60 100644
--- a/moses/LM/BackwardTest.cpp
+++ b/moses/LM/BackwardTest.cpp
@@ -28,6 +28,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include "moses/TypeDef.h"
#include "moses/StaticData.h"
+#include "moses/parameters/AllOptions.h"
//#include "BackwardLMState.h"
#include "moses/LM/Backward.h"
@@ -61,12 +62,14 @@ namespace Moses
// Apparently some Boost versions use templates and are pretty strict about types matching.
#define SLOPPY_CHECK_CLOSE(ref, value, tol) BOOST_CHECK_CLOSE(static_cast<double>(ref), static_cast<double>(value), static_cast<double>(tol));
+AllOptions::ptr DefaultOptions(new AllOptions);
+
class BackwardLanguageModelTest
{
public:
BackwardLanguageModelTest() :
- dummyInput(new Sentence),
+ dummyInput(new Sentence(DefaultOptions)),
backwardLM(
static_cast< BackwardLanguageModel<lm::ngram::ProbingModel> * >(
ConstructBackwardLM(
diff --git a/moses/LM/BilingualLM.cpp b/moses/LM/BilingualLM.cpp
index f7c36a4e8..8fc88c597 100644
--- a/moses/LM/BilingualLM.cpp
+++ b/moses/LM/BilingualLM.cpp
@@ -20,8 +20,9 @@ BilingualLM::BilingualLM(const std::string &line)
}
-void BilingualLM::Load()
+void BilingualLM::Load(AllOptions::ptr const& opts)
{
+ m_options = opts;
ReadParameters();
loadModel();
}
diff --git a/moses/LM/BilingualLM.h b/moses/LM/BilingualLM.h
index cb5075fd1..ed9d99489 100644
--- a/moses/LM/BilingualLM.h
+++ b/moses/LM/BilingualLM.h
@@ -117,7 +117,7 @@ public:
return new BilingualLMState(0);
}
- void Load();
+ void Load(AllOptions::ptr const& opts);
FFState* EvaluateWhenApplied(
const Hypothesis& cur_hypo,
diff --git a/moses/LM/DALMWrapper.cpp b/moses/LM/DALMWrapper.cpp
index 60eee0250..ae21995a9 100644
--- a/moses/LM/DALMWrapper.cpp
+++ b/moses/LM/DALMWrapper.cpp
@@ -204,7 +204,7 @@ LanguageModelDALM::~LanguageModelDALM()
delete m_lm;
}
-void LanguageModelDALM::Load()
+void LanguageModelDALM::Load(AllOptions const& opts)
{
/////////////////////
// READING INIFILE //
diff --git a/moses/LM/DALMWrapper.h b/moses/LM/DALMWrapper.h
index fe724e7c8..4898dd66c 100644
--- a/moses/LM/DALMWrapper.h
+++ b/moses/LM/DALMWrapper.h
@@ -28,7 +28,7 @@ public:
LanguageModelDALM(const std::string &line);
virtual ~LanguageModelDALM();
- void Load();
+ void Load(AllOptions::ptr const& opts);
virtual const FFState *EmptyHypothesisState(const InputType &/*input*/) const;
diff --git a/moses/LM/IRST.cpp b/moses/LM/IRST.cpp
index be0213c97..10fcdcd9f 100644
--- a/moses/LM/IRST.cpp
+++ b/moses/LM/IRST.cpp
@@ -96,7 +96,7 @@ bool LanguageModelIRST::IsUseable(const FactorMask &mask) const
return ret;
}
-void LanguageModelIRST::Load()
+ void LanguageModelIRST::Load(AllOptions::ptr const& opts)
{
FactorCollection &factorCollection = FactorCollection::Instance();
diff --git a/moses/LM/IRST.h b/moses/LM/IRST.h
index 820031faf..b4c080378 100644
--- a/moses/LM/IRST.h
+++ b/moses/LM/IRST.h
@@ -88,7 +88,7 @@ public:
bool IsUseable(const FactorMask &mask) const;
- void Load();
+ void Load(AllOptions::ptr const& opts);
const FFState *EmptyHypothesisState(const InputType &/*input*/) const;
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = NULL) const;
diff --git a/moses/LM/MaxEntSRI.cpp b/moses/LM/MaxEntSRI.cpp
index 3e7f4df44..18fa4415f 100644
--- a/moses/LM/MaxEntSRI.cpp
+++ b/moses/LM/MaxEntSRI.cpp
@@ -66,7 +66,7 @@ LanguageModelMaxEntSRI::~LanguageModelMaxEntSRI()
delete m_srilmVocab;
}
-void LanguageModelMaxEntSRI::Load()
+void LanguageModelMaxEntSRI::Load(AllOptions const& opts)
{
m_srilmVocab = new ::Vocab();
m_srilmModel = new MEModel(*m_srilmVocab, m_nGramOrder);
diff --git a/moses/LM/MaxEntSRI.h b/moses/LM/MaxEntSRI.h
index c53a879b8..1f3004e0f 100644
--- a/moses/LM/MaxEntSRI.h
+++ b/moses/LM/MaxEntSRI.h
@@ -54,7 +54,7 @@ protected:
public:
LanguageModelMaxEntSRI(const std::string &line);
~LanguageModelMaxEntSRI();
- void Load();
+ void Load(AllOptions::ptr const& opts);
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = 0) const;
};
diff --git a/moses/LM/NeuralLMWrapper.cpp b/moses/LM/NeuralLMWrapper.cpp
index 22ff90bb9..f19eb97c4 100644
--- a/moses/LM/NeuralLMWrapper.cpp
+++ b/moses/LM/NeuralLMWrapper.cpp
@@ -22,7 +22,7 @@ NeuralLMWrapper::~NeuralLMWrapper()
}
-void NeuralLMWrapper::Load()
+void NeuralLMWrapper::Load(AllOptions const& opts)
{
// Set parameters required by ancestor classes
diff --git a/moses/LM/NeuralLMWrapper.h b/moses/LM/NeuralLMWrapper.h
index bd6635a7c..66a0278a9 100644
--- a/moses/LM/NeuralLMWrapper.h
+++ b/moses/LM/NeuralLMWrapper.h
@@ -27,7 +27,7 @@ public:
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = 0) const;
- virtual void Load();
+ virtual void Load(AllOptions::ptr const& opts);
};
diff --git a/moses/LM/RDLM.cpp b/moses/LM/RDLM.cpp
index 374274790..992233923 100644
--- a/moses/LM/RDLM.cpp
+++ b/moses/LM/RDLM.cpp
@@ -39,7 +39,7 @@ RDLM::~RDLM()
delete lm_label_base_instance_;
}
-void RDLM::Load()
+void RDLM::Load(AllOptions const& opts)
{
lm_head_base_instance_ = new nplm::neuralTM();
diff --git a/moses/LM/RDLM.h b/moses/LM/RDLM.h
index 963c1e8d5..8fdc9d641 100644
--- a/moses/LM/RDLM.h
+++ b/moses/LM/RDLM.h
@@ -208,7 +208,7 @@ public:
int /* featureID - used to index the state in the previous hypotheses */,
ScoreComponentCollection* accumulator) const;
- void Load();
+ void Load(AllOptions::ptr const& opts);
// Iterator-class that yields all children of a node; if child is virtual node of binarized tree, its children are yielded instead.
class UnbinarizedChildren
diff --git a/moses/LM/Rand.cpp b/moses/LM/Rand.cpp
index edf06fd05..00474deee 100644
--- a/moses/LM/Rand.cpp
+++ b/moses/LM/Rand.cpp
@@ -52,7 +52,7 @@ LanguageModelRandLM::~LanguageModelRandLM()
delete m_lm;
}
-void LanguageModelRandLM::Load()
+void LanguageModelRandLM::Load(AllOptions const& opts)
{
cerr << "Loading LanguageModelRandLM..." << endl;
FactorCollection &factorCollection = FactorCollection::Instance();
diff --git a/moses/LM/Rand.h b/moses/LM/Rand.h
index caf367c8c..54b5738b6 100644
--- a/moses/LM/Rand.h
+++ b/moses/LM/Rand.h
@@ -39,7 +39,7 @@ public:
LanguageModelRandLM(const std::string &line);
~LanguageModelRandLM();
- void Load();
+ void Load(AllOptions::ptr const& opts);
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = NULL) const;
void InitializeForInput(ttasksptr const& ttask);
void CleanUpAfterSentenceProcessing(const InputType& source);
diff --git a/moses/LM/SRI.cpp b/moses/LM/SRI.cpp
index fb60a4adb..2741cfa1e 100644
--- a/moses/LM/SRI.cpp
+++ b/moses/LM/SRI.cpp
@@ -66,7 +66,7 @@ LanguageModelSRI::~LanguageModelSRI()
delete m_srilmVocab;
}
-void LanguageModelSRI::Load()
+void LanguageModelSRI::Load(AllOptions const& opts)
{
m_srilmVocab = new ::Vocab();
m_srilmModel = new Ngram(*m_srilmVocab, m_nGramOrder);
diff --git a/moses/LM/SRI.h b/moses/LM/SRI.h
index 12d5a9626..0a6139832 100644
--- a/moses/LM/SRI.h
+++ b/moses/LM/SRI.h
@@ -54,7 +54,7 @@ protected:
public:
LanguageModelSRI(const std::string &line);
~LanguageModelSRI();
- void Load();
+ void Load(AllOptions::ptr const& opts);
virtual LMResult GetValue(const std::vector<const Word*> &contextFactor, State* finalState = 0) const;
};
diff --git a/moses/LM/oxlm/OxLM.cpp b/moses/LM/oxlm/OxLM.cpp
index 8fde54713..963795bf3 100644
--- a/moses/LM/oxlm/OxLM.cpp
+++ b/moses/LM/oxlm/OxLM.cpp
@@ -70,7 +70,7 @@ void OxLM<Model>::SetParameter(const string& key, const string& value)
}
template<class Model>
-void OxLM<Model>::Load()
+void OxLM<Model>::Load(AllOptions const& opts)
{
model.load(m_filePath);
diff --git a/moses/LM/oxlm/OxLM.h b/moses/LM/oxlm/OxLM.h
index 4056ccab9..5c73cd6c7 100644
--- a/moses/LM/oxlm/OxLM.h
+++ b/moses/LM/oxlm/OxLM.h
@@ -24,7 +24,7 @@ public:
void SetParameter(const std::string& key, const std::string& value);
- void Load();
+ void Load(AllOptions::ptr const& opts);
virtual LMResult GetValue(
const std::vector<const Word*> &contextFactor,