Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/moses-smt/mosesdecoder.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/moses
diff options
context:
space:
mode:
authorheafield <heafield@1f5c12ca-751b-0410-a591-d2e778427230>2011-10-17 13:30:30 +0400
committerheafield <heafield@1f5c12ca-751b-0410-a591-d2e778427230>2011-10-17 13:30:30 +0400
commit68a4626a4911cb1c9d9a163832d9cd772684c526 (patch)
tree75059cb8b43a7f30c024fdbaa73162e2b0700cc8 /moses
parent27dd30122e1ae685a5f7e1dfc76ffa5c0b5ffd92 (diff)
Remove reference counts now that we can use boost
git-svn-id: https://mosesdecoder.svn.sourceforge.net/svnroot/mosesdecoder/trunk@4380 1f5c12ca-751b-0410-a591-d2e778427230
Diffstat (limited to 'moses')
-rw-r--r--moses/src/LM/Implementation.h42
-rw-r--r--moses/src/LM/Ken.cpp75
2 files changed, 26 insertions, 91 deletions
diff --git a/moses/src/LM/Implementation.h b/moses/src/LM/Implementation.h
index e0551f759..1ea59b6b3 100644
--- a/moses/src/LM/Implementation.h
+++ b/moses/src/LM/Implementation.h
@@ -31,9 +31,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include "Word.h"
#include "LM/Base.h"
-#ifdef WITH_THREADS
#include <boost/shared_ptr.hpp>
-#endif
namespace Moses
{
@@ -54,18 +52,8 @@ struct LMResult {
//! Abstract base class which represent a language model on a contiguous phrase
class LanguageModelImplementation
{
-#ifndef WITH_THREADS
-protected:
- /** constructor to be called by inherited class
- */
- LanguageModelImplementation() : m_referenceCount(0) {}
-
-private:
- // ref counting handled by boost if we have threads
- unsigned int m_referenceCount;
-#else
// default constructor is ok
-#endif
+
void ShiftOrPush(std::vector<const Word*> &contextFactor, const Word &word) const;
protected:
@@ -136,35 +124,14 @@ public:
//! overrideable funtions for IRST LM to cleanup. Maybe something to do with on demand/cache loading/unloading
virtual void InitializeBeforeSentenceProcessing() {};
virtual void CleanUpAfterSentenceProcessing() {};
-
-#ifndef WITH_THREADS
- // ref counting handled by boost otherwise
-
- unsigned int IncrementReferenceCount() {
- return ++m_referenceCount;
- }
-
- unsigned int DecrementReferenceCount() {
- return --m_referenceCount;
- }
-#endif
};
class LMRefCount : public LanguageModel {
public:
LMRefCount(ScoreIndexManager &scoreIndexManager, LanguageModelImplementation *impl) : m_impl(impl) {
-#ifndef WITH_THREADS
- impl->IncrementReferenceCount();
-#endif
Init(scoreIndexManager);
}
- ~LMRefCount() {
-#ifndef WITH_THREADS
- if (!m_impl->DecrementReferenceCount()) delete m_impl;
-#endif
- }
-
LanguageModel *Duplicate(ScoreIndexManager &scoreIndexManager) const {
return new LMRefCount(scoreIndexManager, *this);
}
@@ -203,17 +170,10 @@ class LMRefCount : public LanguageModel {
private:
LMRefCount(ScoreIndexManager &scoreIndexManager, const LMRefCount &copy_from) : m_impl(copy_from.m_impl) {
-#ifndef WITH_THREADS
- m_impl->IncrementReferenceCount();
-#endif
Init(scoreIndexManager);
}
-#ifdef WITH_THREADS
boost::shared_ptr<LanguageModelImplementation> m_impl;
-#else
- LanguageModelImplementation *m_impl;
-#endif
};
}
diff --git a/moses/src/LM/Ken.cpp b/moses/src/LM/Ken.cpp
index 22b390c19..678cbfe83 100644
--- a/moses/src/LM/Ken.cpp
+++ b/moses/src/LM/Ken.cpp
@@ -40,9 +40,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include "StaticData.h"
#include "ChartHypothesis.h"
-#ifdef WITH_THREADS
-#include <boost/scoped_ptr.hpp>
-#endif
+#include <boost/shared_ptr.hpp>
using namespace std;
@@ -66,15 +64,6 @@ template <class Model> class LanguageModelKen : public LanguageModel {
public:
LanguageModelKen(const std::string &file, ScoreIndexManager &manager, FactorType factorType, bool lazy);
- ~LanguageModelKen() {
-#ifndef WITH_THREADS
- if (!--*m_refcount) {
- delete m_ngram;
- delete m_refcount;
- }
-#endif
- }
-
LanguageModel *Duplicate(ScoreIndexManager &scoreIndexManager) const;
bool Useable(const Phrase &phrase) const {
@@ -122,12 +111,8 @@ template <class Model> class LanguageModelKen : public LanguageModel {
}
}
-#ifdef WITH_THREADS
boost::shared_ptr<Model> m_ngram;
-#else
- Model *m_ngram;
- mutable unsigned int *m_refcount;
-#endif
+
std::vector<lm::WordIndex> m_lmIdLookup;
FactorType m_factorType;
@@ -166,18 +151,7 @@ template <class Model> LanguageModelKen<Model>::LanguageModelKen(const std::stri
config.enumerate_vocab = &builder;
config.load_method = lazy ? util::LAZY : util::POPULATE_OR_READ;
- try {
-#ifdef WITH_THREADS
- m_ngram.reset(new Model(file.c_str(), config));
-#else
- m_ngram = new Model(file.c_str(), config);
- m_refcount = new unsigned int();
- *m_refcount = 1;
-#endif
- } catch (std::exception &e) {
- std::cerr << e.what() << std::endl;
- abort();
- }
+ m_ngram.reset(new Model(file.c_str(), config));
m_beginSentenceFactor = collection.AddFactor(BOS_);
Init(manager);
@@ -193,10 +167,6 @@ template <class Model> LanguageModelKen<Model>::LanguageModelKen(ScoreIndexManag
m_lmIdLookup(copy_from.m_lmIdLookup),
m_factorType(copy_from.m_factorType),
m_beginSentenceFactor(copy_from.m_beginSentenceFactor) {
-#ifndef WITH_THREADS
- m_refcount = copy_from.m_refcount;
- ++*m_refcount;
-#endif
Init(manager);
}
@@ -349,25 +319,30 @@ template <class Model> FFState *LanguageModelKen<Model>::EvaluateChart(const Cha
} // namespace
LanguageModel *ConstructKenLM(const std::string &file, ScoreIndexManager &manager, FactorType factorType, bool lazy) {
- lm::ngram::ModelType model_type;
- if (lm::ngram::RecognizeBinary(file.c_str(), model_type)) {
- switch(model_type) {
- case lm::ngram::HASH_PROBING:
+ try {
+ lm::ngram::ModelType model_type;
+ if (lm::ngram::RecognizeBinary(file.c_str(), model_type)) {
+ switch(model_type) {
+ case lm::ngram::HASH_PROBING:
+ return new LanguageModelKen<lm::ngram::ProbingModel>(file, manager, factorType, lazy);
+ case lm::ngram::TRIE_SORTED:
+ return new LanguageModelKen<lm::ngram::TrieModel>(file, manager, factorType, lazy);
+ case lm::ngram::QUANT_TRIE_SORTED:
+ return new LanguageModelKen<lm::ngram::QuantTrieModel>(file, manager, factorType, lazy);
+ case lm::ngram::ARRAY_TRIE_SORTED:
+ return new LanguageModelKen<lm::ngram::ArrayTrieModel>(file, manager, factorType, lazy);
+ case lm::ngram::QUANT_ARRAY_TRIE_SORTED:
+ return new LanguageModelKen<lm::ngram::QuantArrayTrieModel>(file, manager, factorType, lazy);
+ default:
+ std::cerr << "Unrecognized kenlm model type " << model_type << std::endl;
+ abort();
+ }
+ } else {
return new LanguageModelKen<lm::ngram::ProbingModel>(file, manager, factorType, lazy);
- case lm::ngram::TRIE_SORTED:
- return new LanguageModelKen<lm::ngram::TrieModel>(file, manager, factorType, lazy);
- case lm::ngram::QUANT_TRIE_SORTED:
- return new LanguageModelKen<lm::ngram::QuantTrieModel>(file, manager, factorType, lazy);
- case lm::ngram::ARRAY_TRIE_SORTED:
- return new LanguageModelKen<lm::ngram::ArrayTrieModel>(file, manager, factorType, lazy);
- case lm::ngram::QUANT_ARRAY_TRIE_SORTED:
- return new LanguageModelKen<lm::ngram::QuantArrayTrieModel>(file, manager, factorType, lazy);
- default:
- std::cerr << "Unrecognized kenlm model type " << model_type << std::endl;
- abort();
}
- } else {
- return new LanguageModelKen<lm::ngram::ProbingModel>(file, manager, factorType, lazy);
+ } catch (std::exception &e) {
+ std::cerr << e.what() << std::endl;
+ abort();
}
}