Welcome to mirror list, hosted at ThFree Co, Russian Federation.

LanguageModelSkip.h « src « moses - github.com/moses-smt/mosesdecoder.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
blob: e253db35b87b7a05b4c8d98d348fdf6a530a2a0b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
// $Id$

/***********************************************************************
Moses - factored phrase-based language decoder
Copyright (C) 2006 University of Edinburgh

This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.

This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
Lesser General Public License for more details.

You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
***********************************************************************/

#pragma once

#include <vector>
#include <algorithm>
#include "LanguageModelMultiFactor.h"
#include "LanguageModelSingleFactor.h"
#include "Phrase.h"
#include "FactorCollection.h"

/* Hacked up LM which skips any factor with string '---'
* order of chunk hardcoded to 3 (m_realNGramOrder)
*/
class LanguageModelSkip : public LanguageModelSingleFactor
{	
protected:
	size_t m_realNGramOrder;
	LanguageModelSingleFactor *m_lmImpl;
	
public:
	/** Constructor
	* \param lmImpl SRI or IRST LM which this LM can use to load data
	*/
	LanguageModelSkip(LanguageModelSingleFactor *lmImpl
										, bool registerScore
										, ScoreIndexManager &scoreIndexManager)
	: LanguageModelSingleFactor(registerScore, scoreIndexManager)
	{
		m_lmImpl = lmImpl;		
	}
	~LanguageModelSkip()
	{
		delete m_lmImpl;
	}
	bool Load(const std::string &filePath
					, FactorType factorType
					, float weight
					, size_t nGramOrder)
	{
		m_factorType 				= factorType;
		m_weight 						= weight;
		m_filePath 					= filePath;
		m_nGramOrder 				= nGramOrder;
		
		m_realNGramOrder 		= 3;

		FactorCollection &factorCollection = FactorCollection::Instance();

		m_sentenceStartArray[m_factorType] = factorCollection.AddFactor(Output, m_factorType, BOS_);
		m_sentenceEndArray[m_factorType] = factorCollection.AddFactor(Output, m_factorType, EOS_);

		return m_lmImpl->Load(filePath, m_factorType, weight, nGramOrder);
	}
			
	float GetValue(const std::vector<const Word*> &contextFactor, State* finalState = NULL, unsigned int* len = NULL) const
	{
		if (contextFactor.size() == 0)
		{
			return 0;
		}

		// only process context where last word is a word we want
		const Factor *factor = (*contextFactor.back())[m_factorType];
		std::string strWord = factor->GetString();
		if (strWord.find("---") == 0)
			return 0;
		
		// add last word
		std::vector<const Word*> chunkContext;
		Word* chunkWord = new Word;
		chunkWord->SetFactor(m_factorType, factor);
		chunkContext.push_back(chunkWord);
		
		// create context in reverse 'cos we skip words we don't want
		for (int currPos = (int)contextFactor.size() - 2 ; currPos >= 0 && chunkContext.size() < m_realNGramOrder ; --currPos )
		{
			const Word &word = *contextFactor[currPos];
			factor = word[m_factorType];
			std::string strWord = factor->GetString();
			bool skip = strWord.find("---") == 0;
			if (skip)
				continue;

			// add word to chunked context
			Word* chunkWord = new Word;
			chunkWord->SetFactor(m_factorType, factor);
			chunkContext.push_back(chunkWord);
		}
	
		// create context factor the right way round
		std::reverse(chunkContext.begin(), chunkContext.end());

		// calc score on chunked phrase
		float ret = m_lmImpl->GetValue(chunkContext, finalState, len);

		RemoveAllInColl(chunkContext);

		return ret;
	}
};