blob: 6a9bea7e3dec56212910198f6746900a3fa2315e (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
|
// $Id: Vocabulary.cpp 1565 2008-02-22 14:42:01Z bojar $
#include "Vocabulary.h"
// as in beamdecoder/tables.cpp
vector<WORD_ID> Vocabulary::Tokenize( const char input[] )
{
vector< WORD_ID > token;
bool betweenWords = true;
int start=0;
int i=0;
for(; input[i] != '\0'; i++) {
bool isSpace = (input[i] == ' ' || input[i] == '\t');
if (!isSpace && betweenWords) {
start = i;
betweenWords = false;
} else if (isSpace && !betweenWords) {
token.push_back( StoreIfNew ( string( input+start, i-start ) ) );
betweenWords = true;
}
}
if (!betweenWords)
token.push_back( StoreIfNew ( string( input+start, i-start ) ) );
return token;
}
WORD_ID Vocabulary::StoreIfNew( const WORD& word )
{
map<WORD, WORD_ID>::iterator i = lookup.find( word );
if( i != lookup.end() )
return i->second;
WORD_ID id = vocab.size();
vocab.push_back( word );
lookup[ word ] = id;
return id;
}
WORD_ID Vocabulary::GetWordID( const WORD &word )
{
map<WORD, WORD_ID>::iterator i = lookup.find( word );
if( i == lookup.end() )
return 0;
WORD_ID w= (WORD_ID) i->second;
return w;
}
void Vocabulary::Save( string fileName )
{
ofstream vcbFile;
vcbFile.open( fileName.c_str(), ios::out | ios::ate | ios::trunc);
vector< WORD >::iterator i;
for(i = vocab.begin(); i != vocab.end(); i++) {
const string &word = *i;
vcbFile << word << endl;
}
vcbFile.close();
}
void Vocabulary::Load( string fileName )
{
ifstream vcbFile;
char line[MAX_LENGTH];
vcbFile.open(fileName.c_str());
cerr << "loading from " << fileName << endl;
istream *fileP = &vcbFile;
int count = 0;
while(!fileP->eof()) {
SAFE_GETLINE((*fileP), line, MAX_LENGTH, '\n');
if (fileP->eof()) break;
int length = 0;
for(; line[length] != '\0'; length++);
StoreIfNew( string( line, length ) );
count++;
}
vcbFile.close();
cerr << count << " word read, vocabulary size " << vocab.size() << endl;
}
|