Welcome to mirror list, hosted at ThFree Co, Russian Federation.

pretokenizer_for_training.h « src - github.com/marian-nmt/sentencepiece.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
blob: 2d3bc827b4a0efb6ad81d9511341ee4b7ce82e84 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//     http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.!

#ifndef PRETOKENIZER_FOR_TRAINING_H_
#define PRETOKENIZER_FOR_TRAINING_H_

#include <memory>
#include <string>

#include "common.h"
#include "sentencepiece.pb.h"
#include "sentencepiece_processor.h"
#include "third_party/absl/strings/string_view.h"

namespace sentencepiece {
namespace pretokenizer {

class PretokenizerForTrainingInterface {
 public:
  PretokenizerForTrainingInterface() {}
  virtual ~PretokenizerForTrainingInterface() {}
  virtual util::Status status() const = 0;

  // Puts kUPPBoundaryStr before and after the pre-tokenizer's segmentation
  // when there are no spaces between these tokens.
  // Example1:
  // input: 東京です
  // segmentation: piece[0] = {0, 6}, piece[1] = {6, 12}
  // output: 東京<tab>です (here kUPPBoundaryStr is <tab>)
  //
  // Example2:
  // input: I love sentencepiece
  // segmentation: piece[0] = {0, 1}, piece[1] = {2, 6},
  //               piece[2] = {7, 15}, piece[3] = {15, 20}
  // output: I love sentence<tab>piece.
  std::string PreTokenize(absl::string_view text) const;

  // Returns pre-tokenized result.
  // Note that the pre-tokenized constraint is specified with the
  // byte offsets (SentencePiece::begin, SentencePiece::end) over
  // the input text.
  virtual SentencePieceText Tokenize(absl::string_view text) const = 0;

 private:
  static std::string Preprocess(absl::string_view text);
  static std::string Postprocess(const SentencePieceText &spt);
};

}  // namespace pretokenizer
}  // namespace sentencepiece

#endif  // PRETOKENIZER_FOR_TRAINING_H_