Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/marian-nmt/marian.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'src/common/config_parser.cpp')
-rw-r--r--src/common/config_parser.cpp26
1 files changed, 13 insertions, 13 deletions
diff --git a/src/common/config_parser.cpp b/src/common/config_parser.cpp
index 9705d5b7..333d87a7 100644
--- a/src/common/config_parser.cpp
+++ b/src/common/config_parser.cpp
@@ -119,10 +119,10 @@ void ConfigParser::addOptionsGeneral(cli::CLIWrapper& cli) {
cli.add<std::vector<std::string>>("--config,-c",
"Configuration file(s). If multiple, later overrides earlier");
cli.add<size_t>("--workspace,-w",
- "Preallocate arg MB of work space",
+ "Preallocate arg MB of work space",
defaultWorkspace);
cli.add<std::string>("--log",
- "Log training process information to file given by arg");
+ "Log training process information to file given by arg");
cli.add<std::string>("--log-level",
"Set verbosity level of logging: trace, debug, info, warn, err(or), critical, off",
"info");
@@ -392,17 +392,17 @@ void ConfigParser::addOptionsTraining(cli::CLIWrapper& cli) {
"Finish after this many chosen training units, 0 is infinity (e.g. 100e = 100 epochs, 10Gt = 10 billion target labels, 100Ku = 100,000 updates",
"0e");
cli.add<std::string/*SchedulerPeriod*/>("--disp-freq",
- "Display information every arg updates (append 't' for every arg target labels)",
+ "Display information every arg updates (append 't' for every arg target labels)",
"1000u");
cli.add<size_t>("--disp-first",
- "Display information for the first arg updates");
+ "Display information for the first arg updates");
cli.add<bool>("--disp-label-counts",
"Display label counts when logging loss progress",
true);
// cli.add<int>("--disp-label-index",
// "Display label counts based on i-th input stream (-1 is last)", -1);
cli.add<std::string/*SchedulerPeriod*/>("--save-freq",
- "Save model file every arg updates (append 't' for every arg target labels)",
+ "Save model file every arg updates (append 't' for every arg target labels)",
"10000u");
cli.add<std::vector<std::string>>("--logical-epoch",
"Redefine logical epoch counter as multiple of data epochs (e.g. 1e), updates (e.g. 100Ku) or labels (e.g. 1Gt). "
@@ -473,12 +473,12 @@ void ConfigParser::addOptionsTraining(cli::CLIWrapper& cli) {
cli.add<bool>("--lr-decay-repeat-warmup",
"Repeat learning rate warmup when learning rate is decayed");
cli.add<std::vector<std::string/*SchedulerPeriod*/>>("--lr-decay-inv-sqrt",
- "Decrease learning rate at arg / sqrt(no. batches) starting at arg (append 't' or 'e' for sqrt(target labels or epochs)). "
+ "Decrease learning rate at arg / sqrt(no. batches) starting at arg (append 't' or 'e' for sqrt(target labels or epochs)). "
"Add second argument to define the starting point (default: same as first value)",
{"0"});
cli.add<std::string/*SchedulerPeriod*/>("--lr-warmup",
- "Increase learning rate linearly for arg first batches (append 't' for arg first target labels)",
+ "Increase learning rate linearly for arg first batches (append 't' for arg first target labels)",
"0");
cli.add<float>("--lr-warmup-start-rate",
"Start value for learning rate warmup");
@@ -492,7 +492,7 @@ void ConfigParser::addOptionsTraining(cli::CLIWrapper& cli) {
cli.add<double>("--factor-weight",
"Weight for loss function for factors (factored vocab only) (1 to disable)", 1.0f);
cli.add<float>("--clip-norm",
- "Clip gradient norm to arg (0 to disable)",
+ "Clip gradient norm to arg (0 to disable)",
1.f); // @TODO: this is currently wrong with ce-sum and should rather be disabled or fixed by multiplying with labels
cli.add<float>("--exponential-smoothing",
"Maintain smoothed version of parameters for validation and saving with smoothing factor. 0 to disable. "
@@ -575,7 +575,7 @@ void ConfigParser::addOptionsValidation(cli::CLIWrapper& cli) {
cli.add<std::vector<std::string>>("--valid-sets",
"Paths to validation corpora: source target");
cli.add<std::string/*SchedulerPeriod*/>("--valid-freq",
- "Validate model every arg updates (append 't' for every arg target labels)",
+ "Validate model every arg updates (append 't' for every arg target labels)",
"10000u");
cli.add<std::vector<std::string>>("--valid-metrics",
"Metric to use during validation: cross-entropy, ce-mean-words, perplexity, valid-script, "
@@ -585,7 +585,7 @@ void ConfigParser::addOptionsValidation(cli::CLIWrapper& cli) {
cli.add<bool>("--valid-reset-stalled",
"Reset all stalled validation metrics when the training is restarted");
cli.add<size_t>("--early-stopping",
- "Stop if the first validation metric does not improve for arg consecutive validation steps",
+ "Stop if the first validation metric does not improve for arg consecutive validation steps",
10);
cli.add<std::string>("--early-stopping-on",
"Decide if early stopping should take into account first, all, or any validation metrics"
@@ -637,7 +637,7 @@ void ConfigParser::addOptionsValidation(cli::CLIWrapper& cli) {
cli.add<bool>("--keep-best",
"Keep best model for each validation metric");
cli.add<std::string>("--valid-log",
- "Log validation scores to file given by arg");
+ "Log validation scores to file given by arg");
cli.switchGroup(previous_group);
// clang-format on
}
@@ -942,10 +942,10 @@ void ConfigParser::addSuboptionsULR(cli::CLIWrapper& cli) {
cli.add<std::string>("--ulr-query-vectors",
"Path to file with universal sources embeddings from projection into universal space",
"");
- // keys: EK in Fig2 : is the keys of the target embbedings projected to unified space (i.e. ENU in
+ // keys: EK in Fig2 : is the keys of the target embeddings projected to unified space (i.e. ENU in
// multi-lingual case)
cli.add<std::string>("--ulr-keys-vectors",
- "Path to file with universal sources embeddings of traget keys from projection into universal space",
+ "Path to file with universal sources embeddings of target keys from projection into universal space",
"");
cli.add<bool>("--ulr-trainable-transformation",
"Make Query Transformation Matrix A trainable");