Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/TharinduDR/TransQuest.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTharinduDR <rhtdranasinghe@gmail.com>2020-07-08 19:52:47 +0300
committerTharinduDR <rhtdranasinghe@gmail.com>2020-07-08 19:52:47 +0300
commitd68412945a3fffb440451aae1585faf5344d3187 (patch)
tree8f6e652d7b15142e3818b5a200837b147b84e042
parentb3a61bffb4ac084caa5423144d333dacc3372772 (diff)
031: Fine tunning
-rw-r--r--examples/wmt_2020/en_zh/transformer_config.py8
1 files changed, 4 insertions, 4 deletions
diff --git a/examples/wmt_2020/en_zh/transformer_config.py b/examples/wmt_2020/en_zh/transformer_config.py
index f8140d9..7c60b2d 100644
--- a/examples/wmt_2020/en_zh/transformer_config.py
+++ b/examples/wmt_2020/en_zh/transformer_config.py
@@ -17,7 +17,7 @@ transformer_config = {
'fp16': False,
'fp16_opt_level': 'O1',
- 'max_seq_length': 128,
+ 'max_seq_length': 80,
'train_batch_size': 8,
'gradient_accumulation_steps': 1,
'eval_batch_size': 8,
@@ -30,14 +30,14 @@ transformer_config = {
'max_grad_norm': 1.0,
'do_lower_case': False,
- 'logging_steps': 300,
- 'save_steps': 300,
+ 'logging_steps': 100,
+ 'save_steps': 100,
"no_cache": False,
'save_model_every_epoch': True,
'save_recent_only': True,
'n_fold': 3,
'evaluate_during_training': True,
- 'evaluate_during_training_steps': 300,
+ 'evaluate_during_training_steps': 100,
"evaluate_during_training_verbose": True,
'use_cached_eval_features': False,
'save_eval_checkpoints': True,