diff options
author | John Bauer <horatio@gmail.com> | 2022-09-10 10:08:23 +0300 |
---|---|---|
committer | John Bauer <horatio@gmail.com> | 2022-09-10 10:45:01 +0300 |
commit | 528430c1b82d48ecbf9f06bd48496bb4c6e2f3d4 (patch) | |
tree | bb5edcfb3c827fa659a825f07a91c5425fef4523 | |
parent | ac0b6ff3bf86f8a200a6020326c37d51af322821 (diff) |
Add a debug log line to reloading optimizers in conparse
-rw-r--r-- | stanza/models/constituency/trainer.py | 1 |
1 files changed, 1 insertions, 0 deletions
diff --git a/stanza/models/constituency/trainer.py b/stanza/models/constituency/trainer.py index 1bea94a9..5e2eb8ff 100644 --- a/stanza/models/constituency/trainer.py +++ b/stanza/models/constituency/trainer.py @@ -137,6 +137,7 @@ class Trainer: if load_optimizer: # need to match the optimizer we build with the one that was used at training time build_simple_adadelta = checkpoint['args']['multistage'] and epochs_trained < checkpoint['args']['epochs'] // 2 + logger.debug("Model loaded was built with multistage %s epochs_trained %d out of total epochs %d Building initial Adadelta optimizer: %s", checkpoint['args']['multistage'], epochs_trained, checkpoint['args']['epochs'], build_simple_adadelta) optimizer = build_optimizer(saved_args, model, build_simple_adadelta) if checkpoint.get('optimizer_state_dict', None) is not None: |