Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/stanfordnlp/stanza.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJohn Bauer <horatio@gmail.com>2022-09-10 10:08:23 +0300
committerJohn Bauer <horatio@gmail.com>2022-09-10 10:45:01 +0300
commit528430c1b82d48ecbf9f06bd48496bb4c6e2f3d4 (patch)
treebb5edcfb3c827fa659a825f07a91c5425fef4523
parentac0b6ff3bf86f8a200a6020326c37d51af322821 (diff)
Add a debug log line to reloading optimizers in conparse
-rw-r--r--stanza/models/constituency/trainer.py1
1 files changed, 1 insertions, 0 deletions
diff --git a/stanza/models/constituency/trainer.py b/stanza/models/constituency/trainer.py
index 1bea94a9..5e2eb8ff 100644
--- a/stanza/models/constituency/trainer.py
+++ b/stanza/models/constituency/trainer.py
@@ -137,6 +137,7 @@ class Trainer:
if load_optimizer:
# need to match the optimizer we build with the one that was used at training time
build_simple_adadelta = checkpoint['args']['multistage'] and epochs_trained < checkpoint['args']['epochs'] // 2
+ logger.debug("Model loaded was built with multistage %s epochs_trained %d out of total epochs %d Building initial Adadelta optimizer: %s", checkpoint['args']['multistage'], epochs_trained, checkpoint['args']['epochs'], build_simple_adadelta)
optimizer = build_optimizer(saved_args, model, build_simple_adadelta)
if checkpoint.get('optimizer_state_dict', None) is not None: