Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/stanfordnlp/stanza.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJohn Bauer <horatio@gmail.com>2022-09-10 09:45:09 +0300
committerJohn Bauer <horatio@gmail.com>2022-09-10 09:45:09 +0300
commitac0b6ff3bf86f8a200a6020326c37d51af322821 (patch)
treec4d8add247bc5f37bc5dafe3349ffdeaa1f1516a
parent4dca146a8c34393c4b3887c76b3d4261e39f2029 (diff)
Fix typo
-rw-r--r--stanza/models/constituency/utils.py2
1 files changed, 1 insertions, 1 deletions
diff --git a/stanza/models/constituency/utils.py b/stanza/models/constituency/utils.py
index ca642551..5ae19f01 100644
--- a/stanza/models/constituency/utils.py
+++ b/stanza/models/constituency/utils.py
@@ -165,7 +165,7 @@ def build_optimizer(args, model, build_simple_adadelta=False):
import madgrad
except ModuleNotFoundError as e:
raise ModuleNotFoundError("Could not create madgrad optimizer. Perhaps the madgrad package is not installed") from e
- logger.info("Building AdaBelief with lr=%f, weight_decay=%f, momentum=%f", learning_rate, weight_decay, momentum)
+ logger.info("Building madgrad with lr=%f, weight_decay=%f, momentum=%f", learning_rate, weight_decay, momentum)
optimizer = madgrad.MADGRAD(parameters, lr=learning_rate, weight_decay=weight_decay, momentum=momentum)
else:
raise ValueError("Unknown optimizer: %s" % optim)