From 6a90ad4bacf923c88438da53219c48355b847ed3 Mon Sep 17 00:00:00 2001 From: John Bauer Date: Wed, 14 Sep 2022 12:12:54 -0700 Subject: Hide the imports of SiLU and Mish from older versions of torch. #1120 --- stanza/models/constituency/utils.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/stanza/models/constituency/utils.py b/stanza/models/constituency/utils.py index 5ae19f01..a11581dc 100644 --- a/stanza/models/constituency/utils.py +++ b/stanza/models/constituency/utils.py @@ -107,10 +107,17 @@ NONLINEARITY = { 'relu': nn.ReLU, 'gelu': nn.GELU, 'leaky_relu': nn.LeakyReLU, - 'silu': nn.SiLU, - 'mish': nn.Mish, } +# separating these out allows for backwards compatibility with earlier versions of pytorch +# NOTE torch compatibility: if we ever *release* models with these +# activation functions, we will need to break that compatibility +if hasattr(nn, 'SiLU'): + NONLINEARITY['silu'] = nn.SiLU + +if hasattr(nn, 'Mish'): + NONLINEARITY['mish'] = nn.Mish + def build_nonlinearity(nonlinearity): """ Look up "nonlinearity" in a map from function name to function, build the appropriate layer. -- cgit v1.2.3