diff options
author | Jan Buethe <jbuethe@amazon.de> | 2023-09-29 15:34:11 +0300 |
---|---|---|
committer | Jan Buethe <jbuethe@amazon.de> | 2023-09-29 15:34:11 +0300 |
commit | 49014454907d515e3c8ca8b06add78ad74c417d1 (patch) | |
tree | 78d4c836495a7442780f4f53cf38d7ae79aac13f | |
parent | c5c214df1b214375fce964949598f5b4405c655e (diff) |
fixed type in error message
-rw-r--r-- | dnn/torch/weight-exchange/wexchange/torch/torch.py | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/dnn/torch/weight-exchange/wexchange/torch/torch.py b/dnn/torch/weight-exchange/wexchange/torch/torch.py index 35723c22..6befe9f4 100644 --- a/dnn/torch/weight-exchange/wexchange/torch/torch.py +++ b/dnn/torch/weight-exchange/wexchange/torch/torch.py @@ -194,7 +194,7 @@ def dump_torch_weights(where, module, name=None, verbose=False, **kwargs): elif isinstance(module, torch.nn.Embedding): return dump_torch_embedding_weights(where, module) else: - raise ValueError(f'dump_tf_weights: layer of type {type(module)} not supported') + raise ValueError(f'dump_torch_weights: layer of type {type(module)} not supported') def load_torch_weights(where, module): """ generic function for loading weights of some torch.nn.Module """ @@ -209,4 +209,4 @@ def load_torch_weights(where, module): elif isinstance(module, torch.nn.Embedding): load_torch_embedding_weights(where, module) else: - raise ValueError(f'dump_tf_weights: layer of type {type(module)} not supported')
\ No newline at end of file + raise ValueError(f'dump_torch_weights: layer of type {type(module)} not supported')
\ No newline at end of file |