diff options
author | Jan Buethe <jbuethe@amazon.de> | 2024-01-09 17:22:13 +0300 |
---|---|---|
committer | Jan Buethe <jbuethe@amazon.de> | 2024-01-09 17:22:13 +0300 |
commit | f2c62e5acfcfe45da6587b532bb4416927e2e83f (patch) | |
tree | 268ac3fc07054e8d708852727f2c058fd2e2c76e | |
parent | 999ddbe09ca1e521cc8d71005c1ecd8513e47611 (diff) |
switched to absolute scale in softquant.py
-rw-r--r-- | dnn/torch/dnntools/dnntools/quantization/softquant.py | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/dnn/torch/dnntools/dnntools/quantization/softquant.py b/dnn/torch/dnntools/dnntools/quantization/softquant.py index ca82ceca..877c6450 100644 --- a/dnn/torch/dnntools/dnntools/quantization/softquant.py +++ b/dnn/torch/dnntools/dnntools/quantization/softquant.py @@ -65,7 +65,7 @@ class SoftQuant: self.quantization_noise[name] = q_scaled_noise(module, weight) else: self.quantization_noise[name] = \ - self.scale * weight.abs().max() * (torch.rand_like(weight) - 0.5) + self.scale * (torch.rand_like(weight) - 0.5) with torch.no_grad(): weight.data[:] = weight + self.quantization_noise[name] else: |