diff --git a/TrainingExtensions/torch/src/python/aimet_torch/experimental/v2/quantization/modules/quantize.py b/TrainingExtensions/torch/src/python/aimet_torch/experimental/v2/quantization/modules/quantize.py index 9ee885a710d..8704457d4b1 100644 --- a/TrainingExtensions/torch/src/python/aimet_torch/experimental/v2/quantization/modules/quantize.py +++ b/TrainingExtensions/torch/src/python/aimet_torch/experimental/v2/quantization/modules/quantize.py @@ -185,6 +185,9 @@ def forward_wrapper(input): finally: self.encoding_analyzer.reset_stats() + def extra_repr(self) -> str: + return f'shape={self.shape}, bitwidth={self.bitwidth}, symmetric={self.symmetric}' + class Quantize(_QuantizerBase): """