mct-quantizers-nightly 1.6.0.20250625.post1523__py3-none-any.whl → 1.6.0.20250626.post102857__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -121,7 +121,7 @@ if FOUND_TORCH:
121
121
 
122
122
  # Compute the step size of quantized values.
123
123
  self.scales = (self.max_range - self.min_range) / (2 ** num_bits - 1)
124
- self.zero_points = -(self.min_range / self.scales).int() # zp has to be positive, and a <=0, so we multiply by -1
124
+ self.zero_points = -(self.min_range / self.scales).round().int() # zp has to be positive, and a <=0, so we multiply by -1
125
125
 
126
126
  self.scales = self.scales.to(get_working_device())
127
127
  self.zero_points = self.zero_points.to(get_working_device())
@@ -270,7 +270,7 @@ if FOUND_ONNXRUNTIME_EXTENSIONS:
270
270
  max_range=max_range,
271
271
  channel_axis=channel_axis)
272
272
 
273
- # adjusts the quantization rage so the quantization grid include zero.
273
+ # adjusts the quantization range so the quantization grid include zero.
274
274
  a, b = adjust_range_to_include_zero(min_range, max_range, num_bits)
275
275
 
276
276
  # Compute the step size of quantized values.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mct-quantizers-nightly
3
- Version: 1.6.0.20250625.post1523
3
+ Version: 1.6.0.20250626.post102857
4
4
  Summary: Infrastructure for support neural networks compression
5
5
  Author-email: ssi-dnn-dev@sony.com
6
6
  Classifier: Programming Language :: Python :: 3
@@ -56,9 +56,9 @@ mct_quantizers/pytorch/quantizers/weights_inferable_quantizers/weights_lut_pot_i
56
56
  mct_quantizers/pytorch/quantizers/weights_inferable_quantizers/weights_lut_symmetric_inferable_quantizer.py,sha256=DB37GVDXD1dQBhf-q-kNYRliYjzRogZfgw3SSDFUX9U,13194
57
57
  mct_quantizers/pytorch/quantizers/weights_inferable_quantizers/weights_pot_inferable_quantizer.py,sha256=99QxwHAaH2gg5r2HTUEjcjsSI0Ro_2sJFI_leYujWsc,8696
58
58
  mct_quantizers/pytorch/quantizers/weights_inferable_quantizers/weights_symmetric_inferable_quantizer.py,sha256=QfACzRLX3Klx9VAWrmXCtDAHVkzVJfW5J9z_hNcEP6o,13550
59
- mct_quantizers/pytorch/quantizers/weights_inferable_quantizers/weights_uniform_inferable_quantizer.py,sha256=NqOTW0Y2Rc_8tHEYo95PIn9b2za-Zaq4ZGwV5uOjYH0,15740
60
- mct_quantizers_nightly-1.6.0.20250625.post1523.dist-info/LICENSE.md,sha256=aYSSIb-5AFPeITTvXm1UAoe0uYBiMmSS8flvXaaFUks,10174
61
- mct_quantizers_nightly-1.6.0.20250625.post1523.dist-info/METADATA,sha256=xK7GmkrBnOku_HxMJ0BdAWihSPllWB2lu_aJm6beWhs,4550
62
- mct_quantizers_nightly-1.6.0.20250625.post1523.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
63
- mct_quantizers_nightly-1.6.0.20250625.post1523.dist-info/top_level.txt,sha256=_gC79XUbGAA-2qS7kip1LosCSPyA-Td4Y7o2QUZ-uwE,15
64
- mct_quantizers_nightly-1.6.0.20250625.post1523.dist-info/RECORD,,
59
+ mct_quantizers/pytorch/quantizers/weights_inferable_quantizers/weights_uniform_inferable_quantizer.py,sha256=TWb9XQ_eswBAoTkTUefVceRH2w01Joezvg3O8n2dTy4,15749
60
+ mct_quantizers_nightly-1.6.0.20250626.post102857.dist-info/LICENSE.md,sha256=aYSSIb-5AFPeITTvXm1UAoe0uYBiMmSS8flvXaaFUks,10174
61
+ mct_quantizers_nightly-1.6.0.20250626.post102857.dist-info/METADATA,sha256=ejovUStdz_QlmBfKi-s06nR77JRhimyubOqgBkpjGAY,4552
62
+ mct_quantizers_nightly-1.6.0.20250626.post102857.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
63
+ mct_quantizers_nightly-1.6.0.20250626.post102857.dist-info/top_level.txt,sha256=_gC79XUbGAA-2qS7kip1LosCSPyA-Td4Y7o2QUZ-uwE,15
64
+ mct_quantizers_nightly-1.6.0.20250626.post102857.dist-info/RECORD,,