mct-nightly 2.2.0.20250107.15510__py3-none-any.whl → 2.2.0.20250107.134735__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mct_nightly-2.2.0.20250107.15510.dist-info → mct_nightly-2.2.0.20250107.134735.dist-info}/METADATA +1 -1
- {mct_nightly-2.2.0.20250107.15510.dist-info → mct_nightly-2.2.0.20250107.134735.dist-info}/RECORD +7 -7
- model_compression_toolkit/__init__.py +1 -1
- model_compression_toolkit/gptq/keras/gptq_loss.py +4 -3
- {mct_nightly-2.2.0.20250107.15510.dist-info → mct_nightly-2.2.0.20250107.134735.dist-info}/LICENSE.md +0 -0
- {mct_nightly-2.2.0.20250107.15510.dist-info → mct_nightly-2.2.0.20250107.134735.dist-info}/WHEEL +0 -0
- {mct_nightly-2.2.0.20250107.15510.dist-info → mct_nightly-2.2.0.20250107.134735.dist-info}/top_level.txt +0 -0
{mct_nightly-2.2.0.20250107.15510.dist-info → mct_nightly-2.2.0.20250107.134735.dist-info}/METADATA
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: mct-nightly
|
3
|
-
Version: 2.2.0.20250107.
|
3
|
+
Version: 2.2.0.20250107.134735
|
4
4
|
Summary: A Model Compression Toolkit for neural networks
|
5
5
|
Classifier: Programming Language :: Python :: 3
|
6
6
|
Classifier: License :: OSI Approved :: Apache Software License
|
{mct_nightly-2.2.0.20250107.15510.dist-info → mct_nightly-2.2.0.20250107.134735.dist-info}/RECORD
RENAMED
@@ -1,4 +1,4 @@
|
|
1
|
-
model_compression_toolkit/__init__.py,sha256=
|
1
|
+
model_compression_toolkit/__init__.py,sha256=E1-liXMcmhObMC_N-_-rlLX_wgeNI7RjieTYhYT0TiI,1573
|
2
2
|
model_compression_toolkit/constants.py,sha256=i_R6uXBfO1ph_X6DNJych2x59SUojfJbn7dNjs_mZnc,3846
|
3
3
|
model_compression_toolkit/defaultdict.py,sha256=LSc-sbZYXENMCw3U9F4GiXuv67IKpdn0Qm7Fr11jy-4,2277
|
4
4
|
model_compression_toolkit/logger.py,sha256=3DByV41XHRR3kLTJNbpaMmikL8icd9e1N-nkQAY9oDk,4567
|
@@ -361,7 +361,7 @@ model_compression_toolkit/gptq/common/gradual_activation_quantization.py,sha256=
|
|
361
361
|
model_compression_toolkit/gptq/common/regularization_factory.py,sha256=hyunpXepVeHyoAFJw6zNLK-3ZHBmiut3lmNisJN_L3E,2514
|
362
362
|
model_compression_toolkit/gptq/keras/__init__.py,sha256=cco4TmeIDIh32nj9ZZXVkws4dd9F2UDrmjKzTN8G0V0,697
|
363
363
|
model_compression_toolkit/gptq/keras/gptq_keras_implementation.py,sha256=axBwnCSjq5xk-xGymOwSOqjp39It-CVtGcCTRTf0E_4,1248
|
364
|
-
model_compression_toolkit/gptq/keras/gptq_loss.py,sha256=
|
364
|
+
model_compression_toolkit/gptq/keras/gptq_loss.py,sha256=k5s7D4CTqbYCHgydyevw1c2p3S2TZCECHNvK79QGE2U,7797
|
365
365
|
model_compression_toolkit/gptq/keras/gptq_training.py,sha256=0WGiP7Gs4xX3FBs1PNaZ7w3hWRigwQXqYjBrs_-x32o,23241
|
366
366
|
model_compression_toolkit/gptq/keras/graph_info.py,sha256=zwoeHX67nJJ5-zYLjzvMXS9TLsy9BsizARbZiDVjVSA,4473
|
367
367
|
model_compression_toolkit/gptq/keras/quantization_facade.py,sha256=meRKqpzZe2Irf21L_rN_mkr5dqPTJHzfSFBeqv4Csp4,18536
|
@@ -525,8 +525,8 @@ model_compression_toolkit/xquant/pytorch/model_analyzer.py,sha256=b93o800yVB3Z-i
|
|
525
525
|
model_compression_toolkit/xquant/pytorch/pytorch_report_utils.py,sha256=3jNiV5Z4BVw9cEWuLKNOlLuLdr0EMuKg6eYnSiAq3LU,3952
|
526
526
|
model_compression_toolkit/xquant/pytorch/similarity_functions.py,sha256=CERxq5K8rqaiE-DlwhZBTUd9x69dtYJlkHOPLB54vm8,2354
|
527
527
|
model_compression_toolkit/xquant/pytorch/tensorboard_utils.py,sha256=mkoEktLFFHtEKzzFRn_jCnxjhJolK12TZ5AQeDHzUO8,9767
|
528
|
-
mct_nightly-2.2.0.20250107.
|
529
|
-
mct_nightly-2.2.0.20250107.
|
530
|
-
mct_nightly-2.2.0.20250107.
|
531
|
-
mct_nightly-2.2.0.20250107.
|
532
|
-
mct_nightly-2.2.0.20250107.
|
528
|
+
mct_nightly-2.2.0.20250107.134735.dist-info/LICENSE.md,sha256=aYSSIb-5AFPeITTvXm1UAoe0uYBiMmSS8flvXaaFUks,10174
|
529
|
+
mct_nightly-2.2.0.20250107.134735.dist-info/METADATA,sha256=C7xwYNXSPtYZ-ZmjkR7YOorNYODEUzOVUXxK6Z4_UXA,26464
|
530
|
+
mct_nightly-2.2.0.20250107.134735.dist-info/WHEEL,sha256=A3WOREP4zgxI0fKrHUG8DC8013e3dK3n7a6HDbcEIwE,91
|
531
|
+
mct_nightly-2.2.0.20250107.134735.dist-info/top_level.txt,sha256=gsYA8juk0Z-ZmQRKULkb3JLGdOdz8jW_cMRjisn9ga4,26
|
532
|
+
mct_nightly-2.2.0.20250107.134735.dist-info/RECORD,,
|
@@ -27,4 +27,4 @@ from model_compression_toolkit import data_generation
|
|
27
27
|
from model_compression_toolkit import pruning
|
28
28
|
from model_compression_toolkit.trainable_infrastructure.keras.load_model import keras_load_quantized_model
|
29
29
|
|
30
|
-
__version__ = "2.2.0.20250107.
|
30
|
+
__version__ = "2.2.0.20250107.134735"
|
@@ -144,8 +144,9 @@ def activation_mse(flp_act_list,
|
|
144
144
|
loss_values_list.append(point_loss)
|
145
145
|
bias_loss_list.append(bias_loss)
|
146
146
|
if weights_for_average_loss is not None:
|
147
|
-
|
148
|
-
|
147
|
+
print(f"weights_for_average_loss.shape: {weights_for_average_loss.shape}")
|
148
|
+
print(f"tf.stack(loss_values_list).shape: {tf.stack(loss_values_list).shape}")
|
149
|
+
return tf.reduce_sum(weights_for_average_loss * tf.stack(loss_values_list)), tf.reduce_mean(tf.stack(bias_loss_list))
|
149
150
|
else:
|
150
151
|
return tf.reduce_mean(tf.stack(loss_values_list)), tf.reduce_mean(tf.stack(bias_loss_list))
|
151
152
|
|
@@ -187,4 +188,4 @@ class GPTQMultipleTensorsLoss:
|
|
187
188
|
weights_for_average_loss=weights_for_average_loss,
|
188
189
|
norm_loss=self.norm_loss)
|
189
190
|
|
190
|
-
return loss_act
|
191
|
+
return loss_act
|
File without changes
|
{mct_nightly-2.2.0.20250107.15510.dist-info → mct_nightly-2.2.0.20250107.134735.dist-info}/WHEEL
RENAMED
File without changes
|
File without changes
|