mct-nightly 2.2.0.20250113.527__py3-none-any.whl → 2.2.0.20250113.134913__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mct_nightly-2.2.0.20250113.527.dist-info → mct_nightly-2.2.0.20250113.134913.dist-info}/METADATA +1 -1
- {mct_nightly-2.2.0.20250113.527.dist-info → mct_nightly-2.2.0.20250113.134913.dist-info}/RECORD +7 -7
- model_compression_toolkit/__init__.py +1 -1
- model_compression_toolkit/core/keras/data_util.py +24 -5
- {mct_nightly-2.2.0.20250113.527.dist-info → mct_nightly-2.2.0.20250113.134913.dist-info}/LICENSE.md +0 -0
- {mct_nightly-2.2.0.20250113.527.dist-info → mct_nightly-2.2.0.20250113.134913.dist-info}/WHEEL +0 -0
- {mct_nightly-2.2.0.20250113.527.dist-info → mct_nightly-2.2.0.20250113.134913.dist-info}/top_level.txt +0 -0
{mct_nightly-2.2.0.20250113.527.dist-info → mct_nightly-2.2.0.20250113.134913.dist-info}/METADATA
RENAMED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: mct-nightly
|
3
|
-
Version: 2.2.0.20250113.
|
3
|
+
Version: 2.2.0.20250113.134913
|
4
4
|
Summary: A Model Compression Toolkit for neural networks
|
5
5
|
Classifier: Programming Language :: Python :: 3
|
6
6
|
Classifier: License :: OSI Approved :: Apache Software License
|
{mct_nightly-2.2.0.20250113.527.dist-info → mct_nightly-2.2.0.20250113.134913.dist-info}/RECORD
RENAMED
@@ -1,4 +1,4 @@
|
|
1
|
-
model_compression_toolkit/__init__.py,sha256=
|
1
|
+
model_compression_toolkit/__init__.py,sha256=lAJGvoR9rqdv9OkbbBQa5WQeqHFUM7X0gMUeE_3A4Vc,1573
|
2
2
|
model_compression_toolkit/constants.py,sha256=i_R6uXBfO1ph_X6DNJych2x59SUojfJbn7dNjs_mZnc,3846
|
3
3
|
model_compression_toolkit/defaultdict.py,sha256=LSc-sbZYXENMCw3U9F4GiXuv67IKpdn0Qm7Fr11jy-4,2277
|
4
4
|
model_compression_toolkit/logger.py,sha256=3DByV41XHRR3kLTJNbpaMmikL8icd9e1N-nkQAY9oDk,4567
|
@@ -155,7 +155,7 @@ model_compression_toolkit/core/common/visualization/tensorboard_writer.py,sha256
|
|
155
155
|
model_compression_toolkit/core/keras/__init__.py,sha256=mjbqLD-KcG3eNeCYpu1GBS7VclGVOQ63x2p6mAAuba4,698
|
156
156
|
model_compression_toolkit/core/keras/constants.py,sha256=dh4elQWt6Q6NYRht5k5RiiOcnLAq1v0MMBCJqMJzzFk,3225
|
157
157
|
model_compression_toolkit/core/keras/custom_layer_validation.py,sha256=f-b14wuiIgitBe7d0MmofYhDCTO3IhwJgwrh-Hq_t_U,1192
|
158
|
-
model_compression_toolkit/core/keras/data_util.py,sha256
|
158
|
+
model_compression_toolkit/core/keras/data_util.py,sha256=jm54o-SlI1DJ-sEvRuX9OyLN68tEt0VxcqrdIjR98Ag,8366
|
159
159
|
model_compression_toolkit/core/keras/default_framework_info.py,sha256=PYcER89eEXjKtR0T7-2Y4f7cckqoD5OQbpHePoRkMec,5030
|
160
160
|
model_compression_toolkit/core/keras/keras_implementation.py,sha256=HwbIR7x4t-TBNbWHVvVNFk8z-KFt6zM0LWAUXQuNZrk,31753
|
161
161
|
model_compression_toolkit/core/keras/keras_model_validation.py,sha256=1wNV2clFdC9BzIELRLSO2uKf0xqjLqlkTJudwtCeaJk,1722
|
@@ -525,8 +525,8 @@ model_compression_toolkit/xquant/pytorch/model_analyzer.py,sha256=b93o800yVB3Z-i
|
|
525
525
|
model_compression_toolkit/xquant/pytorch/pytorch_report_utils.py,sha256=3jNiV5Z4BVw9cEWuLKNOlLuLdr0EMuKg6eYnSiAq3LU,3952
|
526
526
|
model_compression_toolkit/xquant/pytorch/similarity_functions.py,sha256=CERxq5K8rqaiE-DlwhZBTUd9x69dtYJlkHOPLB54vm8,2354
|
527
527
|
model_compression_toolkit/xquant/pytorch/tensorboard_utils.py,sha256=mkoEktLFFHtEKzzFRn_jCnxjhJolK12TZ5AQeDHzUO8,9767
|
528
|
-
mct_nightly-2.2.0.20250113.
|
529
|
-
mct_nightly-2.2.0.20250113.
|
530
|
-
mct_nightly-2.2.0.20250113.
|
531
|
-
mct_nightly-2.2.0.20250113.
|
532
|
-
mct_nightly-2.2.0.20250113.
|
528
|
+
mct_nightly-2.2.0.20250113.134913.dist-info/LICENSE.md,sha256=aYSSIb-5AFPeITTvXm1UAoe0uYBiMmSS8flvXaaFUks,10174
|
529
|
+
mct_nightly-2.2.0.20250113.134913.dist-info/METADATA,sha256=MRbQVy655Qf1QPZeBJj9K8Hc4p0GzYq8tAfuIvtwNAk,26604
|
530
|
+
mct_nightly-2.2.0.20250113.134913.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
531
|
+
mct_nightly-2.2.0.20250113.134913.dist-info/top_level.txt,sha256=gsYA8juk0Z-ZmQRKULkb3JLGdOdz8jW_cMRjisn9ga4,26
|
532
|
+
mct_nightly-2.2.0.20250113.134913.dist-info/RECORD,,
|
@@ -27,4 +27,4 @@ from model_compression_toolkit import data_generation
|
|
27
27
|
from model_compression_toolkit import pruning
|
28
28
|
from model_compression_toolkit.trainable_infrastructure.keras.load_model import keras_load_quantized_model
|
29
29
|
|
30
|
-
__version__ = "2.2.0.20250113.
|
30
|
+
__version__ = "2.2.0.20250113.134913"
|
@@ -134,11 +134,30 @@ class FixedSampleInfoDataset:
|
|
134
134
|
self.samples = samples
|
135
135
|
self.sample_info = sample_info
|
136
136
|
|
137
|
-
#
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
)
|
137
|
+
# Get the number of tensors in each tuple (corresponds to the number of input layers the model has)
|
138
|
+
num_tensors = len(samples[0])
|
139
|
+
|
140
|
+
# Create separate lists: one for each input layer and separate the tuples into lists
|
141
|
+
sample_tensor_lists = [[] for _ in range(num_tensors)]
|
142
|
+
for s in samples:
|
143
|
+
for i, data_tensor in enumerate(s):
|
144
|
+
sample_tensor_lists[i].append(data_tensor)
|
145
|
+
|
146
|
+
# In order to deal with models that have different input shapes for different layers, we need first to
|
147
|
+
# organize the data in a dictionary in order to use tf.data.Dataset.from_tensor_slices
|
148
|
+
samples_dict = {f'tensor_{i}': tensors for i, tensors in enumerate(sample_tensor_lists)}
|
149
|
+
info_dict = {f'info_{i}': tf.convert_to_tensor(info) for i, info in enumerate(self.sample_info)}
|
150
|
+
combined_dict = {**samples_dict, **info_dict}
|
151
|
+
|
152
|
+
tf_dataset = tf.data.Dataset.from_tensor_slices(combined_dict)
|
153
|
+
|
154
|
+
# Map the dataset to return tuples instead of dict
|
155
|
+
def reorganize_ds_outputs(ds_output):
|
156
|
+
tensors = tuple(ds_output[f'tensor_{i}'] for i in range(num_tensors))
|
157
|
+
infos = tuple(ds_output[f'info_{i}'] for i in range(len(sample_info)))
|
158
|
+
return tensors, infos
|
159
|
+
|
160
|
+
self.tf_dataset = tf_dataset.map(reorganize_ds_outputs)
|
142
161
|
|
143
162
|
def __len__(self):
|
144
163
|
return len(self.samples)
|
{mct_nightly-2.2.0.20250113.527.dist-info → mct_nightly-2.2.0.20250113.134913.dist-info}/LICENSE.md
RENAMED
File without changes
|
{mct_nightly-2.2.0.20250113.527.dist-info → mct_nightly-2.2.0.20250113.134913.dist-info}/WHEEL
RENAMED
File without changes
|
File without changes
|