onnx-diagnostic 0.8.11__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- onnx_diagnostic/__init__.py +1 -1
- onnx_diagnostic/ci_models/data/Blanca_Lake_Hudak.jpg +0 -0
- onnx_diagnostic/ci_models/data/Ice_worm_glacier.jpg +0 -0
- onnx_diagnostic/ci_models/data/__init__.py +0 -0
- onnx_diagnostic/ci_models/export_phi4_mm.py +8 -3
- onnx_diagnostic/export/api.py +11 -0
- onnx_diagnostic/export/dynamic_shapes.py +1 -1
- onnx_diagnostic/helpers/cache_helper.py +96 -30
- onnx_diagnostic/helpers/helper.py +39 -0
- onnx_diagnostic/helpers/onnx_helper.py +1 -1
- onnx_diagnostic/helpers/ort_session.py +5 -1
- onnx_diagnostic/helpers/rt_helper.py +53 -9
- onnx_diagnostic/helpers/torch_helper.py +7 -2
- onnx_diagnostic/investigate/input_observer.py +793 -152
- onnx_diagnostic/torch_export_patches/onnx_export_errors.py +32 -14
- onnx_diagnostic/torch_export_patches/patches/_patch_transformers_masking_utils.py +107 -6
- onnx_diagnostic/torch_export_patches/patches/patch_torch.py +13 -3
- onnx_diagnostic/torch_export_patches/patches/patch_transformers.py +1 -0
- onnx_diagnostic/torch_export_patches/serialization/transformers_impl.py +28 -2
- {onnx_diagnostic-0.8.11.dist-info → onnx_diagnostic-0.9.0.dist-info}/METADATA +2 -2
- {onnx_diagnostic-0.8.11.dist-info → onnx_diagnostic-0.9.0.dist-info}/RECORD +24 -21
- {onnx_diagnostic-0.8.11.dist-info → onnx_diagnostic-0.9.0.dist-info}/WHEEL +1 -1
- {onnx_diagnostic-0.8.11.dist-info → onnx_diagnostic-0.9.0.dist-info}/licenses/LICENSE.txt +0 -0
- {onnx_diagnostic-0.8.11.dist-info → onnx_diagnostic-0.9.0.dist-info}/top_level.txt +0 -0
|
@@ -562,6 +562,7 @@ def _patch_transformers(
|
|
|
562
562
|
"[torch_export_patches] patches "
|
|
563
563
|
"transformers.masking_utils.sdpa_mask_recent_torch"
|
|
564
564
|
)
|
|
565
|
+
|
|
565
566
|
f_transformers_sdpa_mask_recent_torch = masking_utils.sdpa_mask_recent_torch
|
|
566
567
|
masking_utils.sdpa_mask_recent_torch = (
|
|
567
568
|
patch_transformers_list.patched_sdpa_mask_recent_torch
|
|
@@ -574,7 +575,9 @@ def _patch_transformers(
|
|
|
574
575
|
)
|
|
575
576
|
if masking_utils.sdpa_mask == f_transformers_sdpa_mask_recent_torch:
|
|
576
577
|
if verbose:
|
|
577
|
-
print(
|
|
578
|
+
print(
|
|
579
|
+
"[torch_export_patches] patches transformers.masking_utils.sdpa_mask (1)"
|
|
580
|
+
)
|
|
578
581
|
f_transformers_sdpa_mask = masking_utils.sdpa_mask
|
|
579
582
|
masking_utils.sdpa_mask = patch_transformers_list.patched_sdpa_mask_recent_torch
|
|
580
583
|
if patch_details:
|
|
@@ -583,8 +586,23 @@ def _patch_transformers(
|
|
|
583
586
|
f_transformers_sdpa_mask,
|
|
584
587
|
patch_transformers_list.patched_sdpa_mask_recent_torch,
|
|
585
588
|
)
|
|
586
|
-
|
|
587
|
-
|
|
589
|
+
|
|
590
|
+
if ( # vmap
|
|
591
|
+
masking_utils
|
|
592
|
+
and patch_transformers_list.patch_masking_utils
|
|
593
|
+
and hasattr(masking_utils, "sdpa_mask")
|
|
594
|
+
and f_transformers_sdpa_mask is None
|
|
595
|
+
):
|
|
596
|
+
if verbose:
|
|
597
|
+
print("[torch_export_patches] patches transformers.masking_utils.sdpa_mask (3)")
|
|
598
|
+
f_transformers_sdpa_mask = masking_utils.sdpa_mask
|
|
599
|
+
masking_utils.sdpa_mask = patch_transformers_list.patched_sdpa_mask
|
|
600
|
+
if patch_details:
|
|
601
|
+
patch_details.append(
|
|
602
|
+
"transformers",
|
|
603
|
+
f_transformers_sdpa_mask,
|
|
604
|
+
patch_transformers_list.patched_sdpa_mask,
|
|
605
|
+
)
|
|
588
606
|
|
|
589
607
|
if ( # eager_mask
|
|
590
608
|
masking_utils
|
|
@@ -742,17 +760,17 @@ def _unpatch_transformers(
|
|
|
742
760
|
"transformers.masking_utils.sdpa_mask_recent_torch"
|
|
743
761
|
)
|
|
744
762
|
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
763
|
+
if f_transformers_sdpa_mask is not None:
|
|
764
|
+
assert f_transformers_sdpa_mask.__name__ in (
|
|
765
|
+
"sdpa_mask",
|
|
766
|
+
"sdpa_mask_recent_torch",
|
|
767
|
+
), (
|
|
768
|
+
f"corrupted function 'sdpa_mask', its name is "
|
|
769
|
+
f"{f_transformers_sdpa_mask.__name__!r}"
|
|
770
|
+
)
|
|
771
|
+
masking_utils.sdpa_mask = f_transformers_sdpa_mask
|
|
772
|
+
if verbose:
|
|
773
|
+
print("[torch_export_patches] restored transformers.masking_utils.sdpa_mask")
|
|
756
774
|
|
|
757
775
|
if ( # eager_mask
|
|
758
776
|
masking_utils
|
|
@@ -36,6 +36,26 @@ if patch_masking_utils:
|
|
|
36
36
|
_ignore_bidirectional_mask_sdpa = None
|
|
37
37
|
bidirectional_mask_function = None
|
|
38
38
|
|
|
39
|
+
try:
|
|
40
|
+
from transformers.masking_utils import _non_vmap_expansion_sdpa
|
|
41
|
+
except ImportError:
|
|
42
|
+
|
|
43
|
+
def _non_vmap_expansion_sdpa(
|
|
44
|
+
batch_indices: torch.Tensor,
|
|
45
|
+
head_indices: torch.Tensor,
|
|
46
|
+
q_indices: torch.Tensor,
|
|
47
|
+
kv_indices: torch.Tensor,
|
|
48
|
+
):
|
|
49
|
+
"""
|
|
50
|
+
https://github.com/huggingface/optimum-onnx/blob/
|
|
51
|
+
c123e8f4fab61b54a8e0e31ce74462bcacca576e/optimum/exporters/onnx/model_patcher.py#L362-L365
|
|
52
|
+
"""
|
|
53
|
+
batch_indices = batch_indices[:, None, None, None]
|
|
54
|
+
head_indices = head_indices[None, :, None, None]
|
|
55
|
+
q_indices = q_indices[None, None, :, None]
|
|
56
|
+
kv_indices = kv_indices[None, None, None, :]
|
|
57
|
+
return batch_indices, head_indices, q_indices, kv_indices
|
|
58
|
+
|
|
39
59
|
def patched__vmap_for_bhqkv(mask_function: Callable, bh_indices: bool = True) -> Callable:
|
|
40
60
|
"""manual patch for function ``transformers.masking_utils._vmap_for_bhqkv``."""
|
|
41
61
|
from ...helpers import string_type
|
|
@@ -146,12 +166,13 @@ if patch_masking_utils:
|
|
|
146
166
|
padding_mask, q_length, kv_length, kv_offset, local_size
|
|
147
167
|
):
|
|
148
168
|
return None
|
|
149
|
-
if
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
169
|
+
if allow_is_bidirectional_skip and _ignore_bidirectional_mask_sdpa:
|
|
170
|
+
# transformers<=5.0: 1 parameter, 3 for transformers>5.0
|
|
171
|
+
n_parameters = len(inspect.signature(_ignore_bidirectional_mask_sdpa).parameters)
|
|
172
|
+
if _ignore_bidirectional_mask_sdpa(
|
|
173
|
+
*[padding_mask, kv_length, kv_offset][:n_parameters]
|
|
174
|
+
):
|
|
175
|
+
return None
|
|
155
176
|
|
|
156
177
|
if mask_function is bidirectional_mask_function:
|
|
157
178
|
if padding_mask is not None:
|
|
@@ -180,3 +201,83 @@ if patch_masking_utils:
|
|
|
180
201
|
batch_arange, head_arange, cache_position, kv_arange
|
|
181
202
|
)
|
|
182
203
|
return causal_mask
|
|
204
|
+
|
|
205
|
+
def patched_sdpa_mask(
|
|
206
|
+
batch_size: int,
|
|
207
|
+
cache_position: torch.Tensor,
|
|
208
|
+
kv_length: int,
|
|
209
|
+
kv_offset: int = 0,
|
|
210
|
+
mask_function: Callable = causal_mask_function,
|
|
211
|
+
attention_mask: torch.Tensor | None = None,
|
|
212
|
+
local_size: int | None = None,
|
|
213
|
+
allow_is_causal_skip: bool = True,
|
|
214
|
+
allow_is_bidirectional_skip: bool = False,
|
|
215
|
+
allow_torch_fix: bool = True,
|
|
216
|
+
use_vmap: bool = False,
|
|
217
|
+
**kwargs,
|
|
218
|
+
) -> torch.Tensor | None:
|
|
219
|
+
"""manual patch for function ``transformers.masking_utils.sdpa_mask``."""
|
|
220
|
+
q_length = cache_position.shape[0]
|
|
221
|
+
|
|
222
|
+
# Potentially pad the 2D mask
|
|
223
|
+
padding_mask = prepare_padding_mask(attention_mask, kv_length, kv_offset)
|
|
224
|
+
|
|
225
|
+
# Under specific conditions, we can avoid materializing the mask
|
|
226
|
+
# 1. Causal masks can rely on the `is_causal` argument
|
|
227
|
+
# 2. Bidirectional do not need any further processing (no bias)
|
|
228
|
+
if allow_is_causal_skip and _ignore_causal_mask_sdpa(
|
|
229
|
+
padding_mask, q_length, kv_length, kv_offset, local_size
|
|
230
|
+
):
|
|
231
|
+
return None
|
|
232
|
+
if allow_is_bidirectional_skip and _ignore_bidirectional_mask_sdpa(
|
|
233
|
+
padding_mask, kv_length, local_size
|
|
234
|
+
):
|
|
235
|
+
return None
|
|
236
|
+
|
|
237
|
+
# Potentially add the padding 2D mask
|
|
238
|
+
if padding_mask is not None:
|
|
239
|
+
mask_function = and_masks(mask_function, padding_mask_function(padding_mask))
|
|
240
|
+
|
|
241
|
+
batch_arange = torch.arange(batch_size, device=cache_position.device)
|
|
242
|
+
head_arange = torch.arange(1, device=cache_position.device)
|
|
243
|
+
# Similar to `kv_arange = torch.arange(start=kv_offset,
|
|
244
|
+
# end=kv_offset + kv_length, device=cache_position.device)`
|
|
245
|
+
# but without data-dependent slicing (i.e. torch.compile friendly)
|
|
246
|
+
kv_arange = torch.arange(kv_length, device=cache_position.device) + kv_offset
|
|
247
|
+
|
|
248
|
+
# Actual mask creation
|
|
249
|
+
# Option 1: Fast non-vmap mask creation (default)
|
|
250
|
+
# PATCHED
|
|
251
|
+
use_vmap = False
|
|
252
|
+
if not use_vmap:
|
|
253
|
+
# Apply mask function element-wise through broadcasting
|
|
254
|
+
attention_mask = mask_function(
|
|
255
|
+
*_non_vmap_expansion_sdpa(batch_arange, head_arange, cache_position, kv_arange)
|
|
256
|
+
)
|
|
257
|
+
# Expand the mask to match batch size
|
|
258
|
+
# and query length if they weren't used in the mask function
|
|
259
|
+
attention_mask = attention_mask.expand(batch_size, -1, q_length, kv_length)
|
|
260
|
+
|
|
261
|
+
# Option 2: Vmap mask creation (torch>=2.6 and custom patterns)
|
|
262
|
+
# elif _is_torch_greater_or_equal_than_2_6:
|
|
263
|
+
# This creates the 4D mask easily.
|
|
264
|
+
# Note that we need this context manager as vmap cannot handle slicing a tensor from
|
|
265
|
+
# scalar tensor (it internally calls `.item()` which vmap does not allow,
|
|
266
|
+
# but this context works around it
|
|
267
|
+
# We don't need to add an offset to the mask_function either,
|
|
268
|
+
# as we vmap directly the correct indices for k and kv indices
|
|
269
|
+
# with TransformGetItemToIndex():
|
|
270
|
+
# attention_mask = _vmap_expansion_sdpa(mask_function)(
|
|
271
|
+
# batch_arange, head_arange, cache_position, kv_arange
|
|
272
|
+
# )
|
|
273
|
+
|
|
274
|
+
# Option 3: Error out since it indicates that the user did something custom,
|
|
275
|
+
# which they shouldn't have (torch<2.6)
|
|
276
|
+
else:
|
|
277
|
+
raise ValueError(
|
|
278
|
+
"The vmap functionality for mask creation "
|
|
279
|
+
"is only supported from torch>=2.6. "
|
|
280
|
+
"Please update your torch version or use "
|
|
281
|
+
"`use_vmap=False` with index-based masks."
|
|
282
|
+
)
|
|
283
|
+
return attention_mask
|
|
@@ -188,6 +188,11 @@ def patched__broadcast_shapes(*_shapes):
|
|
|
188
188
|
return common_shape
|
|
189
189
|
|
|
190
190
|
|
|
191
|
+
def value_ranges_is_positive(value_ranges: torch.utils._sympy.value_ranges.ValueRanges):
|
|
192
|
+
"""Tells if an interval is equivalent to a positive or null integer."""
|
|
193
|
+
return value_ranges.lower == 0 and value_ranges.upper > 4623372036854775806
|
|
194
|
+
|
|
195
|
+
|
|
191
196
|
class patched_ShapeEnv:
|
|
192
197
|
|
|
193
198
|
def _check_frozen(
|
|
@@ -281,7 +286,10 @@ class patched_ShapeEnv:
|
|
|
281
286
|
)
|
|
282
287
|
self._update_var_to_range(b, b_bound, self.var_to_range_sloc[a])
|
|
283
288
|
tgt_bound = self.bound_sympy(tgt)
|
|
284
|
-
assert
|
|
289
|
+
assert (
|
|
290
|
+
value_ranges_is_positive(tgt_bound)
|
|
291
|
+
and value_ranges_is_positive(src_bound)
|
|
292
|
+
) or tgt_bound.issubset(
|
|
285
293
|
src_bound
|
|
286
294
|
), f"{tgt_bound=} not a subset of {src_bound=}"
|
|
287
295
|
|
|
@@ -524,8 +532,10 @@ class patched_ShapeEnv:
|
|
|
524
532
|
|
|
525
533
|
transmute_into_runtime_assert = False
|
|
526
534
|
|
|
527
|
-
backed_var_to_val =
|
|
528
|
-
self
|
|
535
|
+
backed_var_to_val = (
|
|
536
|
+
self.backed_var_to_val
|
|
537
|
+
if hasattr(self, "backed_var_to_val")
|
|
538
|
+
else self.var_to_val
|
|
529
539
|
)
|
|
530
540
|
concrete_val = None
|
|
531
541
|
if not (expr.free_symbols <= backed_var_to_val.keys()):
|
|
@@ -24,10 +24,12 @@ from ...helpers.cache_helper import make_dynamic_cache, make_static_cache, Cache
|
|
|
24
24
|
from . import make_serialization_function_for_dataclass
|
|
25
25
|
|
|
26
26
|
SUPPORTED_DATACLASSES: Set[type] = set()
|
|
27
|
+
|
|
27
28
|
WRONG_REGISTRATIONS = {
|
|
28
29
|
DynamicCache: "4.50",
|
|
29
30
|
BaseModelOutput: None,
|
|
30
31
|
}
|
|
32
|
+
|
|
31
33
|
SHORTEN_LAYER_NAMES = {
|
|
32
34
|
"DynamicLayer": "D",
|
|
33
35
|
"DynamicSlidingWindowLayer": "W",
|
|
@@ -39,6 +41,20 @@ SHORTEN_LAYER_NAMES = {
|
|
|
39
41
|
"X": "StaticSlidingWindowLayer",
|
|
40
42
|
}
|
|
41
43
|
|
|
44
|
+
KWARGS_LAYER_NAMES = {
|
|
45
|
+
"DynamicLayer": lambda layer: "",
|
|
46
|
+
"DynamicSlidingWindowLayer": lambda layer: str(layer.sliding_window),
|
|
47
|
+
"StaticLayer": lambda layer: "",
|
|
48
|
+
"StaticSlidingWindowLayer": lambda layer: str(layer.sliding_window),
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
PARSE_LAYER_NAMES = {
|
|
52
|
+
"DynamicLayer": lambda skw: {},
|
|
53
|
+
"DynamicSlidingWindowLayer": lambda skw: dict(sliding_window=int(skw[1:])),
|
|
54
|
+
"StaticLayer": lambda skw: {},
|
|
55
|
+
"StaticSlidingWindowLayer": lambda skw: dict(sliding_window=int(skw[1:])),
|
|
56
|
+
}
|
|
57
|
+
|
|
42
58
|
|
|
43
59
|
def _flatten_key_value_cache(cache: Cache) -> Tuple[List[Any], torch.utils._pytree.Context]:
|
|
44
60
|
ca = CacheKeyValue(cache)
|
|
@@ -59,7 +75,11 @@ def _flatten_key_value_cache(cache: Cache) -> Tuple[List[Any], torch.utils._pytr
|
|
|
59
75
|
keys = []
|
|
60
76
|
for i in range(len(ca.key_cache)):
|
|
61
77
|
letter = SHORTEN_LAYER_NAMES[ca.cls_layers[i].__name__]
|
|
62
|
-
|
|
78
|
+
if hasattr(cache, "layers"):
|
|
79
|
+
kwargs = KWARGS_LAYER_NAMES[ca.cls_layers[i].__name__](cache.layers[i])
|
|
80
|
+
else:
|
|
81
|
+
kwargs = ""
|
|
82
|
+
keys.extend([f"key_{letter}{kwargs}_{i}", f"value_{letter}{kwargs}_{i}"])
|
|
63
83
|
return flat, keys
|
|
64
84
|
|
|
65
85
|
|
|
@@ -86,10 +106,16 @@ def _unflatten_cache(
|
|
|
86
106
|
res = make_cache(list(zip(values[::2], values[1::2])))
|
|
87
107
|
else:
|
|
88
108
|
cls_layer_names = [SHORTEN_LAYER_NAMES[name.split("_")[1][0]] for name in context][::2]
|
|
109
|
+
cls_kwargs = [
|
|
110
|
+
PARSE_LAYER_NAMES[SHORTEN_LAYER_NAMES[name.split("_")[1][0]]](name.split("_")[1])
|
|
111
|
+
for name in context
|
|
112
|
+
][::2]
|
|
89
113
|
cls_layers = [
|
|
90
114
|
getattr(transformers.cache_utils, cls_name) for cls_name in cls_layer_names
|
|
91
115
|
]
|
|
92
|
-
res = make_cache(
|
|
116
|
+
res = make_cache(
|
|
117
|
+
list(zip(values[::2], values[1::2])), cls_layers=cls_layers, cls_kwargs=cls_kwargs
|
|
118
|
+
)
|
|
93
119
|
|
|
94
120
|
assert output_type is None or isinstance(
|
|
95
121
|
res, output_type
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: onnx-diagnostic
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.9.0
|
|
4
4
|
Summary: Tools to help converting pytorch models into ONNX.
|
|
5
5
|
Home-page: https://github.com/sdpython/onnx-diagnostic
|
|
6
6
|
Author: Xavier Dupré
|
|
@@ -91,7 +91,7 @@ Enlightening Examples
|
|
|
91
91
|
* `Export microsoft/phi-2
|
|
92
92
|
<https://sdpython.github.io/doc/onnx-diagnostic/dev/auto_examples/plot_export_tiny_phi2.html>`_
|
|
93
93
|
* `Export a LLM through method generate (with Tiny-LLM)
|
|
94
|
-
<https://sdpython.github.io/doc/onnx-diagnostic/dev/
|
|
94
|
+
<https://sdpython.github.io/doc/onnx-diagnostic/dev/auto_final/plot_export_tiny_llm_method_generate.html>`_
|
|
95
95
|
|
|
96
96
|
**Torch Export**
|
|
97
97
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
onnx_diagnostic/__init__.py,sha256=
|
|
1
|
+
onnx_diagnostic/__init__.py,sha256=hp8Jeb4rpAUldATxz_DyWOTlzHxT2YqTum8AVG-NRGk,173
|
|
2
2
|
onnx_diagnostic/__main__.py,sha256=YmyV_Aq_ianDlHyKLHMa6h8YK3ZmFPpLVHLKjM91aCk,79
|
|
3
3
|
onnx_diagnostic/_command_lines_parser.py,sha256=nR-WI-15da9e0o-z5mIKezCH4dGFU66gCbyuAFu2FzY,59315
|
|
4
4
|
onnx_diagnostic/doc.py,sha256=-w2qaes0G0TM0PQFTzBVcJPh0r_IR6Vd2j2fj9iV65Y,10478
|
|
@@ -6,13 +6,16 @@ onnx_diagnostic/ext_test_case.py,sha256=5WQrMdILgr-j8hrqBs7Vi6IFMD3TXNHD7Dlr9K5y
|
|
|
6
6
|
onnx_diagnostic/typing.py,sha256=okLjQGA_ikThoMQ1ikjeu-pCi9FiaO8uhSoKz1ufCQ8,409
|
|
7
7
|
onnx_diagnostic/ci_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
8
|
onnx_diagnostic/ci_models/ci_helpers.py,sha256=lblOF7z2kLcCRAwMOdqp-Tz1EL1oBywHfVokhqiTQRg,15592
|
|
9
|
-
onnx_diagnostic/ci_models/export_phi4_mm.py,sha256=
|
|
9
|
+
onnx_diagnostic/ci_models/export_phi4_mm.py,sha256=anCnx5lt7aGZVrzd4XCJv28-isoCX7feEHHEZDtrW-Y,41735
|
|
10
10
|
onnx_diagnostic/ci_models/export_qwen25_vl.py,sha256=_rYPr8PPraWizr2MPcGuYjrJ55ilJOyKl8kg0wq4L90,20405
|
|
11
|
+
onnx_diagnostic/ci_models/data/Blanca_Lake_Hudak.jpg,sha256=O_mpR1XvIxBpQix0Tgit39Vd1PvAefbPquTJ6ChEwcA,690409
|
|
12
|
+
onnx_diagnostic/ci_models/data/Ice_worm_glacier.jpg,sha256=GkteS7CrGfMQ8Hdyk-ShGtXr5HomFy1mKivJEHXdM2Q,610733
|
|
13
|
+
onnx_diagnostic/ci_models/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
14
|
onnx_diagnostic/export/__init__.py,sha256=yEIoWiOeTwBsDhyYt2fTKuhtA0Ya1J9u9ZzMTOTWaWs,101
|
|
12
|
-
onnx_diagnostic/export/api.py,sha256=
|
|
15
|
+
onnx_diagnostic/export/api.py,sha256=3O21VcaJ0CyZ0X8FwXjPXTCoNfWu1q6iBHeoeDvtOmc,43583
|
|
13
16
|
onnx_diagnostic/export/cf_simple_loop_for.py,sha256=OHPGQc9AC-0TBtCYpP6cm-iHP9gmNt8WYRrPlO9ewlc,21158
|
|
14
17
|
onnx_diagnostic/export/control_flow_onnx.py,sha256=izGlctqQANrHzSxPMbT7hoauNbnIBdx6hb8ry7HtVmM,18263
|
|
15
|
-
onnx_diagnostic/export/dynamic_shapes.py,sha256=
|
|
18
|
+
onnx_diagnostic/export/dynamic_shapes.py,sha256=AOfKmGYIs_5hZPy7vi9pZDbpvu-wIlD2cD15lV8rl9E,44763
|
|
16
19
|
onnx_diagnostic/export/onnx_plug.py,sha256=U13fL0BjnhMzcDGxaAOqM4TQte5Z4zKDg4ESS0iktjM,22704
|
|
17
20
|
onnx_diagnostic/export/shape_helper.py,sha256=K3y8-vFXYGg5R1tgeVOm_RjCZ8-yyrvYkQ4b3ILM5H4,10990
|
|
18
21
|
onnx_diagnostic/export/validate.py,sha256=t8NZjgLqLjabKpUDjO5A5nBk_-BAsN1cdq-6UJ0Jm88,6127
|
|
@@ -20,25 +23,25 @@ onnx_diagnostic/helpers/__init__.py,sha256=GJ2GT7cgnlIveVUwMZhuvUwidbTJaKv8CsSIO
|
|
|
20
23
|
onnx_diagnostic/helpers/_log_helper.py,sha256=OTwQH0OIxs9B6nrSvR7MoxMimSw_8mU0mj133NvLk5o,16832
|
|
21
24
|
onnx_diagnostic/helpers/args_helper.py,sha256=qHxXJAM4ovBSYeufVVGMRQwrOW2AuP_HFQgqgU-QBQM,4463
|
|
22
25
|
onnx_diagnostic/helpers/bench_run.py,sha256=9T26icFQOSo3HB1gEw31bf-mD5BrVATrRDhuyWESxz8,16543
|
|
23
|
-
onnx_diagnostic/helpers/cache_helper.py,sha256=
|
|
26
|
+
onnx_diagnostic/helpers/cache_helper.py,sha256=INfp9dkGTl3HDEQYbK_fC0T9dMgTuk0pNCNXjwB3Z9w,35033
|
|
24
27
|
onnx_diagnostic/helpers/config_helper.py,sha256=cWRETgFhZ7tayIZPnMqF8BF5AvTU64G2BMqyzgO7lzs,5670
|
|
25
28
|
onnx_diagnostic/helpers/doc_helper.py,sha256=9rigmq36D20yCKG5VkQnODiWMG2eJ0p22UdHew66UrQ,5983
|
|
26
29
|
onnx_diagnostic/helpers/dot_helper.py,sha256=hwgTJsbsUv0qq7euyPDnc1NsBZDGOwv32JXSZxIHJkE,8118
|
|
27
30
|
onnx_diagnostic/helpers/fake_tensor_helper.py,sha256=59046wDIw84or6PJxLaa2CFqaWT7Y3mpYr-BB2shcBE,12027
|
|
28
31
|
onnx_diagnostic/helpers/graph_helper.py,sha256=ailTFdJu1Kir1VQ4GDsm9bUDIevZmgsJVJBGu7CFbCM,14100
|
|
29
|
-
onnx_diagnostic/helpers/helper.py,sha256=
|
|
32
|
+
onnx_diagnostic/helpers/helper.py,sha256=hqWZ-VDrnNDehdM8nJN6mblUxZeH-He4dvy378a5LMM,67773
|
|
30
33
|
onnx_diagnostic/helpers/log_helper.py,sha256=njGse2PeD1muXVYvEMl6dsWTDja76wCWipxrEMAkLiw,94504
|
|
31
34
|
onnx_diagnostic/helpers/memory_peak.py,sha256=kigKbD3cq6ZCkE_nhOgNIkr9zHP9zp7ezZhut8GTrcI,6596
|
|
32
35
|
onnx_diagnostic/helpers/mini_onnx_builder.py,sha256=3IAJRf7bphKN0sY714me7H1UzEA1ejJQNYY7cMbGkCo,23878
|
|
33
36
|
onnx_diagnostic/helpers/model_builder_helper.py,sha256=r8LG3xh_-gwp4RODVM9jhB9HNWvccuNypwygWHoU2jA,18548
|
|
34
|
-
onnx_diagnostic/helpers/onnx_helper.py,sha256=
|
|
37
|
+
onnx_diagnostic/helpers/onnx_helper.py,sha256=DosgZN1ZU47xA2FN49JCfcmKF9rC8jeO9Vu5icstdOY,64437
|
|
35
38
|
onnx_diagnostic/helpers/optim_helper.py,sha256=0NiYRwV9GLTub4SEny0dqEhLcajRjEhcgkeBDVr9bGQ,4424
|
|
36
|
-
onnx_diagnostic/helpers/ort_session.py,sha256=
|
|
37
|
-
onnx_diagnostic/helpers/rt_helper.py,sha256=
|
|
39
|
+
onnx_diagnostic/helpers/ort_session.py,sha256=gYSUTuUFd8vf7Hk48HZ7B73f83ZUeCcTa6BE6ylk8as,30975
|
|
40
|
+
onnx_diagnostic/helpers/rt_helper.py,sha256=ZuWYTI4Nc_z556PXnS5qtpT__YlyTOwn9ocQ1vw-hfY,40352
|
|
38
41
|
onnx_diagnostic/helpers/torch_fx_graph_helper.py,sha256=7xFe4svdbr4gV3OTNcx8eJejjDyHAv4hD_RNNKSxL0c,6571
|
|
39
|
-
onnx_diagnostic/helpers/torch_helper.py,sha256=
|
|
42
|
+
onnx_diagnostic/helpers/torch_helper.py,sha256=WrC6pr7oT7d8P-34FVDrM7ZYtVEx62OaSGlwK2N_7sc,39782
|
|
40
43
|
onnx_diagnostic/investigate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
41
|
-
onnx_diagnostic/investigate/input_observer.py,sha256=
|
|
44
|
+
onnx_diagnostic/investigate/input_observer.py,sha256=vA3I_fDizIIL4m3ETl6qb5qPmmf6QcSYox06FJXOqh0,40576
|
|
42
45
|
onnx_diagnostic/reference/__init__.py,sha256=rLZsxOlnb7-81F2CzepGnZLejaROg4JvgFaGR9FwVQA,208
|
|
43
46
|
onnx_diagnostic/reference/evaluator.py,sha256=5PcPL_5xuEmm4j16KK0o2N5UkdUpuTSxQYc48MPWLpw,8847
|
|
44
47
|
onnx_diagnostic/reference/ort_evaluator.py,sha256=J3keuxFGnQIKP9V80U61y9rpU3zE26RR7HPozeaeiYo,33929
|
|
@@ -106,7 +109,7 @@ onnx_diagnostic/tasks/zero_shot_image_classification.py,sha256=jJCMWuOqGv5ahCfjr
|
|
|
106
109
|
onnx_diagnostic/tasks/data/__init__.py,sha256=uJoemrWgEjI6oA-tMX7r3__x-b3siPmkgqaY7bgIles,401
|
|
107
110
|
onnx_diagnostic/tasks/data/dummies_imagetext2text_generation_gemma3.onnx,sha256=UbtvmWMqcZOKJ-I-HXWI1A6YR6QDaFS5u_yXm5C3ZBw,10299
|
|
108
111
|
onnx_diagnostic/torch_export_patches/__init__.py,sha256=KcBsUXOzw0n-Eo6uFQUUCnJjDsHLnR_J7u-SJu0d6bo,719
|
|
109
|
-
onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=
|
|
112
|
+
onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=1q615Li3qQskArEjDT31uB9emeY6ByfHtOO7XIkDWhI,44023
|
|
110
113
|
onnx_diagnostic/torch_export_patches/onnx_export_serialization.py,sha256=KZgr8WbLvmZte1G1hKA0zIlio5ZHz9MKIwGPqNamB6E,12690
|
|
111
114
|
onnx_diagnostic/torch_export_patches/patch_details.py,sha256=UHBo4QTLF3ZgQ4951yYHIQqxOeRYNaG7x56XFcRTtg4,11794
|
|
112
115
|
onnx_diagnostic/torch_export_patches/patch_expressions.py,sha256=VOsv71FsR_UZtxz4-5_VKL2sHQhOkHy9RkPJME2h7UU,3271
|
|
@@ -124,18 +127,18 @@ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_funnel.py,sha25
|
|
|
124
127
|
onnx_diagnostic/torch_export_patches/patches/_patch_transformers_gemma3.py,sha256=nVgYQk0xXpHiictN1wOHVMN2lTH9b0vfIJ4ie-uKopg,1999
|
|
125
128
|
onnx_diagnostic/torch_export_patches/patches/_patch_transformers_generation_mixin.py,sha256=VIZsVHgR8NmAcBQalPl5I6ZzNgcBxjGb6ars31m9gRg,21936
|
|
126
129
|
onnx_diagnostic/torch_export_patches/patches/_patch_transformers_idefics.py,sha256=kTjuTRsfkGGGhspJnMxAMQSchZgGC_IruJzpHh_FmI8,6348
|
|
127
|
-
onnx_diagnostic/torch_export_patches/patches/_patch_transformers_masking_utils.py,sha256=
|
|
130
|
+
onnx_diagnostic/torch_export_patches/patches/_patch_transformers_masking_utils.py,sha256=WD7424vssc_uEpcoG_E5V7edxugVratMQJSng5s3zW8,11808
|
|
128
131
|
onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2.py,sha256=OxYdlLrwtd_KGHt3E17poduxvWFg-CfGS57-yN1i6gI,3827
|
|
129
132
|
onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2_5.py,sha256=oYz0tr-6KH0DabpgaISytnXAGxQosoA8gV5LpksO4yI,34834
|
|
130
133
|
onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen3.py,sha256=cND9Iqo1aKdlX-BXGr9Qlq_Y4EW1L5VWSwZfqYTVazU,4888
|
|
131
134
|
onnx_diagnostic/torch_export_patches/patches/_patch_transformers_rotary_embedding.py,sha256=AQTAAGhpg_Jlzc_26c9rmoZRuhmJcrFX488_W_Z0QD8,16945
|
|
132
135
|
onnx_diagnostic/torch_export_patches/patches/_patch_transformers_sam_mask_decoder.py,sha256=-6TuBm3sLAFEGuW3vRfOTtE5uP6aINFfu7xMnl27Dws,5703
|
|
133
136
|
onnx_diagnostic/torch_export_patches/patches/patch_helper.py,sha256=kK_CGW643iVXxa-m6pttDBS7HTyMQaPypza7iqIInn4,721
|
|
134
|
-
onnx_diagnostic/torch_export_patches/patches/patch_torch.py,sha256=
|
|
135
|
-
onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=
|
|
137
|
+
onnx_diagnostic/torch_export_patches/patches/patch_torch.py,sha256=xoWyNn-hj9d6TW-DVYSUJfhYzJJgpLiG3HZzcQA3k7s,45743
|
|
138
|
+
onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=UXfHCu-v1uw0CY7XSjPz35GOQKjtdwxKZUOm_ACelNw,3403
|
|
136
139
|
onnx_diagnostic/torch_export_patches/serialization/__init__.py,sha256=BHLdRPtNAtNPAS-bPKEj3-foGSPvwAbZXrHzGGPDLEw,1876
|
|
137
140
|
onnx_diagnostic/torch_export_patches/serialization/diffusers_impl.py,sha256=drq3EH_yjcSuIWYsVeUWm8Cx6YCZFU6bP_1PLtPfY5I,945
|
|
138
|
-
onnx_diagnostic/torch_export_patches/serialization/transformers_impl.py,sha256=
|
|
141
|
+
onnx_diagnostic/torch_export_patches/serialization/transformers_impl.py,sha256=mBJ8iMngSqI9JJPceq2At9rDWAlRmmRRYFudSl7wgok,12421
|
|
139
142
|
onnx_diagnostic/torch_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
140
143
|
onnx_diagnostic/torch_models/code_sample.py,sha256=D0Hv5PJQj2J8Nivoha-RzCdvVhsWYFVFEoG2D2yKm5k,12882
|
|
141
144
|
onnx_diagnostic/torch_models/llms.py,sha256=soyg4yC87ptGoeulJhKqw5opGmuLvH1pn_ZDXZ4Jr8E,90
|
|
@@ -154,8 +157,8 @@ onnx_diagnostic/torch_onnx/compare.py,sha256=pXPZ168pT7J_-UOOvsbmS_dCJcwaPjA7Gxi
|
|
|
154
157
|
onnx_diagnostic/torch_onnx/runtime_info.py,sha256=Mv6WQXNtfNF31zgLm5Hk3x2fTDS6p8UjPh51_Jn4tl0,8565
|
|
155
158
|
onnx_diagnostic/torch_onnx/sbs.py,sha256=xeq4jrMnTeLvZ3UjFlMNpm-VBbP_16fPwTi1_Ujvilo,40752
|
|
156
159
|
onnx_diagnostic/torch_onnx/sbs_dataclasses.py,sha256=WPu9hdz8dIXqvFYy-xtPX9liD_DUVkXOWO_vElOGksY,20277
|
|
157
|
-
onnx_diagnostic-0.
|
|
158
|
-
onnx_diagnostic-0.
|
|
159
|
-
onnx_diagnostic-0.
|
|
160
|
-
onnx_diagnostic-0.
|
|
161
|
-
onnx_diagnostic-0.
|
|
160
|
+
onnx_diagnostic-0.9.0.dist-info/licenses/LICENSE.txt,sha256=Vv6TXglX6Rc0d-f8aREhayhT-6PMQXEyOmI2NKlUCMc,1045
|
|
161
|
+
onnx_diagnostic-0.9.0.dist-info/METADATA,sha256=-gBCPiRO-6gtbL_khtaTN8GC_UyZNzlY2PmYUo3lCW4,6900
|
|
162
|
+
onnx_diagnostic-0.9.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
163
|
+
onnx_diagnostic-0.9.0.dist-info/top_level.txt,sha256=KwNkXewmcobM3ZT1DJLVWH6ebJzA5qKg7cWqKfpGNT4,16
|
|
164
|
+
onnx_diagnostic-0.9.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|