onnx-diagnostic 0.8.5__py3-none-any.whl → 0.8.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. onnx_diagnostic/__init__.py +1 -1
  2. onnx_diagnostic/_command_lines_parser.py +154 -3
  3. onnx_diagnostic/ci_models/__init__.py +0 -0
  4. onnx_diagnostic/ci_models/ci_helpers.py +435 -0
  5. onnx_diagnostic/ci_models/export_phi4_mm.py +1062 -0
  6. onnx_diagnostic/ci_models/export_qwen25_vl.py +568 -0
  7. onnx_diagnostic/export/api.py +1 -0
  8. onnx_diagnostic/export/cf_simple_loop_for.py +537 -0
  9. onnx_diagnostic/export/control_flow_onnx.py +23 -17
  10. onnx_diagnostic/ext_test_case.py +23 -2
  11. onnx_diagnostic/helpers/bench_run.py +1 -1
  12. onnx_diagnostic/helpers/log_helper.py +1 -3
  13. onnx_diagnostic/helpers/optim_helper.py +116 -0
  14. onnx_diagnostic/tasks/image_text_to_text.py +15 -5
  15. onnx_diagnostic/tasks/text2text_generation.py +84 -48
  16. onnx_diagnostic/tasks/text_generation.py +3 -0
  17. onnx_diagnostic/torch_export_patches/onnx_export_errors.py +44 -2
  18. onnx_diagnostic/torch_export_patches/patch_expressions.py +4 -1
  19. onnx_diagnostic/torch_export_patches/patch_module.py +31 -23
  20. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_funnel.py +80 -0
  21. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2_5.py +86 -3
  22. onnx_diagnostic/torch_export_patches/patches/patch_torch.py +15 -0
  23. onnx_diagnostic/torch_export_patches/patches/patch_transformers.py +23 -24
  24. onnx_diagnostic/torch_models/hghub/hub_api.py +11 -0
  25. onnx_diagnostic/torch_models/hghub/hub_data.py +9 -1
  26. onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py +29 -8
  27. onnx_diagnostic/torch_models/hghub/model_inputs.py +24 -19
  28. onnx_diagnostic/torch_onnx/compare.py +357 -0
  29. {onnx_diagnostic-0.8.5.dist-info → onnx_diagnostic-0.8.7.dist-info}/METADATA +1 -1
  30. {onnx_diagnostic-0.8.5.dist-info → onnx_diagnostic-0.8.7.dist-info}/RECORD +33 -27
  31. onnx_diagnostic/export/control_flow.py +0 -214
  32. onnx_diagnostic/export/control_flow_research.py +0 -140
  33. {onnx_diagnostic-0.8.5.dist-info → onnx_diagnostic-0.8.7.dist-info}/WHEEL +0 -0
  34. {onnx_diagnostic-0.8.5.dist-info → onnx_diagnostic-0.8.7.dist-info}/licenses/LICENSE.txt +0 -0
  35. {onnx_diagnostic-0.8.5.dist-info → onnx_diagnostic-0.8.7.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,357 @@
1
+ import enum
2
+ from dataclasses import dataclass
3
+ from typing import Dict, List, Optional, Tuple, Union
4
+ import numpy as np
5
+ import onnx
6
+ from ..helpers.onnx_helper import onnx_dtype_name
7
+
8
+
9
+ _NOT_SO_FAR_OPS = [
10
+ {"MatMul", "Gemm", "FusedMatMul"},
11
+ {"Conv", "FusedConv"},
12
+ {"MaxPool"},
13
+ ]
14
+
15
+
16
+ def _sum_sets(sets):
17
+ t = set()
18
+ for s in sets:
19
+ t |= s
20
+ return t
21
+
22
+
23
+ _ALL_NOT_SO_FAR_OPS = _sum_sets(_NOT_SO_FAR_OPS)
24
+
25
+
26
+ def _align(res: str, limit: int) -> str:
27
+ if len(res) == limit:
28
+ return res
29
+ if len(res) > limit:
30
+ return res[:limit]
31
+ return res + " " * (limit - len(res))
32
+
33
+
34
+ class ObsType(enum.IntEnum):
35
+ """Observation kind."""
36
+
37
+ RESULT = 1
38
+ INITIALIZER = 2
39
+ SPARSE_INITIALIZER = 4
40
+ INPUT = 8
41
+ OUTPUT = 16
42
+ NODE = 32
43
+
44
+ def __repr__(self):
45
+ return f"{self.__class__.__name__}.{self._name_}"
46
+
47
+
48
+ @dataclass
49
+ class ObsCompare:
50
+ """
51
+ The description of an observation, a node, an input, an output, an initializer.
52
+
53
+ :param position: index of this observation in the original model
54
+ :param kind: node type, see :class:`ObsType`
55
+ :param name_or_outputs: name of an initializer or the outputs of a node
56
+ :param itype: onnx type
57
+ :param index: index of an input or output
58
+ :param shape: shape
59
+ :param op_type: node op_type
60
+ :param comment: comment, unused
61
+ """
62
+
63
+ position: int
64
+ kind: ObsType
65
+ name_or_outputs: Tuple[str]
66
+ itype: int = 0
67
+ index: int = 0
68
+ shape: Optional[Tuple[Tuple[Union[int, str], ...]]] = None
69
+ op_type: str = ""
70
+ comment: str = ""
71
+
72
+ def __str__(self) -> str:
73
+ "usual"
74
+ els = [
75
+ _align(f"{self.position:04d}", 4),
76
+ _align(self.kind._name_, 6),
77
+ _align(onnx_dtype_name(self.itype) if self.itype else "?", 8),
78
+ _align("?" if self.shape is None else "x".join(map(str, self.shape)), 18),
79
+ _align(self.op_type or "", 15),
80
+ _align(", ".join(self.name_or_outputs), 35),
81
+ ]
82
+ return " ".join(els)
83
+
84
+ @classmethod
85
+ def to_str(cls, obs: Optional["ObsCompare"]) -> str:
86
+ assert not obs or isinstance(obs, ObsCompare), f"unexpected type {type(obs)}"
87
+ if obs:
88
+ return str(obs)
89
+ return " " * (4 + 6 + 8 + 18 + 15 + 35 + 5)
90
+
91
+ def distance(self, obs: "ObsCompare") -> float:
92
+ """Computes a cost between two observations."""
93
+ if self.kind != obs.kind:
94
+ return 1e6
95
+ d: float = 0
96
+ if self.itype != obs.itype:
97
+ d += 1e5
98
+ if self.kind == ObsType.NODE:
99
+ cost = 9997
100
+ d = 0
101
+ if self.op_type != obs.op_type:
102
+ if self.op_type in _ALL_NOT_SO_FAR_OPS or obs.op_type in _ALL_NOT_SO_FAR_OPS:
103
+ d += 1e2
104
+ for aset in _NOT_SO_FAR_OPS:
105
+ if self.op_type in aset and obs.op_type in aset:
106
+ cost = 97
107
+ elif self.op_type in aset or obs.op_type in aset:
108
+ d += 5e4
109
+ else:
110
+ d += 9e2
111
+ if len(self.name_or_outputs) == 1 and len(obs.name_or_outputs) == 1:
112
+ if self.name_or_outputs[0] != obs.name_or_outputs[0]:
113
+ n1 = self.name_or_outputs[0]
114
+ n2 = obs.name_or_outputs[0]
115
+ n1 = n1.replace("_", "")
116
+ n2 = n2.replace("_", "")
117
+ if n1 == n2:
118
+ d += 1
119
+ elif (n1.startswith(("val_", "_onx_")) or "::" in n1 or "--" in n1) and (
120
+ n2.startswith(("val_", "_onx_")) or "::" in n2 or "--" in n2
121
+ ):
122
+ # These are name given the exporter
123
+ # and not inspired from the model itself.
124
+ d += cost / 100
125
+ else:
126
+ d += cost
127
+ else:
128
+ a = set(self.name_or_outputs) & set(obs.name_or_outputs)
129
+ b = set(self.name_or_outputs) | set(obs.name_or_outputs)
130
+ d += cost * (len(b) - len(a))
131
+ return d
132
+ if self.kind == ObsType.INPUT:
133
+ return (
134
+ 999.7
135
+ if self.itype != obs.itype
136
+ or self.shape != obs.shape
137
+ or self.index != obs.index
138
+ else 0
139
+ )
140
+ if self.kind == ObsType.INITIALIZER or self.kind == ObsType.SPARSE_INITIALIZER:
141
+ return 1e3 if self.itype != obs.itype or self.shape != obs.shape else 0
142
+ if self.kind == ObsType.OUTPUT:
143
+ return (
144
+ 999.1
145
+ if self.itype != obs.itype
146
+ or self.shape != obs.shape
147
+ or self.index != obs.index
148
+ else 0
149
+ )
150
+ return 1e8
151
+
152
+ @classmethod
153
+ def obs_sequence_from_model(
154
+ cls,
155
+ model: Union[onnx.ModelProto, onnx.GraphProto],
156
+ ) -> List["ObsCompare"]:
157
+ """
158
+ Creates a sequence of observations bases on a model.
159
+
160
+ :param model: model
161
+ :return: sequence of observations
162
+ """
163
+ graph = model if isinstance(model, onnx.GraphProto) else model.graph
164
+
165
+ shapes = {}
166
+ types = {}
167
+ for info in [*graph.value_info, *graph.input, *graph.output]:
168
+ if info.type.tensor_type:
169
+ t = info.type.tensor_type
170
+ shapes[info.name] = tuple((d.dim_param or d.dim_value) for d in t.shape.dim)
171
+ types[info.name] = t.elem_type
172
+
173
+ seq: List[ObsCompare] = []
174
+ for init in graph.initializer:
175
+ obs = ObsCompare(
176
+ position=len(seq),
177
+ kind=ObsType.INITIALIZER,
178
+ itype=init.data_type,
179
+ shape=tuple(init.dims),
180
+ name_or_outputs=(init.name,),
181
+ )
182
+ seq.append(obs)
183
+ for i, inp in enumerate(graph.input):
184
+ obs = ObsCompare(
185
+ position=len(seq),
186
+ kind=ObsType.INPUT,
187
+ itype=inp.type.tensor_type.elem_type,
188
+ index=i,
189
+ shape=tuple(
190
+ (d.dim_param or d.dim_value) for d in inp.type.tensor_type.shape.dim
191
+ ),
192
+ name_or_outputs=(inp.name,),
193
+ )
194
+ seq.append(obs)
195
+ for node in graph.node:
196
+ obs = ObsCompare(
197
+ position=len(seq),
198
+ kind=ObsType.NODE,
199
+ itype=types.get(node.output[0], 0),
200
+ index=i,
201
+ shape=shapes.get(node.output[0], None),
202
+ name_or_outputs=tuple(node.output),
203
+ op_type=node.op_type,
204
+ )
205
+ seq.append(obs)
206
+ for i, inp in enumerate(graph.output):
207
+ obs = ObsCompare(
208
+ position=len(seq),
209
+ kind=ObsType.OUTPUT,
210
+ itype=inp.type.tensor_type.elem_type,
211
+ index=i,
212
+ shape=tuple(
213
+ (d.dim_param or d.dim_value) for d in inp.type.tensor_type.shape.dim
214
+ ),
215
+ name_or_outputs=(inp.name,),
216
+ )
217
+ seq.append(obs)
218
+ return seq
219
+
220
+
221
+ @dataclass
222
+ class ObsComparePair:
223
+ """
224
+ Defines a pair of comparison objects
225
+
226
+ :param side1: object from first side
227
+ :param side2: object from first side
228
+ :param distance: distance
229
+ """
230
+
231
+ side1: Optional[ObsCompare]
232
+ side2: Optional[ObsCompare]
233
+ distance: float
234
+
235
+ def __str__(self) -> str:
236
+ "nice display"
237
+ return (
238
+ f"{self.distance:.4e} | "
239
+ f"{ObsCompare.to_str(self.side1)} | {ObsCompare.to_str(self.side2)}"
240
+ )
241
+
242
+ @classmethod
243
+ def to_str(cls, seq: List["ObsComparePair"]) -> str:
244
+ """Displays every pair in text."""
245
+ return "\n".join([f"{str(pair)}" for pair in seq])
246
+
247
+ @classmethod
248
+ def distance_sequence(cls, s1: List["ObsCompare"], s2: List["ObsCompare"]) -> Tuple[
249
+ float,
250
+ List[Tuple[int, int]],
251
+ List["ObsComparePair"],
252
+ ]:
253
+ """
254
+ Computes the distance between two sequences of results.
255
+
256
+ :param s1: first sequence
257
+ :param s2: second sequence
258
+ :return: distance and alignment
259
+
260
+ An example:
261
+
262
+ .. runpython::
263
+ :showcode:
264
+
265
+ import torch
266
+ from onnx_diagnostic.export.api import to_onnx
267
+ from onnx_diagnostic.torch_onnx.compare import ObsComparePair, ObsCompare
268
+
269
+
270
+ class Model(torch.nn.Module):
271
+ def __init__(self):
272
+ super().__init__()
273
+ self.conv1 = torch.nn.Conv2d(3, 16, 5)
274
+ self.fc1 = torch.nn.Linear(144, 64)
275
+ self.fc2 = torch.nn.Linear(64, 128)
276
+ self.fc3 = torch.nn.Linear(128, 10)
277
+
278
+ def forward(self, x):
279
+ x = torch.nn.functional.max_pool2d(
280
+ torch.nn.functional.relu(self.conv1(x)),
281
+ (4, 4),
282
+ )
283
+ # x = F.max_pool2d(F.relu(self.conv2(x)), 2)
284
+ x = torch.flatten(x, 1)
285
+ x = torch.nn.functional.relu(self.fc1(x))
286
+ x = torch.nn.functional.relu(self.fc2(x))
287
+ y = self.fc3(x)
288
+ return y
289
+
290
+
291
+ model = Model()
292
+ x = torch.randn((2, 3, 16, 17), dtype=torch.float32)
293
+ dynamic_shapes = ({0: "batch", 3: "dim"},)
294
+ onnx_optimized = to_onnx(
295
+ model, (x,), dynamic_shapes=dynamic_shapes, exporter="custom", optimize=True
296
+ ).model_proto
297
+ onnx_not_optimized = to_onnx(
298
+ model, (x,), dynamic_shapes=dynamic_shapes, exporter="custom", optimize=False
299
+ ).model_proto
300
+ seq1 = ObsCompare.obs_sequence_from_model(onnx_not_optimized)
301
+ seq2 = ObsCompare.obs_sequence_from_model(onnx_optimized)
302
+ _dist, _path, pair_cmp = ObsComparePair.distance_sequence(seq1, seq2)
303
+ text = ObsComparePair.to_str(pair_cmp)
304
+ print(text)
305
+ """
306
+ delay = max(50, abs(len(s2) - len(s1)) + 1)
307
+ distance: Dict[Tuple[int, int], Union[int, float]] = {(-1, -1): 0}
308
+ predecessor: Dict[Tuple[int, int], Optional[Tuple[int, int]]] = {(-1, -1): None}
309
+ insert_cost = 1e3
310
+ for i in range(len(s1)):
311
+ for j in range(max(0, i - delay), min(len(s2), i + delay)):
312
+ best = distance.get((i, j), 1e100)
313
+ pred = None
314
+ ki, kj = i - 1, j - 1
315
+ if (ki, kj) in distance:
316
+ d = distance[ki, kj] + s1[i].distance(s2[j])
317
+ if d < best:
318
+ best = d
319
+ pred = (ki, kj)
320
+ ki, kj = i - 1, j
321
+ if (ki, kj) in distance:
322
+ d = distance[ki, kj] + insert_cost + 1
323
+ if d < best:
324
+ best = d
325
+ pred = (ki, kj)
326
+ ki, kj = i, j - 1
327
+ if (ki, kj) in distance:
328
+ d = distance[ki, kj] + insert_cost + 0.1
329
+ if d < best:
330
+ best = d
331
+ pred = (ki, kj)
332
+ distance[i, j] = best
333
+ predecessor[i, j] = pred
334
+
335
+ # reverse
336
+ way = []
337
+ last: Optional[Tuple[int, int]] = len(s1) - 1, len(s2) - 1
338
+ while last is not None:
339
+ way.append(last)
340
+ last = predecessor[last]
341
+ indices = list(reversed(way))[1:]
342
+ obs_path: List[ObsComparePair] = []
343
+ last = -1, -1
344
+ for i, j in indices:
345
+ di = i - last[0]
346
+ dj = j - last[1]
347
+ cost = distance.get((i, j), np.nan)
348
+ if di == dj == 1:
349
+ obs_path.append(ObsComparePair(s1[i], s2[j], distance=cost))
350
+ elif di == 0:
351
+ obs_path.append(ObsComparePair(None, s2[j], distance=cost))
352
+ elif dj == 0:
353
+ obs_path.append(ObsComparePair(s1[i], None, distance=cost))
354
+ else:
355
+ raise RuntimeError(f"issue with di={di}, dj={dj}")
356
+ last = i, j
357
+ return distance[len(s1) - 1, len(s2) - 1], indices, obs_path
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: onnx-diagnostic
3
- Version: 0.8.5
3
+ Version: 0.8.7
4
4
  Summary: Tools to help converting pytorch models into ONNX.
5
5
  Home-page: https://github.com/sdpython/onnx-diagnostic
6
6
  Author: Xavier Dupré
@@ -1,14 +1,17 @@
1
- onnx_diagnostic/__init__.py,sha256=dCiBK_S7EOo_rAsmsgv-laLhtKzE2uny0XIR5aO4eDk,173
1
+ onnx_diagnostic/__init__.py,sha256=4PdbZ6FK0yyAhH9AUn7XujsN3PJ3vNr0EsPuwV8XoWc,173
2
2
  onnx_diagnostic/__main__.py,sha256=YmyV_Aq_ianDlHyKLHMa6h8YK3ZmFPpLVHLKjM91aCk,79
3
- onnx_diagnostic/_command_lines_parser.py,sha256=ZFJdQP1Ee8D5a_xUch-0CHaYbbILztejTjVdyc9KrMw,52667
3
+ onnx_diagnostic/_command_lines_parser.py,sha256=g_udwHBHmY6X_d41Qby_DqMpEHL1p9GfUhJGBCihl8c,57784
4
4
  onnx_diagnostic/api.py,sha256=BhCl_yCd78N7TlVtPOHjeYv1QBEy39TjZ647rcHqLh0,345
5
5
  onnx_diagnostic/doc.py,sha256=t3RELgfooYnVMAi0JSpggWkQEgUsREz8NmRvn0TnLI8,2829
6
- onnx_diagnostic/ext_test_case.py,sha256=rVZWqFEfnvwnsD3wF4jeDblh5uj5ckZ8C6DZQ0RGb_E,49599
6
+ onnx_diagnostic/ext_test_case.py,sha256=A6BkrRm-QbvM8A-qRRMLt9o9ZO6wMXE9jrotggjpGfE,50460
7
+ onnx_diagnostic/ci_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ onnx_diagnostic/ci_models/ci_helpers.py,sha256=lblOF7z2kLcCRAwMOdqp-Tz1EL1oBywHfVokhqiTQRg,15592
9
+ onnx_diagnostic/ci_models/export_phi4_mm.py,sha256=FZik8jN0GdQ7sAo_a9PiuulUpbgG_4BrZugWQ7YbuaU,41668
10
+ onnx_diagnostic/ci_models/export_qwen25_vl.py,sha256=_rYPr8PPraWizr2MPcGuYjrJ55ilJOyKl8kg0wq4L90,20405
7
11
  onnx_diagnostic/export/__init__.py,sha256=yEIoWiOeTwBsDhyYt2fTKuhtA0Ya1J9u9ZzMTOTWaWs,101
8
- onnx_diagnostic/export/api.py,sha256=BX4c99gMlRYsBWk3P15FMRogArxjP4dXYXP5gILjgIk,10626
9
- onnx_diagnostic/export/control_flow.py,sha256=zU5n_QYhNcBllyMsl1_i6ohZt2CshqG2MokJghrvA60,7751
10
- onnx_diagnostic/export/control_flow_onnx.py,sha256=sODOD4v7EJj6LWhrfcdCW68r9nYKsRM4SRnqDw4TrSI,18049
11
- onnx_diagnostic/export/control_flow_research.py,sha256=RuYz9_eM42Bk6TKSiPV6dS68LIMZu-6WBCFCKoSvjrk,5422
12
+ onnx_diagnostic/export/api.py,sha256=5ESg0rcK0EFGMNl76_K2O0rvCC8hVi7Nfj0O3OI7tQA,10657
13
+ onnx_diagnostic/export/cf_simple_loop_for.py,sha256=OHPGQc9AC-0TBtCYpP6cm-iHP9gmNt8WYRrPlO9ewlc,21158
14
+ onnx_diagnostic/export/control_flow_onnx.py,sha256=izGlctqQANrHzSxPMbT7hoauNbnIBdx6hb8ry7HtVmM,18263
12
15
  onnx_diagnostic/export/dynamic_shapes.py,sha256=M2hlpHSTbkzZwGKAbrpQXng5HQrwjF5Z6wGGxEgnp74,42061
13
16
  onnx_diagnostic/export/onnx_plug.py,sha256=U13fL0BjnhMzcDGxaAOqM4TQte5Z4zKDg4ESS0iktjM,22704
14
17
  onnx_diagnostic/export/shape_helper.py,sha256=m628y0oRCQbeZkeh8JDHIfWMsSjoJoeX-IPiPGDHT-w,11273
@@ -16,7 +19,7 @@ onnx_diagnostic/export/validate.py,sha256=_PGUql2DJhIgGKo0WjTGUc5AgsZUx8fEs00MeP
16
19
  onnx_diagnostic/helpers/__init__.py,sha256=GJ2GT7cgnlIveVUwMZhuvUwidbTJaKv8CsSIOpZDsJg,83
17
20
  onnx_diagnostic/helpers/_log_helper.py,sha256=OTwQH0OIxs9B6nrSvR7MoxMimSw_8mU0mj133NvLk5o,16832
18
21
  onnx_diagnostic/helpers/args_helper.py,sha256=SRWnqC7EENg09RZlA50B_PcdiIhdbgA4C3ACfzl5nMs,4419
19
- onnx_diagnostic/helpers/bench_run.py,sha256=CGA6VMJZMH2gDhVueT9ypNm4PMcjGrrGFYp08nhWj9k,16539
22
+ onnx_diagnostic/helpers/bench_run.py,sha256=Vvzb7Wy0baIT5O0dx4RKQTx-5V08PiHxPJh6XPkY-lU,16544
20
23
  onnx_diagnostic/helpers/cache_helper.py,sha256=OLghsSUuZ8cWGkua8eH75KBF-mbVqejnNUYfFo5lRf0,28498
21
24
  onnx_diagnostic/helpers/config_helper.py,sha256=cWRETgFhZ7tayIZPnMqF8BF5AvTU64G2BMqyzgO7lzs,5670
22
25
  onnx_diagnostic/helpers/doc_helper.py,sha256=pl5MZd3_FaE8BqQnqoBuSBxoNCFcd2OJd3eITUSku5c,5897
@@ -24,11 +27,12 @@ onnx_diagnostic/helpers/dot_helper.py,sha256=hwgTJsbsUv0qq7euyPDnc1NsBZDGOwv32JX
24
27
  onnx_diagnostic/helpers/fake_tensor_helper.py,sha256=J7wnK3WTuVKnYiMzLVTAPkdJr3hQfIfMC9ZlOu7oGmI,11024
25
28
  onnx_diagnostic/helpers/graph_helper.py,sha256=hevQT5a7_QuriVPQcbT5qe18n99Doyl5h3-qshx1-uk,14093
26
29
  onnx_diagnostic/helpers/helper.py,sha256=x8EYQmgrz_G5QS_IsbeFIoDcN_sUs-CslJMHseBj1Fw,65482
27
- onnx_diagnostic/helpers/log_helper.py,sha256=0lJiTF87lliI-LmgpUH_V2N8NuzJ0LryH0mSYpkRaL8,93272
30
+ onnx_diagnostic/helpers/log_helper.py,sha256=3mWQd-nLKCctKZt9N8SpoWgLC8O7YdNQ2pfW5QXYWDQ,93232
28
31
  onnx_diagnostic/helpers/memory_peak.py,sha256=M3m4_thWFIwP5HytbJYEqaijXIv5v5BW_vlcJowIYI4,6434
29
32
  onnx_diagnostic/helpers/mini_onnx_builder.py,sha256=jR2lkRZEQ0N30H0FqeBwaxJd_w_6kyxFagrnulqFjhE,23883
30
33
  onnx_diagnostic/helpers/model_builder_helper.py,sha256=qKIq4Naqq03gk6NfqXLQjSDiKL5FFNc1AEyVX0R8GmA,18540
31
34
  onnx_diagnostic/helpers/onnx_helper.py,sha256=MshvqMSTNUUZIpkkRYGDymdW2t2KtB2BgYtOPHIDwvQ,57508
35
+ onnx_diagnostic/helpers/optim_helper.py,sha256=0NiYRwV9GLTub4SEny0dqEhLcajRjEhcgkeBDVr9bGQ,4424
32
36
  onnx_diagnostic/helpers/ort_session.py,sha256=XvRazj7yyepaQwYHpPkKKi9v8u_h9A4ZiFH6IxjqPKs,30502
33
37
  onnx_diagnostic/helpers/rt_helper.py,sha256=OOxHSCKZup2u7zTvVJxPkRHb4jQZ03KpkiDGrfwibMM,38135
34
38
  onnx_diagnostic/helpers/torch_fx_graph_helper.py,sha256=7xFe4svdbr4gV3OTNcx8eJejjDyHAv4hD_RNNKSxL0c,6571
@@ -85,27 +89,27 @@ onnx_diagnostic/tasks/automatic_speech_recognition.py,sha256=aMufLDGW005f7aLMZ9a
85
89
  onnx_diagnostic/tasks/feature_extraction.py,sha256=IS9z9fPNE0hhGUebBfmNZl0twdXobMc7MFKpQB9qZI0,5388
86
90
  onnx_diagnostic/tasks/fill_mask.py,sha256=5Gt6zlj0p6vuifox7Wmj-TpHXJvPS0CEH8evgdBHDNA,2640
87
91
  onnx_diagnostic/tasks/image_classification.py,sha256=nLpBBB1Gkog3Fk6pu2waiHcuQr4ILPptc9FhQ-pn460,4682
88
- onnx_diagnostic/tasks/image_text_to_text.py,sha256=g-xRRTVUiT6mBvYYbOxgzlHNfu9SnEgUcvTeSoqf7gE,21765
92
+ onnx_diagnostic/tasks/image_text_to_text.py,sha256=21gbNU8ZZCSzWWmNpwlUmROD_5pC_u74GgnzTLELzd8,22136
89
93
  onnx_diagnostic/tasks/image_to_video.py,sha256=SoF2cVIJr6P30Abp-FCuixFDh5RvTuNEOL36QthGY6U,3860
90
94
  onnx_diagnostic/tasks/mask_generation.py,sha256=fjdD3rd-O-mFL0hQy3la3JXKth_0bH2HL7Eelq-3Dbs,5057
91
95
  onnx_diagnostic/tasks/mixture_of_expert.py,sha256=al4tk1BrHidtRiHlAaiflWiJaAte0d5M8WcBioANG9k,2808
92
96
  onnx_diagnostic/tasks/object_detection.py,sha256=3FiT8ya5FCd9lwjQCRXhAwXspNwYTlAD3Gpk8aAcG5w,4279
93
97
  onnx_diagnostic/tasks/sentence_similarity.py,sha256=vPqNZgAnIvY0rKWPUTs0IlU3RFQDkXAHL7IVfRFmilY,2655
94
98
  onnx_diagnostic/tasks/summarization.py,sha256=AyDUHLjEymn4waIFf_ZgLAUJT6xqiGFKdaYAikK3wVA,5382
95
- onnx_diagnostic/tasks/text2text_generation.py,sha256=KUN7XSumftAy3cr2zYLR59RQ3wWYOTTTQkDuwjVm-HI,8464
99
+ onnx_diagnostic/tasks/text2text_generation.py,sha256=E-H5_wZX-RjExpM65-B61eaNx_lJVCCOKo5AN7FnYzc,9873
96
100
  onnx_diagnostic/tasks/text_classification.py,sha256=CGc72SpXFzTUyzAHEMPgyy_s187DaYGsRdrosxG80_Q,2711
97
- onnx_diagnostic/tasks/text_generation.py,sha256=80K4RzjCLPpzizxz_TGz6hFxLyGSGn6B2yaWnNa8WWk,14502
101
+ onnx_diagnostic/tasks/text_generation.py,sha256=qIhpVmTphnXVt-ewdSJF6GlIih0C0ewhtGtOs9IgW3U,14625
98
102
  onnx_diagnostic/tasks/text_to_image.py,sha256=mOS3Ruosi3hzRMxXLDN7ZkAbi7NnQb7MWwQP_okGVHs,2962
99
103
  onnx_diagnostic/tasks/zero_shot_image_classification.py,sha256=jJCMWuOqGv5ahCfjrcqxuYCJFhTgHV5KUf2yyv2yxYA,4624
100
104
  onnx_diagnostic/tasks/data/__init__.py,sha256=uJoemrWgEjI6oA-tMX7r3__x-b3siPmkgqaY7bgIles,401
101
105
  onnx_diagnostic/tasks/data/dummies_imagetext2text_generation_gemma3.onnx,sha256=UbtvmWMqcZOKJ-I-HXWI1A6YR6QDaFS5u_yXm5C3ZBw,10299
102
106
  onnx_diagnostic/torch_export_patches/__init__.py,sha256=0SaZedwznm1hQUCvXZsGZORV5vby954wEExr5faepGg,720
103
- onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=WPb8Ku643UIV8kDyt9JUpaJBIVXth9UbteCNctd_yis,41863
107
+ onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=XHYtU7w3vsaTMCuF5X1YtOKxgwL8eEuktXzVZpRz55o,43431
104
108
  onnx_diagnostic/torch_export_patches/onnx_export_serialization.py,sha256=0HdubI06EGpxOICqDWZoVmZkVO9gAaFADEmby197EyM,11935
105
109
  onnx_diagnostic/torch_export_patches/patch_details.py,sha256=MSraVo5ngBhihi8ssPMXSY9B4fJ17J-GAADaw3dT-rc,11794
106
- onnx_diagnostic/torch_export_patches/patch_expressions.py,sha256=vr4tt61cbDnaaaduzMj4UBZ8OUtr6GfDpIWwOYqjWzs,3213
110
+ onnx_diagnostic/torch_export_patches/patch_expressions.py,sha256=VOsv71FsR_UZtxz4-5_VKL2sHQhOkHy9RkPJME2h7UU,3271
107
111
  onnx_diagnostic/torch_export_patches/patch_inputs.py,sha256=-TgcyjVzxTb5Y-_ibssTeaA5PFz6FJrV6q84HMUAsJw,8075
108
- onnx_diagnostic/torch_export_patches/patch_module.py,sha256=9DYgTiFwbFMipFQP-IgjyIkXyVrDsRgwmUQXE2qKFsw,39454
112
+ onnx_diagnostic/torch_export_patches/patch_module.py,sha256=1Mn3xdpK1jSdRs6z1C-mJGkfGmD2TNRwLNoPaOW_EFI,40061
109
113
  onnx_diagnostic/torch_export_patches/patch_module_helper.py,sha256=2U0AdyZuU0W54QTdE7tY7imVzMnpQ5091ADNtTCkT8Y,6967
110
114
  onnx_diagnostic/torch_export_patches/eval/__init__.py,sha256=YQoOGt9XQLWqnJ15NnT7ri_jDevfvpuQwEJo38E-VRU,25056
111
115
  onnx_diagnostic/torch_export_patches/eval/model_cases.py,sha256=9h4yo9vKiK-E6zaXyAsxXGM-lCjd88ONybA1F3YcTI4,27988
@@ -114,18 +118,19 @@ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_attention.py,sh
114
118
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_cache_utils.py,sha256=UdxLii-od2OpQmUJbmXmZinXeLBItVFrr75BVT1Y0zw,2041
115
119
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_causal_mask.py,sha256=h37DPVxsq8iAWECnTlKW5tVqSBgPBF52xr3uxsjdi2k,3113
116
120
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_dynamic_cache.py,sha256=lEdYqX60pyi_w6PrbCTk7NC96nB8FFcFRf_JMjXSAZE,7961
121
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_funnel.py,sha256=QAMFiA8MGQgbowZzpfLsh7gXTuzXc3eGmZ7hLKF1i78,3352
117
122
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_gemma3.py,sha256=nVgYQk0xXpHiictN1wOHVMN2lTH9b0vfIJ4ie-uKopg,1999
118
123
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_generation_mixin.py,sha256=VIZsVHgR8NmAcBQalPl5I6ZzNgcBxjGb6ars31m9gRg,21936
119
124
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_idefics.py,sha256=kTjuTRsfkGGGhspJnMxAMQSchZgGC_IruJzpHh_FmI8,6348
120
125
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_masking_utils.py,sha256=HE3fovyvMiYe9EPz1UjdD9AWopX3H188SMwPb8w5mzM,7111
121
126
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2.py,sha256=OxYdlLrwtd_KGHt3E17poduxvWFg-CfGS57-yN1i6gI,3827
122
- onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2_5.py,sha256=GS7IDHyRaLAsbZE5k7KN-ZT5-ezbmEUzXPJ_xG4SulA,31601
127
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2_5.py,sha256=oYz0tr-6KH0DabpgaISytnXAGxQosoA8gV5LpksO4yI,34834
123
128
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen3.py,sha256=cND9Iqo1aKdlX-BXGr9Qlq_Y4EW1L5VWSwZfqYTVazU,4888
124
129
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_rotary_embedding.py,sha256=4bJ_z2gizZQla_fcCVt0dmuhzO9Vu-D7CCMWdxMlrKM,16893
125
130
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_sam_mask_decoder.py,sha256=-6TuBm3sLAFEGuW3vRfOTtE5uP6aINFfu7xMnl27Dws,5703
126
131
  onnx_diagnostic/torch_export_patches/patches/patch_helper.py,sha256=kK_CGW643iVXxa-m6pttDBS7HTyMQaPypza7iqIInn4,721
127
- onnx_diagnostic/torch_export_patches/patches/patch_torch.py,sha256=FfES0WWiWxmuQbGTlQ7IJS0YBG7km3IQbnMYwk_lPPU,44667
128
- onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=VAfZj0xu3D4CG71SWv-9sYPUK4ZQTSz2-x4qxP4DxGE,3079
132
+ onnx_diagnostic/torch_export_patches/patches/patch_torch.py,sha256=VCs3uZHcuzosCqn9sSEskEWHJym_RrDJM6-G6FcTC08,45117
133
+ onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=1W3iKVYx2QT2xJTKlz1UmtjySuwv-rfT5yVL9DjOfzI,3376
129
134
  onnx_diagnostic/torch_export_patches/serialization/__init__.py,sha256=BHLdRPtNAtNPAS-bPKEj3-foGSPvwAbZXrHzGGPDLEw,1876
130
135
  onnx_diagnostic/torch_export_patches/serialization/diffusers_impl.py,sha256=drq3EH_yjcSuIWYsVeUWm8Cx6YCZFU6bP_1PLtPfY5I,945
131
136
  onnx_diagnostic/torch_export_patches/serialization/transformers_impl.py,sha256=sIHFvUQoMK8ytXQYB-k7OL62z8A3f5uDaq-S5R5uN-M,10034
@@ -134,20 +139,21 @@ onnx_diagnostic/torch_models/code_sample.py,sha256=rCDZY64pkn6uIbJJSBuC5TlU_-ule
134
139
  onnx_diagnostic/torch_models/llms.py,sha256=soyg4yC87ptGoeulJhKqw5opGmuLvH1pn_ZDXZ4Jr8E,90
135
140
  onnx_diagnostic/torch_models/validate.py,sha256=fnbTl5v1n5nM2MpmCgCMaWa6c7DGpb5mZYSuHXXCJEs,94829
136
141
  onnx_diagnostic/torch_models/hghub/__init__.py,sha256=vi1Q7YHdddj1soiBN42MSvJdFqe2_KUoWafHISjwOu8,58
137
- onnx_diagnostic/torch_models/hghub/hub_api.py,sha256=rFbiPNLET-KdBpnv-p0nKgwHX6d7C_Z0s9zZ86_92kQ,14307
138
- onnx_diagnostic/torch_models/hghub/hub_data.py,sha256=8V_pAgACPLPsLRYUododg7MSL6str-T3tBEGY4OaeYQ,8724
139
- onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py,sha256=GimzkI8W3guATkDx7RQ-w2xNGVaFDVegfTnnmNxf4iE,292068
140
- onnx_diagnostic/torch_models/hghub/model_inputs.py,sha256=tCGqigRyY1omxm2rczRUvCTsweZGbF1MccWI3MmCH20,17423
142
+ onnx_diagnostic/torch_models/hghub/hub_api.py,sha256=V3azxUqb7mkmHQ8m5DCgg1WUU2NYBK12USEUy_sfYIA,14709
143
+ onnx_diagnostic/torch_models/hghub/hub_data.py,sha256=6jR8A83cGP4Xw1Wg-q1zzKFpqzoVrybqm0Fm3yurkrE,9030
144
+ onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py,sha256=Dxa13rsnTQ8eH_BcQvbY2bp1AYFtzuFrJ-J_urrSmeQ,292694
145
+ onnx_diagnostic/torch_models/hghub/model_inputs.py,sha256=XahJ-m6ajdXg6vFGUOfV5IvFwn-yjAsIOU37nISbBoo,17646
141
146
  onnx_diagnostic/torch_models/hghub/model_specific.py,sha256=j50Nu7wddJMoqmD4QzMbNdFDUUgUmSBKRzPDH55TlUQ,2498
142
147
  onnx_diagnostic/torch_models/untrained/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
143
148
  onnx_diagnostic/torch_models/untrained/llm_phi2.py,sha256=y_akbdApi136qHcEQgykwIAYVw0Yfi0lbjb3DNuafaU,3948
144
149
  onnx_diagnostic/torch_models/untrained/llm_tiny_llm.py,sha256=QXw_Bs2SzfeiQMf-tmtVl83SmVOL4-Um7Qy-f0E48QI,2507
145
150
  onnx_diagnostic/torch_onnx/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
151
+ onnx_diagnostic/torch_onnx/compare.py,sha256=O0lws4kzn8WAXr8-x-YMPr7oyBC9DtSIs4OfOr4S5-E,12305
146
152
  onnx_diagnostic/torch_onnx/runtime_info.py,sha256=u1bD6VXqzBCRmqmbzQtDswaPs1PH_ygr1r-CrcfXpNU,8562
147
153
  onnx_diagnostic/torch_onnx/sbs.py,sha256=8okBEIupMgw7TtKc80YFimMtwnY3GchdY05FsA9ooa0,40749
148
154
  onnx_diagnostic/torch_onnx/sbs_dataclasses.py,sha256=UctdBjzoPTQG1LS0tZ8A6E9hpoq5HWUYaJLPOPJc9FI,20299
149
- onnx_diagnostic-0.8.5.dist-info/licenses/LICENSE.txt,sha256=Vv6TXglX6Rc0d-f8aREhayhT-6PMQXEyOmI2NKlUCMc,1045
150
- onnx_diagnostic-0.8.5.dist-info/METADATA,sha256=A54IonPIcnualwiRJhvjRMfhF3p3jdXhEH1vTtZBgyE,6734
151
- onnx_diagnostic-0.8.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
152
- onnx_diagnostic-0.8.5.dist-info/top_level.txt,sha256=KwNkXewmcobM3ZT1DJLVWH6ebJzA5qKg7cWqKfpGNT4,16
153
- onnx_diagnostic-0.8.5.dist-info/RECORD,,
155
+ onnx_diagnostic-0.8.7.dist-info/licenses/LICENSE.txt,sha256=Vv6TXglX6Rc0d-f8aREhayhT-6PMQXEyOmI2NKlUCMc,1045
156
+ onnx_diagnostic-0.8.7.dist-info/METADATA,sha256=H6o_1MWw2N8hO2BYh499xYb0Mm_KjgxkwirLXxiHQHI,6734
157
+ onnx_diagnostic-0.8.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
158
+ onnx_diagnostic-0.8.7.dist-info/top_level.txt,sha256=KwNkXewmcobM3ZT1DJLVWH6ebJzA5qKg7cWqKfpGNT4,16
159
+ onnx_diagnostic-0.8.7.dist-info/RECORD,,