onnx-diagnostic 0.8.2__py3-none-any.whl → 0.8.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. onnx_diagnostic/__init__.py +1 -1
  2. onnx_diagnostic/_command_lines_parser.py +387 -12
  3. onnx_diagnostic/export/api.py +91 -8
  4. onnx_diagnostic/export/control_flow.py +48 -345
  5. onnx_diagnostic/export/control_flow_onnx.py +528 -0
  6. onnx_diagnostic/export/control_flow_research.py +3 -3
  7. onnx_diagnostic/export/onnx_plug.py +396 -0
  8. onnx_diagnostic/ext_test_case.py +92 -23
  9. onnx_diagnostic/helpers/cache_helper.py +1 -1
  10. onnx_diagnostic/helpers/dot_helper.py +210 -0
  11. onnx_diagnostic/helpers/helper.py +90 -26
  12. onnx_diagnostic/helpers/mini_onnx_builder.py +3 -1
  13. onnx_diagnostic/helpers/model_builder_helper.py +27 -0
  14. onnx_diagnostic/helpers/onnx_helper.py +103 -1
  15. onnx_diagnostic/helpers/ort_session.py +37 -11
  16. onnx_diagnostic/helpers/torch_fx_graph_helper.py +164 -0
  17. onnx_diagnostic/helpers/torch_helper.py +103 -6
  18. onnx_diagnostic/reference/ort_evaluator.py +233 -28
  19. onnx_diagnostic/tasks/feature_extraction.py +15 -14
  20. onnx_diagnostic/tasks/summarization.py +72 -137
  21. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_attention.py +235 -0
  22. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_cache_utils.py +50 -0
  23. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_causal_mask.py +89 -0
  24. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_dynamic_cache.py +177 -0
  25. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_gemma3.py +54 -0
  26. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_generation_mixin.py +486 -0
  27. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_idefics.py +156 -0
  28. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_masking_utils.py +173 -0
  29. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2.py +99 -0
  30. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2_5.py +680 -0
  31. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen3.py +106 -0
  32. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_rotary_embedding.py +412 -0
  33. onnx_diagnostic/torch_export_patches/patches/_patch_transformers_sam_mask_decoder.py +132 -0
  34. onnx_diagnostic/torch_export_patches/patches/patch_helper.py +28 -0
  35. onnx_diagnostic/torch_export_patches/patches/patch_transformers.py +64 -2608
  36. onnx_diagnostic/torch_models/validate.py +50 -1
  37. onnx_diagnostic/torch_onnx/sbs.py +963 -312
  38. onnx_diagnostic/torch_onnx/sbs_dataclasses.py +491 -0
  39. {onnx_diagnostic-0.8.2.dist-info → onnx_diagnostic-0.8.3.dist-info}/METADATA +1 -1
  40. {onnx_diagnostic-0.8.2.dist-info → onnx_diagnostic-0.8.3.dist-info}/RECORD +43 -24
  41. {onnx_diagnostic-0.8.2.dist-info → onnx_diagnostic-0.8.3.dist-info}/WHEEL +0 -0
  42. {onnx_diagnostic-0.8.2.dist-info → onnx_diagnostic-0.8.3.dist-info}/licenses/LICENSE.txt +0 -0
  43. {onnx_diagnostic-0.8.2.dist-info → onnx_diagnostic-0.8.3.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,491 @@
1
+ import os
2
+ import textwrap
3
+ from dataclasses import dataclass
4
+ from typing import Any, Dict, List, Optional, Set, Tuple
5
+
6
+ try:
7
+ from typing import Self
8
+ except ImportError:
9
+ # python <= 3.10
10
+ Self = "Self" # type: ignore[assignment]
11
+ import onnx
12
+ import numpy as np
13
+ import torch
14
+ from ..helpers.onnx_helper import extract_subset_of_nodes, make_submodel, from_array_extended
15
+ from ..helpers.torch_helper import torch_dtype_to_onnx_dtype
16
+
17
+
18
+ def make_torch_inputs(
19
+ input_names: List[str],
20
+ onnx_name_to_ep_name: Dict[str, str],
21
+ onnx_results: Dict[str, torch.Tensor],
22
+ torch_results: Dict[str, torch.Tensor],
23
+ submodel: Optional[onnx.ModelProto],
24
+ ) -> Tuple[Dict[str, torch.Tensor], Set[str]]:
25
+ """
26
+ Gathers torch tensors instead of onnx tensors (tensors produced by the onnx model)
27
+
28
+ :param input_names: tensors to gather
29
+ :param onnx_name_to_ep_name: mapping between onnx name to names in the exported program
30
+ :param onnx_results: all onnx results (produced by the onnx model)
31
+ :param torch_results: all tensors produced by the exported program
32
+ :param submodel: onnx model, any tensor missing in `torch_results` is
33
+ add as an initializer to this model
34
+ :return: the list of tensors, the set of inputs for which there was no tensor coming
35
+ from the exported program
36
+ """
37
+ torch_inputs = {}
38
+ removed_inputs = set()
39
+ for n in input_names:
40
+ if n in onnx_name_to_ep_name:
41
+ torch_inputs[n] = torch_results[onnx_name_to_ep_name[n]]
42
+ else:
43
+ removed_inputs.add(n)
44
+ if submodel is not None:
45
+ # We add that input as an initializer because it is probably a constant.
46
+ submodel.graph.initializer.append(from_array_extended(onnx_results[n], name=n))
47
+ else:
48
+ torch_inputs[n] = onnx_results[n]
49
+ return torch_inputs, removed_inputs
50
+
51
+
52
+ @dataclass
53
+ class ReplayConfiguration:
54
+ """
55
+ Configuration specifying how to replay or dump pieces of
56
+ onnx graph in order to replay them later and investigate
57
+ later possible sources of discrepancies.
58
+
59
+ :param dump_folder: where to dump the onnx model corresponding to the
60
+ pieces to investigate
61
+ :param selected_names: list of results names to dump
62
+ :param selected_op_types: list of onnx operators to dump
63
+ :param threshold: only keep those whose discrepancies is greater than that threshold
64
+ """
65
+
66
+ dump_folder: str
67
+ selected_names: Optional[Set[str]] = None
68
+ selected_op_types: Optional[Set[str]] = None
69
+ threshold: float = 0.1
70
+
71
+ def __post_init__(self):
72
+ assert self.dump_folder, "dump_folder is empty and this is not allowed for the replay"
73
+
74
+ def select(
75
+ self,
76
+ name: Optional[str] = None,
77
+ op_type: Optional[str] = None,
78
+ err_abs: Optional[float] = None,
79
+ ) -> bool:
80
+ """
81
+ Returns true or false whether or not a piece of the onnx model should be dumped,
82
+ around a particular node. The results is True if one of the condition is true:
83
+
84
+ * ``name in self.selected_names``
85
+ * ``op_type in self.selected_op_types``
86
+ * ``err_abs >= self.threshold``
87
+
88
+ :param name: result name
89
+ :param op_type: operator type
90
+ :param err_abs: measured discrepancy
91
+ :return: True if this should be dumped
92
+ """
93
+ if name and self.selected_names and name in self.selected_names:
94
+ return True
95
+ if op_type and self.selected_op_types and op_type in self.selected_op_types:
96
+ return True
97
+ if err_abs is not None and self.threshold is not None and err_abs >= self.threshold:
98
+ return True
99
+ return False
100
+
101
+ def get_replay_code(self) -> str:
102
+ """
103
+ Returns a code letting the user replay the onnx model.
104
+ It looks like the following. It may have to be adapted.
105
+
106
+ .. runpython::
107
+ :showcode:
108
+
109
+ from onnx_diagnostic.torch_onnx.sbs_dataclasses import ReplayConfiguration
110
+
111
+ rc = ReplayConfiguration(dump_folder="unused")
112
+ print(rc.get_replay_code())
113
+ """
114
+ return textwrap.dedent(
115
+ """
116
+ import onnx
117
+ import torch
118
+ from onnx_diagnostic.helpers import max_diff, string_diff, string_type
119
+ from onnx_diagnostic.helpers.torch_helper import study_discrepancies
120
+ from onnx_diagnostic.helpers.onnx_helper import pretty_onnx
121
+ from onnx_diagnostic.reference import OnnxruntimeEvaluator
122
+
123
+ skws = dict(with_shape=True, with_device=True)
124
+
125
+ torch_inputs = torch.load("torch_inputs.pt")
126
+ onnx_inputs = torch.load("onnx_inputs.pt")
127
+ expected_outputs_and_mapping = torch.load("torch_outputs_and_mapping.pt")
128
+ expected = expected_outputs_and_mapping["expected"]
129
+ mapping = expected_outputs_and_mapping["mapping"]
130
+
131
+ print(f"-- torch_inputs={string_type(torch_inputs, **skws)}")
132
+ print(f"-- onnx_inputs={string_type(onnx_inputs, **skws)}")
133
+ print(f"-- expected={string_type(expected, **skws)}")
134
+ print(f"-- mapping={mapping}")
135
+
136
+ print()
137
+ print("-- model.onnx")
138
+ print()
139
+
140
+ model = onnx.load("model.onnx")
141
+ print(pretty_onnx(model))
142
+
143
+ print()
144
+ print("-- range of inputs --")
145
+ print()
146
+
147
+ for k, v in onnx_inputs.items():
148
+ print(f"-- {k}: {string_type(v, **skws, with_min_max=True)}")
149
+
150
+ print()
151
+ print("-- discrepancies of inputs --")
152
+ print()
153
+
154
+ ep_feeds = {}
155
+ for k, v in onnx_inputs.items():
156
+ tk = mapping.get(k, k)
157
+ tkv = torch_inputs[k] if k in torch_inputs else torch_inputs[tk]
158
+ ep_feeds[k] = tkv
159
+ diff = max_diff(v, tkv)
160
+ print(
161
+ f"-- {k} -> {tk} ep:{string_type(tkv, **skws)} "
162
+ f"nx:{string_type(v, **skws)} / diff {string_diff(diff)}"
163
+ )
164
+
165
+ print()
166
+ print("-- SVD --")
167
+ print()
168
+
169
+ for k, v in onnx_inputs.items():
170
+ if len(v.shape) == 2:
171
+ U, S, Vt = torch.linalg.svd(v.to(torch.float32))
172
+ print(f" -- {k}: {S[:5]}")
173
+
174
+ print()
175
+ print("-- run with onnx_inputs --")
176
+ print()
177
+
178
+ sess = OnnxruntimeEvaluator(model, whole=True)
179
+ feeds = onnx_inputs
180
+ obtained = sess.run(None, feeds)
181
+ print(f"-- obtained={string_type(obtained, **skws)}")
182
+ diff = max_diff(expected, tuple(obtained), hist=[0.1, 0.01])
183
+ print(f"-- diff: {string_diff(diff)}")
184
+ print()
185
+ print("-- plots --")
186
+
187
+ for i in range(len(expected)):
188
+ study_discrepancies(
189
+ expected[i],
190
+ obtained[i],
191
+ title=f"study output {i}",
192
+ name=f"disc{i}.png",
193
+ bins=50,
194
+ )
195
+
196
+ print()
197
+ print("-- run with torch_inputs --")
198
+ print()
199
+
200
+ obtained = sess.run(None, ep_feeds)
201
+ print(f"-- obtained={string_type(obtained, **skws)}")
202
+ diff = max_diff(expected, tuple(obtained), hist=[0.1, 0.01])
203
+ print(f"-- diff: {string_diff(diff)}")
204
+
205
+ print()
206
+ print("-- end --")
207
+ print()
208
+
209
+ if False:
210
+ # CUDA profiling
211
+ with torch.profiler.profile(
212
+ activities=[torch.profiler.ProfilerActivity.CUDA],
213
+ record_shapes=True,
214
+ with_stack=True,
215
+ ) as prof:
216
+ sess.run(None, ep_feeds)
217
+ obj = prof.key_averages()
218
+ print(obj.table())
219
+ """
220
+ )
221
+
222
+ def dump(
223
+ self,
224
+ name: str,
225
+ onnx_id_node: int,
226
+ model: onnx.ModelProto,
227
+ onnx_results: Dict[str, Any],
228
+ torch_results: Dict[str, torch.Tensor],
229
+ onnx_name_to_ep_name: Dict[str, str],
230
+ verbose: int = 0,
231
+ ) -> Optional[str]:
232
+ """
233
+ Dumps the minimal graph which can be replayed outside the model.
234
+
235
+ :param name: name of the result to look into
236
+ :param onnx_id_node: index of the node which produces it model `model`
237
+ :param model: onnx model
238
+ :param onnx_results: all known onnx results
239
+ :param torch_results: all known torch results
240
+ :param onnx_name_to_ep_name: correspondence between onnx_node name
241
+ and exported program name
242
+ :param verbose: verbosity level
243
+ :return: the folder created to dump everything
244
+ """
245
+ if verbose:
246
+ print(f"[ReplayConfiguration.dump] extract subset of node for {name!r}")
247
+ nodes = extract_subset_of_nodes(
248
+ model=model,
249
+ name=name,
250
+ node_index=onnx_id_node,
251
+ cut_points=set(onnx_name_to_ep_name),
252
+ )
253
+ if not nodes:
254
+ if verbose:
255
+ print(
256
+ f"[ReplayConfiguration.dump] could not extract subset of node for {name!r}"
257
+ )
258
+ return None
259
+ if verbose:
260
+ print(f"[ReplayConfiguration.dump] make model with {len(nodes)} nodes")
261
+ submodel = make_submodel(
262
+ nodes,
263
+ ir_version=model.ir_version,
264
+ opset_imports=model.opset_import,
265
+ output_names=[name],
266
+ type_rank_fn=lambda name: (
267
+ torch_dtype_to_onnx_dtype(onnx_results[name].dtype),
268
+ len(onnx_results[name].shape),
269
+ ),
270
+ )
271
+ input_names = [n.name for n in submodel.graph.input]
272
+ if verbose:
273
+ print(f"[ReplayConfiguration.dump] model inputs {input_names}")
274
+ folder = os.path.join(self.dump_folder, name.replace(":", "_").replace("/", "_"))
275
+ os.makedirs(folder, exist_ok=True)
276
+ if verbose:
277
+ print(f"[ReplayConfiguration.dump] dumps into folder {folder!r}")
278
+
279
+ torch_inputs, removed_inputs = make_torch_inputs(
280
+ input_names, onnx_name_to_ep_name, onnx_results, torch_results, submodel
281
+ )
282
+
283
+ if removed_inputs:
284
+ input_names = [i for i in input_names if i not in removed_inputs]
285
+ new_inputs = [i for i in submodel.graph.input if i.name not in removed_inputs]
286
+ del submodel.graph.input[:]
287
+ submodel.graph.input.extend(new_inputs)
288
+ if verbose:
289
+ print(f"[ReplayConfiguration.dump] removed input {removed_inputs}")
290
+ print(f"[ReplayConfiguration.dump] final model inputs {input_names}")
291
+
292
+ onnx.save(submodel, os.path.join(folder, "model.onnx"))
293
+ onnx_inputs = {n: onnx_results[n] for n in input_names}
294
+ assert (
295
+ name in onnx_name_to_ep_name
296
+ ), f"Unable to find {name!r} in {onnx_name_to_ep_name}"
297
+ expected_outputs_and_mapping = dict(
298
+ expected=(torch_results[onnx_name_to_ep_name[name]],),
299
+ mapping={
300
+ k: onnx_name_to_ep_name[k] for k in input_names if k in onnx_name_to_ep_name
301
+ },
302
+ )
303
+ torch.save(torch_inputs, os.path.join(folder, "torch_inputs.pt"))
304
+ torch.save(onnx_inputs, os.path.join(folder, "onnx_inputs.pt"))
305
+ torch.save(
306
+ expected_outputs_and_mapping, os.path.join(folder, "torch_outputs_and_mapping.pt")
307
+ )
308
+ with open(os.path.join(folder, "replay.py"), "w") as f:
309
+ f.write(self.get_replay_code())
310
+ if verbose:
311
+ print(f"[ReplayConfiguration.dump] done {folder!r}")
312
+ return folder
313
+
314
+
315
+ @dataclass
316
+ class RunAlignedRecord:
317
+ """
318
+ The side-by-side ran by function :func:`run_aligned
319
+ <onnx_diagnostic.torch_onnx.sbs.run_aligned>`
320
+ yields instances of this type. If both `ep_name`
321
+ and `onnx_name` are specified, then both results
322
+ appear in the exported program (torch) and the onnx model.
323
+
324
+ :param ep_id_node: node index in the exported program
325
+ :param onnx_id_node: node index in the onnx model, -1 for an initializer
326
+ :param ep_name: result name in the exported program
327
+ :param onnx_name: result name in the onnx model, usually same as `ep_name`
328
+ except for initializer
329
+ :param ep_target: target name in the exported program producing the result
330
+ :param onnx_op_type: operator type in the onnx model producing the result
331
+ :param onnx_id_output: usually 0 unless this node has multiple output,
332
+ in that case, it is the output index
333
+ :param ep_shape_type: shape and type of the results in the exported program
334
+ :param onnx_shape_type: shape and type of the results in the onnx mode,
335
+ it should be the same as `ep_shape_type`, anything different probably
336
+ means a bug
337
+ :param err_abs: maximum absolute error for the considered result
338
+ between the exported program and the onnx model
339
+ :param err_rel: maximum relative error
340
+ :param err_dev: 0 if the device is the same, 1 if not
341
+ :param err_nan: number of nan values disagreeing
342
+ :param err_h01: number of values for which the discrepancy is above 0.1
343
+ :param err_h001: number of values for which the discrepancy is above 0.01
344
+ :param ep_time_run: execution time for the exported program
345
+ :param onnx_time_run: execution time for the onnx model, that includes
346
+ the creation of the onnx model so that's probably not very usable
347
+ :param err_abs2: same as `err_abs` if onnx kernel is run with torch results
348
+ :param err_rel2: same as `err_rel` if onnx kernel is run with torch results
349
+ :param err_dev2: same as `err_dev` if onnx kernel is run with torch results
350
+ :param err_nan2: same as `err_nan` if onnx kernel is run with torch results
351
+ :param err_h012: same as `err_h01` if onnx kernel is run with torch results
352
+ :param err_h0012: same as `err_h001` if onnx kernel is run with torch results
353
+ :param comment: any additional information
354
+ """
355
+
356
+ ep_id_node: Optional[int] = None
357
+ onnx_id_node: Optional[int] = None
358
+ ep_name: Optional[str] = None
359
+ onnx_name: Optional[str] = None
360
+ ep_target: Optional[str] = None
361
+ onnx_op_type: Optional[str] = None
362
+ onnx_id_output: Optional[int] = None
363
+ ep_shape_type: Optional[str] = None
364
+ onnx_shape_type: Optional[str] = None
365
+ err_abs: Optional[float] = None
366
+ err_rel: Optional[float] = None
367
+ err_dev: Optional[float] = None
368
+ err_nan: Optional[float] = None
369
+ err_h01: Optional[float] = None
370
+ err_h001: Optional[float] = None
371
+ ep_time_run: Optional[float] = None
372
+ onnx_time_run: Optional[float] = None
373
+ err_abs2: Optional[float] = None
374
+ err_rel2: Optional[float] = None
375
+ err_dev2: Optional[float] = None
376
+ err_nan2: Optional[float] = None
377
+ err_h012: Optional[float] = None
378
+ err_h0012: Optional[float] = None
379
+ comment: Optional[str] = None
380
+
381
+ def __post_init__(self):
382
+ "Validation."
383
+ assert self.ep_id_node is None or self.ep_id_node >= 0, (
384
+ f"Node id are always positive in the exported program but "
385
+ f"ep_id_node={self.ep_id_node}"
386
+ )
387
+
388
+ def set_diff(self, diff: Dict[str, Any]) -> Self:
389
+ """Sets error."""
390
+ if diff is None:
391
+ return
392
+ if "abs" in diff:
393
+ self.err_abs = diff["abs"]
394
+ if "rel" in diff:
395
+ self.err_rel = diff["rel"]
396
+ if "dev" in diff:
397
+ self.err_dev = diff["dev"]
398
+ if "nan" in diff:
399
+ self.err_nan = diff["nan"]
400
+ if "rep" in diff:
401
+ self.err_h01 = diff["rep"][">0.1"]
402
+ self.err_h001 = diff["rep"][">0.01"]
403
+ return self
404
+
405
+ def set_diff2(self, diff: Dict[str, Any]) -> Self:
406
+ """Sets error."""
407
+ if diff is None:
408
+ return
409
+ if "abs" in diff:
410
+ self.err_abs2 = diff["abs"]
411
+ if "rel" in diff:
412
+ self.err_rel2 = diff["rel"]
413
+ if "dev" in diff:
414
+ self.err_dev2 = diff["dev"]
415
+ if "nan" in diff:
416
+ self.err_nan2 = diff["nan"]
417
+ if "rep" in diff:
418
+ self.err_h012 = diff["rep"][">0.1"]
419
+ self.err_h0012 = diff["rep"][">0.01"]
420
+ return self
421
+
422
+ @property
423
+ def key(
424
+ self,
425
+ ) -> Tuple[Optional[int], Optional[int], Optional[int], Optional[str], Optional[str]]:
426
+ "Creates a unique identifier."
427
+ return (
428
+ self.ep_id_node,
429
+ self.onnx_id_node,
430
+ self.onnx_id_output,
431
+ self.ep_name,
432
+ self.onnx_name,
433
+ )
434
+
435
+ def check(
436
+ self,
437
+ already_yielded: Dict[
438
+ Tuple[Optional[int], Optional[int], Optional[int], Optional[str], Optional[str]],
439
+ int,
440
+ ],
441
+ ) -> Self:
442
+ "Checks a record was not already yielded."
443
+ if self.onnx_op_type == "reset":
444
+ # no record for this one
445
+ return self
446
+ key = self.key
447
+ assert key not in already_yielded, (
448
+ f"Record with key={key} was already yielded, "
449
+ f"number of records={len(already_yielded)} and previous "
450
+ f"record at position {already_yielded[key]} (self={self})"
451
+ )
452
+ already_yielded[key] = len(already_yielded)
453
+ return self
454
+
455
+
456
+ @dataclass
457
+ class StatusRunAligned:
458
+ """
459
+ Information to display while running the side-by-side
460
+
461
+ :param max_abs: maximum absolute seen so far
462
+ :param n_inf: number of infinite values seen so far
463
+ :param n_nan: number of nan values seen so for
464
+ :param yielded_nodes: number of yielded pair of nodes seen so far
465
+ :param last_replay: last result dumped on disk for later replay
466
+ """
467
+
468
+ max_abs: float = 0.0
469
+ n_inf: int = 0
470
+ n_nan: int = 0
471
+ yielded_nodes: int = 0
472
+ last_replay: str = ""
473
+
474
+ def to_str(self) -> str:
475
+ "Nice display."
476
+ s = (
477
+ f"yielded={self.yielded_nodes} maxabs={self.max_abs:1.3f} "
478
+ f"#inf={self.n_inf} #nan={self.n_nan}"
479
+ )
480
+ if self.last_replay:
481
+ return f"{s} -PLAY({self.last_replay})"
482
+ return s
483
+
484
+ def update(self, err_abs: float):
485
+ "Updates all attributes with the latest measure."
486
+ if np.isinf(err_abs) or np.isnan(err_abs):
487
+ self.n_inf += 1
488
+ elif err_abs > 1e6:
489
+ self.n_nan += 1
490
+ else:
491
+ self.max_abs = max(self.max_abs, err_abs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: onnx-diagnostic
3
- Version: 0.8.2
3
+ Version: 0.8.3
4
4
  Summary: Tools to help converting pytorch models into ONNX.
5
5
  Home-page: https://github.com/sdpython/onnx-diagnostic
6
6
  Author: Xavier Dupré
@@ -1,37 +1,41 @@
1
- onnx_diagnostic/__init__.py,sha256=V-Rg-8wiAAhPPRxZNTkkCAlGh_v3sDrFDlTo1Ptqi00,173
1
+ onnx_diagnostic/__init__.py,sha256=q-JGXWdB5HIHrAPUbiGuZ3aflvcrMyhNffVzOd5xO-8,173
2
2
  onnx_diagnostic/__main__.py,sha256=YmyV_Aq_ianDlHyKLHMa6h8YK3ZmFPpLVHLKjM91aCk,79
3
- onnx_diagnostic/_command_lines_parser.py,sha256=uDn91eWmiz9i7KEaB2vTsR06kkrM0VCkUq9kaBDiPA0,39384
3
+ onnx_diagnostic/_command_lines_parser.py,sha256=KFKyH254F7FdcKKc2QrtAjpGLpLwnduflPvqm3FM0UI,51441
4
4
  onnx_diagnostic/api.py,sha256=BhCl_yCd78N7TlVtPOHjeYv1QBEy39TjZ647rcHqLh0,345
5
5
  onnx_diagnostic/doc.py,sha256=t3RELgfooYnVMAi0JSpggWkQEgUsREz8NmRvn0TnLI8,2829
6
- onnx_diagnostic/ext_test_case.py,sha256=RU1WQi2UJNla9vcgz2XrL3q1_YN4v7rB9WnF4_58kko,44867
6
+ onnx_diagnostic/ext_test_case.py,sha256=bA4oK1mnccbt5Ien24I9nhiDgay0IjCxOeH1lPU5E-g,47798
7
7
  onnx_diagnostic/export/__init__.py,sha256=yEIoWiOeTwBsDhyYt2fTKuhtA0Ya1J9u9ZzMTOTWaWs,101
8
- onnx_diagnostic/export/api.py,sha256=gj0rhXjJRchrQhuBdyljA8DlxSNjO7S0dGV4qfoa040,6058
9
- onnx_diagnostic/export/control_flow.py,sha256=mJQHqnvf8RQ9hI69Ghnl-tJrSrWz2fK_STcbpSUZ02M,17351
10
- onnx_diagnostic/export/control_flow_research.py,sha256=qO4X52Lp_grMx0wXOEfwOpXMiRd-VFeMibfJ4VigBcU,5200
8
+ onnx_diagnostic/export/api.py,sha256=xivxtvjFAIbKquAsGLjVp9J_CZl6KWeO8IT4wii9FOI,9649
9
+ onnx_diagnostic/export/control_flow.py,sha256=zU5n_QYhNcBllyMsl1_i6ohZt2CshqG2MokJghrvA60,7751
10
+ onnx_diagnostic/export/control_flow_onnx.py,sha256=sODOD4v7EJj6LWhrfcdCW68r9nYKsRM4SRnqDw4TrSI,18049
11
+ onnx_diagnostic/export/control_flow_research.py,sha256=UMAlriHKBLoYJzdq3kCmsUIKOlYQE0OjFbJ8zkuvuwI,5220
11
12
  onnx_diagnostic/export/dynamic_shapes.py,sha256=M2hlpHSTbkzZwGKAbrpQXng5HQrwjF5Z6wGGxEgnp74,42061
13
+ onnx_diagnostic/export/onnx_plug.py,sha256=vwUyOtF5aihAO-98QSmY_sD9w0hNMnGKaSxG0cF0ZCo,14660
12
14
  onnx_diagnostic/export/shape_helper.py,sha256=m628y0oRCQbeZkeh8JDHIfWMsSjoJoeX-IPiPGDHT-w,11273
13
15
  onnx_diagnostic/export/validate.py,sha256=_PGUql2DJhIgGKo0WjTGUc5AgsZUx8fEs00MePy-w98,6043
14
16
  onnx_diagnostic/helpers/__init__.py,sha256=GJ2GT7cgnlIveVUwMZhuvUwidbTJaKv8CsSIOpZDsJg,83
15
17
  onnx_diagnostic/helpers/_log_helper.py,sha256=OTwQH0OIxs9B6nrSvR7MoxMimSw_8mU0mj133NvLk5o,16832
16
18
  onnx_diagnostic/helpers/args_helper.py,sha256=SRWnqC7EENg09RZlA50B_PcdiIhdbgA4C3ACfzl5nMs,4419
17
19
  onnx_diagnostic/helpers/bench_run.py,sha256=CGA6VMJZMH2gDhVueT9ypNm4PMcjGrrGFYp08nhWj9k,16539
18
- onnx_diagnostic/helpers/cache_helper.py,sha256=JcpRNvwNY6CRjjaZApShhdw8hCZNHS2xUhce5ZaWVtU,28446
20
+ onnx_diagnostic/helpers/cache_helper.py,sha256=OLghsSUuZ8cWGkua8eH75KBF-mbVqejnNUYfFo5lRf0,28498
19
21
  onnx_diagnostic/helpers/config_helper.py,sha256=cWRETgFhZ7tayIZPnMqF8BF5AvTU64G2BMqyzgO7lzs,5670
20
22
  onnx_diagnostic/helpers/doc_helper.py,sha256=pl5MZd3_FaE8BqQnqoBuSBxoNCFcd2OJd3eITUSku5c,5897
23
+ onnx_diagnostic/helpers/dot_helper.py,sha256=Ii6jg-1YUJPI6cPhhTeD8rc5PJR0GIiGa2PLOdWJyA8,7798
21
24
  onnx_diagnostic/helpers/fake_tensor_helper.py,sha256=J7wnK3WTuVKnYiMzLVTAPkdJr3hQfIfMC9ZlOu7oGmI,11024
22
25
  onnx_diagnostic/helpers/graph_helper.py,sha256=hevQT5a7_QuriVPQcbT5qe18n99Doyl5h3-qshx1-uk,14093
23
- onnx_diagnostic/helpers/helper.py,sha256=xzq3iBiasWy6qH1_-ydn8QzRKaBgOy4Cs3f9O5PU-3M,63027
26
+ onnx_diagnostic/helpers/helper.py,sha256=aCPkAU6iNmHA3Glt_uehEiBOIIZtXDgq9hjhdG5Ol3Y,65568
24
27
  onnx_diagnostic/helpers/log_helper.py,sha256=0lJiTF87lliI-LmgpUH_V2N8NuzJ0LryH0mSYpkRaL8,93272
25
28
  onnx_diagnostic/helpers/memory_peak.py,sha256=M3m4_thWFIwP5HytbJYEqaijXIv5v5BW_vlcJowIYI4,6434
26
- onnx_diagnostic/helpers/mini_onnx_builder.py,sha256=hGAh5pLmXMwx8vx8MUjPmvWak86nCc3pJOvdfX-mqcE,23828
27
- onnx_diagnostic/helpers/model_builder_helper.py,sha256=5V-SlOVQaGPZov6aSJ0IvYcwpDo2hsCBJpqadRUdnrk,17875
28
- onnx_diagnostic/helpers/onnx_helper.py,sha256=NC3dD1mKCjzkj5YS6rrBwo5zgfot2Lz71c2ARq-lLpI,39641
29
- onnx_diagnostic/helpers/ort_session.py,sha256=wjQ1pwzUHuOFxK8Q8Ve3Ph6CUBBC_udK7FcwuDyDMzA,29541
29
+ onnx_diagnostic/helpers/mini_onnx_builder.py,sha256=jR2lkRZEQ0N30H0FqeBwaxJd_w_6kyxFagrnulqFjhE,23883
30
+ onnx_diagnostic/helpers/model_builder_helper.py,sha256=qKIq4Naqq03gk6NfqXLQjSDiKL5FFNc1AEyVX0R8GmA,18540
31
+ onnx_diagnostic/helpers/onnx_helper.py,sha256=BCf1djXB--eY_FJv9ldD85bQ--OMI_uPDJN6SumdbdM,43057
32
+ onnx_diagnostic/helpers/ort_session.py,sha256=TFCDgcG3Nvj_1S0xTIUqDw0WTSeav0NMJFYCfT_W-dw,30505
30
33
  onnx_diagnostic/helpers/rt_helper.py,sha256=OOxHSCKZup2u7zTvVJxPkRHb4jQZ03KpkiDGrfwibMM,38135
31
- onnx_diagnostic/helpers/torch_helper.py,sha256=_-5ecz0D8CAEzkH0uW9MTRywH-NDPNCx0qVmWzTa6do,35180
34
+ onnx_diagnostic/helpers/torch_fx_graph_helper.py,sha256=7xFe4svdbr4gV3OTNcx8eJejjDyHAv4hD_RNNKSxL0c,6571
35
+ onnx_diagnostic/helpers/torch_helper.py,sha256=gtkk31BptY5JvNOvbx_1Bfyuns_mQu9gEO0upoUTdy4,38294
32
36
  onnx_diagnostic/reference/__init__.py,sha256=rLZsxOlnb7-81F2CzepGnZLejaROg4JvgFaGR9FwVQA,208
33
37
  onnx_diagnostic/reference/evaluator.py,sha256=RzNzjFDeMe-4X51Tb22N6aagazY5ktNq-mRmPcfY5EU,8848
34
- onnx_diagnostic/reference/ort_evaluator.py,sha256=nituItsP3IKDDWF9z-iGX_iAubrTcdk8pb1GVBp9sCU,26161
38
+ onnx_diagnostic/reference/ort_evaluator.py,sha256=RsTboIAL1QqudV6X3P3VxBPxLtJJF8TsxNdyy0L5epE,34773
35
39
  onnx_diagnostic/reference/quantized_tensor.py,sha256=5u67uS2uGacdMD5VYCbpojNjiesDlV_kO0fAJ0vUWGE,1098
36
40
  onnx_diagnostic/reference/report_results_comparison.py,sha256=OsyQN8EHZZoj97u74RQP-7WFpebPOso5GEDpdkLWu6M,3645
37
41
  onnx_diagnostic/reference/torch_evaluator.py,sha256=Tx1teWvfGEX5RmkDnI83UiOlo5eBOC72vPhgTWdFUF0,27689
@@ -78,7 +82,7 @@ onnx_diagnostic/reference/torch_ops/shape_ops.py,sha256=pJrNR2UB4PlWl6cv4EDl1uGl
78
82
  onnx_diagnostic/reference/torch_ops/unary_ops.py,sha256=dwu6HPr4V_roxu85U3VLTtDLx5bfxKalT_-zlQxZ5wc,1850
79
83
  onnx_diagnostic/tasks/__init__.py,sha256=kk-I2tgtb32A_ANh6Ux_u982mA2SrQKO_MDp0KsRi28,2774
80
84
  onnx_diagnostic/tasks/automatic_speech_recognition.py,sha256=aMufLDGW005f7aLMZ9alIQtg2s_WIUk5Rd9udS_BZ38,6964
81
- onnx_diagnostic/tasks/feature_extraction.py,sha256=Bt5meYvVqOFd_v8NgLKAfyqtjoEsEi6oQcQLn1vcwG4,5316
85
+ onnx_diagnostic/tasks/feature_extraction.py,sha256=IS9z9fPNE0hhGUebBfmNZl0twdXobMc7MFKpQB9qZI0,5388
82
86
  onnx_diagnostic/tasks/fill_mask.py,sha256=5Gt6zlj0p6vuifox7Wmj-TpHXJvPS0CEH8evgdBHDNA,2640
83
87
  onnx_diagnostic/tasks/image_classification.py,sha256=nLpBBB1Gkog3Fk6pu2waiHcuQr4ILPptc9FhQ-pn460,4682
84
88
  onnx_diagnostic/tasks/image_text_to_text.py,sha256=g-xRRTVUiT6mBvYYbOxgzlHNfu9SnEgUcvTeSoqf7gE,21765
@@ -87,7 +91,7 @@ onnx_diagnostic/tasks/mask_generation.py,sha256=fjdD3rd-O-mFL0hQy3la3JXKth_0bH2H
87
91
  onnx_diagnostic/tasks/mixture_of_expert.py,sha256=al4tk1BrHidtRiHlAaiflWiJaAte0d5M8WcBioANG9k,2808
88
92
  onnx_diagnostic/tasks/object_detection.py,sha256=3FiT8ya5FCd9lwjQCRXhAwXspNwYTlAD3Gpk8aAcG5w,4279
89
93
  onnx_diagnostic/tasks/sentence_similarity.py,sha256=vPqNZgAnIvY0rKWPUTs0IlU3RFQDkXAHL7IVfRFmilY,2655
90
- onnx_diagnostic/tasks/summarization.py,sha256=nc0pUCBxQgOLbW9kCIfP361XwUy9HTeaT8lrbVNTPw0,8071
94
+ onnx_diagnostic/tasks/summarization.py,sha256=AyDUHLjEymn4waIFf_ZgLAUJT6xqiGFKdaYAikK3wVA,5382
91
95
  onnx_diagnostic/tasks/text2text_generation.py,sha256=KUN7XSumftAy3cr2zYLR59RQ3wWYOTTTQkDuwjVm-HI,8464
92
96
  onnx_diagnostic/tasks/text_classification.py,sha256=CGc72SpXFzTUyzAHEMPgyy_s187DaYGsRdrosxG80_Q,2711
93
97
  onnx_diagnostic/tasks/text_generation.py,sha256=80K4RzjCLPpzizxz_TGz6hFxLyGSGn6B2yaWnNa8WWk,14502
@@ -106,15 +110,29 @@ onnx_diagnostic/torch_export_patches/patch_module_helper.py,sha256=2U0AdyZuU0W54
106
110
  onnx_diagnostic/torch_export_patches/eval/__init__.py,sha256=YQoOGt9XQLWqnJ15NnT7ri_jDevfvpuQwEJo38E-VRU,25056
107
111
  onnx_diagnostic/torch_export_patches/eval/model_cases.py,sha256=9h4yo9vKiK-E6zaXyAsxXGM-lCjd88ONybA1F3YcTI4,27988
108
112
  onnx_diagnostic/torch_export_patches/patches/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
113
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_attention.py,sha256=kI0qgAGFxkyvx8wikQtPcik_zpPpTAQypQ1cMQsjetw,7730
114
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_cache_utils.py,sha256=UdxLii-od2OpQmUJbmXmZinXeLBItVFrr75BVT1Y0zw,2041
115
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_causal_mask.py,sha256=h37DPVxsq8iAWECnTlKW5tVqSBgPBF52xr3uxsjdi2k,3113
116
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_dynamic_cache.py,sha256=lEdYqX60pyi_w6PrbCTk7NC96nB8FFcFRf_JMjXSAZE,7961
117
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_gemma3.py,sha256=nVgYQk0xXpHiictN1wOHVMN2lTH9b0vfIJ4ie-uKopg,1999
118
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_generation_mixin.py,sha256=VIZsVHgR8NmAcBQalPl5I6ZzNgcBxjGb6ars31m9gRg,21936
119
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_idefics.py,sha256=kTjuTRsfkGGGhspJnMxAMQSchZgGC_IruJzpHh_FmI8,6348
120
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_masking_utils.py,sha256=R4YwnN9ktxjjImiJtLRxiKtKLr9LuFlwkPXkTJ6BTIo,6895
121
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2.py,sha256=OxYdlLrwtd_KGHt3E17poduxvWFg-CfGS57-yN1i6gI,3827
122
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2_5.py,sha256=-THcoMvJ1MhLZFQP6c2IEGqpkY7sTg_xYyAwCE7_91o,29511
123
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen3.py,sha256=cND9Iqo1aKdlX-BXGr9Qlq_Y4EW1L5VWSwZfqYTVazU,4888
124
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_rotary_embedding.py,sha256=4bJ_z2gizZQla_fcCVt0dmuhzO9Vu-D7CCMWdxMlrKM,16893
125
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_sam_mask_decoder.py,sha256=-6TuBm3sLAFEGuW3vRfOTtE5uP6aINFfu7xMnl27Dws,5703
126
+ onnx_diagnostic/torch_export_patches/patches/patch_helper.py,sha256=kK_CGW643iVXxa-m6pttDBS7HTyMQaPypza7iqIInn4,721
109
127
  onnx_diagnostic/torch_export_patches/patches/patch_torch.py,sha256=FfES0WWiWxmuQbGTlQ7IJS0YBG7km3IQbnMYwk_lPPU,44667
110
- onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=8r5NQxVfwPi9YVHmeGS5xN52wsa5IgUKzFxls3xuX4Y,109850
128
+ onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=VAfZj0xu3D4CG71SWv-9sYPUK4ZQTSz2-x4qxP4DxGE,3079
111
129
  onnx_diagnostic/torch_export_patches/serialization/__init__.py,sha256=BHLdRPtNAtNPAS-bPKEj3-foGSPvwAbZXrHzGGPDLEw,1876
112
130
  onnx_diagnostic/torch_export_patches/serialization/diffusers_impl.py,sha256=drq3EH_yjcSuIWYsVeUWm8Cx6YCZFU6bP_1PLtPfY5I,945
113
131
  onnx_diagnostic/torch_export_patches/serialization/transformers_impl.py,sha256=sIHFvUQoMK8ytXQYB-k7OL62z8A3f5uDaq-S5R5uN-M,10034
114
132
  onnx_diagnostic/torch_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
115
133
  onnx_diagnostic/torch_models/code_sample.py,sha256=PWf7piGx7Eiv7BOTpL2bLUtWwaVcw7SMBvkSpEzZDPs,12883
116
134
  onnx_diagnostic/torch_models/llms.py,sha256=soyg4yC87ptGoeulJhKqw5opGmuLvH1pn_ZDXZ4Jr8E,90
117
- onnx_diagnostic/torch_models/validate.py,sha256=jzBgZRKCzpWH25rCoc6b0QsLQlmQXSzLzAntgXzIdWc,92569
135
+ onnx_diagnostic/torch_models/validate.py,sha256=yhcCjZJ7pgjeHQBV-rCbQJ-ot_tngEwSYoonyNhEH5g,94426
118
136
  onnx_diagnostic/torch_models/hghub/__init__.py,sha256=vi1Q7YHdddj1soiBN42MSvJdFqe2_KUoWafHISjwOu8,58
119
137
  onnx_diagnostic/torch_models/hghub/hub_api.py,sha256=rFbiPNLET-KdBpnv-p0nKgwHX6d7C_Z0s9zZ86_92kQ,14307
120
138
  onnx_diagnostic/torch_models/hghub/hub_data.py,sha256=8V_pAgACPLPsLRYUododg7MSL6str-T3tBEGY4OaeYQ,8724
@@ -126,9 +144,10 @@ onnx_diagnostic/torch_models/untrained/llm_phi2.py,sha256=y_akbdApi136qHcEQgykwI
126
144
  onnx_diagnostic/torch_models/untrained/llm_tiny_llm.py,sha256=QXw_Bs2SzfeiQMf-tmtVl83SmVOL4-Um7Qy-f0E48QI,2507
127
145
  onnx_diagnostic/torch_onnx/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
128
146
  onnx_diagnostic/torch_onnx/runtime_info.py,sha256=1g9F_Jf9AAgYQU4stbsrFXwQl-30mWlQrFbQ7val8Ps,9268
129
- onnx_diagnostic/torch_onnx/sbs.py,sha256=IoKLA5UwS6kY8g4OOf_bdQwCziIsQfBczZ3w8wo4wZM,16905
130
- onnx_diagnostic-0.8.2.dist-info/licenses/LICENSE.txt,sha256=Vv6TXglX6Rc0d-f8aREhayhT-6PMQXEyOmI2NKlUCMc,1045
131
- onnx_diagnostic-0.8.2.dist-info/METADATA,sha256=JbEdP7KIm9D1xwdr9Hot2BNxC2ZX9_YeBkdo6oy6UvQ,6734
132
- onnx_diagnostic-0.8.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
133
- onnx_diagnostic-0.8.2.dist-info/top_level.txt,sha256=KwNkXewmcobM3ZT1DJLVWH6ebJzA5qKg7cWqKfpGNT4,16
134
- onnx_diagnostic-0.8.2.dist-info/RECORD,,
147
+ onnx_diagnostic/torch_onnx/sbs.py,sha256=Q2nbj1Ovasf_HDFc5_tNVH8taJhzhgUXPY6N1uajayk,40615
148
+ onnx_diagnostic/torch_onnx/sbs_dataclasses.py,sha256=ctJitdW09gLhg900yjT-Zqbx8SU2n4ZdgyVZ47dmlvQ,18475
149
+ onnx_diagnostic-0.8.3.dist-info/licenses/LICENSE.txt,sha256=Vv6TXglX6Rc0d-f8aREhayhT-6PMQXEyOmI2NKlUCMc,1045
150
+ onnx_diagnostic-0.8.3.dist-info/METADATA,sha256=MsLK613cwgCcvJ4JkLvP2ysF48KEgTmzqm83IBUO5JM,6734
151
+ onnx_diagnostic-0.8.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
152
+ onnx_diagnostic-0.8.3.dist-info/top_level.txt,sha256=KwNkXewmcobM3ZT1DJLVWH6ebJzA5qKg7cWqKfpGNT4,16
153
+ onnx_diagnostic-0.8.3.dist-info/RECORD,,