onnx-diagnostic 0.6.3__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. onnx_diagnostic/__init__.py +1 -1
  2. onnx_diagnostic/_command_lines_parser.py +87 -77
  3. onnx_diagnostic/doc.py +22 -0
  4. onnx_diagnostic/ext_test_case.py +1 -1
  5. onnx_diagnostic/helpers/cache_helper.py +59 -0
  6. onnx_diagnostic/helpers/config_helper.py +8 -4
  7. onnx_diagnostic/helpers/helper.py +30 -3
  8. onnx_diagnostic/helpers/log_helper.py +585 -0
  9. onnx_diagnostic/helpers/mini_onnx_builder.py +4 -1
  10. onnx_diagnostic/helpers/model_builder_helper.py +54 -73
  11. onnx_diagnostic/helpers/torch_helper.py +18 -2
  12. onnx_diagnostic/reference/__init__.py +1 -0
  13. onnx_diagnostic/reference/ort_evaluator.py +29 -4
  14. onnx_diagnostic/reference/report_results_comparison.py +95 -0
  15. onnx_diagnostic/reference/torch_evaluator.py +21 -0
  16. onnx_diagnostic/tasks/automatic_speech_recognition.py +3 -0
  17. onnx_diagnostic/tasks/feature_extraction.py +3 -0
  18. onnx_diagnostic/tasks/fill_mask.py +3 -0
  19. onnx_diagnostic/tasks/image_classification.py +7 -1
  20. onnx_diagnostic/tasks/image_text_to_text.py +3 -0
  21. onnx_diagnostic/tasks/mixture_of_expert.py +3 -0
  22. onnx_diagnostic/tasks/object_detection.py +3 -0
  23. onnx_diagnostic/tasks/sentence_similarity.py +3 -0
  24. onnx_diagnostic/tasks/summarization.py +3 -0
  25. onnx_diagnostic/tasks/text2text_generation.py +3 -0
  26. onnx_diagnostic/tasks/text_classification.py +3 -0
  27. onnx_diagnostic/tasks/text_generation.py +90 -43
  28. onnx_diagnostic/tasks/zero_shot_image_classification.py +3 -0
  29. onnx_diagnostic/torch_export_patches/onnx_export_errors.py +78 -25
  30. onnx_diagnostic/torch_export_patches/onnx_export_serialization.py +37 -0
  31. onnx_diagnostic/torch_export_patches/patches/patch_transformers.py +365 -17
  32. onnx_diagnostic/torch_models/hghub/hub_api.py +20 -4
  33. onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py +209 -0
  34. onnx_diagnostic/torch_models/hghub/model_inputs.py +3 -0
  35. onnx_diagnostic/torch_models/untrained/llm_tiny_llm.py +23 -50
  36. onnx_diagnostic/torch_models/{test_helper.py → validate.py} +158 -103
  37. {onnx_diagnostic-0.6.3.dist-info → onnx_diagnostic-0.7.0.dist-info}/METADATA +2 -2
  38. {onnx_diagnostic-0.6.3.dist-info → onnx_diagnostic-0.7.0.dist-info}/RECORD +41 -39
  39. {onnx_diagnostic-0.6.3.dist-info → onnx_diagnostic-0.7.0.dist-info}/WHEEL +0 -0
  40. {onnx_diagnostic-0.6.3.dist-info → onnx_diagnostic-0.7.0.dist-info}/licenses/LICENSE.txt +0 -0
  41. {onnx_diagnostic-0.6.3.dist-info → onnx_diagnostic-0.7.0.dist-info}/top_level.txt +0 -0
@@ -147,7 +147,7 @@ def version_summary() -> Dict[str, Union[int, float, str]]:
147
147
  :showcode:
148
148
 
149
149
  import pprint
150
- from onnx_diagnostic.torch_models.test_helper import version_summary
150
+ from onnx_diagnostic.torch_models.validate import version_summary
151
151
 
152
152
  pprint.pprint(version_summary())
153
153
  """
@@ -275,6 +275,7 @@ def validate_model(
275
275
  runtime: str = "onnxruntime",
276
276
  repeat: int = 1,
277
277
  warmup: int = 0,
278
+ inputs2: bool = True,
278
279
  ) -> Tuple[Dict[str, Union[int, float, str]], Dict[str, Any]]:
279
280
  """
280
281
  Validates a model.
@@ -307,7 +308,7 @@ def validate_model(
307
308
  :param drop_inputs: drops this list of inputs (given their names)
308
309
  :param ortfusiontype: runs ort fusion, the parameters defines the fusion type,
309
310
  it accepts multiple values separated by ``|``,
310
- see :func:`onnx_diagnostic.torch_models.test_helper.run_ort_fusion`
311
+ see :func:`onnx_diagnostic.torch_models.validate.run_ort_fusion`
311
312
  :param input_options: additional options to define the dummy inputs
312
313
  used to export
313
314
  :param model_options: additional options when creating the model such as
@@ -318,6 +319,8 @@ def validate_model(
318
319
  only if `do_run` is true
319
320
  :param repeat: number of time to measure the model
320
321
  :param warmup: warmup the model first
322
+ :param inputs2: checks that the second set of inputs is reunning as well,
323
+ this ensures that the model does support dynamism
321
324
  :return: two dictionaries, one with some metrics,
322
325
  another one with whatever the function produces
323
326
 
@@ -361,6 +364,7 @@ def validate_model(
361
364
  version_stop_if_static=str(stop_if_static),
362
365
  version_exporter=exporter or "",
363
366
  version_runtime=runtime,
367
+ version_inputs2=inputs2,
364
368
  )
365
369
  )
366
370
  if opset:
@@ -404,7 +408,7 @@ def validate_model(
404
408
  summary,
405
409
  None,
406
410
  (
407
- lambda mid=model_id, v=verbose, task=task, tr=trained, iop=iop, sub=subfolder: (
411
+ lambda mid=model_id, v=verbose, task=task, tr=trained, iop=iop, sub=subfolder, i2=inputs2: ( # noqa: E501
408
412
  get_untrained_model_with_inputs(
409
413
  mid,
410
414
  verbose=v,
@@ -413,10 +417,15 @@ def validate_model(
413
417
  inputs_kwargs=iop,
414
418
  model_kwargs=mop,
415
419
  subfolder=sub,
420
+ add_second_input=i2,
416
421
  )
417
422
  )
418
423
  ),
419
424
  )
425
+ assert not inputs2 or "inputs2" in data, (
426
+ f"inputs2 is True but second set is missing in data for "
427
+ f"model id {model_id!r}: {sorted(data)}"
428
+ )
420
429
 
421
430
  if exporter == "modelbuilder":
422
431
  # Models used with ModelBuilder do not like batch size > 1.
@@ -483,6 +492,16 @@ def validate_model(
483
492
  if verbose:
484
493
  print(f"[validate_model] new inputs: {string_type(data['inputs'])}")
485
494
  print(f"[validate_model] new dynamic_hapes: {string_type(data['dynamic_shapes'])}")
495
+ if inputs2:
496
+ assert (
497
+ "inputs2" in data
498
+ ), "Cannot test a second set of inputs as it was not defined."
499
+ data["inputs2"], _ = filter_inputs(
500
+ data["inputs2"],
501
+ drop_names=drop_inputs,
502
+ model=data["model"],
503
+ dynamic_shapes=data["dynamic_shapes"],
504
+ )
486
505
 
487
506
  if not empty(dtype):
488
507
  if isinstance(dtype, str):
@@ -492,6 +511,8 @@ def validate_model(
492
511
  data["model"] = to_any(data["model"], dtype) # type: ignore
493
512
  data["inputs"] = to_any(data["inputs"], dtype) # type: ignore
494
513
  summary["model_dtype"] = str(dtype)
514
+ if "inputs2" in data:
515
+ data["inputs2"] = to_any(data["inputs2"], dtype) # type: ignore
495
516
 
496
517
  if not empty(device):
497
518
  if verbose:
@@ -499,6 +520,8 @@ def validate_model(
499
520
  data["model"] = to_any(data["model"], device) # type: ignore
500
521
  data["inputs"] = to_any(data["inputs"], device) # type: ignore
501
522
  summary["model_device"] = str(device)
523
+ if "inputs2" in data:
524
+ data["inputs2"] = to_any(data["inputs2"], device) # type: ignore
502
525
 
503
526
  for k in ["task", "size", "n_weights"]:
504
527
  summary[f"model_{k.replace('_','')}"] = data[k]
@@ -527,35 +550,13 @@ def validate_model(
527
550
  print("[validate_model] --")
528
551
 
529
552
  if do_run:
530
- if verbose:
531
- print("[validate_model] -- run the model...")
532
- print(f"[validate_model] inputs={string_type(data['inputs'], with_shape=True)}")
533
- # We make a copy of the input just in case the model modifies them inplace
534
- hash_inputs = string_type(data["inputs"], with_shape=True)
535
- inputs = torch_deepcopy(data["inputs"])
536
- model = data["model"]
537
-
538
- expected = _quiet_or_not_quiet(
539
- quiet,
540
- "run",
541
- summary,
542
- data,
543
- (lambda m=model, inp=inputs: m(**torch_deepcopy(inp))),
544
- repeat=repeat,
545
- warmup=warmup,
546
- )
547
- if "ERR_run" in summary:
548
- return summary, data
549
-
550
- summary["model_expected"] = string_type(expected, with_shape=True)
551
- if verbose:
552
- print("[validate_model] done (run)")
553
- data["expected"] = expected
554
- assert hash_inputs == string_type(data["inputs"], with_shape=True), (
555
- f"The model execution did modified the inputs:\n"
556
- f"before: {hash_inputs}\n"
557
- f" after: {string_type(data['inputs'], with_shape=True)}"
553
+ _validate_do_run_model(
554
+ data, summary, "inputs", "run", "run_expected", verbose, repeat, warmup, quiet
558
555
  )
556
+ if inputs2:
557
+ _validate_do_run_model(
558
+ data, summary, "inputs2", "run2", "run_expected2", verbose, 1, 0, quiet
559
+ )
559
560
 
560
561
  if exporter:
561
562
  print(
@@ -578,43 +579,7 @@ def validate_model(
578
579
  data["inputs_export"] = modificator(data["inputs"]) # type: ignore
579
580
 
580
581
  if do_run:
581
- # We run a second time the model to check the patch did not
582
- # introduce any discrepancies
583
- if verbose:
584
- print("[validate_model] run patched model...")
585
- print(
586
- f"[validate_model] patched inputs="
587
- f"{string_type(data['inputs_export'], with_shape=True)}"
588
- )
589
- hash_inputs = string_type(data["inputs_export"], with_shape=True)
590
-
591
- # We make a copy of the input just in case the model modifies them inplace
592
- inputs = torch_deepcopy(data["inputs_export"])
593
- model = data["model"]
594
-
595
- expected = _quiet_or_not_quiet(
596
- quiet,
597
- "run_patched",
598
- summary,
599
- data,
600
- (lambda m=model, inp=inputs: m(**inp)),
601
- )
602
- if "ERR_run_patched" in summary:
603
- return summary, data
604
-
605
- disc = max_diff(data["expected"], expected)
606
- for k, v in disc.items():
607
- summary[f"disc_patched_{k}"] = str(v)
608
- if verbose:
609
- print("[validate_model] done (patched run)")
610
- print(f"[validate_model] patched discrepancies={string_diff(disc)}")
611
- assert hash_inputs == string_type(
612
- data["inputs_export"], with_shape=True
613
- ), (
614
- f"The model execution did modified the inputs:\n"
615
- f"before: {hash_inputs}\n"
616
- f" after: {string_type(data['inputs_export'], with_shape=True)}"
617
- )
582
+ _validate_do_run_exported_program(data, summary, verbose, quiet)
618
583
 
619
584
  # data is modified inplace
620
585
  summary_export, data = call_exporter(
@@ -707,6 +672,7 @@ def validate_model(
707
672
  runtime=runtime,
708
673
  repeat=repeat,
709
674
  warmup=warmup,
675
+ inputs2=inputs2,
710
676
  )
711
677
  summary.update(summary_valid)
712
678
 
@@ -767,6 +733,7 @@ def validate_model(
767
733
  runtime=runtime,
768
734
  repeat=repeat,
769
735
  warmup=warmup,
736
+ inputs2=inputs2,
770
737
  )
771
738
  summary.update(summary_valid)
772
739
 
@@ -779,6 +746,79 @@ def validate_model(
779
746
  return summary, data
780
747
 
781
748
 
749
+ def _validate_do_run_model(
750
+ data, summary, key, tag, expected_tag, verbose, repeat, warmup, quiet
751
+ ):
752
+ if verbose:
753
+ print(f"[validate_model] -- run the model inputs={key!r}...")
754
+ print(f"[validate_model] {key}={string_type(data[key], with_shape=True)}")
755
+ # We make a copy of the input just in case the model modifies them inplace
756
+ hash_inputs = string_type(data[key], with_shape=True)
757
+ inputs = torch_deepcopy(data[key])
758
+ model = data["model"]
759
+
760
+ expected = _quiet_or_not_quiet(
761
+ quiet,
762
+ tag,
763
+ summary,
764
+ data,
765
+ (lambda m=model, inp=inputs: m(**torch_deepcopy(inp))),
766
+ repeat=repeat,
767
+ warmup=warmup,
768
+ )
769
+ if f"ERR_{tag}" in summary:
770
+ return summary, data
771
+
772
+ summary[expected_tag] = string_type(expected, with_shape=True)
773
+ if verbose:
774
+ print(f"[validate_model] done ([{tag}])")
775
+ data[expected_tag] = expected
776
+ assert hash_inputs == string_type(data[key], with_shape=True), (
777
+ f"The model execution did modified the inputs:\n"
778
+ f"before: {hash_inputs}\n"
779
+ f" after: {string_type(data[key], with_shape=True)}"
780
+ )
781
+
782
+
783
+ def _validate_do_run_exported_program(data, summary, verbose, quiet):
784
+
785
+ # We run a second time the model to check the patch did not
786
+ # introduce any discrepancies
787
+ if verbose:
788
+ print("[validate_model] run patched model...")
789
+ print(
790
+ f"[validate_model] patched inputs="
791
+ f"{string_type(data['inputs_export'], with_shape=True)}"
792
+ )
793
+ hash_inputs = string_type(data["inputs_export"], with_shape=True)
794
+
795
+ # We make a copy of the input just in case the model modifies them inplace
796
+ inputs = torch_deepcopy(data["inputs_export"])
797
+ model = data["model"]
798
+
799
+ expected = _quiet_or_not_quiet(
800
+ quiet,
801
+ "run_patched",
802
+ summary,
803
+ data,
804
+ (lambda m=model, inp=inputs: m(**inp)),
805
+ )
806
+ if "ERR_run_patched" in summary:
807
+ return summary, data
808
+
809
+ disc = max_diff(data["run_expected"], expected)
810
+ for k, v in disc.items():
811
+ summary[f"disc_patched_{k}"] = str(v)
812
+ if verbose:
813
+ print("[validate_model] done (patched run)")
814
+ print(f"[validate_model] patched discrepancies={string_diff(disc)}")
815
+ assert hash_inputs == string_type(data["inputs_export"], with_shape=True), (
816
+ f"The model execution did modified the inputs:\n"
817
+ f"before: {hash_inputs}\n"
818
+ f" after: {string_type(data['inputs_export'], with_shape=True)}"
819
+ )
820
+
821
+
782
822
  def call_exporter(
783
823
  data: Dict[str, Any],
784
824
  exporter: str,
@@ -845,7 +885,11 @@ def call_exporter(
845
885
  )
846
886
  return summary, data
847
887
  raise NotImplementedError(
848
- f"export with {exporter!r} and optimization={optimization!r} not implemented yet"
888
+ f"export with {exporter!r} and optimization={optimization!r} not implemented yet, "
889
+ f"exporter must startswith 'onnx-', 'custom', 'export', 'modelbuilder' "
890
+ f"(onnx-dynamo, custom, export), optimization can 'ir', "
891
+ f"'default', 'default+onnxruntime', "
892
+ f"'default+onnxruntime+os_ort', 'ir', 'os_ort'"
849
893
  )
850
894
 
851
895
 
@@ -959,7 +1003,7 @@ def call_torch_export_export(
959
1003
  if "ERR_export_export" in summary:
960
1004
  return summary, data
961
1005
 
962
- disc = max_diff(data["expected"], expected)
1006
+ disc = max_diff(data["run_expected"], expected)
963
1007
  for k, v in disc.items():
964
1008
  summary[f"disc_exported_{k}"] = str(v)
965
1009
  if verbose:
@@ -981,6 +1025,7 @@ def validate_onnx_model(
981
1025
  runtime: str = "onnxruntime",
982
1026
  repeat: int = 1,
983
1027
  warmup: int = 0,
1028
+ inputs2: bool = True,
984
1029
  ) -> Tuple[Dict[str, Any], Dict[str, Any]]:
985
1030
  """
986
1031
  Verifies that an onnx model produces the same
@@ -996,6 +1041,8 @@ def validate_onnx_model(
996
1041
  :param runtime: onnx runtime to use, onnxruntime or torch
997
1042
  :param repeat: run that number of times the model
998
1043
  :param warmup: warmup the model
1044
+ :param inputs: to validate the model on the second input set
1045
+ to make sure the exported model supports dynamism
999
1046
  :return: two dictionaries, one with some metrics,
1000
1047
  another one with whatever the function produces
1001
1048
  """
@@ -1065,43 +1112,51 @@ def validate_onnx_model(
1065
1112
  if verbose:
1066
1113
  print(f"[validate_onnx_model] done (ort_session) flavour={flavour!r}")
1067
1114
 
1068
- # make_feeds
1069
- if verbose:
1070
- print("[validate_onnx_model] -- make_feeds...")
1071
- print(f"[validate_onnx_model] inputs={string_type(data['inputs'], with_shape=True)}")
1072
- feeds = make_feeds(sess, data["inputs"], use_numpy=True, check_flatten=False)
1073
- if verbose:
1074
- print(f"[validate_onnx_model] ort inputs={string_type(feeds, with_shape=True)}")
1075
- summary[_mk("onnx_ort_inputs")] = string_type(feeds, with_shape=True)
1076
- if verbose:
1077
- print("[validate_onnx_model] done (make_feeds)")
1115
+ keys = [("inputs", "run_expected", "")]
1116
+ if inputs2:
1117
+ keys.append(("inputs2", "run_expected2", "2"))
1118
+ for k_input, k_expected, suffix in keys:
1119
+ # make_feeds
1120
+ if verbose:
1121
+ print(f"[validate_onnx_model] -- make_feeds for {k_input!r}...")
1122
+ print(
1123
+ f"[validate_onnx_model] inputs={string_type(data[k_input], with_shape=True)}"
1124
+ )
1125
+ feeds = make_feeds(sess, data[k_input], use_numpy=True, check_flatten=False)
1126
+ if verbose:
1127
+ print(f"[validate_onnx_model] ort inputs={string_type(feeds, with_shape=True)}")
1128
+ summary[_mk(f"onnx_ort_inputs{suffix}")] = string_type(feeds, with_shape=True)
1129
+ if verbose:
1130
+ print("[validate_onnx_model] done (make_feeds)")
1078
1131
 
1079
- # run ort
1080
- if verbose:
1081
- print("[validate_onnx_model] run session...")
1132
+ # run ort
1133
+ if verbose:
1134
+ print("[validate_onnx_model] run session...")
1082
1135
 
1083
- got = _quiet_or_not_quiet(
1084
- quiet,
1085
- _mk("time_onnx_ort_run"),
1086
- summary,
1087
- data,
1088
- (lambda sess=sess, feeds=feeds: sess.run(None, feeds)),
1089
- repeat=repeat,
1090
- warmup=warmup,
1091
- )
1092
- if f"ERR_{_mk('time_onnx_ort_run')}" in summary:
1093
- return summary, data
1136
+ got = _quiet_or_not_quiet(
1137
+ quiet,
1138
+ _mk(f"time_onnx_ort_run{suffix}"),
1139
+ summary,
1140
+ data,
1141
+ (lambda sess=sess, feeds=feeds: sess.run(None, feeds)),
1142
+ repeat=repeat,
1143
+ warmup=warmup,
1144
+ )
1145
+ if f"ERR_{_mk(f'time_onnx_ort_run{suffix}')}" in summary:
1146
+ return summary, data
1094
1147
 
1095
- if verbose:
1096
- print("[validate_onnx_model] done (run)")
1097
- print(f"[validate_onnx_model] got={string_type(got, with_shape=True)}")
1148
+ summary[f"run_feeds_{k_input}"] = string_type(feeds, with_shape=True, with_device=True)
1149
+ summary[f"run_output_{k_input}"] = string_type(got, with_shape=True, with_device=True)
1150
+ if verbose:
1151
+ print("[validate_onnx_model] done (run)")
1152
+ print(f"[validate_onnx_model] got={string_type(got, with_shape=True)}")
1098
1153
 
1099
- # compute discrepancies
1100
- disc = max_diff(data["expected"], got, flatten=True)
1101
- if verbose:
1102
- print(f"[validate_onnx_model] discrepancies={string_diff(disc)}")
1103
- for k, v in disc.items():
1104
- summary[_mk(f"disc_onnx_ort_run_{k}")] = v
1154
+ # compute discrepancies
1155
+ disc = max_diff(data[k_expected], got, flatten=True)
1156
+ if verbose:
1157
+ print(f"[validate_onnx_model] discrepancies={string_diff(disc)}")
1158
+ for k, v in disc.items():
1159
+ summary[_mk(f"disc_onnx_ort_run{suffix}_{k}")] = v
1105
1160
  return summary, data
1106
1161
 
1107
1162
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: onnx-diagnostic
3
- Version: 0.6.3
3
+ Version: 0.7.0
4
4
  Summary: Investigate ONNX models
5
5
  Home-page: https://github.com/sdpython/onnx-diagnostic
6
6
  Author: Xavier Dupré
@@ -59,7 +59,7 @@ onnx-diagnostic: investigate onnx models
59
59
  .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
60
60
  :target: https://github.com/psf/black
61
61
 
62
- .. image:: https://codecov.io/gh/sdpython/onnx-diagnostic/branch/main/graph/badge.svg?token=Wb9ZGDta8J
62
+ .. image:: https://codecov.io/gh/sdpython/onnx-diagnostic/graph/badge.svg?token=91T5ZVIP96
63
63
  :target: https://codecov.io/gh/sdpython/onnx-diagnostic
64
64
 
65
65
  The main feature is about `patches <https://github.com/sdpython/onnx-diagnostic/tree/main/onnx_diagnostic/torch_export_patches>`_:
@@ -1,32 +1,34 @@
1
- onnx_diagnostic/__init__.py,sha256=mRkq5dlSo05GQMct7d6mMZLb6s5T24eG_3mD5O3wBo0,173
1
+ onnx_diagnostic/__init__.py,sha256=6RCqK3fOs5Ba2WSRikalaPL72CKIWq7XGzxBULaBSQU,173
2
2
  onnx_diagnostic/__main__.py,sha256=YmyV_Aq_ianDlHyKLHMa6h8YK3ZmFPpLVHLKjM91aCk,79
3
- onnx_diagnostic/_command_lines_parser.py,sha256=pxG3dYpTTpRCjBRzFGEZm4ewb7xprZihP7fG08kWL04,19989
3
+ onnx_diagnostic/_command_lines_parser.py,sha256=FCeOpwZ-tRn3-JjOLs0K2X0ziTjDKJFzIb425vNYH7U,20876
4
4
  onnx_diagnostic/api.py,sha256=BhCl_yCd78N7TlVtPOHjeYv1QBEy39TjZ647rcHqLh0,345
5
- onnx_diagnostic/doc.py,sha256=O_ncetL0G4-oHIxLv8ofTIIxCT_5ESSkKxfYvgccJEc,2038
6
- onnx_diagnostic/ext_test_case.py,sha256=nhWz75caudvKn-svH1ppUY8uw8MoTD4cEdqMdj6PiPc,42399
5
+ onnx_diagnostic/doc.py,sha256=t3RELgfooYnVMAi0JSpggWkQEgUsREz8NmRvn0TnLI8,2829
6
+ onnx_diagnostic/ext_test_case.py,sha256=IX-DNabvsPw8UkUeXC1amw3nnzdmJ3DeERn4E1Y_omo,42416
7
7
  onnx_diagnostic/export/__init__.py,sha256=yEIoWiOeTwBsDhyYt2fTKuhtA0Ya1J9u9ZzMTOTWaWs,101
8
8
  onnx_diagnostic/export/dynamic_shapes.py,sha256=EHB7VoWNx8sVetvOgE1vgC7wHtIjWDLjanhbEJNpK88,39892
9
9
  onnx_diagnostic/export/validate.py,sha256=_PGUql2DJhIgGKo0WjTGUc5AgsZUx8fEs00MePy-w98,6043
10
10
  onnx_diagnostic/helpers/__init__.py,sha256=GJ2GT7cgnlIveVUwMZhuvUwidbTJaKv8CsSIOpZDsJg,83
11
11
  onnx_diagnostic/helpers/args_helper.py,sha256=SRWnqC7EENg09RZlA50B_PcdiIhdbgA4C3ACfzl5nMs,4419
12
12
  onnx_diagnostic/helpers/bench_run.py,sha256=CGA6VMJZMH2gDhVueT9ypNm4PMcjGrrGFYp08nhWj9k,16539
13
- onnx_diagnostic/helpers/cache_helper.py,sha256=soKjyIXa7EQgALd9PAUGIKYzXlJGoLevYiQDsxoqkQ4,8349
14
- onnx_diagnostic/helpers/config_helper.py,sha256=aZATKVbZuw8L56KQpwMNcqJ3Qi5OplzS_N3ETR3hmj0,3351
13
+ onnx_diagnostic/helpers/cache_helper.py,sha256=SFw-wNKXvrNo53VmvRVPdI4nBDMIlaGKv4bNx9g_h_o,10406
14
+ onnx_diagnostic/helpers/config_helper.py,sha256=CdMeUhmDe0LfKcdPv9-Lzt73RRs29NmUHg9uVrdFwTQ,3479
15
15
  onnx_diagnostic/helpers/doc_helper.py,sha256=pl5MZd3_FaE8BqQnqoBuSBxoNCFcd2OJd3eITUSku5c,5897
16
16
  onnx_diagnostic/helpers/graph_helper.py,sha256=hevQT5a7_QuriVPQcbT5qe18n99Doyl5h3-qshx1-uk,14093
17
- onnx_diagnostic/helpers/helper.py,sha256=oPybQruFcVLqvqLDhjphOZ8zZU1HHJWAlABMuTkAO8A,57090
17
+ onnx_diagnostic/helpers/helper.py,sha256=_6K0IvfK7ymBE8uWFAOA1ksU_fMvl2BRtlxj5SA9R2I,58203
18
+ onnx_diagnostic/helpers/log_helper.py,sha256=5XsNfnaFaxiLJpkm5tIe4P_Cq0ZdeoZw2hNZXzKX4Ko,22868
18
19
  onnx_diagnostic/helpers/memory_peak.py,sha256=OT6mz0muBbBZY0pjgW2_eCk_lOtFRo-5w4jFo2Z6Kok,6380
19
- onnx_diagnostic/helpers/mini_onnx_builder.py,sha256=R1Vu4zHzN7GIUnbMVQzpkaXj8cCyyOweWOI9-TSgAHM,20966
20
- onnx_diagnostic/helpers/model_builder_helper.py,sha256=xIZmsVMFHfdtYeZHVEffBtxYObAaRPiaSmwwSKkmLwY,13502
20
+ onnx_diagnostic/helpers/mini_onnx_builder.py,sha256=p0Xh2Br38xAqUjB2214GiNOIbCgiVZKeyVEnjdyqyFI,21091
21
+ onnx_diagnostic/helpers/model_builder_helper.py,sha256=RvDyPFqRboEU3HsQV_xi9oy-o3_4KuGFVzs5MhksduY,12552
21
22
  onnx_diagnostic/helpers/onnx_helper.py,sha256=pXXQjfyNTSUF-Kt72U4fnBDkYAnWYMxdSw8m0qk3xmE,39670
22
23
  onnx_diagnostic/helpers/ort_session.py,sha256=UgUUeUslDxEFBc6w6f3HMq_a7bn4TBlItmojqWquSj4,29281
23
24
  onnx_diagnostic/helpers/rt_helper.py,sha256=BXU_u1syk2RyM0HTFHKEiO6rHHhZW2UFPyUTVdeq8BU,4251
24
- onnx_diagnostic/helpers/torch_helper.py,sha256=mrmn4mBeRvMRJ9cEu7BbNG-AHq2OJfSm8dxgtzh-yQQ,31631
25
- onnx_diagnostic/reference/__init__.py,sha256=nrd09rRuwMDBCPTSZ6kSKZXp1W9W_ExO1t9duDlBnh8,146
25
+ onnx_diagnostic/helpers/torch_helper.py,sha256=L7qv14q4r1LcDKpEVobhySK6VE_X3h88Acvr6Kt4qEk,32244
26
+ onnx_diagnostic/reference/__init__.py,sha256=rLZsxOlnb7-81F2CzepGnZLejaROg4JvgFaGR9FwVQA,208
26
27
  onnx_diagnostic/reference/evaluator.py,sha256=RzNzjFDeMe-4X51Tb22N6aagazY5ktNq-mRmPcfY5EU,8848
27
- onnx_diagnostic/reference/ort_evaluator.py,sha256=OaWMREF8fuJwimmONpIjQ6WxQT1X2roDsdJsgR8H_Cg,24853
28
+ onnx_diagnostic/reference/ort_evaluator.py,sha256=1O7dHj8Aspolidg6rB2Nm7hT3HaGb4TxAgjCCD0XVcQ,26159
28
29
  onnx_diagnostic/reference/quantized_tensor.py,sha256=5u67uS2uGacdMD5VYCbpojNjiesDlV_kO0fAJ0vUWGE,1098
29
- onnx_diagnostic/reference/torch_evaluator.py,sha256=qAeYvSFwOCMDctc39evBEle_2bX8kuJW2QSLksofzn8,26600
30
+ onnx_diagnostic/reference/report_results_comparison.py,sha256=OsyQN8EHZZoj97u74RQP-7WFpebPOso5GEDpdkLWu6M,3645
31
+ onnx_diagnostic/reference/torch_evaluator.py,sha256=gf8EPoX4C4yGgQ-DqxXxaGU26WdEhn8Gd6iesDLqAV0,27692
30
32
  onnx_diagnostic/reference/ops/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
31
33
  onnx_diagnostic/reference/ops/op_add_add_mul_mul.py,sha256=CXQVtgVrT066gDJFwxL4nDSY4G8r08XNu3EwhWqMapU,1521
32
34
  onnx_diagnostic/reference/ops/op_attention.py,sha256=ThALMDF53v3QeG1bohi0bvX2o90HZhGJbbAFOtwEHPE,2027
@@ -69,22 +71,22 @@ onnx_diagnostic/reference/torch_ops/sequence_ops.py,sha256=3EiVKpGfN4d1Iry4hgnr3
69
71
  onnx_diagnostic/reference/torch_ops/shape_ops.py,sha256=pJrNR2UB4PlWl6cv4EDl1uGl8YTBUUMQkhJcsh5K4sA,4291
70
72
  onnx_diagnostic/reference/torch_ops/unary_ops.py,sha256=E8Ys1eZsOTsucBKoXb1_Kl5LbBDygniDvW2BvN4IPMo,1708
71
73
  onnx_diagnostic/tasks/__init__.py,sha256=5XXM-rv-Hk2gSHvqsww9DzVd9mcRifacgcPgvPCjnDM,2412
72
- onnx_diagnostic/tasks/automatic_speech_recognition.py,sha256=oRoYy56M0Yv_WOcn1hJXv-R9wgHkJ8rbym7j7y8oslw,6851
73
- onnx_diagnostic/tasks/feature_extraction.py,sha256=V-T5NpZ6EimOz00weWWxGfksZ9jQ5ZQyaP-mxuCEuJo,2223
74
- onnx_diagnostic/tasks/fill_mask.py,sha256=POUtgvOWv8wTOVLqxPNsj_C2WBiBWkmM72Z9mNlNqxI,2341
75
- onnx_diagnostic/tasks/image_classification.py,sha256=wtSkZB6Wqh2Y1O_zjfnYRZxUQPVVUlLQ_4D0H-SG-xU,4140
76
- onnx_diagnostic/tasks/image_text_to_text.py,sha256=6rKbts_p05VZL8wufJa6NP-MhxUOU-fuTAks5QfUVVQ,6037
77
- onnx_diagnostic/tasks/mixture_of_expert.py,sha256=orMx8Ly4DO0Po0tEmme4gi2flPIGip4TaAyxVik4Zgg,2685
78
- onnx_diagnostic/tasks/object_detection.py,sha256=o1T8NMztjdFAFA-Z5efx-8nd9W7YZZcbE8Ag5wKVxZA,3930
79
- onnx_diagnostic/tasks/sentence_similarity.py,sha256=okQ-TQR8j1a92_N-eT6xN56rjtu26CdlU_pk88gdbGs,2356
80
- onnx_diagnostic/tasks/summarization.py,sha256=qK9E7TdaB9g_Mu-f4Vdr_X8mMAUSlIapLlc8FlWSGFU,7989
81
- onnx_diagnostic/tasks/text2text_generation.py,sha256=x5p_5n72scF9hFNOVP8BjOr4Lga1nnqtjfStZ_n9EwQ,8406
82
- onnx_diagnostic/tasks/text_classification.py,sha256=OgC_G9iumzTjTNUEvMoFFNTHCD8_BkdvdYC4jUsfpHM,2412
83
- onnx_diagnostic/tasks/text_generation.py,sha256=jWKBFt7M-Neyjw6YSOrYY28xOkRkIerDF5YlugKK3qk,10386
84
- onnx_diagnostic/tasks/zero_shot_image_classification.py,sha256=N3cEG1Lq95wS1N_CWUUUCU5j-4Tp5eR8Ce68U8THYAk,4380
74
+ onnx_diagnostic/tasks/automatic_speech_recognition.py,sha256=7OspFypNHLSL6huvP9ms_euhHqYXyTZjAJ8Z7EXGimk,6967
75
+ onnx_diagnostic/tasks/feature_extraction.py,sha256=CbxbGsv3JvEQ2J9tO2DOpMHcJj5ZlCwY81ZB3hPB4D4,2339
76
+ onnx_diagnostic/tasks/fill_mask.py,sha256=ZWz8swzEeRbkmbY9oZ4CM1LYCWWUxnS5CqrKmUVw-u0,2457
77
+ onnx_diagnostic/tasks/image_classification.py,sha256=UjUAFYnwXIdPMXJdHR5MDzpsfMeIvyuKR4RqJVpGV_Q,4449
78
+ onnx_diagnostic/tasks/image_text_to_text.py,sha256=WE9o3DAxY9AndsFAp-g952gHFnzwqucSl8d70eD-X8Q,6153
79
+ onnx_diagnostic/tasks/mixture_of_expert.py,sha256=C0ugEc8OWmVyEZpsh8MJq_te1zgOHhpITtnSmGC16Ls,2801
80
+ onnx_diagnostic/tasks/object_detection.py,sha256=1lF5e2f2Coz1urSptEKgvUGCOSFBf0Anuq_QYOC00dA,4046
81
+ onnx_diagnostic/tasks/sentence_similarity.py,sha256=3MvNxjC1iEMtQL_jH1c8bmrVc5IG1lfUygrCZ0SORJk,2472
82
+ onnx_diagnostic/tasks/summarization.py,sha256=NLwqhpiQrU8UWd3u30VsNA3FsL315S3nlQ7ycUzJueo,8105
83
+ onnx_diagnostic/tasks/text2text_generation.py,sha256=mYvsq-O69fr5pitX0mWugT76QuK4xUs40Vsz9ru_XK8,8522
84
+ onnx_diagnostic/tasks/text_classification.py,sha256=NCCKobBQyCc7dSVj7_5N6S_RuvBlRMAdWkS2rVvrzck,2528
85
+ onnx_diagnostic/tasks/text_generation.py,sha256=PDh870BB-llzlu8h_aZX4Z-9QLzcGmDwX5aKJPy_K90,12504
86
+ onnx_diagnostic/tasks/zero_shot_image_classification.py,sha256=GKaXm8g7cK23h3wJEUc6Q-6mpmLAzQ4YkJbd-eGP7Y4,4496
85
87
  onnx_diagnostic/torch_export_patches/__init__.py,sha256=0SaZedwznm1hQUCvXZsGZORV5vby954wEExr5faepGg,720
86
- onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=MgOQwgLf6-uCGQaiUrhVNfZQ43dCp1iWGbzLbKEVyc8,18810
87
- onnx_diagnostic/torch_export_patches/onnx_export_serialization.py,sha256=l5HvE_FfdCtgLJBmJczH6nA8jZY1725AggiHwoAa-o0,15763
88
+ onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=op8jgnTa_1T_bGN172A6YFTtkQv_ALMNu1oukrsFt9U,20634
89
+ onnx_diagnostic/torch_export_patches/onnx_export_serialization.py,sha256=qoLp8ywtIg4AXqd9zdLPtc0WlBNep-n0oGMVmyBub4U,17098
88
90
  onnx_diagnostic/torch_export_patches/patch_expressions.py,sha256=vr4tt61cbDnaaaduzMj4UBZ8OUtr6GfDpIWwOYqjWzs,3213
89
91
  onnx_diagnostic/torch_export_patches/patch_inputs.py,sha256=9b4pmyT00BwLqi7WG-gliep1RUy3gXEgW6BDnlSSA-M,7689
90
92
  onnx_diagnostic/torch_export_patches/patch_module.py,sha256=R2d9IHM-RwsBKDsxuBIJnEqMoxbS9gd4YWFGG2wwV5A,39881
@@ -93,23 +95,23 @@ onnx_diagnostic/torch_export_patches/eval/__init__.py,sha256=VtkQB1o3Q2Fh99OOF6v
93
95
  onnx_diagnostic/torch_export_patches/eval/model_cases.py,sha256=DTvdHPtNQh25Akv5o3D4Jxf1L1-SJ7w14tgvj8AAns8,26577
94
96
  onnx_diagnostic/torch_export_patches/patches/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
95
97
  onnx_diagnostic/torch_export_patches/patches/patch_torch.py,sha256=KaZ8TjDa9ATgT4HllYzzoNf_51q_yOj_GuF5NYjPCrU,18913
96
- onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=Hf-U50vzgzJ4iUjS2LAYkbfmzCEwX80Dzvdrr-Rhlp0,26456
98
+ onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=GwcPUaSm-Zys2pWHac8Wcvpmy2h4oiFQDmx_D3GZNBA,41007
97
99
  onnx_diagnostic/torch_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
98
100
  onnx_diagnostic/torch_models/llms.py,sha256=soyg4yC87ptGoeulJhKqw5opGmuLvH1pn_ZDXZ4Jr8E,90
99
- onnx_diagnostic/torch_models/test_helper.py,sha256=tt6bgLjGRayzvkXrTelKHTjr7XU9BvhX7uE4XJq6H6o,59927
101
+ onnx_diagnostic/torch_models/validate.py,sha256=9UyhUSVvTsxWMaq1jZUVF-giw9tG49Op4Mi63T7ViyM,61868
100
102
  onnx_diagnostic/torch_models/hghub/__init__.py,sha256=vi1Q7YHdddj1soiBN42MSvJdFqe2_KUoWafHISjwOu8,58
101
- onnx_diagnostic/torch_models/hghub/hub_api.py,sha256=BgM_p57Q0gT9GOhdrmOYcnbuTTzCWp80jS4OQqWwFhs,9990
103
+ onnx_diagnostic/torch_models/hghub/hub_api.py,sha256=WvrnDPA80kxyG9fizK-ood3DtxQwD-GZOPqGBTmd1fM,10604
102
104
  onnx_diagnostic/torch_models/hghub/hub_data.py,sha256=885wKyZkdM-Qp5Sg6C9Ol1dxigmA8FYAko-Ys08sppo,8096
103
- onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py,sha256=V4Wv_73QsiWMFTw-xqj-5yZOri1NKuAub6VQa2UVIpw,259582
104
- onnx_diagnostic/torch_models/hghub/model_inputs.py,sha256=1h1jHJknqDHAa308_bjOzXVPgy3GIF-ikpce8CHNTmE,7804
105
+ onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py,sha256=dE8tHksGOsTk77jpa7mldLYzaQ5joKxDxDB0ZnwQBV4,267246
106
+ onnx_diagnostic/torch_models/hghub/model_inputs.py,sha256=D4iThSb3Pj89qQFXLCldhDilHAt1F1e8OS5IIfdygYQ,7966
105
107
  onnx_diagnostic/torch_models/untrained/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
106
108
  onnx_diagnostic/torch_models/untrained/llm_phi2.py,sha256=ynBTDHJHCk44NjLT_t6OiFDBdPP0rFGPteiONDxvztw,3708
107
- onnx_diagnostic/torch_models/untrained/llm_tiny_llm.py,sha256=7N3fGvT_4Mn4NbIo0Qk57c6DMc3OXGWyvj_P41rjwSY,3513
109
+ onnx_diagnostic/torch_models/untrained/llm_tiny_llm.py,sha256=QXw_Bs2SzfeiQMf-tmtVl83SmVOL4-Um7Qy-f0E48QI,2507
108
110
  onnx_diagnostic/torch_onnx/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
109
111
  onnx_diagnostic/torch_onnx/runtime_info.py,sha256=1g9F_Jf9AAgYQU4stbsrFXwQl-30mWlQrFbQ7val8Ps,9268
110
112
  onnx_diagnostic/torch_onnx/sbs.py,sha256=1EL25DeYFzlBSiFG_XjePBLvsiItRXbdDrr5-QZW2mA,16878
111
- onnx_diagnostic-0.6.3.dist-info/licenses/LICENSE.txt,sha256=Vv6TXglX6Rc0d-f8aREhayhT-6PMQXEyOmI2NKlUCMc,1045
112
- onnx_diagnostic-0.6.3.dist-info/METADATA,sha256=eJxj0KTPv1rXf-3T9KImWIF-u8g7wHXBGZm5zvXM7V8,6643
113
- onnx_diagnostic-0.6.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
114
- onnx_diagnostic-0.6.3.dist-info/top_level.txt,sha256=KwNkXewmcobM3ZT1DJLVWH6ebJzA5qKg7cWqKfpGNT4,16
115
- onnx_diagnostic-0.6.3.dist-info/RECORD,,
113
+ onnx_diagnostic-0.7.0.dist-info/licenses/LICENSE.txt,sha256=Vv6TXglX6Rc0d-f8aREhayhT-6PMQXEyOmI2NKlUCMc,1045
114
+ onnx_diagnostic-0.7.0.dist-info/METADATA,sha256=6UcN7eUU5naeV1gEkhd6SmfC9JJ7ehNE8ugoAYaqmA4,6631
115
+ onnx_diagnostic-0.7.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
116
+ onnx_diagnostic-0.7.0.dist-info/top_level.txt,sha256=KwNkXewmcobM3ZT1DJLVWH6ebJzA5qKg7cWqKfpGNT4,16
117
+ onnx_diagnostic-0.7.0.dist-info/RECORD,,