onnx-diagnostic 0.7.9__py3-none-any.whl → 0.7.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- onnx_diagnostic/__init__.py +1 -1
- onnx_diagnostic/_command_lines_parser.py +8 -1
- onnx_diagnostic/helpers/cache_helper.py +12 -10
- onnx_diagnostic/helpers/helper.py +8 -0
- onnx_diagnostic/helpers/onnx_helper.py +1 -1
- onnx_diagnostic/helpers/torch_helper.py +14 -4
- onnx_diagnostic/reference/ops/op_scan.py +5 -5
- onnx_diagnostic/reference/ort_evaluator.py +2 -2
- onnx_diagnostic/tasks/__init__.py +4 -2
- onnx_diagnostic/tasks/image_to_video.py +127 -0
- onnx_diagnostic/torch_export_patches/eval/model_cases.py +3 -3
- onnx_diagnostic/torch_export_patches/onnx_export_errors.py +98 -4
- onnx_diagnostic/torch_export_patches/patches/patch_transformers.py +42 -2
- onnx_diagnostic/torch_export_patches/serialization/transformers_impl.py +0 -1
- onnx_diagnostic/torch_models/hghub/hub_api.py +69 -22
- onnx_diagnostic/torch_models/hghub/hub_data.py +5 -1
- onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py +142 -0
- onnx_diagnostic/torch_models/hghub/model_inputs.py +173 -128
- onnx_diagnostic/torch_models/hghub/model_specific.py +76 -0
- onnx_diagnostic/torch_models/untrained/llm_phi2.py +11 -3
- onnx_diagnostic/torch_models/validate.py +146 -17
- onnx_diagnostic/torch_onnx/sbs.py +1 -1
- {onnx_diagnostic-0.7.9.dist-info → onnx_diagnostic-0.7.11.dist-info}/METADATA +2 -2
- {onnx_diagnostic-0.7.9.dist-info → onnx_diagnostic-0.7.11.dist-info}/RECORD +27 -25
- {onnx_diagnostic-0.7.9.dist-info → onnx_diagnostic-0.7.11.dist-info}/WHEEL +0 -0
- {onnx_diagnostic-0.7.9.dist-info → onnx_diagnostic-0.7.11.dist-info}/licenses/LICENSE.txt +0 -0
- {onnx_diagnostic-0.7.9.dist-info → onnx_diagnostic-0.7.11.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from typing import Any, Dict, Tuple
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def instantiate_specific_model(cls_model: type, config: Any) -> object:
|
|
5
|
+
"""
|
|
6
|
+
Instantiates some model requiring some specific code.
|
|
7
|
+
"""
|
|
8
|
+
if cls_model.__name__ == "CosmosTransformer3DModel":
|
|
9
|
+
return instantiate_CosmosTransformer3DModel(cls_model, config)
|
|
10
|
+
return None
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def instantiate_CosmosTransformer3DModel(cls_model: type, config: Any) -> object:
|
|
14
|
+
kwargs = dict(
|
|
15
|
+
in_channels=config.in_channels,
|
|
16
|
+
out_channels=config.out_channels,
|
|
17
|
+
attention_head_dim=config.attention_head_dim,
|
|
18
|
+
mlp_ratio=config.mlp_ratio,
|
|
19
|
+
num_layers=config.num_layers,
|
|
20
|
+
text_embed_dim=config.text_embed_dim,
|
|
21
|
+
adaln_lora_dim=config.adaln_lora_dim,
|
|
22
|
+
max_size=config.max_size,
|
|
23
|
+
patch_size=config.patch_size,
|
|
24
|
+
rope_scale=config.rope_scale,
|
|
25
|
+
concat_padding_mask=config.concat_padding_mask,
|
|
26
|
+
extra_pos_embed_type=config.extra_pos_embed_type,
|
|
27
|
+
)
|
|
28
|
+
return cls_model(**kwargs)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class SpecificConfig:
|
|
32
|
+
"""Creates a specific configuration for the loaded model."""
|
|
33
|
+
|
|
34
|
+
def __init__(self, **kwargs):
|
|
35
|
+
self._atts = set(kwargs)
|
|
36
|
+
for k, v in kwargs.items():
|
|
37
|
+
setattr(self, k, v)
|
|
38
|
+
|
|
39
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
40
|
+
return {k: getattr(self, k) for k in self._atts if k != "_atts"}
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def load_specific_model(
|
|
44
|
+
model_id: str, verbose: int = 0, **kwargs
|
|
45
|
+
) -> Tuple[Any, str, SpecificConfig]:
|
|
46
|
+
"""
|
|
47
|
+
Some models do not have any generic to be loaded.
|
|
48
|
+
This functions
|
|
49
|
+
|
|
50
|
+
:param model_id: model id
|
|
51
|
+
:param verbose: verbosiy
|
|
52
|
+
:param kwargs: additional parameters
|
|
53
|
+
:return: the model, the task associated to it, a configuration
|
|
54
|
+
"""
|
|
55
|
+
assert model_id in HANDLED_MODELS, (
|
|
56
|
+
f"Unable to load model_id={model_id!r}, "
|
|
57
|
+
f"no function is mapped to this id in {sorted(HANDLED_MODELS)}"
|
|
58
|
+
)
|
|
59
|
+
return HANDLED_MODELS[model_id](model_id, verbose=verbose, **kwargs)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _load_bingsu_adetailer(model_id: str, verbose: int = 0) -> Tuple[Any, str, SpecificConfig]:
|
|
63
|
+
"""See `Bingsu/adetailer <https://huggingface.co/Bingsu/adetailer>`_."""
|
|
64
|
+
from huggingface_hub import hf_hub_download
|
|
65
|
+
from ultralytics import YOLO
|
|
66
|
+
|
|
67
|
+
path = hf_hub_download("Bingsu/adetailer", "face_yolov8n.pt")
|
|
68
|
+
model = YOLO(path)
|
|
69
|
+
return (
|
|
70
|
+
model,
|
|
71
|
+
"object-detection",
|
|
72
|
+
SpecificConfig(architecture=type(model), image_size=224, num_channels=3),
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
HANDLED_MODELS = {"Bingsu/adetailer": _load_bingsu_adetailer}
|
|
@@ -9,6 +9,7 @@ def get_phi2(
|
|
|
9
9
|
sequence_length: int = 30,
|
|
10
10
|
sequence_length2: int = 3,
|
|
11
11
|
dynamic_rope: bool = False,
|
|
12
|
+
use_dim_not_dynamic: bool = False,
|
|
12
13
|
**kwargs,
|
|
13
14
|
) -> Dict[str, Any]:
|
|
14
15
|
"""
|
|
@@ -18,6 +19,8 @@ def get_phi2(
|
|
|
18
19
|
:param sequence_length: sequence length
|
|
19
20
|
:param sequence_length2: new sequence length
|
|
20
21
|
:param dynamic_rope: use dynamic rope (see :class:`transformers.LlamaConfig`)
|
|
22
|
+
:param use_dim_not_dynamic: uses ``torch.export.Dim`` and not a string for the batch size,
|
|
23
|
+
the sequence length and the cache length
|
|
21
24
|
:param kwargs: to overwrite the configuration, example ``num_hidden_layers=1``
|
|
22
25
|
:return: dictionary
|
|
23
26
|
|
|
@@ -62,9 +65,14 @@ def get_phi2(
|
|
|
62
65
|
n_layers = config["num_hidden_layers"]
|
|
63
66
|
num_key_value_heads = config["num_key_value_heads"]
|
|
64
67
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
+
if use_dim_not_dynamic:
|
|
69
|
+
batch = torch.export.Dim("batch", min=1, max=1024)
|
|
70
|
+
seq_length = torch.export.Dim("seq_length", min=1, max=4096)
|
|
71
|
+
cache_length = torch.export.Dim("cache_length", min=1, max=4096)
|
|
72
|
+
else:
|
|
73
|
+
batch = "batch"
|
|
74
|
+
seq_length = "seq_length"
|
|
75
|
+
cache_length = "cache_length"
|
|
68
76
|
|
|
69
77
|
shapes = {
|
|
70
78
|
"input_ids": {0: batch, 1: seq_length},
|
|
@@ -7,8 +7,6 @@ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
|
|
7
7
|
import time
|
|
8
8
|
import numpy as np
|
|
9
9
|
import onnx
|
|
10
|
-
import onnxscript
|
|
11
|
-
import onnxscript.rewriter.ort_fusions as ort_fusions
|
|
12
10
|
import torch
|
|
13
11
|
from ..export import CoupleInputsDynamicShapes
|
|
14
12
|
from ..helpers import max_diff, string_type, string_diff
|
|
@@ -113,6 +111,7 @@ def _make_folder_name(
|
|
|
113
111
|
dtype: Optional[Union[str, torch.dtype]] = None,
|
|
114
112
|
device: Optional[Union[str, torch.device]] = None,
|
|
115
113
|
subfolder: Optional[str] = None,
|
|
114
|
+
opset: Optional[int] = None,
|
|
116
115
|
) -> str:
|
|
117
116
|
"Creates a filename unique based on the given options."
|
|
118
117
|
els = [model_id.replace("/", "_")]
|
|
@@ -136,6 +135,8 @@ def _make_folder_name(
|
|
|
136
135
|
else:
|
|
137
136
|
raise AssertionError(f"unexpected value for device={device}, sdev={sdev!r}")
|
|
138
137
|
els.append(sdev)
|
|
138
|
+
if opset is not None:
|
|
139
|
+
els.append(f"op{opset}")
|
|
139
140
|
return "-".join(els)
|
|
140
141
|
|
|
141
142
|
|
|
@@ -246,6 +247,7 @@ def _quiet_or_not_quiet(
|
|
|
246
247
|
summary[f"time_{suffix}_latency_std"] = a.std()
|
|
247
248
|
summary[f"time_{suffix}_latency_min"] = a.min()
|
|
248
249
|
summary[f"time_{suffix}_latency_min"] = a.max()
|
|
250
|
+
summary[f"time_{suffix}_n"] = len(a)
|
|
249
251
|
return res
|
|
250
252
|
|
|
251
253
|
|
|
@@ -262,6 +264,16 @@ def shrink_config(cfg: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
262
264
|
return new_cfg
|
|
263
265
|
|
|
264
266
|
|
|
267
|
+
def _preprocess_model_id(model_id, subfolder):
|
|
268
|
+
if subfolder or "//" not in model_id:
|
|
269
|
+
return model_id, subfolder
|
|
270
|
+
spl = model_id.split("//")
|
|
271
|
+
if spl[-1] in {"transformer", "vae"}:
|
|
272
|
+
# known subfolder
|
|
273
|
+
return "//".join(spl[:-1]), spl[-1]
|
|
274
|
+
return model_id, subfolder
|
|
275
|
+
|
|
276
|
+
|
|
265
277
|
def validate_model(
|
|
266
278
|
model_id: str,
|
|
267
279
|
task: Optional[str] = None,
|
|
@@ -290,6 +302,7 @@ def validate_model(
|
|
|
290
302
|
warmup: int = 0,
|
|
291
303
|
inputs2: int = 1,
|
|
292
304
|
output_names: Optional[List[str]] = None,
|
|
305
|
+
ort_logs: bool = False,
|
|
293
306
|
) -> Tuple[Dict[str, Union[int, float, str]], Dict[str, Any]]:
|
|
294
307
|
"""
|
|
295
308
|
Validates a model.
|
|
@@ -334,13 +347,15 @@ def validate_model(
|
|
|
334
347
|
:param subfolder: version or subfolders to uses when retrieving a model id
|
|
335
348
|
:param opset: onnx opset to use for the conversion
|
|
336
349
|
:param runtime: onnx runtime to use to check about discrepancies,
|
|
337
|
-
|
|
350
|
+
possible values ``onnxruntime``, ``torch``, ``orteval``,
|
|
351
|
+
``orteval10``, ``ref`` only if `do_run` is true
|
|
338
352
|
:param repeat: number of time to measure the model
|
|
339
353
|
:param warmup: warmup the model first
|
|
340
354
|
:param inputs2: checks that the second set of inputs is reunning as well,
|
|
341
355
|
this ensures that the model does support dynamism, the value is used
|
|
342
356
|
as an increment to the first set of values (added to dimensions)
|
|
343
357
|
:param output_names: output names the onnx exporter should use
|
|
358
|
+
:param ort_logs: increases onnxruntime verbosity when creating the session
|
|
344
359
|
:return: two dictionaries, one with some metrics,
|
|
345
360
|
another one with whatever the function produces
|
|
346
361
|
|
|
@@ -352,7 +367,7 @@ def validate_model(
|
|
|
352
367
|
The following exporters are available:
|
|
353
368
|
|
|
354
369
|
* ``export-nostrict``: run :func:`torch.export.export` (..., strict=False)
|
|
355
|
-
* ``onnx-dynamo``: run :func:`torch.onnx.export` (
|
|
370
|
+
* ``onnx-dynamo``: run :func:`torch.onnx.export` (...),
|
|
356
371
|
models can be optimized with ``optimization`` in ``("ir", "os_ort")``
|
|
357
372
|
* ``modelbuilder``: use :epkg:`ModelBuilder` to builds the onnx model
|
|
358
373
|
* ``custom``: custom exporter (see :epkg:`experimental-experiment`),
|
|
@@ -361,8 +376,15 @@ def validate_model(
|
|
|
361
376
|
|
|
362
377
|
The default runtime, :epkg:`onnxruntime` is used to validate a model and check the
|
|
363
378
|
exported model returns the same outputs as the original one, otherwise,
|
|
364
|
-
:class:`onnx_diagnostic.reference.TorchOnnxEvaluator`
|
|
379
|
+
:class:`onnx_diagnostic.reference.TorchOnnxEvaluator`
|
|
380
|
+
if ``runtime == 'torch'`` or
|
|
381
|
+
:class:`onnx_diagnostic.reference.OnnxruntimeEvaluator`
|
|
382
|
+
if ``runtime == 'orteval'`` or
|
|
383
|
+
:class:`onnx_diagnostic.reference.ExtendedReferenceEvaluator`
|
|
384
|
+
if ``runtime == 'ref'``,
|
|
385
|
+
``orteval10`` increases the verbosity.
|
|
365
386
|
"""
|
|
387
|
+
model_id, subfolder = _preprocess_model_id(model_id, subfolder)
|
|
366
388
|
if isinstance(patch, bool):
|
|
367
389
|
patch_kwargs = (
|
|
368
390
|
dict(patch_transformers=True, patch_diffusers=True, patch=True)
|
|
@@ -412,7 +434,13 @@ def validate_model(
|
|
|
412
434
|
folder_name = None
|
|
413
435
|
if dump_folder:
|
|
414
436
|
folder_name = _make_folder_name(
|
|
415
|
-
model_id,
|
|
437
|
+
model_id,
|
|
438
|
+
exporter,
|
|
439
|
+
optimization,
|
|
440
|
+
dtype=dtype,
|
|
441
|
+
device=device,
|
|
442
|
+
subfolder=subfolder,
|
|
443
|
+
opset=opset,
|
|
416
444
|
)
|
|
417
445
|
dump_folder = os.path.join(dump_folder, folder_name)
|
|
418
446
|
if not os.path.exists(dump_folder):
|
|
@@ -712,6 +740,7 @@ def validate_model(
|
|
|
712
740
|
print(f"[validate_model] done (dump onnx) in {duration}")
|
|
713
741
|
data["onnx_filename"] = onnx_filename
|
|
714
742
|
summary["time_onnx_save"] = duration
|
|
743
|
+
summary.update(compute_statistics(onnx_filename))
|
|
715
744
|
if verbose:
|
|
716
745
|
print(f"[validate_model] dumps statistics in {dump_folder!r}...")
|
|
717
746
|
dump_stats = os.path.join(dump_folder, f"{folder_name}.stats")
|
|
@@ -742,6 +771,7 @@ def validate_model(
|
|
|
742
771
|
repeat=repeat,
|
|
743
772
|
warmup=warmup,
|
|
744
773
|
inputs2=inputs2,
|
|
774
|
+
ort_logs=ort_logs,
|
|
745
775
|
)
|
|
746
776
|
summary.update(summary_valid)
|
|
747
777
|
|
|
@@ -815,6 +845,48 @@ def validate_model(
|
|
|
815
845
|
return summary, data
|
|
816
846
|
|
|
817
847
|
|
|
848
|
+
def compute_statistics(onnx_filename: str) -> Dict[str, Union[float, int]]:
|
|
849
|
+
"""Computes some statistics on the model itself."""
|
|
850
|
+
onx = onnx.load(onnx_filename, load_external_data=False)
|
|
851
|
+
|
|
852
|
+
def node_iter(proto):
|
|
853
|
+
if isinstance(proto, onnx.ModelProto):
|
|
854
|
+
yield from node_iter(proto.graph)
|
|
855
|
+
for f in proto.functions:
|
|
856
|
+
yield from node_iter(f)
|
|
857
|
+
elif isinstance(proto, (onnx.FunctionProto, onnx.GraphProto)):
|
|
858
|
+
for node in proto.node:
|
|
859
|
+
yield node
|
|
860
|
+
for att in node.attribute:
|
|
861
|
+
if att.type == onnx.AttributeProto.GRAPH:
|
|
862
|
+
yield from node_iter(att.g)
|
|
863
|
+
if hasattr(proto, "initializer"):
|
|
864
|
+
yield from proto.initializer
|
|
865
|
+
else:
|
|
866
|
+
raise NotImplementedError(f"Unexpected type={type(proto)}")
|
|
867
|
+
|
|
868
|
+
counts: Dict[str, Union[float, int]] = {}
|
|
869
|
+
n_nodes = 0
|
|
870
|
+
n_nodes_nocst = 0
|
|
871
|
+
for proto in node_iter(onx):
|
|
872
|
+
if isinstance(proto, onnx.NodeProto):
|
|
873
|
+
key = f"n_node_{proto.op_type}"
|
|
874
|
+
n_nodes += 1
|
|
875
|
+
if proto.op_type != "Constant":
|
|
876
|
+
n_nodes_nocst += 1
|
|
877
|
+
else:
|
|
878
|
+
key = f"n_node_initializer_{proto.data_type}"
|
|
879
|
+
|
|
880
|
+
if key not in counts:
|
|
881
|
+
counts[key] = 0
|
|
882
|
+
counts[key] += 1
|
|
883
|
+
|
|
884
|
+
counts["n_node_nodes"] = n_nodes
|
|
885
|
+
counts["n_node_nodes_nocst"] = n_nodes_nocst
|
|
886
|
+
counts["n_node_functions"] = len(onx.functions)
|
|
887
|
+
return counts
|
|
888
|
+
|
|
889
|
+
|
|
818
890
|
def _validate_do_run_model(
|
|
819
891
|
data, summary, key, tag, expected_tag, verbose, repeat, warmup, quiet
|
|
820
892
|
):
|
|
@@ -1100,6 +1172,7 @@ def validate_onnx_model(
|
|
|
1100
1172
|
repeat: int = 1,
|
|
1101
1173
|
warmup: int = 0,
|
|
1102
1174
|
inputs2: int = 1,
|
|
1175
|
+
ort_logs: bool = False,
|
|
1103
1176
|
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
|
1104
1177
|
"""
|
|
1105
1178
|
Verifies that an onnx model produces the same
|
|
@@ -1112,12 +1185,13 @@ def validate_onnx_model(
|
|
|
1112
1185
|
:param quiet: catch exception or not
|
|
1113
1186
|
:param verbose: verbosity
|
|
1114
1187
|
:param flavour: use a different version of the inputs
|
|
1115
|
-
:param runtime: onnx runtime to use, onnxruntime
|
|
1188
|
+
:param runtime: onnx runtime to use, onnxruntime, torch, orteval, ref
|
|
1116
1189
|
:param repeat: run that number of times the model
|
|
1117
1190
|
:param warmup: warmup the model
|
|
1118
1191
|
:param inputs2: to validate the model on the second input set
|
|
1119
1192
|
to make sure the exported model supports dynamism, the value is
|
|
1120
1193
|
used as an increment added to the first set of inputs (added to dimensions)
|
|
1194
|
+
:param ort_logs: triggers the logs for onnxruntime
|
|
1121
1195
|
:return: two dictionaries, one with some metrics,
|
|
1122
1196
|
another one with whatever the function produces
|
|
1123
1197
|
"""
|
|
@@ -1159,23 +1233,71 @@ def validate_onnx_model(
|
|
|
1159
1233
|
f"{providers}..., flavour={flavour!r}"
|
|
1160
1234
|
)
|
|
1161
1235
|
|
|
1162
|
-
if runtime
|
|
1236
|
+
if runtime == "onnxruntime":
|
|
1237
|
+
if os.environ.get("DUMPORTOPT", "") in ("1", "true", "True"):
|
|
1238
|
+
opts = onnxruntime.SessionOptions()
|
|
1239
|
+
opts.optimized_model_filepath = f"{data['onnx_filename']}.rtopt.onnx"
|
|
1240
|
+
if verbose:
|
|
1241
|
+
print(
|
|
1242
|
+
f"[validate_onnx_model] saved optimized onnxruntime "
|
|
1243
|
+
f"in {opts.optimized_model_filepath!r}"
|
|
1244
|
+
)
|
|
1245
|
+
onnxruntime.InferenceSession(data["onnx_filename"], opts, providers=providers)
|
|
1246
|
+
if verbose:
|
|
1247
|
+
print("[validate_onnx_model] -- done")
|
|
1248
|
+
|
|
1249
|
+
if verbose:
|
|
1250
|
+
print("[validate_onnx_model] runtime is onnxruntime")
|
|
1251
|
+
sess_opts = onnxruntime.SessionOptions()
|
|
1252
|
+
if ort_logs:
|
|
1253
|
+
sess_opts.log_severity_level = 0
|
|
1254
|
+
sess_opts.log_verbosity_level = 4
|
|
1255
|
+
cls_runtime = lambda model, providers, _o=sess_opts: onnxruntime.InferenceSession(
|
|
1256
|
+
(model.SerializeToString() if isinstance(model, onnx.ModelProto) else model),
|
|
1257
|
+
_o,
|
|
1258
|
+
providers=providers,
|
|
1259
|
+
)
|
|
1260
|
+
elif runtime == "torch":
|
|
1163
1261
|
from ..reference import TorchOnnxEvaluator
|
|
1164
1262
|
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
providers=providers,
|
|
1263
|
+
if verbose:
|
|
1264
|
+
print("[validate_onnx_model] runtime is TorchOnnxEvaluator")
|
|
1265
|
+
cls_runtime = (
|
|
1266
|
+
lambda model, providers, _cls_=TorchOnnxEvaluator: _cls_( # type: ignore[misc]
|
|
1267
|
+
model, providers=providers, verbose=max(verbose - 1, 0)
|
|
1170
1268
|
)
|
|
1171
1269
|
)
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1270
|
+
elif runtime == "orteval":
|
|
1271
|
+
from ..reference import OnnxruntimeEvaluator
|
|
1272
|
+
|
|
1273
|
+
if verbose:
|
|
1274
|
+
print("[validate_onnx_model] runtime is OnnxruntimeEvaluator")
|
|
1275
|
+
cls_runtime = (
|
|
1276
|
+
lambda model, providers, _cls_=OnnxruntimeEvaluator: _cls_( # type: ignore[misc]
|
|
1175
1277
|
model, providers=providers, verbose=max(verbose - 1, 0)
|
|
1176
1278
|
)
|
|
1177
1279
|
)
|
|
1178
|
-
|
|
1280
|
+
elif runtime == "orteval10":
|
|
1281
|
+
from ..reference import OnnxruntimeEvaluator
|
|
1282
|
+
|
|
1283
|
+
if verbose:
|
|
1284
|
+
print("[validate_onnx_model] runtime is OnnxruntimeEvaluator(verbose=10)")
|
|
1285
|
+
cls_runtime = (
|
|
1286
|
+
lambda model, providers, _cls_=OnnxruntimeEvaluator: _cls_( # type: ignore[misc]
|
|
1287
|
+
model, providers=providers, verbose=10
|
|
1288
|
+
)
|
|
1289
|
+
)
|
|
1290
|
+
elif runtime == "ref":
|
|
1291
|
+
from ..reference import ExtendedReferenceEvaluator
|
|
1292
|
+
|
|
1293
|
+
if verbose:
|
|
1294
|
+
print("[validate_onnx_model] runtime is ExtendedReferenceEvaluator")
|
|
1295
|
+
cls_runtime = lambda model, providers, _cls_=ExtendedReferenceEvaluator: _cls_( # type: ignore[misc]
|
|
1296
|
+
model, verbose=max(verbose - 1, 0)
|
|
1297
|
+
)
|
|
1298
|
+
else:
|
|
1299
|
+
raise ValueError(f"Unexpecteed runtime={runtime!r}")
|
|
1300
|
+
|
|
1179
1301
|
sess = _quiet_or_not_quiet(
|
|
1180
1302
|
quiet,
|
|
1181
1303
|
_mk("create_onnx_ort"),
|
|
@@ -1356,6 +1478,8 @@ def call_torch_export_onnx(
|
|
|
1356
1478
|
if optimization == "ir":
|
|
1357
1479
|
label, f_optim = "export_onnx_opt_ir", (lambda epo=epo: epo.optimize())
|
|
1358
1480
|
else:
|
|
1481
|
+
import onnxscript
|
|
1482
|
+
import onnxscript.rewriter.ort_fusions as ort_fusions
|
|
1359
1483
|
|
|
1360
1484
|
def _os_ort_optim(epo):
|
|
1361
1485
|
onnxscript.optimizer.optimize_ir(epo.model)
|
|
@@ -1475,6 +1599,8 @@ def call_torch_export_custom(
|
|
|
1475
1599
|
"default+onnxruntime+os_ort",
|
|
1476
1600
|
None,
|
|
1477
1601
|
}
|
|
1602
|
+
if optimization == "none":
|
|
1603
|
+
optimization = ""
|
|
1478
1604
|
assert (
|
|
1479
1605
|
optimization in available
|
|
1480
1606
|
), f"unexpected value for optimization={optimization}, available={available}"
|
|
@@ -1638,6 +1764,9 @@ def call_torch_export_custom(
|
|
|
1638
1764
|
print("[call_torch_export_custom] done (export)")
|
|
1639
1765
|
|
|
1640
1766
|
if os_ort:
|
|
1767
|
+
import onnxscript
|
|
1768
|
+
import onnxscript.rewriter.ort_fusions as ort_fusions
|
|
1769
|
+
|
|
1641
1770
|
if verbose:
|
|
1642
1771
|
print("[call_torch_export_custom] conversion to IR...")
|
|
1643
1772
|
begin = time.perf_counter()
|
|
@@ -205,7 +205,7 @@ def run_aligned(
|
|
|
205
205
|
Model(), (x,), dynamic_shapes=({0: torch.export.Dim("batch")},)
|
|
206
206
|
)
|
|
207
207
|
onx = torch.onnx.export(
|
|
208
|
-
Model(), (x,), dynamic_shapes=({0: torch.export.Dim("batch")},)
|
|
208
|
+
Model(), (x,), dynamic_shapes=({0: torch.export.Dim("batch")},)
|
|
209
209
|
).model_proto
|
|
210
210
|
results = list(
|
|
211
211
|
map(
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: onnx-diagnostic
|
|
3
|
-
Version: 0.7.
|
|
3
|
+
Version: 0.7.11
|
|
4
4
|
Summary: Investigate ONNX models
|
|
5
5
|
Home-page: https://github.com/sdpython/onnx-diagnostic
|
|
6
6
|
Author: Xavier Dupré
|
|
@@ -95,7 +95,7 @@ Getting started
|
|
|
95
95
|
|
|
96
96
|
git clone https://github.com/sdpython/onnx-diagnostic.git
|
|
97
97
|
cd onnx-diagnostic
|
|
98
|
-
pip install -e .
|
|
98
|
+
pip install -e . -v
|
|
99
99
|
|
|
100
100
|
or
|
|
101
101
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
onnx_diagnostic/__init__.py,sha256=
|
|
1
|
+
onnx_diagnostic/__init__.py,sha256=tyRANqD6rauk6F7FpFJN5K1FyB1baNeni92_ol7nrdU,174
|
|
2
2
|
onnx_diagnostic/__main__.py,sha256=YmyV_Aq_ianDlHyKLHMa6h8YK3ZmFPpLVHLKjM91aCk,79
|
|
3
|
-
onnx_diagnostic/_command_lines_parser.py,sha256=
|
|
3
|
+
onnx_diagnostic/_command_lines_parser.py,sha256=EljrcTEKF4TuSdougR3i3FL4_jVDG8xizrLsLIA2JRs,33185
|
|
4
4
|
onnx_diagnostic/api.py,sha256=BhCl_yCd78N7TlVtPOHjeYv1QBEy39TjZ647rcHqLh0,345
|
|
5
5
|
onnx_diagnostic/doc.py,sha256=t3RELgfooYnVMAi0JSpggWkQEgUsREz8NmRvn0TnLI8,2829
|
|
6
6
|
onnx_diagnostic/ext_test_case.py,sha256=emfQGiQSz5FVDhyJ1Acsv_Tast7tWl426TjtpNqxDBU,43558
|
|
@@ -12,22 +12,22 @@ onnx_diagnostic/helpers/__init__.py,sha256=GJ2GT7cgnlIveVUwMZhuvUwidbTJaKv8CsSIO
|
|
|
12
12
|
onnx_diagnostic/helpers/_log_helper.py,sha256=OTwQH0OIxs9B6nrSvR7MoxMimSw_8mU0mj133NvLk5o,16832
|
|
13
13
|
onnx_diagnostic/helpers/args_helper.py,sha256=SRWnqC7EENg09RZlA50B_PcdiIhdbgA4C3ACfzl5nMs,4419
|
|
14
14
|
onnx_diagnostic/helpers/bench_run.py,sha256=CGA6VMJZMH2gDhVueT9ypNm4PMcjGrrGFYp08nhWj9k,16539
|
|
15
|
-
onnx_diagnostic/helpers/cache_helper.py,sha256=
|
|
15
|
+
onnx_diagnostic/helpers/cache_helper.py,sha256=zxjm0-3lHs0A7wLEejz2r2KPMPjkkva--8511MaSy74,24846
|
|
16
16
|
onnx_diagnostic/helpers/config_helper.py,sha256=H2mOcMXfrcolFnt8EuqmRFkpQ3YdNRDfvm9ToI1vNH0,5618
|
|
17
17
|
onnx_diagnostic/helpers/doc_helper.py,sha256=pl5MZd3_FaE8BqQnqoBuSBxoNCFcd2OJd3eITUSku5c,5897
|
|
18
18
|
onnx_diagnostic/helpers/graph_helper.py,sha256=hevQT5a7_QuriVPQcbT5qe18n99Doyl5h3-qshx1-uk,14093
|
|
19
|
-
onnx_diagnostic/helpers/helper.py,sha256=
|
|
19
|
+
onnx_diagnostic/helpers/helper.py,sha256=mRQ-wo9P30m0Z0_v3EfEDwK_dZFTUhIVKo-5ut9DPW8,63194
|
|
20
20
|
onnx_diagnostic/helpers/log_helper.py,sha256=ODtMLFfJvkyss9PJwEZFd5_8bLcliaMq0A17t0dSIFA,82771
|
|
21
21
|
onnx_diagnostic/helpers/memory_peak.py,sha256=OT6mz0muBbBZY0pjgW2_eCk_lOtFRo-5w4jFo2Z6Kok,6380
|
|
22
22
|
onnx_diagnostic/helpers/mini_onnx_builder.py,sha256=FgK-Kws1WpSYdYJCPyONwQYY3AjbgUHimZlaYyiNUfE,21286
|
|
23
23
|
onnx_diagnostic/helpers/model_builder_helper.py,sha256=tJi4VkP0TS2yyDSxQPNu9WRoSnPCAjr6L0J49X2LdXk,12810
|
|
24
|
-
onnx_diagnostic/helpers/onnx_helper.py,sha256=
|
|
24
|
+
onnx_diagnostic/helpers/onnx_helper.py,sha256=oxl3x0EQowGP9kfz8aKDqnJZcvYY8FeZLsfoLJDiSUg,39826
|
|
25
25
|
onnx_diagnostic/helpers/ort_session.py,sha256=UgUUeUslDxEFBc6w6f3HMq_a7bn4TBlItmojqWquSj4,29281
|
|
26
26
|
onnx_diagnostic/helpers/rt_helper.py,sha256=qbV6zyMs-iH6H65WHC2tu4h0psnHg0TX5fwfO_k-glg,4623
|
|
27
|
-
onnx_diagnostic/helpers/torch_helper.py,sha256=
|
|
27
|
+
onnx_diagnostic/helpers/torch_helper.py,sha256=e0KkSTdoZthc5Yuf9e8XVGAx-lqOYy4DeRRe-N4QUYQ,33478
|
|
28
28
|
onnx_diagnostic/reference/__init__.py,sha256=rLZsxOlnb7-81F2CzepGnZLejaROg4JvgFaGR9FwVQA,208
|
|
29
29
|
onnx_diagnostic/reference/evaluator.py,sha256=RzNzjFDeMe-4X51Tb22N6aagazY5ktNq-mRmPcfY5EU,8848
|
|
30
|
-
onnx_diagnostic/reference/ort_evaluator.py,sha256=
|
|
30
|
+
onnx_diagnostic/reference/ort_evaluator.py,sha256=nituItsP3IKDDWF9z-iGX_iAubrTcdk8pb1GVBp9sCU,26161
|
|
31
31
|
onnx_diagnostic/reference/quantized_tensor.py,sha256=5u67uS2uGacdMD5VYCbpojNjiesDlV_kO0fAJ0vUWGE,1098
|
|
32
32
|
onnx_diagnostic/reference/report_results_comparison.py,sha256=OsyQN8EHZZoj97u74RQP-7WFpebPOso5GEDpdkLWu6M,3645
|
|
33
33
|
onnx_diagnostic/reference/torch_evaluator.py,sha256=gf8EPoX4C4yGgQ-DqxXxaGU26WdEhn8Gd6iesDLqAV0,27692
|
|
@@ -52,7 +52,7 @@ onnx_diagnostic/reference/ops/op_qlinear_conv.py,sha256=DgiUwoj-gW5xv9CVFXPPRJbK
|
|
|
52
52
|
onnx_diagnostic/reference/ops/op_quick_gelu.py,sha256=43QNWbOK88-h7qqe0ubMTbVt3Qo4YmNZPfrbu5kIefM,631
|
|
53
53
|
onnx_diagnostic/reference/ops/op_replace_zero.py,sha256=Fe8yFJeg33_5e1RGtv6fqBZOY-qpOCv7PukjdubzChA,323
|
|
54
54
|
onnx_diagnostic/reference/ops/op_rotary.py,sha256=GbJhk6id6rSelEK1VuD-LBPM6xDckpmsmJuydSJbMws,628
|
|
55
|
-
onnx_diagnostic/reference/ops/op_scan.py,sha256=
|
|
55
|
+
onnx_diagnostic/reference/ops/op_scan.py,sha256=qmPdrUrhOrxzjiwlOYAyyl-Ztxc_rkAU4oweJgOlbZ8,2077
|
|
56
56
|
onnx_diagnostic/reference/ops/op_scatter_elements.py,sha256=D8fkrNlk22C-o3MddLpaex7vS2NT4KXDzqhYvK250zA,3775
|
|
57
57
|
onnx_diagnostic/reference/ops/op_scatternd_of_shape.py,sha256=PUSRHd_CugWkEMiy9SeKApk26edTXVjDUNC8fLRRvwA,812
|
|
58
58
|
onnx_diagnostic/reference/ops/op_simplified_layer_normalization.py,sha256=1ChLxn_1kYbbN6KTa0uJAHEyJlutBo-B1CY8YVs7EaM,280
|
|
@@ -72,12 +72,13 @@ onnx_diagnostic/reference/torch_ops/reduce_ops.py,sha256=9gFfraPTQbe_ZEUNCUis1JS
|
|
|
72
72
|
onnx_diagnostic/reference/torch_ops/sequence_ops.py,sha256=3EiVKpGfN4d1Iry4hgnr3MIJyEEKUrAIDgmRGsUXXa0,2297
|
|
73
73
|
onnx_diagnostic/reference/torch_ops/shape_ops.py,sha256=pJrNR2UB4PlWl6cv4EDl1uGl8YTBUUMQkhJcsh5K4sA,4291
|
|
74
74
|
onnx_diagnostic/reference/torch_ops/unary_ops.py,sha256=dwu6HPr4V_roxu85U3VLTtDLx5bfxKalT_-zlQxZ5wc,1850
|
|
75
|
-
onnx_diagnostic/tasks/__init__.py,sha256=
|
|
75
|
+
onnx_diagnostic/tasks/__init__.py,sha256=KHMH-YONqUQD3tT6N995wyZuF0R4NIZlIH8moumqmRc,2532
|
|
76
76
|
onnx_diagnostic/tasks/automatic_speech_recognition.py,sha256=umZmjGW1gDUFkqvBJnQyaL7D7-HqiwlQpsq6Ip187Dg,7150
|
|
77
77
|
onnx_diagnostic/tasks/feature_extraction.py,sha256=Zh9p_Q8FqEO2_aqI0cCiq8OXuM3WUZbwItlLOmLnNl8,5537
|
|
78
78
|
onnx_diagnostic/tasks/fill_mask.py,sha256=5Gt6zlj0p6vuifox7Wmj-TpHXJvPS0CEH8evgdBHDNA,2640
|
|
79
79
|
onnx_diagnostic/tasks/image_classification.py,sha256=nLpBBB1Gkog3Fk6pu2waiHcuQr4ILPptc9FhQ-pn460,4682
|
|
80
80
|
onnx_diagnostic/tasks/image_text_to_text.py,sha256=wkFrUaEvQAW-D-jql2xSnae1XvQBl-sSbhmAmJ76qGo,17428
|
|
81
|
+
onnx_diagnostic/tasks/image_to_video.py,sha256=SoF2cVIJr6P30Abp-FCuixFDh5RvTuNEOL36QthGY6U,3860
|
|
81
82
|
onnx_diagnostic/tasks/mask_generation.py,sha256=fjdD3rd-O-mFL0hQy3la3JXKth_0bH2HL7Eelq-3Dbs,5057
|
|
82
83
|
onnx_diagnostic/tasks/mixture_of_expert.py,sha256=al4tk1BrHidtRiHlAaiflWiJaAte0d5M8WcBioANG9k,2808
|
|
83
84
|
onnx_diagnostic/tasks/object_detection.py,sha256=3FiT8ya5FCd9lwjQCRXhAwXspNwYTlAD3Gpk8aAcG5w,4279
|
|
@@ -89,36 +90,37 @@ onnx_diagnostic/tasks/text_generation.py,sha256=hV-oK1bWjtepxkA491Va_0CWrELZbfP4
|
|
|
89
90
|
onnx_diagnostic/tasks/text_to_image.py,sha256=mOS3Ruosi3hzRMxXLDN7ZkAbi7NnQb7MWwQP_okGVHs,2962
|
|
90
91
|
onnx_diagnostic/tasks/zero_shot_image_classification.py,sha256=jJCMWuOqGv5ahCfjrcqxuYCJFhTgHV5KUf2yyv2yxYA,4624
|
|
91
92
|
onnx_diagnostic/torch_export_patches/__init__.py,sha256=0SaZedwznm1hQUCvXZsGZORV5vby954wEExr5faepGg,720
|
|
92
|
-
onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=
|
|
93
|
+
onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=KYux1-Ea3zCxffxc-17DVfO0G_XCU1flPw_XUc_Fcmg,28008
|
|
93
94
|
onnx_diagnostic/torch_export_patches/onnx_export_serialization.py,sha256=klvqiMjccwGhiRnLRVbwTi5WWkMfvtnOV5ycirPcAdA,11354
|
|
94
95
|
onnx_diagnostic/torch_export_patches/patch_expressions.py,sha256=vr4tt61cbDnaaaduzMj4UBZ8OUtr6GfDpIWwOYqjWzs,3213
|
|
95
96
|
onnx_diagnostic/torch_export_patches/patch_inputs.py,sha256=3ySY1nAzINSS1hAzTycwfdbPas8G5CDL2MjnaAHBkMU,7825
|
|
96
97
|
onnx_diagnostic/torch_export_patches/patch_module.py,sha256=R2d9IHM-RwsBKDsxuBIJnEqMoxbS9gd4YWFGG2wwV5A,39881
|
|
97
98
|
onnx_diagnostic/torch_export_patches/patch_module_helper.py,sha256=2U0AdyZuU0W54QTdE7tY7imVzMnpQ5091ADNtTCkT8Y,6967
|
|
98
99
|
onnx_diagnostic/torch_export_patches/eval/__init__.py,sha256=57x62uZNA80XiWgkG8Fe0_8YJcIVrvKLPqvwLDPJwgc,24008
|
|
99
|
-
onnx_diagnostic/torch_export_patches/eval/model_cases.py,sha256=
|
|
100
|
+
onnx_diagnostic/torch_export_patches/eval/model_cases.py,sha256=OU8-63VDhiWtQV3scBV9JyGXn8ds74OzY2-IOZkwg0A,26580
|
|
100
101
|
onnx_diagnostic/torch_export_patches/patches/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
101
102
|
onnx_diagnostic/torch_export_patches/patches/patch_torch.py,sha256=TFjuw--sTYPCoVEaYlYLJuElx_CUynJR6s6ypoZtRWw,18956
|
|
102
|
-
onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=
|
|
103
|
+
onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=SsN-y2yoVaY3xRGDaIl0V449LcuwKAGBHPKm2JjQNhc,67942
|
|
103
104
|
onnx_diagnostic/torch_export_patches/serialization/__init__.py,sha256=BHLdRPtNAtNPAS-bPKEj3-foGSPvwAbZXrHzGGPDLEw,1876
|
|
104
105
|
onnx_diagnostic/torch_export_patches/serialization/diffusers_impl.py,sha256=drq3EH_yjcSuIWYsVeUWm8Cx6YCZFU6bP_1PLtPfY5I,945
|
|
105
|
-
onnx_diagnostic/torch_export_patches/serialization/transformers_impl.py,sha256=
|
|
106
|
+
onnx_diagnostic/torch_export_patches/serialization/transformers_impl.py,sha256=mcmZGekzQlLgE_o3SdKlRgCx4ewwyyAuNWZ9CaN_zrI,9317
|
|
106
107
|
onnx_diagnostic/torch_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
107
108
|
onnx_diagnostic/torch_models/llms.py,sha256=soyg4yC87ptGoeulJhKqw5opGmuLvH1pn_ZDXZ4Jr8E,90
|
|
108
|
-
onnx_diagnostic/torch_models/validate.py,sha256=
|
|
109
|
+
onnx_diagnostic/torch_models/validate.py,sha256=3UJzjH89dpa_pyFoFG_fZ2IwOa25gtC7RxHjKX7c2PQ,70887
|
|
109
110
|
onnx_diagnostic/torch_models/hghub/__init__.py,sha256=vi1Q7YHdddj1soiBN42MSvJdFqe2_KUoWafHISjwOu8,58
|
|
110
|
-
onnx_diagnostic/torch_models/hghub/hub_api.py,sha256=
|
|
111
|
-
onnx_diagnostic/torch_models/hghub/hub_data.py,sha256=
|
|
112
|
-
onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py,sha256=
|
|
113
|
-
onnx_diagnostic/torch_models/hghub/model_inputs.py,sha256=
|
|
111
|
+
onnx_diagnostic/torch_models/hghub/hub_api.py,sha256=YYSX3pLsGCTwhMFSu-6ML4Bcy09EWmg1GgXSZ5eCQjA,14515
|
|
112
|
+
onnx_diagnostic/torch_models/hghub/hub_data.py,sha256=8V_pAgACPLPsLRYUododg7MSL6str-T3tBEGY4OaeYQ,8724
|
|
113
|
+
onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py,sha256=3yH1pQbCYNDmRMNUCwMFf5ELnAa35ubTKD2JRF5y9Ls,287515
|
|
114
|
+
onnx_diagnostic/torch_models/hghub/model_inputs.py,sha256=qg-_incL_nX9J1bit_nYV5diQN0Zqf7b10ZZfTikbjg,13701
|
|
115
|
+
onnx_diagnostic/torch_models/hghub/model_specific.py,sha256=j50Nu7wddJMoqmD4QzMbNdFDUUgUmSBKRzPDH55TlUQ,2498
|
|
114
116
|
onnx_diagnostic/torch_models/untrained/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
115
|
-
onnx_diagnostic/torch_models/untrained/llm_phi2.py,sha256=
|
|
117
|
+
onnx_diagnostic/torch_models/untrained/llm_phi2.py,sha256=JbGZmW41MPJcQgqaJc9R2G00nI79nI-lABN-ffA1lmY,4037
|
|
116
118
|
onnx_diagnostic/torch_models/untrained/llm_tiny_llm.py,sha256=QXw_Bs2SzfeiQMf-tmtVl83SmVOL4-Um7Qy-f0E48QI,2507
|
|
117
119
|
onnx_diagnostic/torch_onnx/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
118
120
|
onnx_diagnostic/torch_onnx/runtime_info.py,sha256=1g9F_Jf9AAgYQU4stbsrFXwQl-30mWlQrFbQ7val8Ps,9268
|
|
119
|
-
onnx_diagnostic/torch_onnx/sbs.py,sha256=
|
|
120
|
-
onnx_diagnostic-0.7.
|
|
121
|
-
onnx_diagnostic-0.7.
|
|
122
|
-
onnx_diagnostic-0.7.
|
|
123
|
-
onnx_diagnostic-0.7.
|
|
124
|
-
onnx_diagnostic-0.7.
|
|
121
|
+
onnx_diagnostic/torch_onnx/sbs.py,sha256=fN799L_G1c2RKEuNcKt_MnQri5dwD4OzeCkBBFFoUBI,16865
|
|
122
|
+
onnx_diagnostic-0.7.11.dist-info/licenses/LICENSE.txt,sha256=Vv6TXglX6Rc0d-f8aREhayhT-6PMQXEyOmI2NKlUCMc,1045
|
|
123
|
+
onnx_diagnostic-0.7.11.dist-info/METADATA,sha256=vfCWZZUvnv_GKZxFRDvpKFrz5JU3LDmBH0WPK6uN__I,7435
|
|
124
|
+
onnx_diagnostic-0.7.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
125
|
+
onnx_diagnostic-0.7.11.dist-info/top_level.txt,sha256=KwNkXewmcobM3ZT1DJLVWH6ebJzA5qKg7cWqKfpGNT4,16
|
|
126
|
+
onnx_diagnostic-0.7.11.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|