onnx-diagnostic 0.4.2__py3-none-any.whl → 0.4.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- onnx_diagnostic/__init__.py +1 -1
- onnx_diagnostic/_command_lines_parser.py +5 -0
- onnx_diagnostic/tasks/__init__.py +2 -0
- onnx_diagnostic/tasks/image_classification.py +21 -0
- onnx_diagnostic/tasks/object_detection.py +123 -0
- onnx_diagnostic/tasks/text_generation.py +13 -10
- onnx_diagnostic/torch_export_patches/__init__.py +17 -1
- onnx_diagnostic/torch_export_patches/onnx_export_errors.py +27 -29
- onnx_diagnostic/torch_export_patches/patch_module.py +304 -0
- onnx_diagnostic/torch_models/hghub/hub_api.py +40 -4
- onnx_diagnostic/torch_models/hghub/hub_data.py +13 -1
- onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py +210 -0
- onnx_diagnostic/torch_models/hghub/model_inputs.py +34 -9
- onnx_diagnostic/torch_models/test_helper.py +82 -28
- {onnx_diagnostic-0.4.2.dist-info → onnx_diagnostic-0.4.4.dist-info}/METADATA +8 -3
- {onnx_diagnostic-0.4.2.dist-info → onnx_diagnostic-0.4.4.dist-info}/RECORD +19 -17
- {onnx_diagnostic-0.4.2.dist-info → onnx_diagnostic-0.4.4.dist-info}/WHEEL +1 -1
- {onnx_diagnostic-0.4.2.dist-info → onnx_diagnostic-0.4.4.dist-info}/licenses/LICENSE.txt +0 -0
- {onnx_diagnostic-0.4.2.dist-info → onnx_diagnostic-0.4.4.dist-info}/top_level.txt +0 -0
|
@@ -4,7 +4,7 @@ import torch
|
|
|
4
4
|
import transformers
|
|
5
5
|
from ...helpers.config_helper import update_config
|
|
6
6
|
from ...tasks import reduce_model_config, random_input_kwargs
|
|
7
|
-
from .hub_api import task_from_arch, get_pretrained_config
|
|
7
|
+
from .hub_api import task_from_arch, task_from_id, get_pretrained_config
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
def get_untrained_model_with_inputs(
|
|
@@ -18,6 +18,7 @@ def get_untrained_model_with_inputs(
|
|
|
18
18
|
same_as_pretrained: bool = False,
|
|
19
19
|
use_preinstalled: bool = True,
|
|
20
20
|
add_second_input: bool = False,
|
|
21
|
+
subfolder: Optional[str] = None,
|
|
21
22
|
) -> Dict[str, Any]:
|
|
22
23
|
"""
|
|
23
24
|
Gets a non initialized model similar to the original model
|
|
@@ -37,6 +38,7 @@ def get_untrained_model_with_inputs(
|
|
|
37
38
|
:param use_preinstalled: use preinstalled configurations
|
|
38
39
|
:param add_second_input: provides a second inputs to check a model
|
|
39
40
|
supports different shapes
|
|
41
|
+
:param subfolder: subfolder to use for this model id
|
|
40
42
|
:return: dictionary with a model, inputs, dynamic shapes, and the configuration
|
|
41
43
|
|
|
42
44
|
Example:
|
|
@@ -62,24 +64,39 @@ def get_untrained_model_with_inputs(
|
|
|
62
64
|
print(f"[get_untrained_model_with_inputs] use preinstalled {model_id!r}")
|
|
63
65
|
if config is None:
|
|
64
66
|
config = get_pretrained_config(
|
|
65
|
-
model_id,
|
|
67
|
+
model_id,
|
|
68
|
+
use_preinstalled=use_preinstalled,
|
|
69
|
+
subfolder=subfolder,
|
|
70
|
+
**(model_kwargs or {}),
|
|
66
71
|
)
|
|
67
|
-
|
|
68
|
-
|
|
72
|
+
if hasattr(config, "architecture") and config.architecture:
|
|
73
|
+
archs = [config.architecture]
|
|
74
|
+
if type(config) is dict:
|
|
75
|
+
assert "_class_name" in config, f"Unable to get the architecture from config={config}"
|
|
76
|
+
archs = [config["_class_name"]]
|
|
77
|
+
else:
|
|
78
|
+
archs = config.architectures # type: ignore
|
|
79
|
+
task = None
|
|
80
|
+
if archs is None:
|
|
81
|
+
task = task_from_id(model_id)
|
|
82
|
+
assert task is not None or (archs is not None and len(archs) == 1), (
|
|
69
83
|
f"Unable to determine the architecture for model {model_id!r}, "
|
|
70
84
|
f"architectures={archs!r}, conf={config}"
|
|
71
85
|
)
|
|
72
|
-
arch = archs[0]
|
|
73
|
-
if verbose:
|
|
74
|
-
print(f"[get_untrained_model_with_inputs] architecture={arch!r}")
|
|
75
86
|
if verbose:
|
|
87
|
+
print(f"[get_untrained_model_with_inputs] architectures={archs!r}")
|
|
76
88
|
print(f"[get_untrained_model_with_inputs] cls={config.__class__.__name__!r}")
|
|
77
|
-
task
|
|
89
|
+
if task is None:
|
|
90
|
+
task = task_from_arch(archs[0])
|
|
78
91
|
if verbose:
|
|
79
92
|
print(f"[get_untrained_model_with_inputs] task={task!r}")
|
|
80
93
|
|
|
81
94
|
# model kwagrs
|
|
82
95
|
if dynamic_rope is not None:
|
|
96
|
+
assert (
|
|
97
|
+
type(config) is not dict
|
|
98
|
+
), f"Unable to set dynamic_rope if the configuration is a dictionary\n{config}"
|
|
99
|
+
assert hasattr(config, "rope_scaling"), f"Missing 'rope_scaling' in\n{config}"
|
|
83
100
|
config.rope_scaling = (
|
|
84
101
|
{"rope_type": "dynamic", "factor": 10.0} if dynamic_rope else None
|
|
85
102
|
)
|
|
@@ -106,7 +123,15 @@ def get_untrained_model_with_inputs(
|
|
|
106
123
|
if inputs_kwargs:
|
|
107
124
|
kwargs.update(inputs_kwargs)
|
|
108
125
|
|
|
109
|
-
|
|
126
|
+
if archs is not None:
|
|
127
|
+
model = getattr(transformers, archs[0])(config)
|
|
128
|
+
else:
|
|
129
|
+
assert same_as_pretrained, (
|
|
130
|
+
f"Model {model_id!r} cannot be built, the model cannot be built. "
|
|
131
|
+
f"It must be downloaded. Use same_as_pretrained=True."
|
|
132
|
+
)
|
|
133
|
+
model = None
|
|
134
|
+
|
|
110
135
|
# This line is important. Some models may produce different
|
|
111
136
|
# outputs even with the same inputs in training mode.
|
|
112
137
|
model.eval()
|
|
@@ -4,6 +4,8 @@ import os
|
|
|
4
4
|
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
|
5
5
|
import time
|
|
6
6
|
import onnx
|
|
7
|
+
import onnxscript
|
|
8
|
+
import onnxscript.rewriter.ort_fusions as ort_fusions
|
|
7
9
|
import torch
|
|
8
10
|
from ..export import CoupleInputsDynamicShapes
|
|
9
11
|
from ..helpers import max_diff, string_type, string_diff
|
|
@@ -12,7 +14,7 @@ from ..helpers.rt_helper import make_feeds
|
|
|
12
14
|
from ..helpers.torch_test_helper import to_any, torch_deepcopy
|
|
13
15
|
from ..helpers.cache_helper import flatten_unflatten_for_dynamic_shapes
|
|
14
16
|
from ..tasks import random_input_kwargs
|
|
15
|
-
from ..torch_export_patches import
|
|
17
|
+
from ..torch_export_patches import torch_export_patches
|
|
16
18
|
from ..torch_export_patches.patch_inputs import use_dyn_not_str
|
|
17
19
|
from .hghub import get_untrained_model_with_inputs
|
|
18
20
|
|
|
@@ -107,9 +109,12 @@ def _make_folder_name(
|
|
|
107
109
|
optimization: Optional[str] = None,
|
|
108
110
|
dtype: Optional[Union[str, torch.dtype]] = None,
|
|
109
111
|
device: Optional[Union[str, torch.device]] = None,
|
|
112
|
+
subfolder: Optional[str] = None,
|
|
110
113
|
) -> str:
|
|
111
114
|
"Creates a filename unique based on the given options."
|
|
112
115
|
els = [model_id.replace("/", "_")]
|
|
116
|
+
if subfolder:
|
|
117
|
+
els.append(subfolder.replace("/", "_"))
|
|
113
118
|
if exporter:
|
|
114
119
|
els.append(exporter)
|
|
115
120
|
if optimization:
|
|
@@ -222,6 +227,7 @@ def validate_model(
|
|
|
222
227
|
ortfusiontype: Optional[str] = None,
|
|
223
228
|
input_options: Optional[Dict[str, Any]] = None,
|
|
224
229
|
model_options: Optional[Dict[str, Any]] = None,
|
|
230
|
+
subfolder: Optional[str] = None,
|
|
225
231
|
) -> Tuple[Dict[str, Union[int, float, str]], Dict[str, Any]]:
|
|
226
232
|
"""
|
|
227
233
|
Validates a model.
|
|
@@ -242,9 +248,9 @@ def validate_model(
|
|
|
242
248
|
depend on the the exporter
|
|
243
249
|
:param quiet: if quiet, catches exception if any issue
|
|
244
250
|
:param patch: applies patches (``patch_transformers=True``) before exporting,
|
|
245
|
-
see :func:`onnx_diagnostic.torch_export_patches.
|
|
251
|
+
see :func:`onnx_diagnostic.torch_export_patches.torch_export_patches`
|
|
246
252
|
:param stop_if_static: stops if a dynamic dimension becomes static,
|
|
247
|
-
see :func:`onnx_diagnostic.torch_export_patches.
|
|
253
|
+
see :func:`onnx_diagnostic.torch_export_patches.torch_export_patches`
|
|
248
254
|
:param dump_folder: dumps everything in a subfolder of this one
|
|
249
255
|
:param drop_inputs: drops this list of inputs (given their names)
|
|
250
256
|
:param ortfusiontype: runs ort fusion, the parameters defines the fusion type,
|
|
@@ -254,11 +260,11 @@ def validate_model(
|
|
|
254
260
|
used to export
|
|
255
261
|
:param model_options: additional options when creating the model such as
|
|
256
262
|
``num_hidden_layers`` or ``attn_implementation``
|
|
263
|
+
:param subfolder: version or subfolders to uses when retrieving a model id
|
|
257
264
|
:return: two dictionaries, one with some metrics,
|
|
258
265
|
another one with whatever the function produces
|
|
259
266
|
"""
|
|
260
267
|
summary = version_summary()
|
|
261
|
-
|
|
262
268
|
summary.update(
|
|
263
269
|
dict(
|
|
264
270
|
version_model_id=model_id,
|
|
@@ -280,7 +286,7 @@ def validate_model(
|
|
|
280
286
|
folder_name = None
|
|
281
287
|
if dump_folder:
|
|
282
288
|
folder_name = _make_folder_name(
|
|
283
|
-
model_id, exporter, optimization, dtype=dtype, device=device
|
|
289
|
+
model_id, exporter, optimization, dtype=dtype, device=device, subfolder=subfolder
|
|
284
290
|
)
|
|
285
291
|
dump_folder = os.path.join(dump_folder, folder_name)
|
|
286
292
|
if not os.path.exists(dump_folder):
|
|
@@ -291,11 +297,15 @@ def validate_model(
|
|
|
291
297
|
print(f"[validate_model] dump into {folder_name!r}")
|
|
292
298
|
|
|
293
299
|
if verbose:
|
|
294
|
-
|
|
300
|
+
if subfolder:
|
|
301
|
+
print(f"[validate_model] validate model id {model_id!r}, subfolder={subfolder!r}")
|
|
302
|
+
else:
|
|
303
|
+
print(f"[validate_model] validate model id {model_id!r}")
|
|
295
304
|
if model_options:
|
|
296
305
|
print(f"[validate_model] model_options={model_options!r}")
|
|
297
306
|
print(f"[validate_model] get dummy inputs with input_options={input_options}...")
|
|
298
307
|
summary["model_id"] = model_id
|
|
308
|
+
summary["model_subfolder"] = subfolder or ""
|
|
299
309
|
|
|
300
310
|
iop = input_options or {}
|
|
301
311
|
mop = model_options or {}
|
|
@@ -305,7 +315,7 @@ def validate_model(
|
|
|
305
315
|
summary,
|
|
306
316
|
None,
|
|
307
317
|
(
|
|
308
|
-
lambda mid=model_id, v=verbose, task=task, tr=trained, iop=iop: (
|
|
318
|
+
lambda mid=model_id, v=verbose, task=task, tr=trained, iop=iop, sub=subfolder: (
|
|
309
319
|
get_untrained_model_with_inputs(
|
|
310
320
|
mid,
|
|
311
321
|
verbose=v,
|
|
@@ -313,6 +323,7 @@ def validate_model(
|
|
|
313
323
|
same_as_pretrained=tr,
|
|
314
324
|
inputs_kwargs=iop,
|
|
315
325
|
model_kwargs=mop,
|
|
326
|
+
subfolder=sub,
|
|
316
327
|
)
|
|
317
328
|
)
|
|
318
329
|
),
|
|
@@ -417,7 +428,7 @@ def validate_model(
|
|
|
417
428
|
f"[validate_model] applies patches before exporting "
|
|
418
429
|
f"stop_if_static={stop_if_static}"
|
|
419
430
|
)
|
|
420
|
-
with
|
|
431
|
+
with torch_export_patches( # type: ignore
|
|
421
432
|
patch_transformers=True,
|
|
422
433
|
stop_if_static=stop_if_static,
|
|
423
434
|
verbose=max(0, verbose - 1),
|
|
@@ -917,11 +928,10 @@ def call_torch_export_onnx(
|
|
|
917
928
|
:return: two dictionaries, one with some metrics,
|
|
918
929
|
another one with whatever the function produces
|
|
919
930
|
"""
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
}, f"unexpected value for optimization={optimization}"
|
|
931
|
+
available = {"", "ir", "os_ort"}
|
|
932
|
+
assert (
|
|
933
|
+
optimization in available
|
|
934
|
+
), f"unexpected value for optimization={optimization}, available={available}"
|
|
925
935
|
assert exporter in {
|
|
926
936
|
"onnx-dynamo",
|
|
927
937
|
"onnx-script",
|
|
@@ -1001,16 +1011,25 @@ def call_torch_export_onnx(
|
|
|
1001
1011
|
print(epo)
|
|
1002
1012
|
print("[call_torch_export_onnx] -- End of ONNXProgram")
|
|
1003
1013
|
|
|
1004
|
-
if optimization
|
|
1014
|
+
if optimization in {"ir", "os_ort"}:
|
|
1005
1015
|
if verbose:
|
|
1006
1016
|
print(f"[call_torch_export_onnx] starts optimization={optimization!r}...")
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1017
|
+
if optimization == "ir":
|
|
1018
|
+
label, f_optim = "export_onnx_opt_ir", (lambda epo=epo: epo.optimize())
|
|
1019
|
+
else:
|
|
1020
|
+
|
|
1021
|
+
def _os_ort_optim(epo):
|
|
1022
|
+
onnxscript.optimizer.optimize_ir(epo.model)
|
|
1023
|
+
optimized = ort_fusions.optimize_for_ort(epo.model)
|
|
1024
|
+
if isinstance(optimized, tuple):
|
|
1025
|
+
for k, v in optimized[1].items():
|
|
1026
|
+
summary[f"op_opt_fused_{k}"] = v
|
|
1027
|
+
epo.model = optimized[0]
|
|
1028
|
+
else:
|
|
1029
|
+
epo.model = optimized
|
|
1030
|
+
|
|
1031
|
+
label, f_optim = "export_onnx_opt_os_ort", (lambda epo=epo: _os_ort_optim(epo))
|
|
1032
|
+
_quiet_or_not_quiet(quiet, label, summary, data, f_optim)
|
|
1014
1033
|
if "ERR_export_onnx_opt_ir" in summary:
|
|
1015
1034
|
return summary, data
|
|
1016
1035
|
if verbose:
|
|
@@ -1039,21 +1058,27 @@ def call_torch_export_custom(
|
|
|
1039
1058
|
:return: two dictionaries, one with some metrics,
|
|
1040
1059
|
another one with whatever the function produces
|
|
1041
1060
|
"""
|
|
1042
|
-
|
|
1061
|
+
available = {
|
|
1043
1062
|
"",
|
|
1044
1063
|
"default",
|
|
1045
1064
|
"default+onnxruntime",
|
|
1065
|
+
"default+os_ort",
|
|
1066
|
+
"default+onnxruntime+os_ort",
|
|
1046
1067
|
None,
|
|
1047
|
-
}
|
|
1048
|
-
assert
|
|
1068
|
+
}
|
|
1069
|
+
assert (
|
|
1070
|
+
optimization in available
|
|
1071
|
+
), f"unexpected value for optimization={optimization}, available={available}"
|
|
1072
|
+
available = {
|
|
1049
1073
|
"custom",
|
|
1050
1074
|
"custom-strict",
|
|
1051
|
-
"custom-strict-
|
|
1075
|
+
"custom-strict-default",
|
|
1052
1076
|
"custom-strict-all",
|
|
1053
1077
|
"custom-nostrict",
|
|
1054
|
-
"custom-nostrict-
|
|
1078
|
+
"custom-nostrict-default",
|
|
1055
1079
|
"custom-nostrict-all",
|
|
1056
|
-
}
|
|
1080
|
+
}
|
|
1081
|
+
assert exporter in available, f"Unexpected value for exporter={exporter!r} in {available}"
|
|
1057
1082
|
assert "model" in data, f"model is missing from data: {sorted(data)}"
|
|
1058
1083
|
assert "inputs_export" in data, f"inputs_export is missing from data: {sorted(data)}"
|
|
1059
1084
|
summary: Dict[str, Union[str, int, float]] = {}
|
|
@@ -1078,10 +1103,14 @@ def call_torch_export_custom(
|
|
|
1078
1103
|
from experimental_experiment.torch_interpreter import to_onnx, ExportOptions
|
|
1079
1104
|
from experimental_experiment.xbuilder import OptimizationOptions
|
|
1080
1105
|
|
|
1106
|
+
spl = optimization.split("+") if optimization else []
|
|
1107
|
+
os_ort = "os_ort" in spl
|
|
1108
|
+
optimization = "+".join(_ for _ in spl if _ != "os_ort")
|
|
1109
|
+
|
|
1081
1110
|
export_options = ExportOptions(
|
|
1082
1111
|
strict=strict,
|
|
1083
1112
|
decomposition_table=(
|
|
1084
|
-
"
|
|
1113
|
+
"default" if "-default" in exporter else ("all" if "-all" in exporter else None)
|
|
1085
1114
|
),
|
|
1086
1115
|
)
|
|
1087
1116
|
options = OptimizationOptions(patterns=optimization) if optimization else None
|
|
@@ -1181,6 +1210,31 @@ def call_torch_export_custom(
|
|
|
1181
1210
|
assert epo is not None, "no onnx export was found"
|
|
1182
1211
|
if verbose:
|
|
1183
1212
|
print("[call_torch_export_custom] done (export)")
|
|
1213
|
+
|
|
1214
|
+
if os_ort:
|
|
1215
|
+
if verbose:
|
|
1216
|
+
print("[call_torch_export_custom] conversion to IR...")
|
|
1217
|
+
begin = time.perf_counter()
|
|
1218
|
+
ir_model = epo.to_ir()
|
|
1219
|
+
duration = time.perf_counter() - begin
|
|
1220
|
+
summary["time_optim_to_ir"] = duration
|
|
1221
|
+
if verbose:
|
|
1222
|
+
print(f"[call_torch_export_custom] done in {duration}")
|
|
1223
|
+
print("[call_torch_export_custom] start optimization...")
|
|
1224
|
+
begin = time.perf_counter()
|
|
1225
|
+
onnxscript.optimizer.optimize_ir(ir_model)
|
|
1226
|
+
ir_optimized = ort_fusions.optimize_for_ort(ir_model)
|
|
1227
|
+
if isinstance(ir_optimized, tuple):
|
|
1228
|
+
report = ir_optimized[1]
|
|
1229
|
+
for k, v in report.items():
|
|
1230
|
+
summary[f"op_opt_fused_{k}"] = v
|
|
1231
|
+
ir_optimized = ir_optimized[0]
|
|
1232
|
+
epo.model = ir_optimized
|
|
1233
|
+
duration = time.perf_counter() - begin
|
|
1234
|
+
summary["time_optim_os_ort"] = duration
|
|
1235
|
+
if verbose:
|
|
1236
|
+
print(f"[call_torch_export_custom] done in {duration}")
|
|
1237
|
+
|
|
1184
1238
|
data["onnx_program"] = epo
|
|
1185
1239
|
return summary, data
|
|
1186
1240
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: onnx-diagnostic
|
|
3
|
-
Version: 0.4.
|
|
3
|
+
Version: 0.4.4
|
|
4
4
|
Summary: Investigate ONNX models
|
|
5
5
|
Home-page: https://github.com/sdpython/onnx-diagnostic
|
|
6
6
|
Author: Xavier Dupré
|
|
@@ -67,13 +67,13 @@ it helps exporting **pytorch models into ONNX**, mostly designed for LLMs using
|
|
|
67
67
|
|
|
68
68
|
.. code-block:: python
|
|
69
69
|
|
|
70
|
-
with
|
|
70
|
+
with torch_export_patches(patch_transformers=True) as f:
|
|
71
71
|
ep = torch.export.export(model, args, kwargs=kwargs, dynamic_shapes=dynamic_shapes)
|
|
72
72
|
# ...
|
|
73
73
|
|
|
74
74
|
It also implements tools to investigate, validate exported models (ExportedProgramm, ONNXProgram, ...).
|
|
75
75
|
See `documentation of onnx-diagnostic <https://sdpython.github.io/doc/onnx-diagnostic/dev/>`_ and
|
|
76
|
-
`
|
|
76
|
+
`torch_export_patches <https://sdpython.github.io/doc/onnx-diagnostic/dev/api/torch_export_patches/index.html#onnx_diagnostic.torch_export_patches.torch_export_patches>`_.
|
|
77
77
|
|
|
78
78
|
Getting started
|
|
79
79
|
+++++++++++++++
|
|
@@ -93,6 +93,11 @@ or
|
|
|
93
93
|
Enlightening Examples
|
|
94
94
|
+++++++++++++++++++++
|
|
95
95
|
|
|
96
|
+
**Where to start to export a model**
|
|
97
|
+
|
|
98
|
+
* `Export microsoft/phi-2
|
|
99
|
+
<https://sdpython.github.io/doc/onnx-diagnostic/dev/auto_examples/plot_export_tiny_phi2.html>`_
|
|
100
|
+
|
|
96
101
|
**Torch Export**
|
|
97
102
|
|
|
98
103
|
* `Use DYNAMIC or AUTO when exporting if dynamic shapes has constraints
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
onnx_diagnostic/__init__.py,sha256=
|
|
1
|
+
onnx_diagnostic/__init__.py,sha256=VQWqNIBvo3L_1EFs9AQFHV8psqXB7jqUazJt4CrKt04,164
|
|
2
2
|
onnx_diagnostic/__main__.py,sha256=YmyV_Aq_ianDlHyKLHMa6h8YK3ZmFPpLVHLKjM91aCk,79
|
|
3
|
-
onnx_diagnostic/_command_lines_parser.py,sha256=
|
|
3
|
+
onnx_diagnostic/_command_lines_parser.py,sha256=hwh-o6DfkO9Oj1TNqC2RGgfO_lXnZlyobH16Kzffyx4,14917
|
|
4
4
|
onnx_diagnostic/doc.py,sha256=MTuT7Kxyvn7KEy84liQeFeqhugJrUQhjjpx21F72Uxw,926
|
|
5
5
|
onnx_diagnostic/ext_test_case.py,sha256=JTKlAXZL0EIzRCNUjP2VWHq7g9ueFSFd18JVrJY9SFM,41404
|
|
6
6
|
onnx_diagnostic/export/__init__.py,sha256=yEIoWiOeTwBsDhyYt2fTKuhtA0Ya1J9u9ZzMTOTWaWs,101
|
|
@@ -49,40 +49,42 @@ onnx_diagnostic/reference/ops/op_skip_layer_normalization.py,sha256=oJ7fQNx2iQh9
|
|
|
49
49
|
onnx_diagnostic/reference/ops/op_slice.py,sha256=yRxfYBs8b7QezyyG9JHCD8MIJHij2qR2NNDpBmD3FJI,705
|
|
50
50
|
onnx_diagnostic/reference/ops/op_transpose_cast.py,sha256=ifef74rvh0Yvq1Zx51B4mfnISbxV9uRg9DFjkdL1_68,361
|
|
51
51
|
onnx_diagnostic/reference/ops/op_tri_matrix.py,sha256=Yn2gxAyygcwtF5Hjau9ihXDAzul0BAkdqVimVahtFBU,519
|
|
52
|
-
onnx_diagnostic/tasks/__init__.py,sha256=
|
|
52
|
+
onnx_diagnostic/tasks/__init__.py,sha256=G9q-yZf-bU8udb7YtwPXf4bkfZh0lEXXzSKcG2HKvEU,1610
|
|
53
53
|
onnx_diagnostic/tasks/automatic_speech_recognition.py,sha256=oRoYy56M0Yv_WOcn1hJXv-R9wgHkJ8rbym7j7y8oslw,6851
|
|
54
54
|
onnx_diagnostic/tasks/feature_extraction.py,sha256=V-T5NpZ6EimOz00weWWxGfksZ9jQ5ZQyaP-mxuCEuJo,2223
|
|
55
55
|
onnx_diagnostic/tasks/fill_mask.py,sha256=POUtgvOWv8wTOVLqxPNsj_C2WBiBWkmM72Z9mNlNqxI,2341
|
|
56
|
-
onnx_diagnostic/tasks/image_classification.py,sha256=
|
|
56
|
+
onnx_diagnostic/tasks/image_classification.py,sha256=qgT9tbXby3dACZyXXjvfpm0a7-ey2-vxMCXtjoDusJw,4210
|
|
57
57
|
onnx_diagnostic/tasks/image_text_to_text.py,sha256=6rKbts_p05VZL8wufJa6NP-MhxUOU-fuTAks5QfUVVQ,6037
|
|
58
58
|
onnx_diagnostic/tasks/mixture_of_expert.py,sha256=orMx8Ly4DO0Po0tEmme4gi2flPIGip4TaAyxVik4Zgg,2685
|
|
59
|
+
onnx_diagnostic/tasks/object_detection.py,sha256=o1T8NMztjdFAFA-Z5efx-8nd9W7YZZcbE8Ag5wKVxZA,3930
|
|
59
60
|
onnx_diagnostic/tasks/sentence_similarity.py,sha256=okQ-TQR8j1a92_N-eT6xN56rjtu26CdlU_pk88gdbGs,2356
|
|
60
61
|
onnx_diagnostic/tasks/text2text_generation.py,sha256=jaJLQqKk38mAop7O3zCFQjUvmYmFTYWDVgzT7dSJPOw,7032
|
|
61
62
|
onnx_diagnostic/tasks/text_classification.py,sha256=OgC_G9iumzTjTNUEvMoFFNTHCD8_BkdvdYC4jUsfpHM,2412
|
|
62
|
-
onnx_diagnostic/tasks/text_generation.py,sha256=
|
|
63
|
+
onnx_diagnostic/tasks/text_generation.py,sha256=Wv8DamBHte355wXe_tAeVxG4EL20y86fu7JEmUM75to,10385
|
|
63
64
|
onnx_diagnostic/tasks/zero_shot_image_classification.py,sha256=N3cEG1Lq95wS1N_CWUUUCU5j-4Tp5eR8Ce68U8THYAk,4380
|
|
64
|
-
onnx_diagnostic/torch_export_patches/__init__.py,sha256=
|
|
65
|
-
onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=
|
|
65
|
+
onnx_diagnostic/torch_export_patches/__init__.py,sha256=SGR2aCWfxl8--rBF3_Zm0h6ovGyp8HknhkUsS1NyVNM,675
|
|
66
|
+
onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=9WQUBAp5okQL9aJJKyp23ZumBnKt-qahcr94a9MYWxA,16083
|
|
66
67
|
onnx_diagnostic/torch_export_patches/onnx_export_serialization.py,sha256=1s1LqgqOL_hV6yqT7sgxzTKSDAL267CcZgNq8K4oTZM,14898
|
|
67
68
|
onnx_diagnostic/torch_export_patches/patch_inputs.py,sha256=FQrMjwvEgPqvYY7ptfULzfexW5yJHo6Pzq_p1HDkNrY,7680
|
|
69
|
+
onnx_diagnostic/torch_export_patches/patch_module.py,sha256=66DXLeblDw5oLFJ-tGJYJzso4V2_q-96pQXR3L9nsvM,10891
|
|
68
70
|
onnx_diagnostic/torch_export_patches/patches/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
69
71
|
onnx_diagnostic/torch_export_patches/patches/patch_torch.py,sha256=TKLxrIJUrQsy0mbQwFcvn5e2l_s4OGKu5jnwWDHC6LE,15670
|
|
70
72
|
onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=exiIq8zNZsY6QTzZVDMgU2ywGzs6-54Ic4vzTQ-26YQ,21863
|
|
71
73
|
onnx_diagnostic/torch_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
72
74
|
onnx_diagnostic/torch_models/llms.py,sha256=soyg4yC87ptGoeulJhKqw5opGmuLvH1pn_ZDXZ4Jr8E,90
|
|
73
|
-
onnx_diagnostic/torch_models/test_helper.py,sha256=
|
|
75
|
+
onnx_diagnostic/torch_models/test_helper.py,sha256=odRN3eIrK3kkvJCWTnZruUZGqGZrKR6r_Dp0dXQRDbw,49091
|
|
74
76
|
onnx_diagnostic/torch_models/hghub/__init__.py,sha256=vi1Q7YHdddj1soiBN42MSvJdFqe2_KUoWafHISjwOu8,58
|
|
75
|
-
onnx_diagnostic/torch_models/hghub/hub_api.py,sha256=
|
|
76
|
-
onnx_diagnostic/torch_models/hghub/hub_data.py,sha256=
|
|
77
|
-
onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py,sha256=
|
|
78
|
-
onnx_diagnostic/torch_models/hghub/model_inputs.py,sha256=
|
|
77
|
+
onnx_diagnostic/torch_models/hghub/hub_api.py,sha256=BgM_p57Q0gT9GOhdrmOYcnbuTTzCWp80jS4OQqWwFhs,9990
|
|
78
|
+
onnx_diagnostic/torch_models/hghub/hub_data.py,sha256=LLXq4KIYdz80JpRTV_liHNkJu6qBjdC2cH6E4v98yGg,7972
|
|
79
|
+
onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py,sha256=dYL4E7-12JwhZQKuNhoDT1avb7V3Go5nZyaoPCGR92o,251610
|
|
80
|
+
onnx_diagnostic/torch_models/hghub/model_inputs.py,sha256=XIFyadFg2cbDSqOCFMm7aimh15PVp7kxOnZ_oVhnjps,6770
|
|
79
81
|
onnx_diagnostic/torch_models/untrained/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
80
82
|
onnx_diagnostic/torch_models/untrained/llm_phi2.py,sha256=ynBTDHJHCk44NjLT_t6OiFDBdPP0rFGPteiONDxvztw,3708
|
|
81
83
|
onnx_diagnostic/torch_models/untrained/llm_tiny_llm.py,sha256=7N3fGvT_4Mn4NbIo0Qk57c6DMc3OXGWyvj_P41rjwSY,3513
|
|
82
84
|
onnx_diagnostic/torch_onnx/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
83
85
|
onnx_diagnostic/torch_onnx/sbs.py,sha256=HEGDHhV9pfXxpBQrpOWPNWGMsNfOebWewyAazi9poV8,16872
|
|
84
|
-
onnx_diagnostic-0.4.
|
|
85
|
-
onnx_diagnostic-0.4.
|
|
86
|
-
onnx_diagnostic-0.4.
|
|
87
|
-
onnx_diagnostic-0.4.
|
|
88
|
-
onnx_diagnostic-0.4.
|
|
86
|
+
onnx_diagnostic-0.4.4.dist-info/licenses/LICENSE.txt,sha256=Vv6TXglX6Rc0d-f8aREhayhT-6PMQXEyOmI2NKlUCMc,1045
|
|
87
|
+
onnx_diagnostic-0.4.4.dist-info/METADATA,sha256=9c5HUQruT58E_auU0qmqSKxoJMCEKz_MU3dGvhIi_G8,5659
|
|
88
|
+
onnx_diagnostic-0.4.4.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
|
|
89
|
+
onnx_diagnostic-0.4.4.dist-info/top_level.txt,sha256=KwNkXewmcobM3ZT1DJLVWH6ebJzA5qKg7cWqKfpGNT4,16
|
|
90
|
+
onnx_diagnostic-0.4.4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|