onnx-diagnostic 0.8.5__py3-none-any.whl → 0.8.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -55,6 +55,7 @@ Automatically generated:
55
55
  import base64
56
56
  import json
57
57
  import textwrap
58
+ from typing import Any
58
59
  import transformers
59
60
 
60
61
  null = None
@@ -62,6 +63,22 @@ true = True
62
63
  false = False
63
64
 
64
65
 
66
+ def _enforce_default(config_type: type, **kwargs) -> Any:
67
+ config = config_type(**kwargs)
68
+ for name in [
69
+ *[k for k in kwargs if k.endswith("_token_id")],
70
+ "attention_dropout",
71
+ "hidden_size",
72
+ "hidden_act",
73
+ "intermediate_size",
74
+ "max_position_embeddings",
75
+ "vocab_size",
76
+ ]:
77
+ if name in kwargs and (not hasattr(config, name) or getattr(config, name) is None):
78
+ setattr(config, name, kwargs[name])
79
+ return config
80
+
81
+
65
82
  def _ccached_arnir0_tiny_LLM():
66
83
  "arnir0/Tiny-LLM"
67
84
  return transformers.LlamaConfig(
@@ -4691,7 +4708,8 @@ def _ccached_zai_glm_45():
4691
4708
 
4692
4709
  def _ccached_microsoft_phi3_mini_128k_instruct():
4693
4710
  "microsoft/Phi-3-mini-128k-instruct"
4694
- return transformers.Phi3Config(
4711
+ return _enforce_default(
4712
+ transformers.Phi3Config,
4695
4713
  **{
4696
4714
  "_name_or_path": "Phi-3-mini-128k-instruct",
4697
4715
  "architectures": ["Phi3ForCausalLM"],
@@ -4827,13 +4845,14 @@ def _ccached_microsoft_phi3_mini_128k_instruct():
4827
4845
  "use_cache": true,
4828
4846
  "attention_bias": false,
4829
4847
  "vocab_size": 32064,
4830
- }
4848
+ },
4831
4849
  )
4832
4850
 
4833
4851
 
4834
4852
  def _ccached_google_gemma_3_4b_it_like():
4835
4853
  "google/gemma-3-4b-it"
4836
- return transformers.Gemma3Config(
4854
+ return _enforce_default(
4855
+ transformers.Gemma3Config,
4837
4856
  **{
4838
4857
  "architectures": ["Gemma3ForConditionalGeneration"],
4839
4858
  "boi_token_index": 255999,
@@ -4863,13 +4882,14 @@ def _ccached_google_gemma_3_4b_it_like():
4863
4882
  "patch_size": 14,
4864
4883
  "vision_use_head": false,
4865
4884
  },
4866
- }
4885
+ },
4867
4886
  )
4868
4887
 
4869
4888
 
4870
4889
  def _ccached_hf_internal_testing_tiny_random_gemma3_for_causal_lm():
4871
4890
  "hf-internal-testing/tiny-random-Gemma3ForCausalLM"
4872
- return transformers.Gemma3TextConfig(
4891
+ return _enforce_default(
4892
+ transformers.Gemma3TextConfig,
4873
4893
  **{
4874
4894
  "architectures": ["Gemma3ForCausalLM"],
4875
4895
  "attention_bias": false,
@@ -4901,13 +4921,14 @@ def _ccached_hf_internal_testing_tiny_random_gemma3_for_causal_lm():
4901
4921
  "transformers_version": "4.52.0.dev0",
4902
4922
  "use_cache": true,
4903
4923
  "vocab_size": 262144,
4904
- }
4924
+ },
4905
4925
  )
4906
4926
 
4907
4927
 
4908
4928
  def _ccached_qwen_qwen2_5_vl_7b_instruct():
4909
4929
  "Qwen/Qwen2.5-VL-7B-Instruct"
4910
- return transformers.Qwen2_5_VLConfig(
4930
+ return _enforce_default(
4931
+ transformers.Qwen2_5_VLConfig,
4911
4932
  **{
4912
4933
  "architectures": ["Qwen2_5_VLForConditionalGeneration"],
4913
4934
  "attention_dropout": 0.0,
@@ -4954,5 +4975,5 @@ def _ccached_qwen_qwen2_5_vl_7b_instruct():
4954
4975
  },
4955
4976
  "rope_scaling": {"type": "mrope", "mrope_section": [16, 24, 24]},
4956
4977
  "vocab_size": 152064,
4957
- }
4978
+ },
4958
4979
  )
@@ -0,0 +1,357 @@
1
+ import enum
2
+ from dataclasses import dataclass
3
+ from typing import Dict, List, Optional, Tuple, Union
4
+ import numpy as np
5
+ import onnx
6
+ from ..helpers.onnx_helper import onnx_dtype_name
7
+
8
+
9
+ _NOT_SO_FAR_OPS = [
10
+ {"MatMul", "Gemm", "FusedMatMul"},
11
+ {"Conv", "FusedConv"},
12
+ {"MaxPool"},
13
+ ]
14
+
15
+
16
+ def _sum_sets(sets):
17
+ t = set()
18
+ for s in sets:
19
+ t |= s
20
+ return t
21
+
22
+
23
+ _ALL_NOT_SO_FAR_OPS = _sum_sets(_NOT_SO_FAR_OPS)
24
+
25
+
26
+ def _align(res: str, limit: int) -> str:
27
+ if len(res) == limit:
28
+ return res
29
+ if len(res) > limit:
30
+ return res[:limit]
31
+ return res + " " * (limit - len(res))
32
+
33
+
34
+ class ObsType(enum.IntEnum):
35
+ """Observation kind."""
36
+
37
+ RESULT = 1
38
+ INITIALIZER = 2
39
+ SPARSE_INITIALIZER = 4
40
+ INPUT = 8
41
+ OUTPUT = 16
42
+ NODE = 32
43
+
44
+ def __repr__(self):
45
+ return f"{self.__class__.__name__}.{self._name_}"
46
+
47
+
48
+ @dataclass
49
+ class ObsCompare:
50
+ """
51
+ The description of an observation, a node, an input, an output, an initializer.
52
+
53
+ :param position: index of this observation in the original model
54
+ :param kind: node type, see :class:`ObsType`
55
+ :param name_or_outputs: name of an initializer or the outputs of a node
56
+ :param itype: onnx type
57
+ :param index: index of an input or output
58
+ :param shape: shape
59
+ :param op_type: node op_type
60
+ :param comment: comment, unused
61
+ """
62
+
63
+ position: int
64
+ kind: ObsType
65
+ name_or_outputs: Tuple[str]
66
+ itype: int = 0
67
+ index: int = 0
68
+ shape: Optional[Tuple[Tuple[Union[int, str], ...]]] = None
69
+ op_type: str = ""
70
+ comment: str = ""
71
+
72
+ def __str__(self) -> str:
73
+ "usual"
74
+ els = [
75
+ _align(f"{self.position:04d}", 4),
76
+ _align(self.kind._name_, 6),
77
+ _align(onnx_dtype_name(self.itype) if self.itype else "?", 8),
78
+ _align("?" if self.shape is None else "x".join(map(str, self.shape)), 18),
79
+ _align(self.op_type or "", 15),
80
+ _align(", ".join(self.name_or_outputs), 35),
81
+ ]
82
+ return " ".join(els)
83
+
84
+ @classmethod
85
+ def to_str(cls, obs: Optional["ObsCompare"]) -> str:
86
+ assert not obs or isinstance(obs, ObsCompare), f"unexpected type {type(obs)}"
87
+ if obs:
88
+ return str(obs)
89
+ return " " * (4 + 6 + 8 + 18 + 15 + 35 + 5)
90
+
91
+ def distance(self, obs: "ObsCompare") -> float:
92
+ """Computes a cost between two observations."""
93
+ if self.kind != obs.kind:
94
+ return 1e6
95
+ d: float = 0
96
+ if self.itype != obs.itype:
97
+ d += 1e5
98
+ if self.kind == ObsType.NODE:
99
+ cost = 9997
100
+ d = 0
101
+ if self.op_type != obs.op_type:
102
+ if self.op_type in _ALL_NOT_SO_FAR_OPS or obs.op_type in _ALL_NOT_SO_FAR_OPS:
103
+ d += 1e2
104
+ for aset in _NOT_SO_FAR_OPS:
105
+ if self.op_type in aset and obs.op_type in aset:
106
+ cost = 97
107
+ elif self.op_type in aset or obs.op_type in aset:
108
+ d += 5e4
109
+ else:
110
+ d += 9e2
111
+ if len(self.name_or_outputs) == 1 and len(obs.name_or_outputs) == 1:
112
+ if self.name_or_outputs[0] != obs.name_or_outputs[0]:
113
+ n1 = self.name_or_outputs[0]
114
+ n2 = obs.name_or_outputs[0]
115
+ n1 = n1.replace("_", "")
116
+ n2 = n2.replace("_", "")
117
+ if n1 == n2:
118
+ d += 1
119
+ elif (n1.startswith(("val_", "_onx_")) or "::" in n1 or "--" in n1) and (
120
+ n2.startswith(("val_", "_onx_")) or "::" in n2 or "--" in n2
121
+ ):
122
+ # These are name given the exporter
123
+ # and not inspired from the model itself.
124
+ d += cost / 100
125
+ else:
126
+ d += cost
127
+ else:
128
+ a = set(self.name_or_outputs) & set(obs.name_or_outputs)
129
+ b = set(self.name_or_outputs) | set(obs.name_or_outputs)
130
+ d += cost * (len(b) - len(a))
131
+ return d
132
+ if self.kind == ObsType.INPUT:
133
+ return (
134
+ 999.7
135
+ if self.itype != obs.itype
136
+ or self.shape != obs.shape
137
+ or self.index != obs.index
138
+ else 0
139
+ )
140
+ if self.kind == ObsType.INITIALIZER or self.kind == ObsType.SPARSE_INITIALIZER:
141
+ return 1e3 if self.itype != obs.itype or self.shape != obs.shape else 0
142
+ if self.kind == ObsType.OUTPUT:
143
+ return (
144
+ 999.1
145
+ if self.itype != obs.itype
146
+ or self.shape != obs.shape
147
+ or self.index != obs.index
148
+ else 0
149
+ )
150
+ return 1e8
151
+
152
+ @classmethod
153
+ def obs_sequence_from_model(
154
+ cls,
155
+ model: Union[onnx.ModelProto, onnx.GraphProto],
156
+ ) -> List["ObsCompare"]:
157
+ """
158
+ Creates a sequence of observations bases on a model.
159
+
160
+ :param model: model
161
+ :return: sequence of observations
162
+ """
163
+ graph = model if isinstance(model, onnx.GraphProto) else model.graph
164
+
165
+ shapes = {}
166
+ types = {}
167
+ for info in [*graph.value_info, *graph.input, *graph.output]:
168
+ if info.type.tensor_type:
169
+ t = info.type.tensor_type
170
+ shapes[info.name] = tuple((d.dim_param or d.dim_value) for d in t.shape.dim)
171
+ types[info.name] = t.elem_type
172
+
173
+ seq: List[ObsCompare] = []
174
+ for init in graph.initializer:
175
+ obs = ObsCompare(
176
+ position=len(seq),
177
+ kind=ObsType.INITIALIZER,
178
+ itype=init.data_type,
179
+ shape=tuple(init.dims),
180
+ name_or_outputs=(init.name,),
181
+ )
182
+ seq.append(obs)
183
+ for i, inp in enumerate(graph.input):
184
+ obs = ObsCompare(
185
+ position=len(seq),
186
+ kind=ObsType.INPUT,
187
+ itype=inp.type.tensor_type.elem_type,
188
+ index=i,
189
+ shape=tuple(
190
+ (d.dim_param or d.dim_value) for d in inp.type.tensor_type.shape.dim
191
+ ),
192
+ name_or_outputs=(inp.name,),
193
+ )
194
+ seq.append(obs)
195
+ for node in graph.node:
196
+ obs = ObsCompare(
197
+ position=len(seq),
198
+ kind=ObsType.NODE,
199
+ itype=types.get(node.output[0], 0),
200
+ index=i,
201
+ shape=shapes.get(node.output[0], None),
202
+ name_or_outputs=tuple(node.output),
203
+ op_type=node.op_type,
204
+ )
205
+ seq.append(obs)
206
+ for i, inp in enumerate(graph.output):
207
+ obs = ObsCompare(
208
+ position=len(seq),
209
+ kind=ObsType.OUTPUT,
210
+ itype=inp.type.tensor_type.elem_type,
211
+ index=i,
212
+ shape=tuple(
213
+ (d.dim_param or d.dim_value) for d in inp.type.tensor_type.shape.dim
214
+ ),
215
+ name_or_outputs=(inp.name,),
216
+ )
217
+ seq.append(obs)
218
+ return seq
219
+
220
+
221
+ @dataclass
222
+ class ObsComparePair:
223
+ """
224
+ Defines a pair of comparison objects
225
+
226
+ :param side1: object from first side
227
+ :param side2: object from first side
228
+ :param distance: distance
229
+ """
230
+
231
+ side1: Optional[ObsCompare]
232
+ side2: Optional[ObsCompare]
233
+ distance: float
234
+
235
+ def __str__(self) -> str:
236
+ "nice display"
237
+ return (
238
+ f"{self.distance:.4e} | "
239
+ f"{ObsCompare.to_str(self.side1)} | {ObsCompare.to_str(self.side2)}"
240
+ )
241
+
242
+ @classmethod
243
+ def to_str(cls, seq: List["ObsComparePair"]) -> str:
244
+ """Displays every pair in text."""
245
+ return "\n".join([f"{str(pair)}" for pair in seq])
246
+
247
+ @classmethod
248
+ def distance_sequence(cls, s1: List["ObsCompare"], s2: List["ObsCompare"]) -> Tuple[
249
+ float,
250
+ List[Tuple[int, int]],
251
+ List["ObsComparePair"],
252
+ ]:
253
+ """
254
+ Computes the distance between two sequences of results.
255
+
256
+ :param s1: first sequence
257
+ :param s2: second sequence
258
+ :return: distance and alignment
259
+
260
+ An example:
261
+
262
+ .. runpython::
263
+ :showcode:
264
+
265
+ import torch
266
+ from onnx_diagnostic.export.api import to_onnx
267
+ from onnx_diagnostic.torch_onnx.compare import ObsComparePair, ObsCompare
268
+
269
+
270
+ class Model(torch.nn.Module):
271
+ def __init__(self):
272
+ super().__init__()
273
+ self.conv1 = torch.nn.Conv2d(3, 16, 5)
274
+ self.fc1 = torch.nn.Linear(144, 64)
275
+ self.fc2 = torch.nn.Linear(64, 128)
276
+ self.fc3 = torch.nn.Linear(128, 10)
277
+
278
+ def forward(self, x):
279
+ x = torch.nn.functional.max_pool2d(
280
+ torch.nn.functional.relu(self.conv1(x)),
281
+ (4, 4),
282
+ )
283
+ # x = F.max_pool2d(F.relu(self.conv2(x)), 2)
284
+ x = torch.flatten(x, 1)
285
+ x = torch.nn.functional.relu(self.fc1(x))
286
+ x = torch.nn.functional.relu(self.fc2(x))
287
+ y = self.fc3(x)
288
+ return y
289
+
290
+
291
+ model = Model()
292
+ x = torch.randn((2, 3, 16, 17), dtype=torch.float32)
293
+ dynamic_shapes = ({0: "batch", 3: "dim"},)
294
+ onnx_optimized = to_onnx(
295
+ model, (x,), dynamic_shapes=dynamic_shapes, exporter="custom", optimize=True
296
+ ).model_proto
297
+ onnx_not_optimized = to_onnx(
298
+ model, (x,), dynamic_shapes=dynamic_shapes, exporter="custom", optimize=False
299
+ ).model_proto
300
+ seq1 = ObsCompare.obs_sequence_from_model(onnx_not_optimized)
301
+ seq2 = ObsCompare.obs_sequence_from_model(onnx_optimized)
302
+ _dist, _path, pair_cmp = ObsComparePair.distance_sequence(seq1, seq2)
303
+ text = ObsComparePair.to_str(pair_cmp)
304
+ print(text)
305
+ """
306
+ delay = max(50, abs(len(s2) - len(s1)) + 1)
307
+ distance: Dict[Tuple[int, int], Union[int, float]] = {(-1, -1): 0}
308
+ predecessor: Dict[Tuple[int, int], Optional[Tuple[int, int]]] = {(-1, -1): None}
309
+ insert_cost = 1e3
310
+ for i in range(len(s1)):
311
+ for j in range(max(0, i - delay), min(len(s2), i + delay)):
312
+ best = distance.get((i, j), 1e100)
313
+ pred = None
314
+ ki, kj = i - 1, j - 1
315
+ if (ki, kj) in distance:
316
+ d = distance[ki, kj] + s1[i].distance(s2[j])
317
+ if d < best:
318
+ best = d
319
+ pred = (ki, kj)
320
+ ki, kj = i - 1, j
321
+ if (ki, kj) in distance:
322
+ d = distance[ki, kj] + insert_cost + 1
323
+ if d < best:
324
+ best = d
325
+ pred = (ki, kj)
326
+ ki, kj = i, j - 1
327
+ if (ki, kj) in distance:
328
+ d = distance[ki, kj] + insert_cost + 0.1
329
+ if d < best:
330
+ best = d
331
+ pred = (ki, kj)
332
+ distance[i, j] = best
333
+ predecessor[i, j] = pred
334
+
335
+ # reverse
336
+ way = []
337
+ last: Optional[Tuple[int, int]] = len(s1) - 1, len(s2) - 1
338
+ while last is not None:
339
+ way.append(last)
340
+ last = predecessor[last]
341
+ indices = list(reversed(way))[1:]
342
+ obs_path: List[ObsComparePair] = []
343
+ last = -1, -1
344
+ for i, j in indices:
345
+ di = i - last[0]
346
+ dj = j - last[1]
347
+ cost = distance.get((i, j), np.nan)
348
+ if di == dj == 1:
349
+ obs_path.append(ObsComparePair(s1[i], s2[j], distance=cost))
350
+ elif di == 0:
351
+ obs_path.append(ObsComparePair(None, s2[j], distance=cost))
352
+ elif dj == 0:
353
+ obs_path.append(ObsComparePair(s1[i], None, distance=cost))
354
+ else:
355
+ raise RuntimeError(f"issue with di={di}, dj={dj}")
356
+ last = i, j
357
+ return distance[len(s1) - 1, len(s2) - 1], indices, obs_path
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: onnx-diagnostic
3
- Version: 0.8.5
3
+ Version: 0.8.6
4
4
  Summary: Tools to help converting pytorch models into ONNX.
5
5
  Home-page: https://github.com/sdpython/onnx-diagnostic
6
6
  Author: Xavier Dupré
@@ -1,14 +1,16 @@
1
- onnx_diagnostic/__init__.py,sha256=dCiBK_S7EOo_rAsmsgv-laLhtKzE2uny0XIR5aO4eDk,173
1
+ onnx_diagnostic/__init__.py,sha256=YQit5D2idhb9-wNQZzvWLT_qwRrKWBoTqMpNlBaWsGw,173
2
2
  onnx_diagnostic/__main__.py,sha256=YmyV_Aq_ianDlHyKLHMa6h8YK3ZmFPpLVHLKjM91aCk,79
3
- onnx_diagnostic/_command_lines_parser.py,sha256=ZFJdQP1Ee8D5a_xUch-0CHaYbbILztejTjVdyc9KrMw,52667
3
+ onnx_diagnostic/_command_lines_parser.py,sha256=AWT6XrphbR0C0w9J846jPcRWkoUtnSSAX7gdR-JavQ4,54258
4
4
  onnx_diagnostic/api.py,sha256=BhCl_yCd78N7TlVtPOHjeYv1QBEy39TjZ647rcHqLh0,345
5
5
  onnx_diagnostic/doc.py,sha256=t3RELgfooYnVMAi0JSpggWkQEgUsREz8NmRvn0TnLI8,2829
6
- onnx_diagnostic/ext_test_case.py,sha256=rVZWqFEfnvwnsD3wF4jeDblh5uj5ckZ8C6DZQ0RGb_E,49599
6
+ onnx_diagnostic/ext_test_case.py,sha256=KxRC6s9107hYvNgU9x2B85rj8_EhAtymPIlMpmkUNu8,50154
7
+ onnx_diagnostic/ci_models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ onnx_diagnostic/ci_models/ci_helpers.py,sha256=6CKQ4dVHBHeF6rN_Q3Y_0ZFeLYbYQbGQO3YyW3PQyAc,15341
9
+ onnx_diagnostic/ci_models/export_qwen25_vl.py,sha256=pyxmMIps9aDNkXzDAFrG8Q9DDOsEyKRHjoVvAggjFdU,20050
7
10
  onnx_diagnostic/export/__init__.py,sha256=yEIoWiOeTwBsDhyYt2fTKuhtA0Ya1J9u9ZzMTOTWaWs,101
8
11
  onnx_diagnostic/export/api.py,sha256=BX4c99gMlRYsBWk3P15FMRogArxjP4dXYXP5gILjgIk,10626
9
- onnx_diagnostic/export/control_flow.py,sha256=zU5n_QYhNcBllyMsl1_i6ohZt2CshqG2MokJghrvA60,7751
10
- onnx_diagnostic/export/control_flow_onnx.py,sha256=sODOD4v7EJj6LWhrfcdCW68r9nYKsRM4SRnqDw4TrSI,18049
11
- onnx_diagnostic/export/control_flow_research.py,sha256=RuYz9_eM42Bk6TKSiPV6dS68LIMZu-6WBCFCKoSvjrk,5422
12
+ onnx_diagnostic/export/cf_simple_loop_for.py,sha256=0I1tRAwhmmqA-6Qaq8AiUL0Ci-HODuRAVcI9azNcxAQ,13345
13
+ onnx_diagnostic/export/control_flow_onnx.py,sha256=izGlctqQANrHzSxPMbT7hoauNbnIBdx6hb8ry7HtVmM,18263
12
14
  onnx_diagnostic/export/dynamic_shapes.py,sha256=M2hlpHSTbkzZwGKAbrpQXng5HQrwjF5Z6wGGxEgnp74,42061
13
15
  onnx_diagnostic/export/onnx_plug.py,sha256=U13fL0BjnhMzcDGxaAOqM4TQte5Z4zKDg4ESS0iktjM,22704
14
16
  onnx_diagnostic/export/shape_helper.py,sha256=m628y0oRCQbeZkeh8JDHIfWMsSjoJoeX-IPiPGDHT-w,11273
@@ -100,7 +102,7 @@ onnx_diagnostic/tasks/zero_shot_image_classification.py,sha256=jJCMWuOqGv5ahCfjr
100
102
  onnx_diagnostic/tasks/data/__init__.py,sha256=uJoemrWgEjI6oA-tMX7r3__x-b3siPmkgqaY7bgIles,401
101
103
  onnx_diagnostic/tasks/data/dummies_imagetext2text_generation_gemma3.onnx,sha256=UbtvmWMqcZOKJ-I-HXWI1A6YR6QDaFS5u_yXm5C3ZBw,10299
102
104
  onnx_diagnostic/torch_export_patches/__init__.py,sha256=0SaZedwznm1hQUCvXZsGZORV5vby954wEExr5faepGg,720
103
- onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=WPb8Ku643UIV8kDyt9JUpaJBIVXth9UbteCNctd_yis,41863
105
+ onnx_diagnostic/torch_export_patches/onnx_export_errors.py,sha256=OpZHNWiA0iU-6WCFZcVCj06_MopYiZQ6c6CbAuSQ8Ms,42357
104
106
  onnx_diagnostic/torch_export_patches/onnx_export_serialization.py,sha256=0HdubI06EGpxOICqDWZoVmZkVO9gAaFADEmby197EyM,11935
105
107
  onnx_diagnostic/torch_export_patches/patch_details.py,sha256=MSraVo5ngBhihi8ssPMXSY9B4fJ17J-GAADaw3dT-rc,11794
106
108
  onnx_diagnostic/torch_export_patches/patch_expressions.py,sha256=vr4tt61cbDnaaaduzMj4UBZ8OUtr6GfDpIWwOYqjWzs,3213
@@ -119,13 +121,13 @@ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_generation_mixi
119
121
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_idefics.py,sha256=kTjuTRsfkGGGhspJnMxAMQSchZgGC_IruJzpHh_FmI8,6348
120
122
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_masking_utils.py,sha256=HE3fovyvMiYe9EPz1UjdD9AWopX3H188SMwPb8w5mzM,7111
121
123
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2.py,sha256=OxYdlLrwtd_KGHt3E17poduxvWFg-CfGS57-yN1i6gI,3827
122
- onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2_5.py,sha256=GS7IDHyRaLAsbZE5k7KN-ZT5-ezbmEUzXPJ_xG4SulA,31601
124
+ onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen2_5.py,sha256=yALbXWi3ysJ6nzQD-rxTdxdNJiBsTbYEBIj4TdksDOA,34598
123
125
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_qwen3.py,sha256=cND9Iqo1aKdlX-BXGr9Qlq_Y4EW1L5VWSwZfqYTVazU,4888
124
126
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_rotary_embedding.py,sha256=4bJ_z2gizZQla_fcCVt0dmuhzO9Vu-D7CCMWdxMlrKM,16893
125
127
  onnx_diagnostic/torch_export_patches/patches/_patch_transformers_sam_mask_decoder.py,sha256=-6TuBm3sLAFEGuW3vRfOTtE5uP6aINFfu7xMnl27Dws,5703
126
128
  onnx_diagnostic/torch_export_patches/patches/patch_helper.py,sha256=kK_CGW643iVXxa-m6pttDBS7HTyMQaPypza7iqIInn4,721
127
129
  onnx_diagnostic/torch_export_patches/patches/patch_torch.py,sha256=FfES0WWiWxmuQbGTlQ7IJS0YBG7km3IQbnMYwk_lPPU,44667
128
- onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=VAfZj0xu3D4CG71SWv-9sYPUK4ZQTSz2-x4qxP4DxGE,3079
130
+ onnx_diagnostic/torch_export_patches/patches/patch_transformers.py,sha256=Mvq8q1Lz3l3GyCD6j8WQjbrPk_V2dnc4iKm3cC_o1OA,3112
129
131
  onnx_diagnostic/torch_export_patches/serialization/__init__.py,sha256=BHLdRPtNAtNPAS-bPKEj3-foGSPvwAbZXrHzGGPDLEw,1876
130
132
  onnx_diagnostic/torch_export_patches/serialization/diffusers_impl.py,sha256=drq3EH_yjcSuIWYsVeUWm8Cx6YCZFU6bP_1PLtPfY5I,945
131
133
  onnx_diagnostic/torch_export_patches/serialization/transformers_impl.py,sha256=sIHFvUQoMK8ytXQYB-k7OL62z8A3f5uDaq-S5R5uN-M,10034
@@ -136,18 +138,19 @@ onnx_diagnostic/torch_models/validate.py,sha256=fnbTl5v1n5nM2MpmCgCMaWa6c7DGpb5m
136
138
  onnx_diagnostic/torch_models/hghub/__init__.py,sha256=vi1Q7YHdddj1soiBN42MSvJdFqe2_KUoWafHISjwOu8,58
137
139
  onnx_diagnostic/torch_models/hghub/hub_api.py,sha256=rFbiPNLET-KdBpnv-p0nKgwHX6d7C_Z0s9zZ86_92kQ,14307
138
140
  onnx_diagnostic/torch_models/hghub/hub_data.py,sha256=8V_pAgACPLPsLRYUododg7MSL6str-T3tBEGY4OaeYQ,8724
139
- onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py,sha256=GimzkI8W3guATkDx7RQ-w2xNGVaFDVegfTnnmNxf4iE,292068
141
+ onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py,sha256=Dxa13rsnTQ8eH_BcQvbY2bp1AYFtzuFrJ-J_urrSmeQ,292694
140
142
  onnx_diagnostic/torch_models/hghub/model_inputs.py,sha256=tCGqigRyY1omxm2rczRUvCTsweZGbF1MccWI3MmCH20,17423
141
143
  onnx_diagnostic/torch_models/hghub/model_specific.py,sha256=j50Nu7wddJMoqmD4QzMbNdFDUUgUmSBKRzPDH55TlUQ,2498
142
144
  onnx_diagnostic/torch_models/untrained/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
143
145
  onnx_diagnostic/torch_models/untrained/llm_phi2.py,sha256=y_akbdApi136qHcEQgykwIAYVw0Yfi0lbjb3DNuafaU,3948
144
146
  onnx_diagnostic/torch_models/untrained/llm_tiny_llm.py,sha256=QXw_Bs2SzfeiQMf-tmtVl83SmVOL4-Um7Qy-f0E48QI,2507
145
147
  onnx_diagnostic/torch_onnx/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
148
+ onnx_diagnostic/torch_onnx/compare.py,sha256=O0lws4kzn8WAXr8-x-YMPr7oyBC9DtSIs4OfOr4S5-E,12305
146
149
  onnx_diagnostic/torch_onnx/runtime_info.py,sha256=u1bD6VXqzBCRmqmbzQtDswaPs1PH_ygr1r-CrcfXpNU,8562
147
150
  onnx_diagnostic/torch_onnx/sbs.py,sha256=8okBEIupMgw7TtKc80YFimMtwnY3GchdY05FsA9ooa0,40749
148
151
  onnx_diagnostic/torch_onnx/sbs_dataclasses.py,sha256=UctdBjzoPTQG1LS0tZ8A6E9hpoq5HWUYaJLPOPJc9FI,20299
149
- onnx_diagnostic-0.8.5.dist-info/licenses/LICENSE.txt,sha256=Vv6TXglX6Rc0d-f8aREhayhT-6PMQXEyOmI2NKlUCMc,1045
150
- onnx_diagnostic-0.8.5.dist-info/METADATA,sha256=A54IonPIcnualwiRJhvjRMfhF3p3jdXhEH1vTtZBgyE,6734
151
- onnx_diagnostic-0.8.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
152
- onnx_diagnostic-0.8.5.dist-info/top_level.txt,sha256=KwNkXewmcobM3ZT1DJLVWH6ebJzA5qKg7cWqKfpGNT4,16
153
- onnx_diagnostic-0.8.5.dist-info/RECORD,,
152
+ onnx_diagnostic-0.8.6.dist-info/licenses/LICENSE.txt,sha256=Vv6TXglX6Rc0d-f8aREhayhT-6PMQXEyOmI2NKlUCMc,1045
153
+ onnx_diagnostic-0.8.6.dist-info/METADATA,sha256=9xPlJ9UHYSSIyEMqxN14mqZg31Rq9jqzHaczfqWhu-4,6734
154
+ onnx_diagnostic-0.8.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
155
+ onnx_diagnostic-0.8.6.dist-info/top_level.txt,sha256=KwNkXewmcobM3ZT1DJLVWH6ebJzA5qKg7cWqKfpGNT4,16
156
+ onnx_diagnostic-0.8.6.dist-info/RECORD,,