onnxruntime_extensions 0.14.0__cp313-cp313-macosx_11_0_universal2.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- onnxruntime_extensions/__init__.py +82 -0
- onnxruntime_extensions/_cuops.py +564 -0
- onnxruntime_extensions/_extensions_pydll.cpython-313-darwin.so +0 -0
- onnxruntime_extensions/_extensions_pydll.pyi +45 -0
- onnxruntime_extensions/_hf_cvt.py +331 -0
- onnxruntime_extensions/_ocos.py +133 -0
- onnxruntime_extensions/_ortapi2.py +274 -0
- onnxruntime_extensions/_torch_cvt.py +231 -0
- onnxruntime_extensions/_version.py +2 -0
- onnxruntime_extensions/cmd.py +66 -0
- onnxruntime_extensions/cvt.py +306 -0
- onnxruntime_extensions/onnxprocess/__init__.py +12 -0
- onnxruntime_extensions/onnxprocess/_builder.py +53 -0
- onnxruntime_extensions/onnxprocess/_onnx_ops.py +1507 -0
- onnxruntime_extensions/onnxprocess/_session.py +355 -0
- onnxruntime_extensions/onnxprocess/_tensor.py +628 -0
- onnxruntime_extensions/onnxprocess/torch_wrapper.py +31 -0
- onnxruntime_extensions/pnp/__init__.py +13 -0
- onnxruntime_extensions/pnp/_base.py +124 -0
- onnxruntime_extensions/pnp/_imagenet.py +65 -0
- onnxruntime_extensions/pnp/_nlp.py +148 -0
- onnxruntime_extensions/pnp/_onnx_ops.py +1544 -0
- onnxruntime_extensions/pnp/_torchext.py +310 -0
- onnxruntime_extensions/pnp/_unifier.py +45 -0
- onnxruntime_extensions/pnp/_utils.py +302 -0
- onnxruntime_extensions/pp_api.py +83 -0
- onnxruntime_extensions/tools/__init__.py +0 -0
- onnxruntime_extensions/tools/add_HuggingFace_CLIPImageProcessor_to_model.py +171 -0
- onnxruntime_extensions/tools/add_pre_post_processing_to_model.py +535 -0
- onnxruntime_extensions/tools/pre_post_processing/__init__.py +4 -0
- onnxruntime_extensions/tools/pre_post_processing/pre_post_processor.py +395 -0
- onnxruntime_extensions/tools/pre_post_processing/step.py +227 -0
- onnxruntime_extensions/tools/pre_post_processing/steps/__init__.py +6 -0
- onnxruntime_extensions/tools/pre_post_processing/steps/general.py +366 -0
- onnxruntime_extensions/tools/pre_post_processing/steps/nlp.py +344 -0
- onnxruntime_extensions/tools/pre_post_processing/steps/vision.py +1157 -0
- onnxruntime_extensions/tools/pre_post_processing/utils.py +139 -0
- onnxruntime_extensions/util.py +186 -0
- onnxruntime_extensions-0.14.0.dist-info/LICENSE +21 -0
- onnxruntime_extensions-0.14.0.dist-info/METADATA +102 -0
- onnxruntime_extensions-0.14.0.dist-info/RECORD +43 -0
- onnxruntime_extensions-0.14.0.dist-info/WHEEL +6 -0
- onnxruntime_extensions-0.14.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,366 @@
|
|
|
1
|
+
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
2
|
+
# Licensed under the MIT License.
|
|
3
|
+
|
|
4
|
+
import onnx
|
|
5
|
+
from typing import List, Optional
|
|
6
|
+
from ..step import Step
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Identity(Step):
|
|
10
|
+
"""
|
|
11
|
+
ONNX Identity for all inputs to the Step. Used to pass through values as-is to later Steps.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
def __init__(self, num_inputs: int = 1, name: Optional[str] = None):
|
|
15
|
+
"""
|
|
16
|
+
Args:
|
|
17
|
+
name: Optional name of step. Defaults to 'Identity'
|
|
18
|
+
"""
|
|
19
|
+
super().__init__([f"in_{x}" for x in range(0, num_inputs)],
|
|
20
|
+
[f"out_{x}" for x in range(0, num_inputs)],
|
|
21
|
+
name)
|
|
22
|
+
self._num_inputs = num_inputs
|
|
23
|
+
|
|
24
|
+
def _create_graph_for_step(self, graph: onnx.GraphProto, onnx_opset: int):
|
|
25
|
+
|
|
26
|
+
inputs = []
|
|
27
|
+
outputs = []
|
|
28
|
+
identity_nodes = []
|
|
29
|
+
|
|
30
|
+
for i in range(0, self._num_inputs):
|
|
31
|
+
input_type_str, input_shape_str = self._get_input_type_and_shape_strs(graph, i)
|
|
32
|
+
inputs.append(f"{input_type_str}[{input_shape_str}] {self.input_names[i]}")
|
|
33
|
+
outputs.append(f"{input_type_str}[{input_shape_str}] {self.output_names[i]}")
|
|
34
|
+
identity_nodes.append(f"{self.output_names[i]} = Identity({self.input_names[i]})")
|
|
35
|
+
|
|
36
|
+
identity_node_text = '\n'.join(identity_nodes)
|
|
37
|
+
converter_graph = onnx.parser.parse_graph(
|
|
38
|
+
f"""\
|
|
39
|
+
identities ({', '.join(inputs)}) => ({', '.join(outputs)})
|
|
40
|
+
{{
|
|
41
|
+
{identity_node_text}
|
|
42
|
+
}}
|
|
43
|
+
"""
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
return converter_graph
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class ReverseAxis(Step):
|
|
50
|
+
"""
|
|
51
|
+
Reverses the data in an axis by splitting and concatenating in reverse order.
|
|
52
|
+
e.g. convert RGB ordered data to BGR.
|
|
53
|
+
Output data type and shape is the same as the input.
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
def __init__(self, axis: int = -1, dim_value: int = -1, name: Optional[str] = None):
|
|
57
|
+
"""
|
|
58
|
+
Args:
|
|
59
|
+
axis: Axis to reverse. Default is last axis.
|
|
60
|
+
dim_value: Explicit value for size of dimension being reversed.
|
|
61
|
+
This can be provided if the axis being reversed currently has a symbolic value.
|
|
62
|
+
Note that this will fail during graph execution if the actual value at runtime does not match.
|
|
63
|
+
If not provided, the size of the dimension to reverse is inferred from the input shape.
|
|
64
|
+
name: Optional Step name. Defaults to 'ReverseAxis'
|
|
65
|
+
"""
|
|
66
|
+
super().__init__(["data"], ["data_with_reversed_axis"], name)
|
|
67
|
+
self._axis = axis
|
|
68
|
+
self._dim_value = dim_value
|
|
69
|
+
|
|
70
|
+
def _create_graph_for_step(self, graph: onnx.GraphProto, onnx_opset: int):
|
|
71
|
+
input_type_str, input_shape_str = self._get_input_type_and_shape_strs(graph, 0)
|
|
72
|
+
input_dims = input_shape_str.split(",")
|
|
73
|
+
split_dim = input_dims[self._axis]
|
|
74
|
+
|
|
75
|
+
if split_dim.isdigit():
|
|
76
|
+
dim_value = int(split_dim)
|
|
77
|
+
if self._dim_value != -1:
|
|
78
|
+
# TODO: Technically we don't require a match here. For now expect it to match.
|
|
79
|
+
assert dim_value == self._dim_value
|
|
80
|
+
else:
|
|
81
|
+
self._dim_value = dim_value
|
|
82
|
+
|
|
83
|
+
split_outs = []
|
|
84
|
+
for i in range(0, self._dim_value):
|
|
85
|
+
split_outs.append(f"split_out_{i}")
|
|
86
|
+
|
|
87
|
+
split_attr = f"axis = {self._axis}"
|
|
88
|
+
if onnx_opset >= 18:
|
|
89
|
+
# Split now requires the number of outputs to be specified even though that can be easily inferred...
|
|
90
|
+
split_attr += f", num_outputs = {len(split_outs)}"
|
|
91
|
+
|
|
92
|
+
reverse_graph = onnx.parser.parse_graph(
|
|
93
|
+
f"""\
|
|
94
|
+
reverse_axis ({input_type_str}[{input_shape_str}] {self.input_names[0]})
|
|
95
|
+
=> ({input_type_str}[{input_shape_str}] {self.output_names[0]})
|
|
96
|
+
{{
|
|
97
|
+
{','.join(split_outs)} = Split <{split_attr}> ({self.input_names[0]})
|
|
98
|
+
{self.output_names[0]} = Concat <axis = {self._axis}> ({','.join(reversed(split_outs))})
|
|
99
|
+
}}
|
|
100
|
+
"""
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
return reverse_graph
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
class Split(Step):
|
|
107
|
+
"""
|
|
108
|
+
ONNX Split
|
|
109
|
+
"""
|
|
110
|
+
|
|
111
|
+
def __init__(self,
|
|
112
|
+
num_outputs: int,
|
|
113
|
+
axis: Optional[int] = None,
|
|
114
|
+
splits: Optional[List[int]] = None,
|
|
115
|
+
name: Optional[str] = None):
|
|
116
|
+
"""
|
|
117
|
+
:param num_outputs: Number of outputs to split the input into. Unequal split is allowed for opset 18+.
|
|
118
|
+
:param axis: Axis to split on. Default is 0.
|
|
119
|
+
:param splits: Optional length of each output. Sum must equal dim value at 'axis'
|
|
120
|
+
:param name: Optional Step name. Defaults to 'Split'
|
|
121
|
+
"""
|
|
122
|
+
output_names = [f"{name if name else self.__class__.__name__}_{x}" for x in range(0, num_outputs)]
|
|
123
|
+
super().__init__(["data"], output_names, name)
|
|
124
|
+
self._num_outputs = num_outputs
|
|
125
|
+
self._axis = axis if axis else 0
|
|
126
|
+
self._splits = splits
|
|
127
|
+
|
|
128
|
+
if splits and len(splits) != num_outputs:
|
|
129
|
+
raise ValueError("Splits length must match num_outputs")
|
|
130
|
+
|
|
131
|
+
def _create_graph_for_step(self, graph: onnx.GraphProto, onnx_opset: int):
|
|
132
|
+
input_type_str, input_shape_str = self._get_input_type_and_shape_strs(graph, 0)
|
|
133
|
+
dims = input_shape_str.split(",")
|
|
134
|
+
|
|
135
|
+
axis = (self._axis + len(dims)) if self._axis < 0 else self._axis
|
|
136
|
+
|
|
137
|
+
# calculate dim value of axis being split for each output
|
|
138
|
+
if self._splits:
|
|
139
|
+
split_dim_strs = [str(x) for x in self._splits]
|
|
140
|
+
splits_input_name = ", split_sizes"
|
|
141
|
+
splits_const = (f"split_sizes = Constant <value = int64[{self._num_outputs}] "
|
|
142
|
+
f"{{{', '.join(split_dim_strs)}}}>()")
|
|
143
|
+
else:
|
|
144
|
+
if dims[axis].isdigit():
|
|
145
|
+
split_dim_str = str(int(dims[axis]) / self._num_outputs)
|
|
146
|
+
else:
|
|
147
|
+
split_dim_str = f"{dims[axis]}_/_{self._num_outputs}"
|
|
148
|
+
|
|
149
|
+
split_dim_strs = [split_dim_str] * self._num_outputs
|
|
150
|
+
splits_input_name = ""
|
|
151
|
+
splits_const = ""
|
|
152
|
+
|
|
153
|
+
split_outputs = []
|
|
154
|
+
for i in range(0, self._num_outputs):
|
|
155
|
+
dims[axis] = split_dim_strs[i]
|
|
156
|
+
split_outputs.append(f"{input_type_str}[{','.join(dims)}] {self.output_names[i]}")
|
|
157
|
+
|
|
158
|
+
# num_outputs attribute is required if opset 18+ and not providing splits input
|
|
159
|
+
num_outputs = ""
|
|
160
|
+
if onnx_opset >= 18 and not self._splits:
|
|
161
|
+
num_outputs = ", num_outputs = {self._num_outputs}"
|
|
162
|
+
|
|
163
|
+
split_graph = onnx.parser.parse_graph(
|
|
164
|
+
f"""\
|
|
165
|
+
split ({input_type_str}[{input_shape_str}] {self.input_names[0]})
|
|
166
|
+
=> ({",".join(split_outputs)})
|
|
167
|
+
{{
|
|
168
|
+
{splits_const}
|
|
169
|
+
{",".join(self.output_names)}
|
|
170
|
+
= Split <axis={self._axis} {num_outputs}>({self.input_names[0]} {splits_input_name})
|
|
171
|
+
}}
|
|
172
|
+
"""
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
return split_graph
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
class Squeeze(Step):
|
|
179
|
+
"""
|
|
180
|
+
ONNX Squeeze
|
|
181
|
+
"""
|
|
182
|
+
|
|
183
|
+
def __init__(self, axes: Optional[List[int]] = None, name: Optional[str] = None):
|
|
184
|
+
"""
|
|
185
|
+
Args:
|
|
186
|
+
axes: Axes to remove.
|
|
187
|
+
If None, remove all axes with size of 1. Requires all dimensions to have explicit values.
|
|
188
|
+
name: Optional Step name. Defaults to 'Squeeze'
|
|
189
|
+
"""
|
|
190
|
+
super().__init__(["data"], ["squeezed"], name)
|
|
191
|
+
self._axes = axes
|
|
192
|
+
|
|
193
|
+
def _create_graph_for_step(self, graph: onnx.GraphProto, onnx_opset: int):
|
|
194
|
+
input_type_str, input_shape_str = self._get_input_type_and_shape_strs(graph, 0)
|
|
195
|
+
dims = input_shape_str.split(",")
|
|
196
|
+
|
|
197
|
+
axes = self._axes
|
|
198
|
+
if not axes:
|
|
199
|
+
axes = []
|
|
200
|
+
for idx, dim in enumerate(dims):
|
|
201
|
+
if not dim.isnumeric():
|
|
202
|
+
# we can't infer the output shape if there are symbolic dims
|
|
203
|
+
raise ValueError("Axes must be specified if there are symbolic dimensions.")
|
|
204
|
+
|
|
205
|
+
if dim == '1':
|
|
206
|
+
axes.append(int(idx))
|
|
207
|
+
|
|
208
|
+
output_dims = [dim for idx, dim in enumerate(dims) if idx not in axes]
|
|
209
|
+
output_shape_str = ",".join(output_dims)
|
|
210
|
+
|
|
211
|
+
axes_strs = [str(axis) for axis in axes]
|
|
212
|
+
|
|
213
|
+
squeeze_graph = onnx.parser.parse_graph(
|
|
214
|
+
f"""\
|
|
215
|
+
squeeze ({input_type_str}[{input_shape_str}] {self.input_names[0]})
|
|
216
|
+
=> ({input_type_str}[{output_shape_str}] {self.output_names[0]})
|
|
217
|
+
{{
|
|
218
|
+
axes = Constant <value = int64[{len(axes)}] {{{','.join(axes_strs)}}}> ()
|
|
219
|
+
{self.output_names[0]} = Squeeze({self.input_names[0]}, axes)
|
|
220
|
+
}}
|
|
221
|
+
"""
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
return squeeze_graph
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
class Transpose(Step):
|
|
228
|
+
"""
|
|
229
|
+
ONNX Transpose.
|
|
230
|
+
"""
|
|
231
|
+
|
|
232
|
+
def __init__(self, perms: List[int], name: Optional[str] = None):
|
|
233
|
+
"""
|
|
234
|
+
Args:
|
|
235
|
+
perms: List of integers with permutations to apply.
|
|
236
|
+
name: Optional Step name. Defaults to 'Transpose'
|
|
237
|
+
"""
|
|
238
|
+
super().__init__(["X"], ["transposed"], name)
|
|
239
|
+
self.perms = perms
|
|
240
|
+
|
|
241
|
+
def _create_graph_for_step(self, graph: onnx.GraphProto, onnx_opset: int):
|
|
242
|
+
input_type_str, input_shape_str = self._get_input_type_and_shape_strs(graph, 0)
|
|
243
|
+
perms_str = ",".join([str(idx) for idx in self.perms])
|
|
244
|
+
dims = input_shape_str.split(",")
|
|
245
|
+
output_dims = [dims[axis] for axis in self.perms]
|
|
246
|
+
output_shape_str = ",".join(output_dims)
|
|
247
|
+
|
|
248
|
+
transpose_graph = onnx.parser.parse_graph(
|
|
249
|
+
f"""\
|
|
250
|
+
transpose ({input_type_str}[{input_shape_str}] {self.input_names[0]})
|
|
251
|
+
=> ({input_type_str}[{output_shape_str}] {self.output_names[0]})
|
|
252
|
+
{{
|
|
253
|
+
{self.output_names[0]} = Transpose <perm = [{perms_str}]> ({self.input_names[0]})
|
|
254
|
+
}}
|
|
255
|
+
"""
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
return transpose_graph
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
class Softmax(Step):
|
|
262
|
+
"""
|
|
263
|
+
ONNX Softmax
|
|
264
|
+
"""
|
|
265
|
+
|
|
266
|
+
def __init__(self, name: Optional[str] = None):
|
|
267
|
+
"""
|
|
268
|
+
Args:
|
|
269
|
+
name: Optional Step name. Defaults to 'Softmax'
|
|
270
|
+
"""
|
|
271
|
+
super().__init__(["data"], ["probabilities"], name)
|
|
272
|
+
|
|
273
|
+
def _create_graph_for_step(self, graph: onnx.GraphProto, onnx_opset: int):
|
|
274
|
+
input_type_str, input_shape_str = self._get_input_type_and_shape_strs(graph, 0)
|
|
275
|
+
|
|
276
|
+
softmax_graph = onnx.parser.parse_graph(
|
|
277
|
+
f"""\
|
|
278
|
+
softmax ({input_type_str}[{input_shape_str}] {self.input_names[0]})
|
|
279
|
+
=> ({input_type_str}[{input_shape_str}] {self.output_names[0]})
|
|
280
|
+
{{
|
|
281
|
+
{self.output_names[0]} = Softmax ({self.input_names[0]})
|
|
282
|
+
}}
|
|
283
|
+
"""
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
return softmax_graph
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
class Unsqueeze(Step):
|
|
290
|
+
"""
|
|
291
|
+
ONNX Unsqueeze
|
|
292
|
+
"""
|
|
293
|
+
|
|
294
|
+
def __init__(self, axes: List[int], name: Optional[str] = None):
|
|
295
|
+
"""
|
|
296
|
+
Args:
|
|
297
|
+
axes: List of integers indicating the dimensions to be inserted.
|
|
298
|
+
name: Optional Step name. Defaults to 'Unsqueeze'
|
|
299
|
+
"""
|
|
300
|
+
super().__init__(["data"], ["expanded"], name)
|
|
301
|
+
self._axes = axes
|
|
302
|
+
|
|
303
|
+
def _create_graph_for_step(self, graph: onnx.GraphProto, onnx_opset: int):
|
|
304
|
+
input_type_str, input_shape_str = self._get_input_type_and_shape_strs(graph, 0)
|
|
305
|
+
dims = input_shape_str.split(",")
|
|
306
|
+
|
|
307
|
+
for idx in self._axes:
|
|
308
|
+
dims.insert(idx, "1")
|
|
309
|
+
|
|
310
|
+
output_shape_str = ",".join(dims)
|
|
311
|
+
axes_strs = [str(axis) for axis in self._axes]
|
|
312
|
+
|
|
313
|
+
unsqueeze_graph = onnx.parser.parse_graph(
|
|
314
|
+
f"""\
|
|
315
|
+
unsqueeze ({input_type_str}[{input_shape_str}] {self.input_names[0]})
|
|
316
|
+
=> ({input_type_str}[{output_shape_str}] {self.output_names[0]})
|
|
317
|
+
{{
|
|
318
|
+
axes = Constant <value = int64[{len(self._axes)}] {{{','.join(axes_strs)}}}> ()
|
|
319
|
+
{self.output_names[0]} = Unsqueeze ({self.input_names[0]}, axes)
|
|
320
|
+
}}
|
|
321
|
+
"""
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
return unsqueeze_graph
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
class ArgMax(Step):
|
|
328
|
+
def __init__(self, name: Optional[str] = None, axis: int = -1, keepdims: int = 0):
|
|
329
|
+
"""
|
|
330
|
+
Brief:
|
|
331
|
+
Same as ArgMax op.
|
|
332
|
+
Args:
|
|
333
|
+
name: Optional name of step. Defaults to 'ArgMax'
|
|
334
|
+
|
|
335
|
+
"""
|
|
336
|
+
super().__init__(["data"], ["index"], name)
|
|
337
|
+
self._axis = axis
|
|
338
|
+
self._keepdims = keepdims
|
|
339
|
+
|
|
340
|
+
def _create_graph_for_step(self, graph: onnx.GraphProto, onnx_opset: int):
|
|
341
|
+
input_type_str_0, input_shape_str_0 = self._get_input_type_and_shape_strs(graph, 0)
|
|
342
|
+
input_shape_0 = input_shape_str_0.split(",")
|
|
343
|
+
|
|
344
|
+
def build_input_declare():
|
|
345
|
+
return f"{input_type_str_0}[{input_shape_str_0}] {self.input_names[0]}"
|
|
346
|
+
|
|
347
|
+
axis = self._axis + len(input_shape_0) if self._axis < 0 else self._axis
|
|
348
|
+
if axis >= len(input_shape_0):
|
|
349
|
+
raise ValueError("axis should be in range [-rank, rank-1].")
|
|
350
|
+
|
|
351
|
+
output_shape_str = input_shape_0.copy()
|
|
352
|
+
output_shape_str[axis] = "1"
|
|
353
|
+
if self._keepdims == 0:
|
|
354
|
+
output_shape_str.pop(axis)
|
|
355
|
+
|
|
356
|
+
converter_graph = onnx.parser.parse_graph(
|
|
357
|
+
f"""\
|
|
358
|
+
classify ({build_input_declare()})
|
|
359
|
+
=> (int64[{','.join(output_shape_str)}] {self.output_names[0]})
|
|
360
|
+
{{
|
|
361
|
+
{self.output_names[0]} = ArgMax<axis = {self._axis}, keepdims={self._keepdims}>({self.input_names[0]})
|
|
362
|
+
}}
|
|
363
|
+
"""
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
return converter_graph
|