tico 0.1.0.dev250724__py3-none-any.whl → 0.1.0.dev250728__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. tico/__init__.py +1 -1
  2. tico/experimental/quantization/passes/insert_quantize_on_dtype_mismatch.py +6 -0
  3. tico/serialize/circle_graph.py +7 -18
  4. tico/serialize/circle_mapping.py +58 -2
  5. tico/serialize/circle_serializer.py +9 -4
  6. tico/serialize/operators/op_any.py +7 -11
  7. tico/serialize/operators/op_avg_pool2d.py +13 -3
  8. tico/serialize/operators/op_clamp.py +4 -3
  9. tico/serialize/operators/op_conv2d.py +9 -4
  10. tico/serialize/operators/op_copy.py +26 -3
  11. tico/serialize/operators/op_cumsum.py +3 -1
  12. tico/serialize/operators/op_depthwise_conv2d.py +11 -5
  13. tico/serialize/operators/op_index_select.py +8 -1
  14. tico/serialize/operators/op_log1p.py +3 -2
  15. tico/serialize/operators/op_max_pool2d_with_indices.py +12 -2
  16. tico/serialize/operators/op_mm.py +1 -0
  17. tico/serialize/operators/op_pow.py +3 -1
  18. tico/serialize/operators/op_repeat.py +6 -2
  19. tico/serialize/operators/op_reshape.py +1 -1
  20. tico/serialize/operators/op_split_with_sizes.py +16 -8
  21. tico/serialize/operators/op_transpose_conv.py +9 -4
  22. tico/serialize/operators/op_view.py +2 -1
  23. tico/utils/serialize.py +11 -0
  24. {tico-0.1.0.dev250724.dist-info → tico-0.1.0.dev250728.dist-info}/METADATA +1 -1
  25. {tico-0.1.0.dev250724.dist-info → tico-0.1.0.dev250728.dist-info}/RECORD +29 -29
  26. {tico-0.1.0.dev250724.dist-info → tico-0.1.0.dev250728.dist-info}/LICENSE +0 -0
  27. {tico-0.1.0.dev250724.dist-info → tico-0.1.0.dev250728.dist-info}/WHEEL +0 -0
  28. {tico-0.1.0.dev250724.dist-info → tico-0.1.0.dev250728.dist-info}/entry_points.txt +0 -0
  29. {tico-0.1.0.dev250724.dist-info → tico-0.1.0.dev250728.dist-info}/top_level.txt +0 -0
tico/__init__.py CHANGED
@@ -29,7 +29,7 @@ __all__ = [
29
29
  ]
30
30
 
31
31
  # THIS LINE IS AUTOMATICALLY GENERATED BY setup.py
32
- __version__ = "0.1.0.dev250724"
32
+ __version__ = "0.1.0.dev250728"
33
33
 
34
34
  MINIMUM_SUPPORTED_VERSION = "2.5.0"
35
35
  SECURE_TORCH_VERSION = "2.6.0"
@@ -376,6 +376,12 @@ def _relu_handler(node, logger):
376
376
  quantize.meta[QPARAM_KEY] = copy.deepcopy(node.meta[QPARAM_KEY])
377
377
  node.meta[QPARAM_KEY] = _u8_to_i16(node.meta[QPARAM_KEY])
378
378
  logger.debug(f"quantize_per_tensor.default is inserted after {node.name}.")
379
+ elif qparam_dtype(inp) == "uint8" and qparam_dtype(node) == "int16":
380
+ quantize = _insert_quantize_op_after(node)
381
+
382
+ quantize.meta[QPARAM_KEY] = copy.deepcopy(node.meta[QPARAM_KEY])
383
+ node.meta[QPARAM_KEY] = _i16_to_u8(node.meta[QPARAM_KEY])
384
+ logger.debug(f"quantize_per_tensor.default is inserted after {node.name}.")
379
385
  else:
380
386
  raise NotYetSupportedError("Unsupported dtype")
381
387
 
@@ -24,7 +24,7 @@ from torch._subclasses.fake_tensor import FakeTensor
24
24
 
25
25
  from tico.serialize.circle_mapping import (
26
26
  extract_circle_dtype,
27
- extract_shape,
27
+ extract_circle_shape,
28
28
  str_to_circle_dtype,
29
29
  to_circle_dtype,
30
30
  )
@@ -151,15 +151,7 @@ class CircleSubgraph(circle.SubGraph.SubGraphT):
151
151
  self.name_to_node[tensor.name] = node
152
152
  assert node.meta.get("val") is not None
153
153
  tensor.type = extract_circle_dtype(node)
154
- tensor.shape = list(extract_shape(node))
155
-
156
- # Handle dynamic shape
157
- if any(isinstance(s, torch.SymInt) for s in tensor.shape):
158
- tensor.shapeSignature = tensor.shape.copy()
159
- for idx, s in enumerate(tensor.shape):
160
- if isinstance(s, torch.SymInt):
161
- tensor.shape[idx] = 1
162
- tensor.shapeSignature[idx] = -1
154
+ tensor.shape, tensor.shapeSignature = extract_circle_shape(node) # type: ignore[assignment]
163
155
 
164
156
  if QPARAM_KEY in node.meta:
165
157
  tensor.quantization = to_circle_qparam(node.meta[QPARAM_KEY])
@@ -208,6 +200,7 @@ class CircleSubgraph(circle.SubGraph.SubGraphT):
208
200
  self,
209
201
  prefix: str,
210
202
  shape: List[int],
203
+ shape_signature: Optional[List[int]],
211
204
  dtype: int,
212
205
  qparam: Optional[QuantParam] = None,
213
206
  source_node: Optional[torch.fx.Node] = None,
@@ -230,6 +223,8 @@ class CircleSubgraph(circle.SubGraph.SubGraphT):
230
223
  A name prefix used to generate a unique tensor name.
231
224
  shape : List[int]
232
225
  The shape of the tensor.
226
+ shape_signature : Optional[List[int]]
227
+ The shape signature of the tensor to express Dynamic Shape. Defaults to `None` for Static Shape.
233
228
  dtype : int
234
229
  The Circle-compatible dtype of the tensor. Use `to_circle_dtype()` to convert.
235
230
  qparam : Optional[QuantParam]
@@ -250,14 +245,8 @@ class CircleSubgraph(circle.SubGraph.SubGraphT):
250
245
  if source_node is not None:
251
246
  self.name_to_node[tensor.name] = source_node
252
247
  tensor.shape = shape
253
-
254
- # Handle dynamic shape
255
- if any(isinstance(s, torch.SymInt) for s in tensor.shape):
256
- tensor.shapeSignature = tensor.shape.copy()
257
- for idx, s in enumerate(tensor.shape):
258
- if isinstance(s, torch.SymInt):
259
- tensor.shape[idx] = 1
260
- tensor.shapeSignature[idx] = -1
248
+ if shape_signature is not None:
249
+ tensor.shapeSignature = shape_signature
261
250
 
262
251
  if qparam is not None:
263
252
  tensor.quantization = to_circle_qparam(qparam)
@@ -12,7 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- from typing import Tuple, TYPE_CHECKING, Union
15
+ from typing import List, Optional, Tuple, TYPE_CHECKING, Union
16
16
 
17
17
  if TYPE_CHECKING:
18
18
  import torch.fx
@@ -128,6 +128,61 @@ def extract_shape(node: torch.fx.Node) -> torch.Size:
128
128
  return val_shape
129
129
 
130
130
 
131
+ def extract_circle_shape(node: torch.fx.Node) -> Tuple[List[int], Optional[List[int]]]:
132
+ return to_circle_shape(extract_shape(node))
133
+
134
+
135
+ def to_circle_shape(torch_shape: torch.Size) -> Tuple[List[int], Optional[List[int]]]:
136
+ shape: List[int] = list(torch_shape)
137
+ shape_signature: Optional[List[int]] = None
138
+
139
+ if any(isinstance(s, torch.SymInt) for s in shape):
140
+ shape_signature = shape.copy()
141
+ for idx, s in enumerate(shape):
142
+ if isinstance(s, torch.SymInt):
143
+ shape[idx] = 1
144
+ shape_signature[idx] = -1
145
+
146
+ return shape, shape_signature
147
+
148
+
149
+ def validate_circle_shape(shape: List[int], shape_signature: Optional[List[int]]):
150
+ """
151
+ Validate circle tensor shape and shape_signature.
152
+ @ref https://github.com/Samsung/TICO/issues/244
153
+ """
154
+ if shape_signature is not None:
155
+ if len(shape_signature) == 0:
156
+ raise ValueError(
157
+ "Invalid circle shape: shape_signature must not be an empty list. "
158
+ "For static shapes, use None instead of []."
159
+ )
160
+ if len(shape) != len(shape_signature):
161
+ raise ValueError(
162
+ f"Invalid circle shape: shape and shape_signature must have same length: {shape} {shape_signature}"
163
+ )
164
+ if not all(isinstance(s, int) for s in shape_signature):
165
+ raise ValueError(
166
+ f"circle tensor shape_signature must be all integer values. {shape_signature}"
167
+ )
168
+ for s, ss in zip(shape, shape_signature):
169
+ if ss == -1:
170
+ # dynamic shape dimension
171
+ if s != 1:
172
+ raise ValueError(
173
+ f"Invalid circle shape: {s} {ss} {shape} {shape_signature}"
174
+ )
175
+ else:
176
+ # static shape dimension
177
+ if s != ss:
178
+ raise ValueError(
179
+ f"Invalid circle shape: {s} {ss} {shape} {shape_signature}"
180
+ )
181
+
182
+ if not all(isinstance(s, int) for s in shape):
183
+ raise ValueError(f"circle tensor shape must be all integer values. {shape}")
184
+
185
+
131
186
  # Return stride of node
132
187
  def extract_stride(node: torch.fx.Node) -> Tuple[int, ...]:
133
188
  assert node.meta is not None
@@ -157,7 +212,8 @@ def check_if_i32_range(axis: Union[list, int]):
157
212
  return all(INT32_MIN <= val <= INT32_MAX for val in values)
158
213
 
159
214
 
160
- def circle_legalize_dtype_to(values, *, dtype: torch.dtype):
215
+ # TODO: Revisit this dtype legalization function as it breaks SRP
216
+ def circle_legalize_dtype_to(values, *, dtype: torch.dtype) -> torch.Tensor:
161
217
  """
162
218
  Legalize data types from `torch.int64` to `torch.int32`.
163
219
 
@@ -20,13 +20,13 @@ import torch
20
20
  from circle_schema import circle
21
21
  from torch.export.exported_program import ConstantArgument, ExportedProgram, InputKind
22
22
 
23
- from tico.serialize.circle_mapping import to_circle_dtype
23
+ from tico.serialize.circle_mapping import to_circle_dtype, to_circle_shape
24
24
  from tico.serialize.operators import *
25
25
  from tico.serialize.circle_graph import CircleModel, CircleSubgraph
26
26
  from tico.serialize.operators.hashable_opcode import OpCode
27
27
  from tico.serialize.operators.node_visitor import get_node_visitors
28
28
  from tico.utils import logging
29
- from tico.utils.serialize import finalise_tensor_names
29
+ from tico.utils.serialize import finalise_tensor_names, validate_tensor_shapes
30
30
 
31
31
 
32
32
  multiple_output_ops = [
@@ -104,8 +104,10 @@ def build_circle(ep: ExportedProgram) -> bytes:
104
104
  graph.add_operator(circle_op)
105
105
  logger.debug(f"call_function: {node.name} ({opcode}) Op exported.")
106
106
 
107
- # Register subgraph
108
107
  finalise_tensor_names(graph)
108
+ validate_tensor_shapes(graph)
109
+
110
+ # Register subgraph
109
111
  model.subgraphs.append(graph)
110
112
 
111
113
  # Encode operator codes
@@ -322,9 +324,12 @@ def _handle_get_attr_node(
322
324
  if not isinstance(attr_tensor, torch.Tensor):
323
325
  raise ValueError(f"Attribute {node.target} is not a tensor")
324
326
 
327
+ attr_shape, attr_shape_signature = to_circle_shape(attr_tensor.shape)
328
+
325
329
  graph.add_tensor_from_scratch(
326
330
  prefix=node.name,
327
- shape=list(attr_tensor.shape),
331
+ shape=attr_shape,
332
+ shape_signature=attr_shape_signature,
328
333
  dtype=to_circle_dtype(attr_tensor.dtype),
329
334
  source_node=node,
330
335
  )
@@ -22,6 +22,7 @@ from circle_schema import circle
22
22
  from tico.serialize.circle_graph import CircleSubgraph
23
23
  from tico.serialize.circle_mapping import (
24
24
  circle_legalize_dtype_to,
25
+ extract_circle_shape,
25
26
  extract_shape,
26
27
  extract_torch_dtype,
27
28
  )
@@ -99,17 +100,10 @@ class AnyVisitor(NodeVisitor):
99
100
  keepdim = args.keepdim
100
101
 
101
102
  input_shape = list(extract_shape(input))
102
- dim_i32 = None
103
103
  if dim is None:
104
- dims = tuple(i for i in range(0, len(input_shape)))
105
- dim_i32 = tuple(
106
- circle_legalize_dtype_to(dim, dtype=torch.int32) for dim in dims
107
- )
108
- if isinstance(dim, int):
109
- dim_i32 = circle_legalize_dtype_to(dim, dtype=torch.int32)
110
- if isinstance(dim, tuple):
111
- dim_i32 = tuple(circle_legalize_dtype_to(d, dtype=torch.int32) for d in dim)
112
- assert dim_i32 is not None
104
+ dim = tuple(i for i in range(0, len(input_shape)))
105
+
106
+ dim_i32 = circle_legalize_dtype_to(dim, dtype=torch.int32)
113
107
 
114
108
  inputs = [
115
109
  input,
@@ -123,9 +117,11 @@ class AnyVisitor(NodeVisitor):
123
117
  if dtype_torch in [torch.int32, torch.int64, torch.float32, torch.float64]:
124
118
  dst_dtype_circle = circle.TensorType.TensorType.BOOL
125
119
  dst_dtype_torch = torch.bool
120
+ dst_shape, dst_shape_signature = extract_circle_shape(input)
126
121
  ne_tensor: circle.Tensor.TensorT = self.graph.add_tensor_from_scratch(
127
122
  prefix=f"{input.name}_ne",
128
- shape=input_shape,
123
+ shape=dst_shape,
124
+ shape_signature=dst_shape_signature,
129
125
  dtype=dst_dtype_circle,
130
126
  source_node=input,
131
127
  )
@@ -22,7 +22,7 @@ import torch
22
22
  from circle_schema import circle
23
23
 
24
24
  from tico.serialize.circle_graph import CircleSubgraph
25
- from tico.serialize.circle_mapping import extract_circle_dtype, extract_shape
25
+ from tico.serialize.circle_mapping import extract_circle_dtype, extract_circle_shape
26
26
  from tico.serialize.operators.hashable_opcode import OpCode
27
27
  from tico.serialize.operators.node_visitor import NodeVisitor, register_node_visitor
28
28
  from tico.serialize.operators.utils import create_builtin_operator, get_op_index
@@ -57,7 +57,12 @@ class AvgPool2DVisitor(NodeVisitor):
57
57
  return True
58
58
 
59
59
  def has_same_padding(self, args: AvgPool2dArgs) -> bool:
60
- input_shape = list(extract_shape(args.input))
60
+ input_shape, input_shape_signature = extract_circle_shape(args.input)
61
+
62
+ if input_shape_signature is not None:
63
+ # TODO: support dynamic shapes
64
+ raise NotImplementedError("Dynamic shape is not supported yet")
65
+
61
66
  kernel_size = args.kernel_size
62
67
  stride = args.stride
63
68
  assert stride
@@ -137,7 +142,11 @@ class AvgPool2DVisitor(NodeVisitor):
137
142
  ],
138
143
  dtype=torch.int32,
139
144
  )
140
- input_shape = list(extract_shape(input))
145
+ input_shape, input_shape_signature = extract_circle_shape(input)
146
+
147
+ if input_shape_signature is not None:
148
+ raise RuntimeError("Dynamic shape is not supported yet.")
149
+
141
150
  input_dtype: int = extract_circle_dtype(input)
142
151
  padded_input_shape = [
143
152
  input_shape[0],
@@ -151,6 +160,7 @@ class AvgPool2DVisitor(NodeVisitor):
151
160
  padded_input_tensor = self.graph.add_tensor_from_scratch(
152
161
  prefix=f"{input.name}_pad_output",
153
162
  shape=padded_input_shape,
163
+ shape_signature=None,
154
164
  dtype=input_dtype,
155
165
  source_node=node,
156
166
  )
@@ -23,7 +23,7 @@ from circle_schema import circle
23
23
  from tico.passes import ops
24
24
  from tico.serialize.circle_graph import CircleSubgraph
25
25
 
26
- from tico.serialize.circle_mapping import extract_circle_dtype, extract_shape
26
+ from tico.serialize.circle_mapping import extract_circle_dtype, extract_circle_shape
27
27
  from tico.serialize.operators.hashable_opcode import OpCode
28
28
  from tico.serialize.operators.node_visitor import NodeVisitor, register_node_visitor
29
29
  from tico.serialize.operators.utils import create_builtin_operator, get_op_index
@@ -101,12 +101,13 @@ class ClampVisitor(NodeVisitor):
101
101
  return self.define_minimum_node([input, max_val], [node])
102
102
 
103
103
  elif min_val is not None and max_val is not None:
104
- input_shape = extract_shape(input)
104
+ input_shape, input_shape_signature = extract_circle_shape(input)
105
105
  input_dtype = extract_circle_dtype(input)
106
106
  minimum_tensor = self.graph.add_tensor_from_scratch(
107
107
  prefix=f"{input.name}_min",
108
108
  dtype=input_dtype,
109
- shape=list(input_shape),
109
+ shape=input_shape,
110
+ shape_signature=input_shape_signature,
110
111
  source_node=node,
111
112
  )
112
113
  minimum_opertor = self.define_minimum_node(
@@ -20,7 +20,7 @@ if TYPE_CHECKING:
20
20
  import torch
21
21
  from circle_schema import circle
22
22
 
23
- from tico.serialize.circle_mapping import extract_circle_dtype, extract_shape
23
+ from tico.serialize.circle_mapping import extract_circle_dtype, extract_circle_shape
24
24
  from tico.serialize.operators.hashable_opcode import OpCode
25
25
  from tico.serialize.operators.node_visitor import NodeVisitor, register_node_visitor
26
26
  from tico.serialize.operators.utils import create_builtin_operator, get_op_index
@@ -111,13 +111,17 @@ class Conv2dVisitor(NodeVisitor):
111
111
 
112
112
  assert groups == 1, "Only support group 1 conv2d"
113
113
 
114
- input_shape = list(extract_shape(input_))
115
- output_shape = list(extract_shape(node))
116
- weight_shape = list(extract_shape(weight))
114
+ input_shape, input_shape_signature = extract_circle_shape(input_)
115
+ output_shape, _ = extract_circle_shape(node)
116
+ weight_shape, _ = extract_circle_shape(weight)
117
117
  assert len(input_shape) == 4, len(input_shape)
118
118
  assert len(output_shape) == 4, len(output_shape)
119
119
  assert len(weight_shape) == 4, len(weight_shape)
120
120
 
121
+ if input_shape_signature is not None:
122
+ # TODO: support dynamic shapes
123
+ raise NotImplementedError("Dynamic shape is not supported yet")
124
+
121
125
  pad_decision = identify_padding(padding, input_shape, output_shape, stride)
122
126
 
123
127
  conv_input: torch.fx.Node | circle.Tensor.TensorT = input_
@@ -143,6 +147,7 @@ class Conv2dVisitor(NodeVisitor):
143
147
  pad_output = self.graph.add_tensor_from_scratch(
144
148
  prefix=f"{node.name}_input_pad_output",
145
149
  shape=pad_output_shape,
150
+ shape_signature=None,
146
151
  dtype=extract_circle_dtype(input_),
147
152
  qparam=input_qparam,
148
153
  source_node=node,
@@ -12,7 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- from typing import Dict, List, TYPE_CHECKING, Union
15
+ from typing import Dict, List, Optional, TYPE_CHECKING, Union
16
16
 
17
17
  if TYPE_CHECKING:
18
18
  import torch._ops
@@ -52,7 +52,15 @@ class CopyVisitor(NodeVisitor):
52
52
  def __init__(self, op_codes: Dict[OpCode, int], graph: CircleSubgraph):
53
53
  super().__init__(op_codes, graph)
54
54
 
55
- def check_to_do_broadcast(self, dst: List[int], src: List[int]) -> bool:
55
+ def check_to_do_broadcast(
56
+ self,
57
+ dst: List[int],
58
+ dst_sig: Optional[List[int]],
59
+ src: List[int],
60
+ src_sig: Optional[List[int]],
61
+ ) -> bool:
62
+ assert dst_sig is None
63
+ assert src_sig is None
56
64
  return dst != src
57
65
 
58
66
  def define_broadcast_to_node(
@@ -102,6 +110,12 @@ class CopyVisitor(NodeVisitor):
102
110
  # To connect 'dst' to Reshape node in the graph, 'dst' must be converted to Shape op.
103
111
  dst_tensor: circle.Tensor.TensorT = self.graph.get_tensor(dst)
104
112
  dst_shape: List[int] = dst_tensor.shape
113
+ dst_shape_signature: Optional[List[int]] = dst_tensor.shapeSignature
114
+
115
+ if dst_shape_signature is not None:
116
+ # TODO: support dynamic shape
117
+ raise NotYetSupportedError("Dynamic shape is not supported yet.")
118
+
105
119
  dst_shape_tensor = torch.as_tensor(dst_shape, dtype=torch.int32)
106
120
 
107
121
  dst_shape_shape = [len(dst_shape)]
@@ -110,6 +124,7 @@ class CopyVisitor(NodeVisitor):
110
124
  shape_output = self.graph.add_tensor_from_scratch(
111
125
  prefix=f"{dst_name}_shape_output",
112
126
  shape=dst_shape_shape,
127
+ shape_signature=None,
113
128
  dtype=circle.TensorType.TensorType.INT32,
114
129
  source_node=node,
115
130
  )
@@ -119,9 +134,16 @@ class CopyVisitor(NodeVisitor):
119
134
 
120
135
  src_tensor: circle.Tensor.TensorT = self.graph.get_tensor(src)
121
136
  src_shape: List[int] = src_tensor.shape
137
+ src_shape_signature: Optional[List[int]] = src_tensor.shapeSignature
138
+
139
+ if src_shape_signature is not None:
140
+ # TODO: support dynamic shape
141
+ raise NotYetSupportedError("Dynamic shape is not supported yet.")
122
142
 
123
143
  # The src tensor must be broadcastable with the dst tensor.
124
- do_broadcast = self.check_to_do_broadcast(dst_shape, src_shape)
144
+ do_broadcast = self.check_to_do_broadcast(
145
+ dst_shape, dst_shape_signature, src_shape, src_shape_signature
146
+ )
125
147
  if do_broadcast:
126
148
  # create braodcastTo output tensor
127
149
  src_name: str = src.name
@@ -131,6 +153,7 @@ class CopyVisitor(NodeVisitor):
131
153
  self.graph.add_tensor_from_scratch(
132
154
  prefix=f"{src_name}_broadcast_to_output",
133
155
  shape=dst_shape,
156
+ shape_signature=dst_shape_signature,
134
157
  dtype=src_type,
135
158
  source_node=node,
136
159
  )
@@ -12,7 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- from typing import Dict, List, TYPE_CHECKING
15
+ from typing import Dict, List, Optional, TYPE_CHECKING
16
16
 
17
17
  if TYPE_CHECKING:
18
18
  import torch._ops
@@ -57,6 +57,7 @@ class CumsumVisitor(NodeVisitor):
57
57
  if input_dtype == torch.int32:
58
58
  input_tensor: circle.Tensor.TensorT = self.graph.get_tensor(input)
59
59
  input_shape: List[int] = input_tensor.shape
60
+ input_shape_signature: Optional[List[int]] = input_tensor.shapeSignature
60
61
  cast_op_index = get_op_index(
61
62
  circle.BuiltinOperator.BuiltinOperator.CAST, self._op_codes
62
63
  )
@@ -66,6 +67,7 @@ class CumsumVisitor(NodeVisitor):
66
67
  prefix=cast_name,
67
68
  dtype=cast_dtype,
68
69
  shape=input_shape,
70
+ shape_signature=input_shape_signature,
69
71
  source_node=node,
70
72
  )
71
73
  cast_operator = create_builtin_operator(
@@ -20,7 +20,7 @@ if TYPE_CHECKING:
20
20
  import torch
21
21
  from circle_schema import circle
22
22
 
23
- from tico.serialize.circle_mapping import extract_circle_dtype, extract_shape
23
+ from tico.serialize.circle_mapping import extract_circle_dtype, extract_circle_shape
24
24
  from tico.serialize.operators.hashable_opcode import OpCode
25
25
  from tico.serialize.operators.node_visitor import NodeVisitor, register_node_visitor
26
26
  from tico.serialize.operators.utils import create_builtin_operator, get_op_index
@@ -115,12 +115,17 @@ class DepthwiseConv2dVisitor(NodeVisitor):
115
115
  dilation = args.dilation
116
116
  groups = args.groups
117
117
 
118
- input_shape = list(extract_shape(input_)) # OHWI
119
- output_shape = list(extract_shape(node)) # OHWI
120
- weight_shape = list(extract_shape(weight)) # 1HWO
118
+ input_shape, input_shape_signature = extract_circle_shape(input_) # OHWI
119
+ output_shape, _ = extract_circle_shape(node) # OHWI
120
+ weight_shape, _ = extract_circle_shape(weight) # 1HWO
121
121
  assert len(input_shape) == 4, len(input_shape)
122
122
  assert len(output_shape) == 4, len(output_shape)
123
- assert len(weight_shape) == 4
123
+ assert len(weight_shape) == 4, len(weight_shape)
124
+
125
+ if input_shape_signature is not None:
126
+ # TODO: support dynamic shapes
127
+ raise NotImplementedError("Dynamic shape is not supported yet")
128
+
124
129
  assert weight_shape[0] == 1
125
130
  assert weight_shape[3] == output_shape[3]
126
131
  assert input_shape[3] == groups
@@ -156,6 +161,7 @@ class DepthwiseConv2dVisitor(NodeVisitor):
156
161
  pad_output = self.graph.add_tensor_from_scratch(
157
162
  prefix=f"{node.name}_input_pad_output",
158
163
  shape=pad_output_shape,
164
+ shape_signature=None,
159
165
  dtype=extract_circle_dtype(input_),
160
166
  qparam=input_qparam,
161
167
  source_node=node,
@@ -49,7 +49,14 @@ class IndexSelectVisitor(NodeVisitor):
49
49
  self._op_codes,
50
50
  )
51
51
 
52
+ # TODO: Revise this to be simple
52
53
  dim_i32 = circle_legalize_dtype_to(dim, dtype=torch.int32)
54
+ assert (
55
+ dim_i32.dim() == 0 or len(dim_i32) == 1
56
+ ), f"dim should be scalar: {dim_i32}"
57
+ dim_i32_item = dim_i32.item()
58
+ assert isinstance(dim_i32_item, int)
59
+
53
60
  inputs = [input, index]
54
61
  outputs = [node]
55
62
 
@@ -57,7 +64,7 @@ class IndexSelectVisitor(NodeVisitor):
57
64
 
58
65
  operator.builtinOptionsType = circle.BuiltinOptions.BuiltinOptions.GatherOptions
59
66
  option = circle.GatherOptions.GatherOptionsT()
60
- option.axis = dim_i32
67
+ option.axis = dim_i32_item
61
68
 
62
69
  operator.builtinOptions = option
63
70
 
@@ -23,7 +23,7 @@ from circle_schema import circle
23
23
  from tico.serialize.circle_graph import CircleSubgraph
24
24
  from tico.serialize.circle_mapping import (
25
25
  extract_circle_dtype,
26
- extract_shape,
26
+ extract_circle_shape,
27
27
  extract_torch_dtype,
28
28
  )
29
29
  from tico.serialize.operators.hashable_opcode import OpCode
@@ -62,11 +62,12 @@ class Log1pVisitor(NodeVisitor):
62
62
  args = Log1pArgs(*node.args, **node.kwargs) # type: ignore[arg-type]
63
63
  input = args.input
64
64
 
65
- input_shape = list(extract_shape(input))
65
+ input_shape, input_shape_signature = extract_circle_shape(input)
66
66
  dst_dtype_circle = extract_circle_dtype(input)
67
67
  add_tensor: circle.Tensor.TensorT = self.graph.add_tensor_from_scratch(
68
68
  prefix=f"{input.name}_add",
69
69
  shape=input_shape,
70
+ shape_signature=input_shape_signature,
70
71
  dtype=dst_dtype_circle,
71
72
  source_node=node,
72
73
  )
@@ -22,7 +22,11 @@ import torch
22
22
  from circle_schema import circle
23
23
 
24
24
  from tico.serialize.circle_graph import CircleSubgraph
25
- from tico.serialize.circle_mapping import extract_circle_dtype, extract_shape
25
+ from tico.serialize.circle_mapping import (
26
+ extract_circle_dtype,
27
+ extract_circle_shape,
28
+ extract_shape,
29
+ )
26
30
  from tico.serialize.operators.hashable_opcode import OpCode
27
31
  from tico.serialize.operators.node_visitor import NodeVisitor, register_node_visitor
28
32
  from tico.serialize.operators.utils import (
@@ -88,7 +92,12 @@ class MaxPool2DWithIndicesVisitor(NodeVisitor):
88
92
  ],
89
93
  dtype=torch.int32,
90
94
  )
91
- input_shape = list(extract_shape(input))
95
+ input_shape, input_shape_signature = extract_circle_shape(input)
96
+
97
+ if input_shape_signature is not None:
98
+ # TODO: support dynamic shape
99
+ raise NotImplementedError("Padding with dynamic shape is not supported")
100
+
92
101
  input_dtype: int = extract_circle_dtype(input)
93
102
  padded_input_shape = [
94
103
  input_shape[0],
@@ -105,6 +114,7 @@ class MaxPool2DWithIndicesVisitor(NodeVisitor):
105
114
  padded_input_tensor = self.graph.add_tensor_from_scratch(
106
115
  prefix=f"{input.name}_pad_output",
107
116
  shape=padded_input_shape,
117
+ shape_signature=None,
108
118
  dtype=input_dtype,
109
119
  qparam=input_qparam,
110
120
  source_node=node,
@@ -129,6 +129,7 @@ class MatmulDefaultVisitor(NodeVisitor):
129
129
  trs_output = self.graph.add_tensor_from_scratch(
130
130
  prefix=f"{rhs_name}_transposed_output",
131
131
  shape=rhs_shape_transpose,
132
+ shape_signature=None,
132
133
  dtype=rhs_type,
133
134
  source_node=node,
134
135
  )
@@ -12,7 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- from typing import Dict, List, TYPE_CHECKING
15
+ from typing import Dict, List, Optional, TYPE_CHECKING
16
16
 
17
17
  if TYPE_CHECKING:
18
18
  import torch._ops
@@ -36,6 +36,7 @@ class BasePowVisitor(NodeVisitor):
36
36
  assert isinstance(node, torch.fx.Node), type(node)
37
37
  node_tensor: circle.Tensor.TensorT = self.graph.get_tensor(node)
38
38
  node_shape: List[int] = node_tensor.shape
39
+ node_shape_signature: Optional[List[int]] = node_tensor.shapeSignature
39
40
  op_index = get_op_index(
40
41
  circle.BuiltinOperator.BuiltinOperator.CAST, self._op_codes
41
42
  )
@@ -45,6 +46,7 @@ class BasePowVisitor(NodeVisitor):
45
46
  prefix=cast_name,
46
47
  dtype=cast_dtype,
47
48
  shape=node_shape,
49
+ shape_signature=node_shape_signature,
48
50
  source_node=node,
49
51
  )
50
52
  cast_operator = create_builtin_operator(
@@ -21,7 +21,7 @@ import torch
21
21
  from circle_schema import circle
22
22
 
23
23
  from tico.serialize.circle_graph import CircleSubgraph
24
- from tico.serialize.circle_mapping import extract_circle_dtype, extract_shape
24
+ from tico.serialize.circle_mapping import extract_circle_dtype, extract_circle_shape
25
25
  from tico.serialize.operators.hashable_opcode import OpCode
26
26
  from tico.serialize.operators.node_visitor import NodeVisitor, register_node_visitor
27
27
  from tico.serialize.operators.utils import create_builtin_operator, get_op_index
@@ -51,7 +51,10 @@ class RepeatVisitor(NodeVisitor):
51
51
  elif r < 0:
52
52
  raise InvalidArgumentError("Only support positive repeat value")
53
53
 
54
- tensor_shape = extract_shape(input)
54
+ tensor_shape, tensor_shape_signature = extract_circle_shape(input)
55
+ if tensor_shape_signature is not None:
56
+ # TODO: support dynamic shape
57
+ raise NotYetSupportedError("Repeat does not support dynamic shape yet.")
55
58
  assert len(tensor_shape) <= len(repeats)
56
59
  if len(tensor_shape) != len(repeats):
57
60
  # TODO Support len(tensor_shape) < len(repeats)
@@ -75,6 +78,7 @@ class RepeatVisitor(NodeVisitor):
75
78
  concat_output = self.graph.add_tensor_from_scratch(
76
79
  prefix=f"{node.name}_concat_{idx}",
77
80
  shape=repeated_shape,
81
+ shape_signature=None, # TODO: support dynamic shape
78
82
  dtype=tensor_dtype,
79
83
  source_node=node,
80
84
  )
@@ -66,7 +66,7 @@ class ReshapeVisitor(NodeVisitor):
66
66
  circle.BuiltinOptions.BuiltinOptions.ReshapeOptions
67
67
  )
68
68
  option = circle.ReshapeOptions.ReshapeOptionsT()
69
- option.newShape = size_i32
69
+ option.newShape = size_i32.tolist()
70
70
 
71
71
  operator.builtinOptions = option
72
72
 
@@ -58,12 +58,14 @@ class SplitWithSizesVisitor(NodeVisitor):
58
58
  inputs = [input, split_sizes_i32, axis_i32]
59
59
 
60
60
  """
61
- `split_with_sizes` has multiple output tensors and they are represented as `getitem`.
62
- Therefore, unlike other ops, node itself doesn't become a circle tensor. Instead, each `getitem` will be
61
+ `split_with_sizes` has multiple output tensors along with `getitem`.
62
+ Unlike other ops, node itself doesn't become a circle tensor. Instead, each `getitem` will be
63
63
  a circle tensor.
64
- Further, torch module having `split_with_sizes` may somtimes return selected outputs. At that time, `getitem`
65
- nodes are generated only for the ouptut selected. Since one-compiler assumes that `CircleSplitV` always has
66
- all the outputs, let's add unused output tensors to compensate this restriction.
64
+
65
+ torch module having `split_with_sizes` may return selected outputs by using `getitem`.
66
+ However, one-compiler assumes that `CircleSplitV` always have all outputs.
67
+
68
+ So, let's add unused output tensors to compensate this restriction.
67
69
  """
68
70
  outputs: List[Union[circle.Tensor.TensorT, torch.fx.node.Node]] = []
69
71
  sorted_users = sorted(node.users.keys(), key=lambda x: x.args[1]) # type: ignore[arg-type, return-value]
@@ -80,11 +82,17 @@ class SplitWithSizesVisitor(NodeVisitor):
80
82
  fake_tensor = node_val[idx]
81
83
  assert isinstance(fake_tensor, FakeTensor)
82
84
  shape = list(fake_tensor.size())
85
+
86
+ if any(isinstance(s, torch.SymInt) for s in shape):
87
+ # TODO: support dynamic shape
88
+ raise NotImplementedError("Dynamic shape is not supported yet.")
89
+
83
90
  dtype = to_circle_dtype(fake_tensor.dtype)
84
91
  tensor = self.graph.add_tensor_from_scratch(
85
- f"{node.name}_unused_{idx}",
86
- shape,
87
- dtype,
92
+ prefix=f"{node.name}_unused_{idx}",
93
+ shape=shape,
94
+ shape_signature=None, # TODO: support dynamic shape
95
+ dtype=dtype,
88
96
  source_node=node,
89
97
  )
90
98
  outputs.append(tensor)
@@ -23,7 +23,7 @@ from circle_schema import circle
23
23
  from tico.serialize.circle_mapping import (
24
24
  circle_legalize_dtype_to,
25
25
  extract_circle_dtype,
26
- extract_shape,
26
+ extract_circle_shape,
27
27
  )
28
28
  from tico.serialize.operators.hashable_opcode import OpCode
29
29
  from tico.serialize.operators.node_visitor import NodeVisitor, register_node_visitor
@@ -80,13 +80,17 @@ class TransposeConvVisitor(NodeVisitor):
80
80
 
81
81
  assert groups == 1, "Only support group 1"
82
82
 
83
- input_shape = list(extract_shape(input_))
84
- output_shape = list(extract_shape(node))
85
- weight_shape = list(extract_shape(weight))
83
+ input_shape, input_shape_signature = extract_circle_shape(input_)
84
+ output_shape, _ = extract_circle_shape(node)
85
+ weight_shape, _ = extract_circle_shape(weight)
86
86
  assert len(input_shape) == 4, len(input_shape)
87
87
  assert len(output_shape) == 4, len(output_shape)
88
88
  assert len(weight_shape) == 4, len(weight_shape)
89
89
 
90
+ if input_shape_signature is not None:
91
+ # TODO: support dynamic shapes
92
+ raise NotImplementedError("Dynamic shape is not supported yet")
93
+
90
94
  pad_decision = identify_padding(padding, input_shape, output_shape, stride)
91
95
 
92
96
  conv_input: torch.fx.Node | circle.Tensor.TensorT = input_
@@ -112,6 +116,7 @@ class TransposeConvVisitor(NodeVisitor):
112
116
  pad_output = self.graph.add_tensor_from_scratch(
113
117
  prefix=f"{node.name}_input_pad_output",
114
118
  shape=pad_output_shape,
119
+ shape_signature=None,
115
120
  dtype=extract_circle_dtype(input_),
116
121
  qparam=input_qparam,
117
122
  source_node=node,
@@ -56,6 +56,7 @@ class ViewVisitor(NodeVisitor):
56
56
  if isinstance(size, int):
57
57
  raise Exception("scalar size conversion is not supported yet.")
58
58
 
59
+ # TODO: support dynamic shape
59
60
  size_i32 = circle_legalize_dtype_to(size, dtype=torch.int32)
60
61
  inputs = [input, size_i32]
61
62
  outputs = [node]
@@ -67,7 +68,7 @@ class ViewVisitor(NodeVisitor):
67
68
  circle.BuiltinOptions.BuiltinOptions.ReshapeOptions
68
69
  )
69
70
  option = circle.ReshapeOptions.ReshapeOptionsT()
70
- option.newShape = size_i32
71
+ option.newShape = size_i32.tolist()
71
72
 
72
73
  operator.builtinOptions = option
73
74
 
tico/utils/serialize.py CHANGED
@@ -14,6 +14,7 @@
14
14
 
15
15
 
16
16
  from tico.serialize.circle_graph import CircleSubgraph
17
+ from tico.serialize.circle_mapping import validate_circle_shape
17
18
  from tico.utils.graph import get_module_name_chain
18
19
 
19
20
 
@@ -37,3 +38,13 @@ def finalise_tensor_names(
37
38
  for tensor in graph.tensors:
38
39
  if tensor.name in graph.name_to_node:
39
40
  tensor.name = f"{get_module_name_chain(graph.name_to_node[tensor.name])}::{tensor.name}"
41
+
42
+
43
+ def validate_tensor_shapes(
44
+ graph: CircleSubgraph,
45
+ ) -> None:
46
+ """
47
+ Let's validate all tensors' shapes against their shape signatures.
48
+ """
49
+ for tensor in graph.tensors:
50
+ validate_circle_shape(tensor.shape, tensor.shapeSignature)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: tico
3
- Version: 0.1.0.dev250724
3
+ Version: 0.1.0.dev250728
4
4
  Summary: Convert exported Torch module to circle
5
5
  Home-page: UNKNOWN
6
6
  License: UNKNOWN
@@ -1,4 +1,4 @@
1
- tico/__init__.py,sha256=yM9vGhAtMUsoFy-o3sf3ZOhadAaZ_-cXKO_AXBJvC9E,1883
1
+ tico/__init__.py,sha256=kZCpd8rEcyfgQB3nr_jtFyjo2VEY_NHzI1gBgPCFAIQ,1883
2
2
  tico/pt2_to_circle.py,sha256=gu3MD4Iqc0zMZcCZ2IT8oGbyj21CTSbT3Rgd9s2B_9A,2767
3
3
  tico/config/__init__.py,sha256=xZzCXjZ84qE-CsBi-dfaL05bqpQ3stKKfTXhnrJRyVs,142
4
4
  tico/config/base.py,sha256=q5xMqGxTUZs4mFqt5c7i_y9U00fYgdMGl9nUqIVMlCo,1248
@@ -51,7 +51,7 @@ tico/experimental/quantization/evaluation/executor/circle_executor.py,sha256=eCC
51
51
  tico/experimental/quantization/evaluation/executor/triv24_executor.py,sha256=sUoXl6oOO2arAKaNjOBg7HiQja145_Jv6qgY7XtR7A8,5159
52
52
  tico/experimental/quantization/passes/__init__.py,sha256=IO6FP_xYbGy0dW0HL26GXD3ouxARaxCK7bz9dn4blPQ,26
53
53
  tico/experimental/quantization/passes/fold_quant_ops.py,sha256=bRHYSeHdSTaz3261skIkK5Aso2Lbv7ql0zFI9ICmbDY,7028
54
- tico/experimental/quantization/passes/insert_quantize_on_dtype_mismatch.py,sha256=4xwOzUrnMH4MQfdOHBHHXQo1EyWSkqNNVfBg47J5UCg,14721
54
+ tico/experimental/quantization/passes/insert_quantize_on_dtype_mismatch.py,sha256=AtfK9kDnWyIWyVlwD4a0EEx_-5rW5Hmo5DuKZ-HyXH0,15069
55
55
  tico/experimental/quantization/passes/propagate_qparam_backward.py,sha256=TGtyW0Z2qOTgVIasBdGRgbwH31YYd6ek7OvLTmCV614,3118
56
56
  tico/experimental/quantization/passes/propagate_qparam_forward.py,sha256=RhUHGCR2RpBO5KYkQ7Z8U5u7HEwDq2wdKHLKAJCi-5c,5138
57
57
  tico/experimental/quantization/passes/quantize_bias.py,sha256=T7YxJ70N0tSK0FF9VJZA5iP0sHdnnsX9GX4AT4JDFSk,4325
@@ -96,9 +96,9 @@ tico/passes/remove_redundant_to_copy.py,sha256=tKy4XKkO2l33fMxVPQ_iFkUeFvP15kbPv
96
96
  tico/passes/restore_linear.py,sha256=xGJdNb-1CrkOKS9BnLbcblkZc6P2vVjKIi-7lRcs7Bk,4111
97
97
  tico/passes/segment_index_select.py,sha256=VVCKNLtYRkr9n5lGnlzEuQsQ0WVxEYXGchFrDnB1C40,5189
98
98
  tico/serialize/__init__.py,sha256=IO6FP_xYbGy0dW0HL26GXD3ouxARaxCK7bz9dn4blPQ,26
99
- tico/serialize/circle_graph.py,sha256=8H7N4shSWj8TmsS8VUZUOQbA3DSCjZY8u1k45opXpWc,12317
100
- tico/serialize/circle_mapping.py,sha256=C9C3ORACQOdvBdnt5KRzlT8zao_TvzQklIxH794OhP0,5719
101
- tico/serialize/circle_serializer.py,sha256=eb90eTgU6WAZEWPclZIkInJsqAP1KjRSF2QeY5F3c3E,10766
99
+ tico/serialize/circle_graph.py,sha256=gEb-vubY1qcPqaUdINabYvqTSpGNsfRKLeFV3_2NVBA,11973
100
+ tico/serialize/circle_mapping.py,sha256=lH5VxYMQc8xJfRG3wC_Td-wRFEJWhlAwgxVMDetz1C8,7958
101
+ tico/serialize/circle_serializer.py,sha256=BGK9tltKkoL1h4rcrJUgDJIGlHst7aF3cZAKJk_GPWc,10950
102
102
  tico/serialize/pack.py,sha256=5HZ9kX3x6C6CyT_FWS6FRmvx_P7Dx21orjUNQxJ2xlo,1297
103
103
  tico/serialize/quant_param.py,sha256=6nbGKdqwMI9Cx9BLXJ9A9JU4qb770S8vTM1vCZRX3Eo,1342
104
104
  tico/serialize/operators/__init__.py,sha256=LIvXsNnN4yUCS2CGNQ5XW8p8oXDTV_WHWuOEAw1t6WY,990
@@ -107,20 +107,20 @@ tico/serialize/operators/node_visitor.py,sha256=UYyCwXqSCeRyimThMShstHnt7vKM9tsu
107
107
  tico/serialize/operators/op_abs.py,sha256=Y-vy7rcqPT-qD3QS5R8zbApWWTPpjY6xuMMVnbIhYmQ,1827
108
108
  tico/serialize/operators/op_add.py,sha256=otm062DMHVAThWmOtSTZdPyP3P5-2cv5VL_UWBJeLms,2346
109
109
  tico/serialize/operators/op_alias_copy.py,sha256=Xu9OiILbGf8oddh8yTqovvLfgVs8XYV7Cg4n6CesWcg,2175
110
- tico/serialize/operators/op_any.py,sha256=Umsr5H7iaX9GoIDRcyqmvXo0yfswZnQ5rhiAn1lGrOY,5161
110
+ tico/serialize/operators/op_any.py,sha256=wrTXFQ1TWl-2ET2NGXAXI1dzfDDJsYtA71pyj2numPE,4968
111
111
  tico/serialize/operators/op_arange_start_step.py,sha256=0T5lWwh3TfsFStmVv0v5qG03KENRDBmMix08RXQ4D-U,2132
112
112
  tico/serialize/operators/op_argmax.py,sha256=ARyGHlmWVmzwCct93V5x1-VyKqhxMOvV8GuM8yQWXdo,2290
113
- tico/serialize/operators/op_avg_pool2d.py,sha256=vc7WCakGXtGFPV1ix5EJmboH23tQ-cSI36ePY3PHKI4,7544
113
+ tico/serialize/operators/op_avg_pool2d.py,sha256=OlxEezOtqdmc76d88Q5zziBBoOaIwtsHyhHMYKXHHog,7930
114
114
  tico/serialize/operators/op_bmm.py,sha256=AELjHC9ISFPIzEEl5Kr1s4GSNLZElwZmVZJWkEyCEoA,2189
115
115
  tico/serialize/operators/op_cat.py,sha256=XDYOh0XAyrM0TlxVm6Sa0OFFGrKk7aSDcGXC-hYX4gs,2204
116
- tico/serialize/operators/op_clamp.py,sha256=6iDFZZTMkEqJ8lpNcEhKUpAIc4h2oPkBCBWnnt4lTDw,4322
116
+ tico/serialize/operators/op_clamp.py,sha256=RRQVrzayDfN3PioCVJqa_yYOtcYwb5HHwkMe4E_YPmE,4408
117
117
  tico/serialize/operators/op_clone.py,sha256=vzDYJ8TS3tc2BAyd_z8nt5VqT1inpymSseMEhd9dva0,2394
118
118
  tico/serialize/operators/op_constant_pad_nd.py,sha256=OpP4AP-d1IFcWZolNa-o9ZxzXJQkMdG9WQ66soX3s-E,2675
119
- tico/serialize/operators/op_conv2d.py,sha256=UfYk5xnA9PqVYyjU9dUCSW0CiCmcEK3LnlnFh0WY4Gg,6599
120
- tico/serialize/operators/op_copy.py,sha256=vaianLQ19-2ZQZ-MdQ07YuOPeFeo_HAx2a0Qfn7I5Kk,6122
119
+ tico/serialize/operators/op_conv2d.py,sha256=I9OgOr1cBGFkx1Q6eyLLvY5DmKdaieMSNmVAuZ8pBa0,6842
120
+ tico/serialize/operators/op_copy.py,sha256=boXHfl0bcvdBVl0tpzPMA_KBonh80vVqv61N3H5-PRU,6941
121
121
  tico/serialize/operators/op_cos.py,sha256=N12bNyuTQIxRnD0eHRPdFVzRQPMy1NFM4iM8oQ4lYzw,2034
122
- tico/serialize/operators/op_cumsum.py,sha256=3fmOf1mIeCX1uhTBcSJmRGXejzLtO8UwaI1eEQDC6nA,3798
123
- tico/serialize/operators/op_depthwise_conv2d.py,sha256=wH1SFjhWJdJrb8xi2qCiCeSWNxlL8IjEwALGCxTQxbc,7034
122
+ tico/serialize/operators/op_cumsum.py,sha256=px9ZGUDDsdWjrql8Z1FdXfF-7CJhditxyNz5QRZbLiM,3948
123
+ tico/serialize/operators/op_depthwise_conv2d.py,sha256=S2naBMWAoUL3nJiV7RVg97bUvuQdB5VldE_r--rX0hA,7297
124
124
  tico/serialize/operators/op_dequantize_per_channel.py,sha256=aPcVxjdgvfSFoLnv9NL-RxO5vZYj8ulqriMP5LHIWs0,3133
125
125
  tico/serialize/operators/op_dequantize_per_tensor.py,sha256=u9aK_Xle9rDN0EHLE0YrCTlXY4Q53Ch9Di4qmx7ynps,2304
126
126
  tico/serialize/operators/op_div.py,sha256=WjeM2Ux7TyGlSNx2aVC783JvcL0xnY6FBYo1Q_kdb5Q,2201
@@ -134,33 +134,33 @@ tico/serialize/operators/op_ge.py,sha256=TrgZ6wbIEYkDGfVFNtDlfM7ZkMMWjvcks5U5Dan
134
134
  tico/serialize/operators/op_gelu.py,sha256=bS8-0rg5_bT__OI3mBDywxGx4xTO2Iqea3h-uC17MpU,2145
135
135
  tico/serialize/operators/op_gt.py,sha256=JAVbtuAUNLYhtJycJJCEkYo9QAvmiK4lTMdw5yHUd10,1886
136
136
  tico/serialize/operators/op_index.py,sha256=iDW2YSeUS_kLiWEaQ_MjrYpxZAFBbm7_GU_2B4SRe6c,3033
137
- tico/serialize/operators/op_index_select.py,sha256=cw7IbvixooikGxzbpUmI9tHS4kjl4lXLtO9D-GO8qLQ,2277
137
+ tico/serialize/operators/op_index_select.py,sha256=O2MXXWGnCgS8QG3DrWKdYKbl88VBVscmOuoGcgBEf_0,2522
138
138
  tico/serialize/operators/op_instance_norm.py,sha256=5QvLefa74BrAPsTNYsi4Y7IB8d1wer4gtWantKo2nlQ,2940
139
139
  tico/serialize/operators/op_leaky_relu.py,sha256=UJPoL7kAIp6nAjyDdda_afdOcMLHme7NE77b2y76exc,2160
140
140
  tico/serialize/operators/op_linear.py,sha256=bw_mn2CiJy8CbpPevOV0PMPh0ZMWKAybLZ9cnIKJSsk,2527
141
141
  tico/serialize/operators/op_log.py,sha256=1TKvH2lttdAHE0P84vcxmOvGBBRUs6D71Jrei7SdZHE,1827
142
- tico/serialize/operators/op_log1p.py,sha256=gG7Fs4UDj_Nnp7U60UtPyz0fLv1lBpJVOGGCMm-42pY,3121
142
+ tico/serialize/operators/op_log1p.py,sha256=c-fSBkaDFZ2Z_4LcZMnEvKCfGZbNOrWBMz6-Hdw98V8,3203
143
143
  tico/serialize/operators/op_logical_and.py,sha256=WhQ8knuq32BO-WhAqkOgpcUStPkjoPmRWuYNsKveF3w,2163
144
144
  tico/serialize/operators/op_logical_not.py,sha256=ugrVcRqR3IvUUaiRVW5cArCYJbzmkcXp88QM846jCww,2129
145
145
  tico/serialize/operators/op_lt.py,sha256=_vA7dWpV9wVBxB7JL9pLQT9BsV91NGQBq_0auAtHK5Y,2080
146
146
  tico/serialize/operators/op_max_dim.py,sha256=nS_TZl5uq4uv1LwgBD9Wddyac4atKqBiIWKIyeXse2s,2519
147
- tico/serialize/operators/op_max_pool2d_with_indices.py,sha256=Vab8KV4w0i70P5XPdqItXEv_hLFjscVngypOltRvBV8,5746
147
+ tico/serialize/operators/op_max_pool2d_with_indices.py,sha256=2Q1upWdChLpq1x7IaoP0eRNgqTt7dH7yqlRjhEKlnjM,6034
148
148
  tico/serialize/operators/op_maximum.py,sha256=JjBr6gWEnuakLuk1_feotTHfIIm3s5YqWmqhUMpSPI0,1873
149
149
  tico/serialize/operators/op_mean.py,sha256=rVQZOxCJkHFY4kQBAS1HVK0HkcqxgkSy6zvEDLX_WYQ,2267
150
150
  tico/serialize/operators/op_minimum.py,sha256=fASjQVcTPCin02umQwFPdq2ss-Ve7S5A33J3QmmQ_wQ,1873
151
- tico/serialize/operators/op_mm.py,sha256=Fgq_HUUKuXOQY_t8lah3SOUqTsGet-KbVttCK4-fjAk,6821
151
+ tico/serialize/operators/op_mm.py,sha256=XcH15gjbP5aAl9rBKFQsVvN2GE4127zNH6_0v81_ExA,6855
152
152
  tico/serialize/operators/op_mul.py,sha256=si_VdYNyFbULb50SnXHOINh0dZQ2PhRB6Fzl54ZBj5Y,3049
153
153
  tico/serialize/operators/op_ne.py,sha256=xa2WJL2tYksxw7fIJic_D9ltLEseyCII8HpR32Oq8Do,1900
154
154
  tico/serialize/operators/op_neg.py,sha256=fkI3ExyD3QF-qtxBcXqQutPNDbNL8g7lZYE7CyD2wLk,2046
155
155
  tico/serialize/operators/op_permute.py,sha256=5DfX3pfZ5FDNmrSqx3-hRwPA7vm36z7BfG-nuyyBTsM,2282
156
- tico/serialize/operators/op_pow.py,sha256=z_4G_J1k_keeVE6ZYKSy-kqkdJ_i4p4kHkO0dJZnz-Y,5434
156
+ tico/serialize/operators/op_pow.py,sha256=a-Nyy_s8d9nCIEAb5DacB1quDVmDu1VOHyAkD75u7Ts,5573
157
157
  tico/serialize/operators/op_prelu.py,sha256=0ZybL5pNvBrRvQGy4M6gELrjiEXEsb2wBDdU8x4D75I,1874
158
158
  tico/serialize/operators/op_quantize_per_tensor.py,sha256=w-vYxSPnN2gtx-pEkkcMGU0ZjiwaS4y1sxy56pKEq3E,3004
159
159
  tico/serialize/operators/op_reciprocal.py,sha256=6b9_bxjg_0EvgAitSv1MgBi4PJSEgm-21s5qtWI1UR4,2394
160
160
  tico/serialize/operators/op_relu.py,sha256=WXCR_chwEUBqjFIQ_4E2avwk-Acy76pmX20rJQCBTQo,1832
161
161
  tico/serialize/operators/op_relu6.py,sha256=ZWqEolfAKjOdUC1ZCg0iuu4dBhkJRxVYR2tUzpbvKQM,1829
162
- tico/serialize/operators/op_repeat.py,sha256=0wTv1Mg7kg0eHz0CT6atyVAli4T4h5rYXq5opY6op20,4235
163
- tico/serialize/operators/op_reshape.py,sha256=0_bJwimiGAHaKkfwfhxUw9Gebt5tnecGaEVoKhEvV0Q,2550
162
+ tico/serialize/operators/op_repeat.py,sha256=q05RQOheLxkWgj63Czrp6tGpjGuB0900FNRKLxXdNJ4,4524
163
+ tico/serialize/operators/op_reshape.py,sha256=6wErQpmDX9mAmfJRCTg_cg1uOdJZqHm8Nux8dNI53Vg,2559
164
164
  tico/serialize/operators/op_resize_nearest_neighbor.py,sha256=dXaAnZ5M_ko_tH-HolxNpHFXkDUQ8x45myskojP5XZE,2771
165
165
  tico/serialize/operators/op_round.py,sha256=pe6w_TB4xGLu0iPv4Qo0a0fIkY9DgCgXk5127TWt8pE,1837
166
166
  tico/serialize/operators/op_rsqrt.py,sha256=yl2vd8InjhLPbE0vHIrEera6DVXlY9dLgO7yZZCH3RI,1837
@@ -170,16 +170,16 @@ tico/serialize/operators/op_sigmoid.py,sha256=ZubbGG1yU5uvNkEmOmbjj3eq7d9mwEaJdC
170
170
  tico/serialize/operators/op_sin.py,sha256=MbttmHTVKhwKK6gH9Vbcbn5aAaxnQ71NdpmQAlTcojU,1827
171
171
  tico/serialize/operators/op_slice.py,sha256=g0r8lj5CIxpT6ixOKqUzwKiNhoiuIFwWjbpaiCoOg6w,5259
172
172
  tico/serialize/operators/op_softmax.py,sha256=qwYke5zfhnSL89DZbzdr5Fc9SsJf0vI-LDZjT_NFpbc,3669
173
- tico/serialize/operators/op_split_with_sizes.py,sha256=TgYg1cu-3BSz9SsXfAhoJbo4q5ZzFaoFArkH_obsYlU,4274
173
+ tico/serialize/operators/op_split_with_sizes.py,sha256=DzSnMEsBoWpmun-NAW-lS-gssVUhaclzJ_nTxL5zZtM,4491
174
174
  tico/serialize/operators/op_sqrt.py,sha256=9Q5jkuEPrim11XfSQHGDGVTMYk1TQhOfVqMVYD_eIrI,1871
175
175
  tico/serialize/operators/op_squeeze.py,sha256=QnNwfAdTC1xBm04C9DkVs8VB5YRN-4fCsIWn189QaPg,2416
176
176
  tico/serialize/operators/op_sub.py,sha256=yZskQJF0ylXVk02Uid8djPNIWDJ-0uHJar4UYhlJVkk,2479
177
177
  tico/serialize/operators/op_sum.py,sha256=B5aSwQMhyoBe2JYdE5nVQ3QeVDSzL-yuZZujsG08OdQ,2294
178
178
  tico/serialize/operators/op_tanh.py,sha256=rs7FsbQeUQ7Ak8RoQV9ymNGXHXRObojfY_SiqJiyqdA,1846
179
179
  tico/serialize/operators/op_to_copy.py,sha256=a8T0uPMavMO_md1a-4_0dlvDHyZS_xew0qB6xjf69rI,3934
180
- tico/serialize/operators/op_transpose_conv.py,sha256=-qdtKOlOmuFXxOBsJd5Bj3A44L7726RdqpYMRtP2br0,5553
180
+ tico/serialize/operators/op_transpose_conv.py,sha256=PgM0mSIeiCMk-V2jQ2L1JdF-v3mHwdsdY4nTEp2GRro,5796
181
181
  tico/serialize/operators/op_unsqueeze.py,sha256=ZHhfVXSWEiwb2VDYX5uhxbGQyzZjKT7CrbBpVGxVHBU,2310
182
- tico/serialize/operators/op_view.py,sha256=5EMww-ve17Vm9XPuV03Tn7vJsjpU2J8U4d_FOrlm9_o,2546
182
+ tico/serialize/operators/op_view.py,sha256=xxE-GvTJ1UpcHst5KXYz3qKY-eJQvXKKrSZiA2O7E40,2593
183
183
  tico/serialize/operators/op_where.py,sha256=doE81GSwygrPBm3JIfN9w7kKXxeIYKxgk0eoY22QIcg,2845
184
184
  tico/serialize/operators/utils.py,sha256=lXGpEJW1h8U_-gfc6EWjvvSiq3yJ9P-v1v3EMRT_pSk,2954
185
185
  tico/utils/__init__.py,sha256=IO6FP_xYbGy0dW0HL26GXD3ouxARaxCK7bz9dn4blPQ,26
@@ -196,7 +196,7 @@ tico/utils/padding.py,sha256=0iEcS5G3gSFySPzBz1m5nxHYZ6MlLzG_KdAFJRauwIg,3279
196
196
  tico/utils/passes.py,sha256=kGmDe__5cPaO6i5EDAoXSVe6yXEoX9hAny4ROb3ZEmQ,2409
197
197
  tico/utils/pytree_utils.py,sha256=jrk3N6X6LiUnBCX_gM1K9nywbVAJBVnszlTAgeIeDUc,5219
198
198
  tico/utils/register_custom_op.py,sha256=3-Yl6iYmx1qQA2igNHt4hYhQhQMkdPb7gF50LIY8yvc,27350
199
- tico/utils/serialize.py,sha256=cBtEUfi_SU_9_v0cq2CNikzn8GnzEz2RwRvUH2NkWu4,1378
199
+ tico/utils/serialize.py,sha256=zAzylTEEgYc_9PZie7TlNY6umQol0Ris9omrzvtBdm0,1697
200
200
  tico/utils/torch_compat.py,sha256=oc6PztVsXdHcQ3iaVR90wLLxrGaj6zFHWZ8K9rRS6q8,1795
201
201
  tico/utils/trace_decorators.py,sha256=ddLIiKQfSaQrxgF1kNpwjFTQnXENzeSfcr1kuAW4jGI,3221
202
202
  tico/utils/utils.py,sha256=A5p3iAAxRGDsZJh4ybp-Qo3MX3vk5RrmSY-R3rXqVeI,12976
@@ -205,9 +205,9 @@ tico/utils/mx/__init__.py,sha256=IO6FP_xYbGy0dW0HL26GXD3ouxARaxCK7bz9dn4blPQ,26
205
205
  tico/utils/mx/elemwise_ops.py,sha256=V6glyAHsVR1joqpsgnNytatCD_ew92xNWZ19UFDoMTA,10281
206
206
  tico/utils/mx/formats.py,sha256=uzNWyu-1onUlwQfX5cZ6fZSUfHMRqorper7_T1k3jfk,3404
207
207
  tico/utils/mx/mx_ops.py,sha256=RcfUTYVi-wilGB2sC35OeARdwDqnixv7dG5iyZ-fQT8,8555
208
- tico-0.1.0.dev250724.dist-info/LICENSE,sha256=kp4JLII7bzRhPb0CPD5XTDZMh22BQ7h3k3B7t8TiSbw,12644
209
- tico-0.1.0.dev250724.dist-info/METADATA,sha256=AEx_8OeWODxLHfRAavGNh8dHJUAdUxOshMwBvjdd5tQ,8430
210
- tico-0.1.0.dev250724.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
211
- tico-0.1.0.dev250724.dist-info/entry_points.txt,sha256=kBKYSS_IYrSXmUYevmmepqIVPScq5vF8ulQRu3I_Zf0,59
212
- tico-0.1.0.dev250724.dist-info/top_level.txt,sha256=oqs7UPoNSKZEwqsX8B-KAWdQwfAa7i60pbxW_Jk7P3w,5
213
- tico-0.1.0.dev250724.dist-info/RECORD,,
208
+ tico-0.1.0.dev250728.dist-info/LICENSE,sha256=kp4JLII7bzRhPb0CPD5XTDZMh22BQ7h3k3B7t8TiSbw,12644
209
+ tico-0.1.0.dev250728.dist-info/METADATA,sha256=dffUxDeJCYb5AUxiLN9WbXQF8yRm0aQaYz12ScXxrao,8430
210
+ tico-0.1.0.dev250728.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
211
+ tico-0.1.0.dev250728.dist-info/entry_points.txt,sha256=kBKYSS_IYrSXmUYevmmepqIVPScq5vF8ulQRu3I_Zf0,59
212
+ tico-0.1.0.dev250728.dist-info/top_level.txt,sha256=oqs7UPoNSKZEwqsX8B-KAWdQwfAa7i60pbxW_Jk7P3w,5
213
+ tico-0.1.0.dev250728.dist-info/RECORD,,