tico 0.1.0.dev250604__py3-none-any.whl → 0.1.0.dev250608__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tico/__init__.py CHANGED
@@ -21,7 +21,7 @@ from tico.config import CompileConfigV1, get_default_config
21
21
  from tico.utils.convert import convert, convert_from_exported_program, convert_from_pt2
22
22
 
23
23
  # THIS LINE IS AUTOMATICALLY GENERATED BY setup.py
24
- __version__ = "0.1.0.dev250604"
24
+ __version__ = "0.1.0.dev250608"
25
25
 
26
26
  MINIMUM_SUPPORTED_VERSION = "2.5.0"
27
27
  SECURE_TORCH_VERSION = "2.6.0"
@@ -25,7 +25,12 @@ from tico.utils import logging
25
25
  from tico.utils.passes import PassBase, PassResult
26
26
  from tico.utils.trace_decorators import trace_graph_diff_on_pass
27
27
  from tico.utils.utils import set_new_meta_val
28
- from tico.utils.validate_args_kwargs import ReshapeArgs
28
+ from tico.utils.validate_args_kwargs import (
29
+ AddTensorArgs,
30
+ PermuteArgs,
31
+ ReshapeArgs,
32
+ SoftmaxArgs,
33
+ )
29
34
 
30
35
 
31
36
  def passes():
@@ -70,8 +75,8 @@ class RemoveRedundantReshapePattern1(PassBase):
70
75
  # Assumes that other node do not use ops in the pattern for simplisity.
71
76
  if len(reshape1.users) != 1:
72
77
  continue
73
- assert len(reshape1.args) == 2, len(reshape1.args)
74
- reshape1_input, reshape1_size = reshape1.args
78
+ reshape1_args = ReshapeArgs(*reshape1.args, **reshape1.kwargs) # type: ignore[arg-type]
79
+ reshape1_input, reshape1_size = reshape1_args.input, reshape1_args.size
75
80
  # `(AxBxC) - aten.reshape` - (1xAxBxC)
76
81
  if [1] + list(extract_shape(reshape1_input)) != list(
77
82
  extract_shape(reshape1)
@@ -84,8 +89,8 @@ class RemoveRedundantReshapePattern1(PassBase):
84
89
  continue
85
90
  if len(permute.users) != 1:
86
91
  continue
87
- assert len(permute.args) == 2, len(permute.args)
88
- permute_input, permute_dims = permute.args
92
+ permute_args = PermuteArgs(*permute.args, **permute.kwargs) # type: ignore[arg-type]
93
+ permute_input, permute_dims = permute_args.input, permute_args.dims
89
94
  # (1xAxBxC) - `aten.permute` - (1xAxCxB)
90
95
  if permute_dims != [0, 1, 3, 2]:
91
96
  continue
@@ -103,7 +108,8 @@ class RemoveRedundantReshapePattern1(PassBase):
103
108
  continue
104
109
  if len(reshape2.users) != 1:
105
110
  continue
106
- reshape2_input, reshape2_size = reshape2.args
111
+ reshape2_args = ReshapeArgs(*reshape2.args, **reshape2.kwargs) # type: ignore[arg-type]
112
+ reshape2_input, reshape2_size = reshape2_args.input, reshape2_args.size
107
113
  # (1xAxCxB) - `aten.reshape - (AxCxB)
108
114
  if list(extract_shape(reshape2_input)) != [1] + list(
109
115
  extract_shape(reshape2)
@@ -154,8 +160,8 @@ class RemoveRedundantReshapePattern2(PassBase):
154
160
  continue
155
161
  if len(reshape1.users) != 1:
156
162
  continue
157
- assert len(reshape1.args) == 2, len(reshape1.args)
158
- reshape1_input, reshape1_size = reshape1.args
163
+ reshape1_args = ReshapeArgs(*reshape1.args, **reshape1.kwargs) # type: ignore[arg-type]
164
+ reshape1_input, reshape1_size = reshape1_args.input, reshape1_args.size
159
165
  # `(AxBxC) - aten.reshape` - (1xAxBxC)
160
166
  if [1] + list(extract_shape(reshape1_input)) != list(
161
167
  extract_shape(reshape1)
@@ -168,8 +174,8 @@ class RemoveRedundantReshapePattern2(PassBase):
168
174
  continue
169
175
  if len(permute.users) != 1:
170
176
  continue
171
- assert len(permute.args) == 2, len(permute.args)
172
- permute_input, permute_dims = permute.args
177
+ permute_args = PermuteArgs(*permute.args, **permute.kwargs) # type: ignore[arg-type]
178
+ permute_input, permute_dims = permute_args.input, permute_args.dims
173
179
  # (1xAxBxC) - `aten.permute` - (Bx1xAxC)
174
180
  if permute_dims != [2, 0, 1, 3]:
175
181
  continue
@@ -180,7 +186,8 @@ class RemoveRedundantReshapePattern2(PassBase):
180
186
  continue
181
187
  if len(reshape2.users) != 1:
182
188
  continue
183
- reshape2_input, reshape2_size = reshape2.args
189
+ reshape2_args = ReshapeArgs(*reshape2.args, **reshape2.kwargs) # type: ignore[arg-type]
190
+ reshape2_input, reshape2_size = reshape2_args.input, reshape2_args.size
184
191
  # (Bx1xAxC) - `aten.reshape - (Bx(A*C))
185
192
  reshape2_input_shape = list(extract_shape(reshape2_input))
186
193
  assert len(reshape2_input_shape) == 4
@@ -237,8 +244,8 @@ class RemoveRedundantReshapePattern3(PassBase):
237
244
  continue
238
245
  if not reshape_1.target in ops.aten.reshape:
239
246
  continue
240
- assert len(reshape_1.args) == 2, len(reshape_1.args)
241
- softmax, reshape_1_size = reshape_1.args
247
+ reshape_1_args = ReshapeArgs(*reshape_1.args, **reshape_1.kwargs) # type: ignore[arg-type]
248
+ softmax, reshape_1_size = reshape_1_args.input, reshape_1_args.size
242
249
 
243
250
  # softmax
244
251
  assert isinstance(softmax, torch.fx.Node), type(softmax)
@@ -246,8 +253,12 @@ class RemoveRedundantReshapePattern3(PassBase):
246
253
  continue
247
254
  if not softmax.target in ops.aten.softmax:
248
255
  continue
249
- assert len(softmax.args) == 3, len(softmax.args)
250
- add, softmax_dim, softmax_half_to_float = softmax.args
256
+ softmax_args = SoftmaxArgs(*softmax.args, **softmax.kwargs) # type: ignore[arg-type]
257
+ add, softmax_dim, softmax_half_to_float = (
258
+ softmax_args.input,
259
+ softmax_args.dim,
260
+ softmax_args.half_to_float,
261
+ )
251
262
  assert isinstance(add, torch.fx.Node), type(add)
252
263
  assert isinstance(softmax_dim, int), type(softmax_dim)
253
264
  assert isinstance(softmax_half_to_float, bool), type(softmax_half_to_float)
@@ -259,8 +270,8 @@ class RemoveRedundantReshapePattern3(PassBase):
259
270
  # add
260
271
  if not add.target in ops.aten.add:
261
272
  continue
262
- assert len(add.args) == 2, len(add.args)
263
- reshape_2, reshape_3 = add.args
273
+ add_args = AddTensorArgs(*add.args, **add.kwargs) # type: ignore[arg-type]
274
+ reshape_2, reshape_3 = add_args.input, add_args.other
264
275
  assert isinstance(reshape_2, torch.fx.Node), type(reshape_2)
265
276
  assert isinstance(reshape_3, torch.fx.Node), type(reshape_3)
266
277
 
@@ -269,16 +280,16 @@ class RemoveRedundantReshapePattern3(PassBase):
269
280
  continue
270
281
  if not reshape_2.target in ops.aten.reshape:
271
282
  continue
272
- assert len(reshape_2.args) == 2, len(reshape_2.args)
273
- reshape_2_input, reshape_2_size = reshape_2.args
283
+ reshape_2_args = ReshapeArgs(*reshape_2.args, **reshape_2.kwargs) # type: ignore[arg-type]
284
+ reshape_2_input, reshape_2_size = reshape_2_args.input, reshape_2_args.size
274
285
  assert isinstance(reshape_2_input, torch.fx.Node), type(reshape_2_input)
275
286
  # reshape_3
276
287
  if not reshape_3.op == "call_function":
277
288
  continue
278
289
  if not reshape_3.target in ops.aten.reshape:
279
290
  continue
280
- assert len(reshape_3.args) == 2, len(reshape_3.args)
281
- reshape_3_input, reshape_3_size = reshape_3.args
291
+ reshape_3_args = ReshapeArgs(*reshape_3.args, **reshape_3.kwargs) # type: ignore[arg-type]
292
+ reshape_3_input, reshape_3_size = reshape_3_args.input, reshape_3_args.size
282
293
  assert isinstance(reshape_3_input, torch.fx.Node), type(reshape_3_input)
283
294
 
284
295
  # Check condition
@@ -342,9 +353,8 @@ class RemoveRedundantReshapePattern4(PassBase):
342
353
  continue
343
354
  if not reshape1.target in ops.aten.reshape:
344
355
  continue
345
- assert len(reshape1.args) == 2, len(reshape1.args)
346
-
347
- reshape1_input, size = list(reshape1.args)
356
+ reshape1_args = ReshapeArgs(*reshape1.args, **reshape1.kwargs) # type: ignore[arg-type]
357
+ reshape1_input, size = reshape1_args.input, reshape1_args.size
348
358
  assert isinstance(reshape1_input, torch.fx.Node), type(reshape1_input)
349
359
  assert isinstance(size, list), type(size)
350
360
  for s in size:
@@ -359,9 +369,9 @@ class RemoveRedundantReshapePattern4(PassBase):
359
369
  continue
360
370
  if not reshape2.target in ops.aten.reshape:
361
371
  continue
362
- assert len(reshape2.args) == 2, len(reshape2.args)
372
+ reshape2_args = ReshapeArgs(*reshape2.args, **reshape2.kwargs) # type: ignore[arg-type]
363
373
 
364
- reshape2_input, reshape2_size = list(reshape2.args)
374
+ reshape2_input, reshape2_size = reshape2_args.input, reshape2_args.size
365
375
  assert isinstance(reshape2_input, torch.fx.Node), type(reshape2_input)
366
376
  assert isinstance(reshape2_size, list), type(reshape2_size)
367
377
  for s in reshape2_size:
@@ -411,7 +421,7 @@ class RemoveRedundantReshapePattern5(PassBase):
411
421
  if not node.target in ops.aten.reshape:
412
422
  continue
413
423
 
414
- args = ReshapeArgs(*node.args)
424
+ args = ReshapeArgs(*node.args, **node.kwargs) # type: ignore[arg-type]
415
425
  output_shape = args.size
416
426
  input_shape = list(extract_shape(args.input))
417
427
 
@@ -91,6 +91,10 @@ class CircleSubgraph(circle.SubGraph.SubGraphT):
91
91
  self.tensors: List[circle.Tensor.TensorT] = []
92
92
  self.operators: List[circle.Operator.OperatorT] = []
93
93
  self.name_to_tid: Dict[str, int] = {}
94
+ # Mapping from Circle tensor names to their originating FX nodes.
95
+ # Used to trace back tensor definitions to their source and finalize
96
+ # human-readable tensor names after serialization.
97
+ self.name_to_node: Dict[str, torch.fx.Node] = {}
94
98
  self.counter: defaultdict = defaultdict(int)
95
99
 
96
100
  # Generate a unique name with prefix.
@@ -111,6 +115,7 @@ class CircleSubgraph(circle.SubGraph.SubGraphT):
111
115
 
112
116
  def _add_tensor(self, tensor: circle.Tensor.TensorT) -> None:
113
117
  self.tensors.append(tensor)
118
+ assert tensor.name not in self.name_to_tid
114
119
  self.name_to_tid[tensor.name] = len(self.tensors) - 1
115
120
 
116
121
  def add_operator(self, op: circle.Operator.OperatorT) -> None:
@@ -138,10 +143,12 @@ class CircleSubgraph(circle.SubGraph.SubGraphT):
138
143
  return name in self.name_to_tid
139
144
 
140
145
  def add_tensor_from_node(
141
- self, node: torch.fx.node.Node, data: Optional[np.ndarray] = None
146
+ self, node: torch.fx.Node, data: Optional[np.ndarray] = None
142
147
  ) -> None:
143
148
  tensor = circle.Tensor.TensorT()
144
149
  tensor.name = self._gen_unique_name_with_prefix(node.name)
150
+ assert tensor.name not in self.name_to_node
151
+ self.name_to_node[tensor.name] = node
145
152
  assert node.meta.get("val") is not None
146
153
  tensor.type = extract_circle_dtype(node)
147
154
  tensor.shape = list(extract_shape(node))
@@ -165,10 +172,15 @@ class CircleSubgraph(circle.SubGraph.SubGraphT):
165
172
  tensor.buffer = bid
166
173
  self._add_tensor(tensor)
167
174
 
168
- def add_const_tensor(self, data: ConstData) -> circle.Tensor.TensorT:
175
+ def add_const_tensor(
176
+ self, data: ConstData, source_node: Optional[torch.fx.Node] = None
177
+ ) -> circle.Tensor.TensorT:
169
178
  assert is_const(data)
170
179
  tensor = circle.Tensor.TensorT()
171
180
  tensor.name = self._gen_unique_name_with_prefix("const_tensor")
181
+ assert tensor.name not in self.name_to_node
182
+ if source_node is not None:
183
+ self.name_to_node[tensor.name] = source_node
172
184
  assert not self.has_tensor(tensor.name)
173
185
  torch_t = torch.as_tensor(data=data)
174
186
  torch_t_shape = list(torch_t.size())
@@ -189,10 +201,45 @@ class CircleSubgraph(circle.SubGraph.SubGraphT):
189
201
  shape: List[int],
190
202
  dtype: int,
191
203
  qparam: Optional[QuantParam] = None,
204
+ source_node: Optional[torch.fx.Node] = None,
192
205
  ) -> circle.Tensor.TensorT:
206
+ """
207
+ Create a new tensor and register it into the Circle subgraph from scratch.
208
+
209
+ This function is used to allocate tensors that are not directly derived from
210
+ values in the FX graph, such as those created by padding or shape-generating
211
+ operators.
212
+
213
+ If a `source_node` is provided, it is used to enrich the tensor's metadata
214
+ (e.g., by associating the tensor with the module hierarchy path stored in
215
+ the node's `nn_module_stack`). This enables better traceability and more
216
+ informative tensor names in the final Circle model.
217
+
218
+ Parameters
219
+ ----------
220
+ prefix : str
221
+ A name prefix used to generate a unique tensor name.
222
+ shape : List[int]
223
+ The shape of the tensor.
224
+ dtype : int
225
+ The Circle-compatible dtype of the tensor. Use `to_circle_dtype()` to convert.
226
+ qparam : Optional[QuantParam]
227
+ Optional quantization parameters to apply to the tensor.
228
+ source_node : Optional[torch.fx.Node]
229
+ If provided, the FX node from which this tensor originates. Used to generate
230
+ a richer name and track module origin.
231
+
232
+ Returns
233
+ -------
234
+ circle.Tensor.TensorT
235
+ The newly created and registered tensor.
236
+ """
193
237
  assert isinstance(dtype, int), f"{dtype} must be integer. Use to_circle_dtype."
194
238
  tensor = circle.Tensor.TensorT()
195
239
  tensor.name = self._gen_unique_name_with_prefix(prefix)
240
+ assert tensor.name not in self.name_to_node
241
+ if source_node is not None:
242
+ self.name_to_node[tensor.name] = source_node
196
243
  tensor.shape = shape
197
244
  if qparam is not None:
198
245
  tensor.quantization = to_circle_qparam(qparam)
@@ -255,7 +302,7 @@ class CircleSubgraph(circle.SubGraph.SubGraphT):
255
302
 
256
303
  # TODO Rename, it doesn't only get_tid but also possibly add a new const tensor
257
304
  def get_tid(
258
- self, node: Union[torch.fx.node.Node, circle.Tensor.TensorT, ConstData]
305
+ self, node: Union[torch.fx.Node, circle.Tensor.TensorT, ConstData]
259
306
  ) -> int:
260
307
  # return -1 if node is None. This is for generating CircleOutputExclude
261
308
  if node == None:
@@ -31,6 +31,7 @@ from tico.serialize.circle_graph import CircleModel, CircleSubgraph
31
31
  from tico.serialize.operators.hashable_opcode import OpCode
32
32
  from tico.serialize.operators.node_visitor import get_node_visitors
33
33
  from tico.utils import logging
34
+ from tico.utils.serialize import finalise_tensor_names
34
35
 
35
36
 
36
37
  multiple_output_ops = [
@@ -150,6 +151,7 @@ def build_circle(edge_program: ExportedProgram) -> bytes:
150
151
  prefix=node.name,
151
152
  shape=list(attr_tensor.shape),
152
153
  dtype=to_circle_dtype(attr_tensor.dtype),
154
+ source_node=node,
153
155
  )
154
156
 
155
157
  logger.debug(f"get_attr: {node.name} tensor exported.")
@@ -217,6 +219,7 @@ def build_circle(edge_program: ExportedProgram) -> bytes:
217
219
  logger.debug(f"call_function: {node.name} ({opcode}) Op exported.")
218
220
 
219
221
  # Register subgraph
222
+ finalise_tensor_names(graph)
220
223
  model.subgraphs.append(graph)
221
224
 
222
225
  # Encode operator codes
@@ -122,7 +122,10 @@ class AnyVisitor(NodeVisitor):
122
122
  dst_dtype_circle = circle.TensorType.TensorType.BOOL
123
123
  dst_dtype_torch = torch.bool
124
124
  ne_tensor: circle.Tensor.TensorT = self.graph.add_tensor_from_scratch(
125
- prefix=f"{input.name}_ne", shape=input_shape, dtype=dst_dtype_circle
125
+ prefix=f"{input.name}_ne",
126
+ shape=input_shape,
127
+ dtype=dst_dtype_circle,
128
+ source_node=input,
126
129
  )
127
130
  ne_node = self.define_ne_node(
128
131
  [input_tensor, torch.Tensor([0]).to(dtype_torch)], [ne_tensor]
@@ -151,6 +151,7 @@ class AvgPool2DVisitor(NodeVisitor):
151
151
  prefix=f"{input.name}_pad_output",
152
152
  shape=padded_input_shape,
153
153
  dtype=input_dtype,
154
+ source_node=node,
154
155
  )
155
156
  pad_operator = define_pad_node(
156
157
  self.graph, self._op_codes, [input, padding_vec], [padded_input_tensor]
@@ -107,7 +107,10 @@ class ClampVisitor(NodeVisitor):
107
107
  input_shape = extract_shape(input)
108
108
  input_dtype = extract_circle_dtype(input)
109
109
  minimum_tensor = self.graph.add_tensor_from_scratch(
110
- prefix=f"{input.name}_min", dtype=input_dtype, shape=list(input_shape)
110
+ prefix=f"{input.name}_min",
111
+ dtype=input_dtype,
112
+ shape=list(input_shape),
113
+ source_node=node,
111
114
  )
112
115
  minimum_opertor = self.define_minimum_node(
113
116
  [input, max_val], [minimum_tensor]
@@ -159,6 +159,7 @@ class Conv2dVisitor(NodeVisitor):
159
159
  shape=pad_output_shape,
160
160
  dtype=input_dtype,
161
161
  qparam=input_qparam,
162
+ source_node=node,
162
163
  )
163
164
  # CirclePad
164
165
  pad_operator = define_pad_node(
@@ -111,6 +111,7 @@ class CopyVisitor(NodeVisitor):
111
111
  prefix=f"{dst_name}_shape_output",
112
112
  shape=dst_shape_shape,
113
113
  dtype=circle.TensorType.TensorType.INT32,
114
+ source_node=node,
114
115
  )
115
116
 
116
117
  shape_operator = self.define_shape_node([dst], [shape_output])
@@ -131,6 +132,7 @@ class CopyVisitor(NodeVisitor):
131
132
  prefix=f"{src_name}_broadcast_to_output",
132
133
  shape=dst_shape,
133
134
  dtype=src_type,
135
+ source_node=node,
134
136
  )
135
137
  )
136
138
 
@@ -63,7 +63,10 @@ class CumsumVisitor(NodeVisitor):
63
63
  cast_name = f"{input.name}_cast"
64
64
  cast_dtype = circle.TensorType.TensorType.INT64
65
65
  cast_tensor = self.graph.add_tensor_from_scratch(
66
- prefix=cast_name, dtype=cast_dtype, shape=input_shape
66
+ prefix=cast_name,
67
+ dtype=cast_dtype,
68
+ shape=input_shape,
69
+ source_node=node,
67
70
  )
68
71
  cast_operator = create_builtin_operator(
69
72
  self.graph, cast_op_index, [input], [cast_tensor]
@@ -171,6 +171,7 @@ class DepthwiseConv2dVisitor(NodeVisitor):
171
171
  prefix=f"{node.name}_input_pad_output",
172
172
  shape=pad_output_shape,
173
173
  dtype=input_dtype,
174
+ source_node=node,
174
175
  )
175
176
  # CirclePad
176
177
  pad_operator = define_pad_node(
@@ -65,7 +65,10 @@ class Log1pVisitor(NodeVisitor):
65
65
  input_shape = list(extract_shape(input))
66
66
  dst_dtype_circle = extract_circle_dtype(input)
67
67
  add_tensor: circle.Tensor.TensorT = self.graph.add_tensor_from_scratch(
68
- prefix=f"{input.name}_add", shape=input_shape, dtype=dst_dtype_circle
68
+ prefix=f"{input.name}_add",
69
+ shape=input_shape,
70
+ dtype=dst_dtype_circle,
71
+ source_node=node,
69
72
  )
70
73
  const_one = torch.tensor([1]).to(extract_torch_dtype(input))
71
74
 
@@ -106,6 +106,7 @@ class MaxPool2DWithIndicesVisitor(NodeVisitor):
106
106
  shape=padded_input_shape,
107
107
  dtype=input_dtype,
108
108
  qparam=input_qparam,
109
+ source_node=node,
109
110
  )
110
111
  if input_qparam is not None:
111
112
  padding_value = get_integer_dtype_min(input_qparam.dtype)
@@ -111,7 +111,9 @@ class MatmulDefaultVisitor(NodeVisitor):
111
111
  fullyconnected( lhs[H, K], trs_output[W', K] ) -> output(H, W')
112
112
  """
113
113
 
114
- def define_fc_with_transpose(self, inputs, outputs) -> circle.Operator.OperatorT:
114
+ def define_fc_with_transpose(
115
+ self, node, inputs, outputs
116
+ ) -> circle.Operator.OperatorT:
115
117
  lhs, rhs = inputs
116
118
 
117
119
  # get transpose shape
@@ -128,8 +130,9 @@ class MatmulDefaultVisitor(NodeVisitor):
128
130
  prefix=f"{rhs_name}_transposed_output",
129
131
  shape=rhs_shape_transpose,
130
132
  dtype=rhs_type,
133
+ source_node=node,
131
134
  )
132
- trs_perm = self.graph.add_const_tensor(data=[1, 0])
135
+ trs_perm = self.graph.add_const_tensor(data=[1, 0], source_node=node)
133
136
  trs_operator = self.define_transpose_node([rhs, trs_perm], [trs_output])
134
137
  self.graph.add_operator(trs_operator)
135
138
 
@@ -138,7 +141,7 @@ class MatmulDefaultVisitor(NodeVisitor):
138
141
  fc_weight = trs_output
139
142
  fc_shape = [fc_weight.shape[0]]
140
143
  fc_bias = self.graph.add_const_tensor(
141
- data=[0.0] * fc_shape[0],
144
+ data=[0.0] * fc_shape[0], source_node=node
142
145
  )
143
146
 
144
147
  operator = self.define_fc_node([fc_input, fc_weight, fc_bias], outputs)
@@ -169,6 +172,6 @@ class MatmulDefaultVisitor(NodeVisitor):
169
172
  if not is_const(other) and prior_latency:
170
173
  operator = self.define_bmm_node(inputs, outputs)
171
174
  else:
172
- operator = self.define_fc_with_transpose(inputs, outputs)
175
+ operator = self.define_fc_with_transpose(node, inputs, outputs)
173
176
 
174
177
  return operator
@@ -42,7 +42,10 @@ class BasePowVisitor(NodeVisitor):
42
42
  cast_name = f"{node.name}_cast"
43
43
  cast_dtype = circle.TensorType.TensorType.FLOAT32
44
44
  cast_tensor = self.graph.add_tensor_from_scratch(
45
- prefix=cast_name, dtype=cast_dtype, shape=node_shape
45
+ prefix=cast_name,
46
+ dtype=cast_dtype,
47
+ shape=node_shape,
48
+ source_node=node,
46
49
  )
47
50
  cast_operator = create_builtin_operator(
48
51
  self.graph, op_index, [node], [cast_tensor]
@@ -47,7 +47,7 @@ class ReciprocalVisitor(NodeVisitor):
47
47
  input = args.input
48
48
 
49
49
  input_tensor = torch.tensor(1, dtype=extract_torch_dtype(input))
50
- x = self.graph.add_const_tensor(input_tensor)
50
+ x = self.graph.add_const_tensor(input_tensor, source_node=node)
51
51
  inputs = [x, input]
52
52
  outputs = [node]
53
53
 
@@ -76,6 +76,7 @@ class RepeatVisitor(NodeVisitor):
76
76
  prefix=f"{node.name}_concat_{idx}",
77
77
  shape=repeated_shape,
78
78
  dtype=tensor_dtype,
79
+ source_node=node,
79
80
  )
80
81
  inputs = [concat_input] * r
81
82
  if repeat_dim_cnt == 1:
@@ -82,7 +82,10 @@ class SplitWithSizesVisitor(NodeVisitor):
82
82
  shape = list(fake_tensor.size())
83
83
  dtype = to_circle_dtype(fake_tensor.dtype)
84
84
  tensor = self.graph.add_tensor_from_scratch(
85
- f"{node.name}_unused_{idx}", shape, dtype
85
+ f"{node.name}_unused_{idx}",
86
+ shape,
87
+ dtype,
88
+ source_node=node,
86
89
  )
87
90
  outputs.append(tensor)
88
91
 
tico/utils/graph.py CHANGED
@@ -198,3 +198,39 @@ def is_single_value_tensor(t: torch.Tensor):
198
198
  return True
199
199
 
200
200
  return False
201
+
202
+
203
+ def get_module_name_chain(node: Optional[torch.fx.Node]) -> str:
204
+ """
205
+ Returns a slash-separated string of module names representing the
206
+ hierarchical path of the FX node within the original model.
207
+
208
+ If the node has no `nn_module_stack` metadata, "unknown" is returned.
209
+
210
+ Example:
211
+ "encoder/layer1/linear"
212
+
213
+ Parameters
214
+ ----------
215
+ node: torch.fx.Node
216
+ A node from an ExportedProgram graph.
217
+
218
+ Returns
219
+ -------
220
+ str
221
+ A human-readable string that describes the full module path.
222
+ """
223
+ if node is None:
224
+ return "unknown"
225
+ # Let's prefix "tico" for graph inputs
226
+ if node.op == "placeholder" and "nn_module_stack" not in node.meta:
227
+ return "tico"
228
+
229
+ assert isinstance(node, torch.fx.Node)
230
+ stack = node.meta.get("nn_module_stack")
231
+ if stack:
232
+ assert isinstance(stack, dict)
233
+ # Retrieving the last element is enough.
234
+ return next(reversed(stack.values()))[1]
235
+ else:
236
+ return "unknown"
@@ -0,0 +1,42 @@
1
+ # Copyright (c) 2025 Samsung Electronics Co., Ltd. All Rights Reserved
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from typing import Optional
16
+
17
+ import torch
18
+
19
+ from tico.serialize.circle_graph import CircleSubgraph
20
+ from tico.utils.graph import get_module_name_chain
21
+
22
+
23
+ def finalise_tensor_names(
24
+ graph: CircleSubgraph,
25
+ ) -> None:
26
+ """
27
+ Replace every `tensor.name` with the *readable* version
28
+ **after** the graph is fully built.
29
+
30
+ Why late?
31
+ ---------
32
+ - All intermediate steps (add_input, add_output, get_tid…) rely on the
33
+ original technical names in ExportedProgram.
34
+
35
+ The rewrite is *in-place* and touches **only** the `name` field of
36
+ each tensor.
37
+ """
38
+ assert hasattr(graph, "name_to_node")
39
+
40
+ for tensor in graph.tensors:
41
+ if tensor.name in graph.name_to_node:
42
+ tensor.name = f"{get_module_name_chain(graph.name_to_node[tensor.name])}::{tensor.name}"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: tico
3
- Version: 0.1.0.dev250604
3
+ Version: 0.1.0.dev250608
4
4
  Summary: Convert exported Torch module to circle
5
5
  Home-page: UNKNOWN
6
6
  License: UNKNOWN
@@ -1,4 +1,4 @@
1
- tico/__init__.py,sha256=X2eazANf-Y9gV1fcug-QytZyLbNFnJqqW658DamD_wI,1743
1
+ tico/__init__.py,sha256=FjyTxaQjfrgfUCPfMzx5WnfpSY0Vbksu3gx8m6Tc5xY,1743
2
2
  tico/pt2_to_circle.py,sha256=PPmFNw20jw2Z2VyM3ln9pX__jTzBOAZiv0gT5a-p-Y8,2666
3
3
  tico/config/__init__.py,sha256=xZzCXjZ84qE-CsBi-dfaL05bqpQ3stKKfTXhnrJRyVs,142
4
4
  tico/config/base.py,sha256=anwOiJFkUxUi7Cef573JgQcjk6S-FSi6O_TLjYASW-g,1244
@@ -87,15 +87,15 @@ tico/passes/remove_nop.py,sha256=5QE3inFsXgzyPT_t7pKeXNqD1LRf6ed_Mp7YMadA6AI,270
87
87
  tico/passes/remove_redundant_assert_nodes.py,sha256=3a2xEQ2iPY7Gqg8jZi8G5bfDDrK2kOO1OHCMv_gJGz0,1592
88
88
  tico/passes/remove_redundant_expand.py,sha256=7st92AbWOl7yzM0Y5seaZJQKMFHqkYpH3qYMOlAU5lk,2234
89
89
  tico/passes/remove_redundant_permute.py,sha256=sS53eTY4sSnpZWDaaHN8czUmzNwmqh1lF90nYamXzac,3566
90
- tico/passes/remove_redundant_reshape.py,sha256=eJ2GpYTx7qrXROjmZn4m9xBFIfjSWjlGjYqQrEnU5Qw,15677
90
+ tico/passes/remove_redundant_reshape.py,sha256=aPZcDR0kBExEsWCYfBbLulm_wcjJNnGjn4mgrUIPdpU,16810
91
91
  tico/passes/remove_redundant_slice.py,sha256=BAfSkA5jDIEhYx4nMnu6cJadQle3YTw5y39ZLiYfJJ8,2109
92
92
  tico/passes/remove_redundant_to_copy.py,sha256=uTIjAn3Eli_RvXC-QOqxBAkV_whDBkkNhu-mvNKAEhs,3136
93
93
  tico/passes/restore_linear.py,sha256=UMMHdLmRGq9bfJx_0L9lL2UQBd51PGNP0WywO8KdrDM,4066
94
94
  tico/passes/segment_index_select.py,sha256=ifXOIFC12lNwsB-s3k1cJcMHP3UEijPpkMAbwI7lZbQ,5097
95
95
  tico/serialize/__init__.py,sha256=IO6FP_xYbGy0dW0HL26GXD3ouxARaxCK7bz9dn4blPQ,26
96
- tico/serialize/circle_graph.py,sha256=l9fcvV4x3L5Bh2WmEseo--0KFjSVTrlSBaBrqbrmXgg,9498
96
+ tico/serialize/circle_graph.py,sha256=_u0vFDhPdOhEkucmaEhqILo13NKbjyVemPYFfC5YCZg,11619
97
97
  tico/serialize/circle_mapping.py,sha256=C9C3ORACQOdvBdnt5KRzlT8zao_TvzQklIxH794OhP0,5719
98
- tico/serialize/circle_serializer.py,sha256=gJP2QEY7hFrlP_I4JRnnMehThr9ay9SUyoOQ2ppJBl0,8859
98
+ tico/serialize/circle_serializer.py,sha256=KRx_Azx2Je9XNYe-pZuuiSMvbXEddd8M8qDATIt7XXk,8981
99
99
  tico/serialize/pack.py,sha256=5HZ9kX3x6C6CyT_FWS6FRmvx_P7Dx21orjUNQxJ2xlo,1297
100
100
  tico/serialize/quant_param.py,sha256=s97GJyDOZULnqFUWPakHais31G_qqPuO0awPHCkZDvI,1342
101
101
  tico/serialize/operators/__init__.py,sha256=LIvXsNnN4yUCS2CGNQ5XW8p8oXDTV_WHWuOEAw1t6WY,990
@@ -104,20 +104,20 @@ tico/serialize/operators/node_visitor.py,sha256=UYyCwXqSCeRyimThMShstHnt7vKM9tsu
104
104
  tico/serialize/operators/op_abs.py,sha256=Y-vy7rcqPT-qD3QS5R8zbApWWTPpjY6xuMMVnbIhYmQ,1827
105
105
  tico/serialize/operators/op_add.py,sha256=otm062DMHVAThWmOtSTZdPyP3P5-2cv5VL_UWBJeLms,2346
106
106
  tico/serialize/operators/op_alias_copy.py,sha256=Xu9OiILbGf8oddh8yTqovvLfgVs8XYV7Cg4n6CesWcg,2175
107
- tico/serialize/operators/op_any.py,sha256=WMsHLq7WIcl6rD2G3QqpWRSCR-a6UYX6y5AjB6BDS3U,5049
107
+ tico/serialize/operators/op_any.py,sha256=QqAYby1WPd4l97vX-tBjAsOb_pVb1wPjj7SbWQNToCQ,5117
108
108
  tico/serialize/operators/op_arange_start_step.py,sha256=0T5lWwh3TfsFStmVv0v5qG03KENRDBmMix08RXQ4D-U,2132
109
109
  tico/serialize/operators/op_argmax.py,sha256=ARyGHlmWVmzwCct93V5x1-VyKqhxMOvV8GuM8yQWXdo,2290
110
- tico/serialize/operators/op_avg_pool2d.py,sha256=ABxhfowDz7SXlWnW2iQuSA5X52xm0PGLs-N1l9vGXbo,7488
110
+ tico/serialize/operators/op_avg_pool2d.py,sha256=eVd5ngFSImAklvJFfBAaEUvAc-I60e43xCdLjd5zwYA,7522
111
111
  tico/serialize/operators/op_bmm.py,sha256=AELjHC9ISFPIzEEl5Kr1s4GSNLZElwZmVZJWkEyCEoA,2189
112
112
  tico/serialize/operators/op_cat.py,sha256=XDYOh0XAyrM0TlxVm6Sa0OFFGrKk7aSDcGXC-hYX4gs,2204
113
- tico/serialize/operators/op_clamp.py,sha256=V3rncHvUAuJ2nXOyywTnOGCvNBeCQGqQIW1_zxKlSsA,4231
113
+ tico/serialize/operators/op_clamp.py,sha256=ZRAsXLGsZqJEh4wXxESEpRJkRtUuJWTDgAem6lr9_5I,4298
114
114
  tico/serialize/operators/op_clone.py,sha256=vzDYJ8TS3tc2BAyd_z8nt5VqT1inpymSseMEhd9dva0,2394
115
115
  tico/serialize/operators/op_constant_pad_nd.py,sha256=OpP4AP-d1IFcWZolNa-o9ZxzXJQkMdG9WQ66soX3s-E,2675
116
- tico/serialize/operators/op_conv2d.py,sha256=a8aU0CwPgCYM37rDWmrCpxMoQZQdxzXaPtJAVB7dK0M,7283
117
- tico/serialize/operators/op_copy.py,sha256=W_ih1yqqMwSCO_l9l_LUn_G_IoTgDJcNUnAbX1ZITZI,6054
116
+ tico/serialize/operators/op_conv2d.py,sha256=nC_jqzjlrUJ0L_lux_wXBqxDfq67jyroXSgrl5WoNfk,7317
117
+ tico/serialize/operators/op_copy.py,sha256=vaianLQ19-2ZQZ-MdQ07YuOPeFeo_HAx2a0Qfn7I5Kk,6122
118
118
  tico/serialize/operators/op_cos.py,sha256=N12bNyuTQIxRnD0eHRPdFVzRQPMy1NFM4iM8oQ4lYzw,2034
119
- tico/serialize/operators/op_cumsum.py,sha256=HhPF2uKMamk8KIiV0j6P-grU8oZ9AhlcRsRVC28oYVw,3731
120
- tico/serialize/operators/op_depthwise_conv2d.py,sha256=Ed0aVW0UaRoTDPt6kcF4iDzpx_CqZ0oDjdiMSexVk78,7498
119
+ tico/serialize/operators/op_cumsum.py,sha256=3fmOf1mIeCX1uhTBcSJmRGXejzLtO8UwaI1eEQDC6nA,3798
120
+ tico/serialize/operators/op_depthwise_conv2d.py,sha256=PTos0tQoM8EZoB88s4Tjb7n6pJja5nbNQRDsucVzRwc,7532
121
121
  tico/serialize/operators/op_dequantize_per_channel.py,sha256=aPcVxjdgvfSFoLnv9NL-RxO5vZYj8ulqriMP5LHIWs0,3133
122
122
  tico/serialize/operators/op_dequantize_per_tensor.py,sha256=u9aK_Xle9rDN0EHLE0YrCTlXY4Q53Ch9Di4qmx7ynps,2304
123
123
  tico/serialize/operators/op_div.py,sha256=WjeM2Ux7TyGlSNx2aVC783JvcL0xnY6FBYo1Q_kdb5Q,2201
@@ -135,27 +135,27 @@ tico/serialize/operators/op_index_select.py,sha256=cw7IbvixooikGxzbpUmI9tHS4kjl4
135
135
  tico/serialize/operators/op_instance_norm.py,sha256=AhcVm71ChB16BlPNwqBh5tMHCqMShOXHPkE8Ag9jBfQ,3144
136
136
  tico/serialize/operators/op_linear.py,sha256=bw_mn2CiJy8CbpPevOV0PMPh0ZMWKAybLZ9cnIKJSsk,2527
137
137
  tico/serialize/operators/op_log.py,sha256=1TKvH2lttdAHE0P84vcxmOvGBBRUs6D71Jrei7SdZHE,1827
138
- tico/serialize/operators/op_log1p.py,sha256=lH0rLxpag7kGeM5UiE5b1Q4JluOE-yiQSsEcQBYv6ts,3066
138
+ tico/serialize/operators/op_log1p.py,sha256=gG7Fs4UDj_Nnp7U60UtPyz0fLv1lBpJVOGGCMm-42pY,3121
139
139
  tico/serialize/operators/op_logical_and.py,sha256=WhQ8knuq32BO-WhAqkOgpcUStPkjoPmRWuYNsKveF3w,2163
140
140
  tico/serialize/operators/op_logical_not.py,sha256=ugrVcRqR3IvUUaiRVW5cArCYJbzmkcXp88QM846jCww,2129
141
141
  tico/serialize/operators/op_lt.py,sha256=_vA7dWpV9wVBxB7JL9pLQT9BsV91NGQBq_0auAtHK5Y,2080
142
142
  tico/serialize/operators/op_max_dim.py,sha256=nS_TZl5uq4uv1LwgBD9Wddyac4atKqBiIWKIyeXse2s,2519
143
- tico/serialize/operators/op_max_pool2d_with_indices.py,sha256=GBeBNEyohVSGMIiY0Z1UUX-9N8OdVGitbGNAq_TJZd4,5631
143
+ tico/serialize/operators/op_max_pool2d_with_indices.py,sha256=SPXIdlrYgJfR982il_ajokNehFYJWOmccUoksS_0qos,5665
144
144
  tico/serialize/operators/op_maximum.py,sha256=JjBr6gWEnuakLuk1_feotTHfIIm3s5YqWmqhUMpSPI0,1873
145
145
  tico/serialize/operators/op_mean.py,sha256=rVQZOxCJkHFY4kQBAS1HVK0HkcqxgkSy6zvEDLX_WYQ,2267
146
146
  tico/serialize/operators/op_minimum.py,sha256=fASjQVcTPCin02umQwFPdq2ss-Ve7S5A33J3QmmQ_wQ,1873
147
- tico/serialize/operators/op_mm.py,sha256=fHggR9dmlwXw0DAyn__2JbG7e0q1Jhfmi5-2jDlpRDk,6730
147
+ tico/serialize/operators/op_mm.py,sha256=Fgq_HUUKuXOQY_t8lah3SOUqTsGet-KbVttCK4-fjAk,6821
148
148
  tico/serialize/operators/op_mul.py,sha256=42Guc0MWBGBCZoj9-4LcLtTMtUPwsmDSVmvkR8tqLhM,3165
149
149
  tico/serialize/operators/op_ne.py,sha256=xa2WJL2tYksxw7fIJic_D9ltLEseyCII8HpR32Oq8Do,1900
150
150
  tico/serialize/operators/op_neg.py,sha256=fkI3ExyD3QF-qtxBcXqQutPNDbNL8g7lZYE7CyD2wLk,2046
151
151
  tico/serialize/operators/op_permute.py,sha256=5DfX3pfZ5FDNmrSqx3-hRwPA7vm36z7BfG-nuyyBTsM,2282
152
- tico/serialize/operators/op_pow.py,sha256=cvW81yaW2hxsMPwRF_vJu4VXMy2AvdATqJ26eNRrGZk,5379
152
+ tico/serialize/operators/op_pow.py,sha256=z_4G_J1k_keeVE6ZYKSy-kqkdJ_i4p4kHkO0dJZnz-Y,5434
153
153
  tico/serialize/operators/op_prelu.py,sha256=0ZybL5pNvBrRvQGy4M6gELrjiEXEsb2wBDdU8x4D75I,1874
154
154
  tico/serialize/operators/op_quantize_per_tensor.py,sha256=w-vYxSPnN2gtx-pEkkcMGU0ZjiwaS4y1sxy56pKEq3E,3004
155
- tico/serialize/operators/op_reciprocal.py,sha256=cK2uddDbV32wPAhD8msUW7AXnLIGIs7v6SxTq9i5Bxc,2376
155
+ tico/serialize/operators/op_reciprocal.py,sha256=6b9_bxjg_0EvgAitSv1MgBi4PJSEgm-21s5qtWI1UR4,2394
156
156
  tico/serialize/operators/op_relu.py,sha256=WXCR_chwEUBqjFIQ_4E2avwk-Acy76pmX20rJQCBTQo,1832
157
157
  tico/serialize/operators/op_relu6.py,sha256=ZWqEolfAKjOdUC1ZCg0iuu4dBhkJRxVYR2tUzpbvKQM,1829
158
- tico/serialize/operators/op_repeat.py,sha256=nR-xKZYl4ZzEYj0Tw8N-oKh5wLyv3PxI80vmiNcAm_0,4193
158
+ tico/serialize/operators/op_repeat.py,sha256=0wTv1Mg7kg0eHz0CT6atyVAli4T4h5rYXq5opY6op20,4235
159
159
  tico/serialize/operators/op_reshape.py,sha256=PdYenXvfQxzYST3yNH6MTxUQ25TulNwiip6N2q76zfQ,2549
160
160
  tico/serialize/operators/op_resize_nearest_neighbor.py,sha256=dXaAnZ5M_ko_tH-HolxNpHFXkDUQ8x45myskojP5XZE,2771
161
161
  tico/serialize/operators/op_rsqrt.py,sha256=yl2vd8InjhLPbE0vHIrEera6DVXlY9dLgO7yZZCH3RI,1837
@@ -165,7 +165,7 @@ tico/serialize/operators/op_sigmoid.py,sha256=ZubbGG1yU5uvNkEmOmbjj3eq7d9mwEaJdC
165
165
  tico/serialize/operators/op_sin.py,sha256=MbttmHTVKhwKK6gH9Vbcbn5aAaxnQ71NdpmQAlTcojU,1827
166
166
  tico/serialize/operators/op_slice.py,sha256=g0r8lj5CIxpT6ixOKqUzwKiNhoiuIFwWjbpaiCoOg6w,5259
167
167
  tico/serialize/operators/op_softmax.py,sha256=8AwmsAVdSoIMKdfejrw9cy44TbOvvXsA0w3WQDVpI3A,3855
168
- tico/serialize/operators/op_split_with_sizes.py,sha256=rZ6WR-u_S-jOa6fgN5l4xxVq7ZLLuJLhTbaabOvY6Bs,4195
168
+ tico/serialize/operators/op_split_with_sizes.py,sha256=TgYg1cu-3BSz9SsXfAhoJbo4q5ZzFaoFArkH_obsYlU,4274
169
169
  tico/serialize/operators/op_sqrt.py,sha256=9Q5jkuEPrim11XfSQHGDGVTMYk1TQhOfVqMVYD_eIrI,1871
170
170
  tico/serialize/operators/op_squeeze.py,sha256=QnNwfAdTC1xBm04C9DkVs8VB5YRN-4fCsIWn189QaPg,2416
171
171
  tico/serialize/operators/op_sub.py,sha256=yZskQJF0ylXVk02Uid8djPNIWDJ-0uHJar4UYhlJVkk,2479
@@ -181,12 +181,13 @@ tico/utils/convert.py,sha256=KCllPnvQ8bjEYR1yI72s9aNBp7Py1CzIEEpYSYZcu60,11684
181
181
  tico/utils/define.py,sha256=Ypgp7YffM4pgPl4Zh6TmogSn1OxGBMRw_e09qYGflZk,1467
182
182
  tico/utils/diff_graph.py,sha256=_eDGGPDPYQD4b--MXX0DLoVgSt_wLfNPt47UlolLLR4,5272
183
183
  tico/utils/errors.py,sha256=f3csJjgbXG9W1aHhqEcou008Aor19W57X8oT5Hx8w1M,954
184
- tico/utils/graph.py,sha256=YTAMbnjxqtNWy8ckOi68WYLXUIIvGwsux4PHmIamVok,6654
184
+ tico/utils/graph.py,sha256=qQIz78XfJK1rrGyq00f-ArqxACItiS-oHbN2nE4xQrY,7653
185
185
  tico/utils/logging.py,sha256=IlbBWscsaHidI0dNqro1HEXAbIcbkR3BD5ukLy2m95k,1286
186
186
  tico/utils/model.py,sha256=Uqc92AnJXQ2pbvctS2z2F3Ku3yNrwXZ9O33hZVis7is,1250
187
187
  tico/utils/padding.py,sha256=GGO27VbaOvtaMYLDrSaKv7uxjeet566aMJD0PyYeMvQ,1484
188
188
  tico/utils/passes.py,sha256=kGmDe__5cPaO6i5EDAoXSVe6yXEoX9hAny4ROb3ZEmQ,2409
189
189
  tico/utils/register_custom_op.py,sha256=iRQvdqlBqrJxq_pNkvJyDIJD_SYtCUl88wwbbuvSwlk,22952
190
+ tico/utils/serialize.py,sha256=AQXMBOLu-Kg2Rn-qbqsAtHndjZAZIavlKA0QFgJREHM,1420
190
191
  tico/utils/trace_decorators.py,sha256=ddLIiKQfSaQrxgF1kNpwjFTQnXENzeSfcr1kuAW4jGI,3221
191
192
  tico/utils/utils.py,sha256=NAa3ZX5G-UCQwmz5WnFl0iCEra24PMY5wC0MyX7smUg,11156
192
193
  tico/utils/validate_args_kwargs.py,sha256=P4aMnr9EhNCtc_AgJPpuezfQbqFfDn0lhJSWqmumLZ8,25054
@@ -194,9 +195,9 @@ tico/utils/mx/__init__.py,sha256=IO6FP_xYbGy0dW0HL26GXD3ouxARaxCK7bz9dn4blPQ,26
194
195
  tico/utils/mx/elemwise_ops.py,sha256=V6glyAHsVR1joqpsgnNytatCD_ew92xNWZ19UFDoMTA,10281
195
196
  tico/utils/mx/formats.py,sha256=uzNWyu-1onUlwQfX5cZ6fZSUfHMRqorper7_T1k3jfk,3404
196
197
  tico/utils/mx/mx_ops.py,sha256=RcfUTYVi-wilGB2sC35OeARdwDqnixv7dG5iyZ-fQT8,8555
197
- tico-0.1.0.dev250604.dist-info/LICENSE,sha256=kp4JLII7bzRhPb0CPD5XTDZMh22BQ7h3k3B7t8TiSbw,12644
198
- tico-0.1.0.dev250604.dist-info/METADATA,sha256=gN7iYzRhOodrWqExhbNBTMQ5S_6iOpb-vte13nECcak,8633
199
- tico-0.1.0.dev250604.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
200
- tico-0.1.0.dev250604.dist-info/entry_points.txt,sha256=kBKYSS_IYrSXmUYevmmepqIVPScq5vF8ulQRu3I_Zf0,59
201
- tico-0.1.0.dev250604.dist-info/top_level.txt,sha256=oqs7UPoNSKZEwqsX8B-KAWdQwfAa7i60pbxW_Jk7P3w,5
202
- tico-0.1.0.dev250604.dist-info/RECORD,,
198
+ tico-0.1.0.dev250608.dist-info/LICENSE,sha256=kp4JLII7bzRhPb0CPD5XTDZMh22BQ7h3k3B7t8TiSbw,12644
199
+ tico-0.1.0.dev250608.dist-info/METADATA,sha256=LJYAjDzOfuHcUGzJvc-Cgc_QKCnVUHFY2yr9IY8EkgU,8633
200
+ tico-0.1.0.dev250608.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
201
+ tico-0.1.0.dev250608.dist-info/entry_points.txt,sha256=kBKYSS_IYrSXmUYevmmepqIVPScq5vF8ulQRu3I_Zf0,59
202
+ tico-0.1.0.dev250608.dist-info/top_level.txt,sha256=oqs7UPoNSKZEwqsX8B-KAWdQwfAa7i60pbxW_Jk7P3w,5
203
+ tico-0.1.0.dev250608.dist-info/RECORD,,