onnxslim 0.1.72__tar.gz → 0.1.74__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. {onnxslim-0.1.72/onnxslim.egg-info → onnxslim-0.1.74}/PKG-INFO +1 -1
  2. onnxslim-0.1.74/VERSION +1 -0
  3. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/fusion/__init__.py +1 -0
  4. onnxslim-0.1.74/onnxslim/core/pattern/fusion/convmul.py +69 -0
  5. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/fusion/gemm.py +26 -4
  6. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/fusion/padconv.py +5 -3
  7. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/misc/tabulate.py +1 -1
  8. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/_sympy/functions.py +1 -1
  9. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/symbolic_shape_infer.py +1 -0
  10. onnxslim-0.1.74/onnxslim/version.py +1 -0
  11. {onnxslim-0.1.72 → onnxslim-0.1.74/onnxslim.egg-info}/PKG-INFO +1 -1
  12. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim.egg-info/SOURCES.txt +1 -0
  13. onnxslim-0.1.72/VERSION +0 -1
  14. onnxslim-0.1.72/onnxslim/version.py +0 -1
  15. {onnxslim-0.1.72 → onnxslim-0.1.74}/LICENSE +0 -0
  16. {onnxslim-0.1.72 → onnxslim-0.1.74}/MANIFEST.in +0 -0
  17. {onnxslim-0.1.72 → onnxslim-0.1.74}/README.md +0 -0
  18. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/__init__.py +0 -0
  19. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/__main__.py +0 -0
  20. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/argparser.py +0 -0
  21. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/cli/__init__.py +0 -0
  22. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/cli/_main.py +0 -0
  23. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/__init__.py +0 -0
  24. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/optimization/__init__.py +0 -0
  25. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/optimization/dead_node_elimination.py +0 -0
  26. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/optimization/subexpression_elimination.py +0 -0
  27. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/optimization/weight_tying.py +0 -0
  28. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/__init__.py +0 -0
  29. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/elimination/__init__.py +0 -0
  30. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/elimination/concat.py +0 -0
  31. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/elimination/reshape.py +0 -0
  32. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/elimination/reshape_as.py +0 -0
  33. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/elimination/slice.py +0 -0
  34. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/elimination/unsqueeze.py +0 -0
  35. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/fusion/concat_reshape.py +0 -0
  36. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/fusion/convadd.py +0 -0
  37. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/fusion/convbn.py +0 -0
  38. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/fusion/gelu.py +0 -0
  39. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/fusion/reduce.py +0 -0
  40. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/core/pattern/registry.py +0 -0
  41. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/misc/__init__.py +0 -0
  42. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/__init__.py +0 -0
  43. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/_sympy/__init__.py +0 -0
  44. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/_sympy/numbers.py +0 -0
  45. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/_sympy/printers.py +0 -0
  46. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/_sympy/solve.py +0 -0
  47. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/_sympy/symbol.py +0 -0
  48. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/__init__.py +0 -0
  49. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/exporters/__init__.py +0 -0
  50. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/exporters/base_exporter.py +0 -0
  51. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/exporters/onnx_exporter.py +0 -0
  52. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/graph_pattern/__init__.py +0 -0
  53. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/graph_pattern/graph_pattern.py +0 -0
  54. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/importers/__init__.py +0 -0
  55. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/importers/base_importer.py +0 -0
  56. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/importers/onnx_importer.py +0 -0
  57. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/ir/__init__.py +0 -0
  58. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/ir/function.py +0 -0
  59. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/ir/graph.py +0 -0
  60. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/ir/node.py +0 -0
  61. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/ir/tensor.py +0 -0
  62. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/logger/__init__.py +0 -0
  63. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/logger/logger.py +0 -0
  64. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/util/__init__.py +0 -0
  65. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/util/exception.py +0 -0
  66. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/third_party/onnx_graphsurgeon/util/misc.py +0 -0
  67. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim/utils.py +0 -0
  68. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim.egg-info/dependency_links.txt +0 -0
  69. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim.egg-info/entry_points.txt +0 -0
  70. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim.egg-info/requires.txt +0 -0
  71. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim.egg-info/top_level.txt +0 -0
  72. {onnxslim-0.1.72 → onnxslim-0.1.74}/onnxslim.egg-info/zip-safe +0 -0
  73. {onnxslim-0.1.72 → onnxslim-0.1.74}/pyproject.toml +0 -0
  74. {onnxslim-0.1.72 → onnxslim-0.1.74}/setup.cfg +0 -0
  75. {onnxslim-0.1.72 → onnxslim-0.1.74}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: onnxslim
3
- Version: 0.1.72
3
+ Version: 0.1.74
4
4
  Summary: OnnxSlim: A Toolkit to Help Optimize Onnx Model
5
5
  Home-page: https://github.com/inisis/OnnxSlim
6
6
  Author: inisis
@@ -0,0 +1 @@
1
+ 0.1.74
@@ -1,6 +1,7 @@
1
1
  from .concat_reshape import *
2
2
  from .convadd import *
3
3
  from .convbn import *
4
+ from .convmul import *
4
5
  from .gelu import *
5
6
  from .gemm import *
6
7
  from .padconv import *
@@ -0,0 +1,69 @@
1
+ import onnxslim.third_party.onnx_graphsurgeon as gs
2
+ from onnxslim.core.pattern import Pattern, PatternMatcher
3
+ from onnxslim.core.pattern.registry import register_fusion_pattern
4
+
5
+
6
+ class ConvMulMatcher(PatternMatcher):
7
+ def __init__(self, priority):
8
+ """Initializes the ConvMulMatcher for fusing Conv and Mul layers in an ONNX graph."""
9
+ pattern = Pattern(
10
+ """
11
+ input input 0 1 conv_0
12
+ Conv conv_0 1+ 1 input mul_0
13
+ Mul mul_0 2 1 conv_0 ? output
14
+ output output 1 0 mul_0
15
+ """
16
+ )
17
+ super().__init__(pattern, priority)
18
+
19
+ @property
20
+ def name(self):
21
+ """Returns the name of the FusionConvMul pattern."""
22
+ return "FusionConvMul"
23
+
24
+ def rewrite(self, opset=11):
25
+ match_case = {}
26
+ conv_node = self.conv_0
27
+ mul_node = self.mul_0
28
+ conv_weight = list(conv_node.inputs)[1]
29
+ if len(conv_node.users) == 1 and conv_node.users[0] == mul_node and isinstance(mul_node.inputs[1], gs.Constant):
30
+ mul_constant = mul_node.inputs[1].values
31
+
32
+ if mul_constant.squeeze().ndim == 1 and mul_constant.squeeze().shape[0] == conv_weight.shape[0]:
33
+ weight_shape = conv_weight.values.shape
34
+ reshape_shape = [-1] + [1] * (len(weight_shape) - 1)
35
+
36
+ mul_scale_reshaped = mul_constant.squeeze().reshape(reshape_shape)
37
+ new_weight = conv_weight.values * mul_scale_reshaped
38
+
39
+ inputs = []
40
+ inputs.append(next(iter(conv_node.inputs)))
41
+
42
+ weight_name = list(conv_node.inputs)[1].name
43
+ inputs.append(gs.Constant(weight_name, values=new_weight))
44
+
45
+ if len(conv_node.inputs) == 3:
46
+ conv_bias = conv_node.inputs[2].values
47
+ new_bias = conv_bias * mul_constant.squeeze()
48
+ bias_name = list(conv_node.inputs)[2].name
49
+ inputs.append(gs.Constant(bias_name, values=new_bias))
50
+
51
+ outputs = list(mul_node.outputs)
52
+
53
+ conv_node.outputs.clear()
54
+ mul_node.inputs.clear()
55
+ mul_node.outputs.clear()
56
+
57
+ match_case[conv_node.name] = {
58
+ "op": conv_node.op,
59
+ "inputs": inputs,
60
+ "outputs": outputs,
61
+ "name": conv_node.name,
62
+ "attrs": conv_node.attrs,
63
+ "domain": None,
64
+ }
65
+
66
+ return match_case
67
+
68
+
69
+ register_fusion_pattern(ConvMulMatcher(1))
@@ -289,16 +289,38 @@ class GemmAddPatternMatcher(PatternMatcher):
289
289
  )
290
290
  and add_bias_variable
291
291
  and len(reshape_node.users) == 1
292
+ and gemm_node.outputs[0].shape
292
293
  ):
294
+
295
+ def can_broadcast_to(shape_from, shape_to):
296
+ """Return True if shape_from can broadcast to shape_to per NumPy rules."""
297
+ if shape_from is None or shape_to is None:
298
+ return False
299
+ try:
300
+ np.empty(shape_to, dtype=np.float32) + np.empty(shape_from, dtype=np.float32)
301
+ return True
302
+ except ValueError:
303
+ return False
304
+
293
305
  gemm_bias_constant = gemm_node.inputs[2] if len(gemm_node.inputs) == 3 else None
294
306
  if gemm_bias_constant:
295
307
  gemm_bias = gemm_bias_constant.values
296
308
  add_bias = add_bias_variable.values
297
- gemm_bias_fused = gemm_bias + add_bias
298
- gemm_bias_fused_constant = gs.Constant(gemm_bias_constant.name + "_fused", values=gemm_bias_fused)
299
- gemm_node.inputs[2] = gemm_bias_fused_constant
309
+ if (
310
+ can_broadcast_to(gemm_bias.shape, gemm_node.outputs[0].shape)
311
+ and can_broadcast_to(add_bias.shape, gemm_node.outputs[0].shape)
312
+ and add_bias.ndim <= 2
313
+ ):
314
+ gemm_bias_fused = gemm_bias + add_bias
315
+ gemm_bias_fused_constant = gs.Constant(gemm_bias_constant.name + "_fused", values=gemm_bias_fused)
316
+ gemm_node.inputs[2] = gemm_bias_fused_constant
317
+ else:
318
+ return match_case
300
319
  else:
301
- gemm_node.inputs[2] = add_bias_variable
320
+ if can_broadcast_to(add_bias_variable.values.shape, gemm_node.outputs[0].shape):
321
+ gemm_node.inputs[2] = add_bias_variable
322
+ else:
323
+ return match_case
302
324
 
303
325
  add_node.replace_all_uses_with(reshape_node)
304
326
 
@@ -24,6 +24,7 @@ class PadConvMatcher(PatternMatcher):
24
24
  def parameter_check(self) -> bool:
25
25
  """Validates if the padding parameter for a convolutional node is a constant."""
26
26
  pad_node = self.pad_0
27
+
27
28
  return isinstance(pad_node.inputs[1], gs.Constant)
28
29
 
29
30
  def rewrite(self, opset=11):
@@ -42,7 +43,7 @@ class PadConvMatcher(PatternMatcher):
42
43
  ):
43
44
  if (
44
45
  isinstance(pad_node.inputs[1], gs.Constant)
45
- and pad_node.attrs["mode"] == "constant"
46
+ and pad_node.attrs.get("mode", "constant") == "constant"
46
47
  and conv_node.inputs[1].shape
47
48
  ):
48
49
  conv_weight_dim = len(conv_node.inputs[1].shape)
@@ -67,9 +68,10 @@ class PadConvMatcher(PatternMatcher):
67
68
  pad_node.inputs.clear()
68
69
  pad_node.outputs.clear()
69
70
 
70
- conv_pads = attrs["pads"]
71
71
  pads = pad_value[2:conv_weight_dim] + pad_value[conv_weight_dim + 2 :]
72
- pads = [pad + conv_pad for pad, conv_pad in zip(pads, conv_pads)]
72
+ if hasattr(attrs, "pads"):
73
+ conv_pads = attrs["pads"]
74
+ pads = [pad + conv_pad for pad, conv_pad in zip(pads, conv_pads)]
73
75
 
74
76
  attrs["pads"] = pads
75
77
  match_case[conv_node.name] = {
@@ -1606,7 +1606,7 @@ def tabulate(
1606
1606
  given header. Possible values are: "global" (no override), "same"
1607
1607
  (follow column alignment), "right", "center", "left".
1608
1608
 
1609
- Note on intended behaviour: If there is no `tabular_data`, any column
1609
+ Note on intended behavior: If there is no `tabular_data`, any column
1610
1610
  alignment argument is ignored. Hence, in this case, header
1611
1611
  alignment cannot be inferred from column alignment.
1612
1612
 
@@ -52,7 +52,7 @@ def simple_floordiv_gcd(p: sympy.Basic, q: sympy.Basic) -> sympy.Basic:
52
52
 
53
53
  We try to rewrite p and q in the form n*e*p1 + n*e*p2 and n*e*q0, where n is the greatest common integer factor and
54
54
  e is the largest syntactic common factor (i.e., common sub-expression) in p and q. Then the gcd returned is n*e,
55
- cancelling which we would be left with p1 + p2 and q0.
55
+ canceling which we would be left with p1 + p2 and q0.
56
56
 
57
57
  Note that further factoring of p1 + p2 and q0 might be possible with sympy.factor (which uses domain-specific
58
58
  theories). E.g., we are unable to find that x*y + x + y + 1 is divisible by x + 1. More generally, when q is of the
@@ -532,6 +532,7 @@ class SymbolicShapeInference:
532
532
  initializers = []
533
533
  if (get_opset(self.out_mp_) >= 9) and (
534
534
  node.op_type == "Unsqueeze" or node.op_type == "ReduceMax" or node.op_type == "ReduceMean"
535
+ or node.op_type == "DFT" or node.op_type == "ReduceL2" or node.op_type == "ReduceMin"
535
536
  ):
536
537
  initializers = [
537
538
  self.initializers_[name]
@@ -0,0 +1 @@
1
+ __version__ = "0.1.74"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: onnxslim
3
- Version: 0.1.72
3
+ Version: 0.1.74
4
4
  Summary: OnnxSlim: A Toolkit to Help Optimize Onnx Model
5
5
  Home-page: https://github.com/inisis/OnnxSlim
6
6
  Author: inisis
@@ -35,6 +35,7 @@ onnxslim/core/pattern/fusion/__init__.py
35
35
  onnxslim/core/pattern/fusion/concat_reshape.py
36
36
  onnxslim/core/pattern/fusion/convadd.py
37
37
  onnxslim/core/pattern/fusion/convbn.py
38
+ onnxslim/core/pattern/fusion/convmul.py
38
39
  onnxslim/core/pattern/fusion/gelu.py
39
40
  onnxslim/core/pattern/fusion/gemm.py
40
41
  onnxslim/core/pattern/fusion/padconv.py
onnxslim-0.1.72/VERSION DELETED
@@ -1 +0,0 @@
1
- 0.1.72
@@ -1 +0,0 @@
1
- __version__ = "0.1.72"
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes