angr 9.2.150__py3-none-macosx_11_0_arm64.whl → 9.2.153__py3-none-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of angr might be problematic. Click here for more details.

Files changed (34) hide show
  1. angr/__init__.py +1 -1
  2. angr/analyses/calling_convention/calling_convention.py +17 -9
  3. angr/analyses/cfg/cfg_base.py +1 -1
  4. angr/analyses/cfg/cfg_fast.py +39 -0
  5. angr/analyses/decompiler/ail_simplifier.py +0 -1
  6. angr/analyses/decompiler/ccall_rewriters/amd64_ccalls.py +39 -0
  7. angr/analyses/decompiler/clinic.py +118 -2
  8. angr/analyses/decompiler/dephication/rewriting_engine.py +38 -1
  9. angr/analyses/decompiler/optimization_passes/condition_constprop.py +6 -0
  10. angr/analyses/decompiler/optimization_passes/engine_base.py +5 -0
  11. angr/analyses/decompiler/peephole_optimizations/__init__.py +2 -0
  12. angr/analyses/decompiler/peephole_optimizations/cas_intrinsics.py +115 -0
  13. angr/analyses/decompiler/region_identifier.py +171 -119
  14. angr/analyses/decompiler/ssailification/rewriting_engine.py +37 -1
  15. angr/analyses/decompiler/ssailification/traversal_engine.py +10 -1
  16. angr/analyses/reaching_definitions/engine_ail.py +20 -0
  17. angr/analyses/s_propagator.py +28 -0
  18. angr/analyses/smc.py +3 -1
  19. angr/analyses/stack_pointer_tracker.py +2 -1
  20. angr/analyses/typehoon/simple_solver.py +143 -81
  21. angr/analyses/typehoon/typehoon.py +2 -1
  22. angr/analyses/variable_recovery/engine_ail.py +9 -0
  23. angr/engines/light/engine.py +7 -0
  24. angr/knowledge_plugins/functions/function.py +10 -4
  25. angr/lib/angr_native.dylib +0 -0
  26. angr/storage/memory_mixins/clouseau_mixin.py +7 -1
  27. angr/utils/graph.py +10 -12
  28. angr/utils/ssa/__init__.py +6 -1
  29. {angr-9.2.150.dist-info → angr-9.2.153.dist-info}/METADATA +6 -6
  30. {angr-9.2.150.dist-info → angr-9.2.153.dist-info}/RECORD +34 -33
  31. {angr-9.2.150.dist-info → angr-9.2.153.dist-info}/WHEEL +1 -1
  32. {angr-9.2.150.dist-info → angr-9.2.153.dist-info}/entry_points.txt +0 -0
  33. {angr-9.2.150.dist-info → angr-9.2.153.dist-info}/licenses/LICENSE +0 -0
  34. {angr-9.2.150.dist-info → angr-9.2.153.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
1
1
  from __future__ import annotations
2
+ from typing import Any
2
3
  from itertools import count
3
4
  from collections import defaultdict
4
5
  import logging
@@ -70,6 +71,9 @@ class RegionIdentifier(Analysis):
70
71
  self._largest_successor_tree_outside_loop = largest_successor_tree_outside_loop
71
72
  self._force_loop_single_exit = force_loop_single_exit
72
73
  self._complete_successors = complete_successors
74
+ # we keep a dictionary of node and their traversal order in a quasi-topological traversal and update this
75
+ # dictionary as we update the graph
76
+ self._node_order: dict[Any, tuple[int, int]] = {}
73
77
 
74
78
  self._analyze()
75
79
 
@@ -102,11 +106,30 @@ class RegionIdentifier(Analysis):
102
106
 
103
107
  self._start_node = self._get_start_node(graph)
104
108
 
109
+ self._node_order = self._compute_node_order(graph)
110
+
105
111
  self.region = self._make_regions(graph)
106
112
 
107
113
  # make regions into block address lists
108
114
  self.regions_by_block_addrs = self._make_regions_by_block_addrs()
109
115
 
116
+ @staticmethod
117
+ def _compute_node_order(graph: networkx.DiGraph) -> dict[Any, tuple[int, int]]:
118
+ sorted_nodes = GraphUtils.quasi_topological_sort_nodes(graph)
119
+ node_order = {}
120
+ for i, n in enumerate(sorted_nodes):
121
+ node_order[n] = i, 0
122
+ return node_order
123
+
124
+ def _sort_nodes(self, nodes: list | set) -> list:
125
+ """
126
+ Sorts the nodes in the order specified in self._node_order.
127
+
128
+ :param nodes: A list or set of nodes to be sorted.
129
+ :return: A sorted list of nodes.
130
+ """
131
+ return sorted(nodes, key=lambda n: self._node_order[n])
132
+
110
133
  def _make_regions_by_block_addrs(self) -> list[list[tuple[int, int | None]]]:
111
134
  """
112
135
  Creates a list of addr lists representing each region without recursion. A single region is defined
@@ -182,30 +205,6 @@ class RegionIdentifier(Analysis):
182
205
  None,
183
206
  )
184
207
 
185
- def _test_reducibility(self):
186
- # make a copy of the graph
187
- graph = networkx.DiGraph(self._graph)
188
-
189
- # preprocess: make it a super graph
190
- self._make_supergraph(graph)
191
-
192
- while True:
193
- changed = False
194
-
195
- # find a node with a back-edge, remove the edge (deleting the loop), and replace it with a MultiNode
196
- changed |= self._remove_self_loop(graph)
197
-
198
- # find a node that has only one predecessor, and merge it with its predecessor (replace them with a
199
- # MultiNode)
200
- changed |= self._merge_single_entry_node(graph)
201
-
202
- if not changed:
203
- # a fixed-point is reached
204
- break
205
-
206
- # Flow graph reducibility, Hecht and Ullman
207
- return len(graph.nodes) == 1
208
-
209
208
  def _make_supergraph(self, graph: networkx.DiGraph):
210
209
 
211
210
  entry_node = None
@@ -236,7 +235,7 @@ class RegionIdentifier(Analysis):
236
235
 
237
236
  def _find_loop_headers(self, graph: networkx.DiGraph) -> list:
238
237
  heads = list({t for _, t in dfs_back_edges(graph, self._start_node)})
239
- return GraphUtils.quasi_topological_sort_nodes(graph, heads)
238
+ return self._sort_nodes(heads)
240
239
 
241
240
  def _find_initial_loop_nodes(self, graph: networkx.DiGraph, head):
242
241
  # TODO optimize
@@ -290,7 +289,7 @@ class RegionIdentifier(Analysis):
290
289
  # node.
291
290
  subgraph = networkx.DiGraph()
292
291
 
293
- sorted_refined_exit_nodes = GraphUtils.quasi_topological_sort_nodes(graph, refined_exit_nodes)
292
+ sorted_refined_exit_nodes = self._sort_nodes(refined_exit_nodes)
294
293
  while len(sorted_refined_exit_nodes) > 1 and new_exit_nodes:
295
294
  # visit each node in refined_exit_nodes once and determine which nodes to consider as loop nodes
296
295
  candidate_nodes = {}
@@ -324,7 +323,7 @@ class RegionIdentifier(Analysis):
324
323
 
325
324
  sorted_refined_exit_nodes += list(new_exit_nodes)
326
325
  sorted_refined_exit_nodes = list(set(sorted_refined_exit_nodes))
327
- sorted_refined_exit_nodes = GraphUtils.quasi_topological_sort_nodes(graph, sorted_refined_exit_nodes)
326
+ sorted_refined_exit_nodes = self._sort_nodes(sorted_refined_exit_nodes)
328
327
 
329
328
  refined_exit_nodes = set(sorted_refined_exit_nodes)
330
329
  refined_loop_nodes = refined_loop_nodes - refined_exit_nodes
@@ -373,37 +372,6 @@ class RegionIdentifier(Analysis):
373
372
 
374
373
  return refined_loop_nodes, refined_exit_nodes
375
374
 
376
- def _remove_self_loop(self, graph: networkx.DiGraph):
377
- r = False
378
-
379
- while True:
380
- for node in graph.nodes():
381
- if node in graph[node]:
382
- # found a self loop
383
- self._remove_node(graph, node)
384
- r = True
385
- break
386
- else:
387
- break
388
-
389
- return r
390
-
391
- def _merge_single_entry_node(self, graph: networkx.DiGraph):
392
- r = False
393
-
394
- while True:
395
- for node in networkx.dfs_postorder_nodes(graph):
396
- preds = list(graph.predecessors(node))
397
- if len(preds) == 1:
398
- # merge the two nodes
399
- self._absorb_node(graph, preds[0], node)
400
- r = True
401
- break
402
- else:
403
- break
404
-
405
- return r
406
-
407
375
  def _make_regions(self, graph: networkx.DiGraph):
408
376
  structured_loop_headers = set()
409
377
  new_regions = []
@@ -535,7 +503,14 @@ class RegionIdentifier(Analysis):
535
503
  abnormal_exit_nodes = set()
536
504
 
537
505
  region = self._abstract_cyclic_region(
538
- graph, refined_loop_nodes, head, normal_entries, abnormal_entries, normal_exit_node, abnormal_exit_nodes
506
+ graph,
507
+ refined_loop_nodes,
508
+ head,
509
+ normal_entries,
510
+ abnormal_entries,
511
+ normal_exit_node,
512
+ abnormal_exit_nodes,
513
+ self._node_order,
539
514
  )
540
515
  if region.successors is not None and len(region.successors) > 1 and self._force_loop_single_exit:
541
516
  # multi-successor region. refinement is required
@@ -661,6 +636,10 @@ class RegionIdentifier(Analysis):
661
636
  graph.remove_edge(region, succ)
662
637
  graph.add_edge(cond, succ, **edge_data)
663
638
 
639
+ # compute the node order of newly created nodes
640
+ self._node_order[region] = region_node_order = min(self._node_order[node_] for node_ in region.graph)
641
+ self._node_order[cond] = region_node_order[0], region_node_order[1] + 1
642
+
664
643
  #
665
644
  # Acyclic regions
666
645
  #
@@ -733,6 +712,7 @@ class RegionIdentifier(Analysis):
733
712
  graph,
734
713
  GraphRegion(node, subgraph, None, None, False, None, cyclic_ancestor=cyclic),
735
714
  [],
715
+ self._node_order,
736
716
  secondary_graph=secondary_graph,
737
717
  )
738
718
  continue
@@ -780,7 +760,12 @@ class RegionIdentifier(Analysis):
780
760
  l.debug("Node %r, frontier %r.", node, frontier)
781
761
  # l.debug("Identified an acyclic region %s.", self._dbg_block_list(region.graph.nodes()))
782
762
  self._abstract_acyclic_region(
783
- graph, region, frontier, dummy_endnode=dummy_endnode, secondary_graph=secondary_graph
763
+ graph,
764
+ region,
765
+ frontier,
766
+ self._node_order,
767
+ dummy_endnode=dummy_endnode,
768
+ secondary_graph=secondary_graph,
784
769
  )
785
770
  # assert dummy_endnode not in graph
786
771
  region_created = True
@@ -909,11 +894,17 @@ class RegionIdentifier(Analysis):
909
894
  )
910
895
  return None
911
896
 
897
+ @staticmethod
912
898
  def _abstract_acyclic_region(
913
- self, graph: networkx.DiGraph, region, frontier, dummy_endnode=None, secondary_graph=None
899
+ graph: networkx.DiGraph,
900
+ region,
901
+ frontier,
902
+ node_order: dict[Any, tuple[int, int]],
903
+ dummy_endnode=None,
904
+ secondary_graph=None,
914
905
  ):
915
- in_edges = self._region_in_edges(graph, region, data=True)
916
- out_edges = self._region_out_edges(graph, region, data=True)
906
+ in_edges = RegionIdentifier._region_in_edges(graph, region, data=True)
907
+ out_edges = RegionIdentifier._region_out_edges(graph, region, data=True)
917
908
 
918
909
  nodes_set = set()
919
910
  for node_ in list(region.graph.nodes()):
@@ -922,6 +913,7 @@ class RegionIdentifier(Analysis):
922
913
  graph.remove_node(node_)
923
914
 
924
915
  graph.add_node(region)
916
+ node_order[region] = min(node_order[node_] for node_ in nodes_set)
925
917
 
926
918
  for src, _, data in in_edges:
927
919
  if src not in nodes_set:
@@ -937,7 +929,7 @@ class RegionIdentifier(Analysis):
937
929
  graph.add_edge(region, frontier_node)
938
930
 
939
931
  if secondary_graph is not None:
940
- self._abstract_acyclic_region(secondary_graph, region, {})
932
+ RegionIdentifier._abstract_acyclic_region(secondary_graph, region, {}, node_order)
941
933
 
942
934
  @staticmethod
943
935
  def _abstract_cyclic_region(
@@ -948,6 +940,7 @@ class RegionIdentifier(Analysis):
948
940
  abnormal_entries,
949
941
  normal_exit_node,
950
942
  abnormal_exit_nodes,
943
+ node_order: dict[Any, tuple[int, int]],
951
944
  ):
952
945
  region = GraphRegion(head, None, None, None, True, None)
953
946
 
@@ -1019,6 +1012,8 @@ class RegionIdentifier(Analysis):
1019
1012
  graph.add_node(region)
1020
1013
  for src, dst, data in delayed_edges:
1021
1014
  graph.add_edge(src, dst, **data)
1015
+ # update node order
1016
+ node_order[region] = node_order[head]
1022
1017
 
1023
1018
  region.full_graph = full_graph
1024
1019
 
@@ -1039,25 +1034,8 @@ class RegionIdentifier(Analysis):
1039
1034
  out_edges.append((region, dst, data_))
1040
1035
  return out_edges
1041
1036
 
1042
- def _remove_node(self, graph: networkx.DiGraph, node): # pylint:disable=no-self-use
1043
- in_edges = [(src, dst, data) for (src, dst, data) in graph.in_edges(node, data=True) if src is not node]
1044
- out_edges = [(src, dst, data) for (src, dst, data) in graph.out_edges(node, data=True) if dst is not node]
1045
-
1046
- # true case: it forms a region by itself :-)
1047
- new_node = None if len(in_edges) <= 1 and len(out_edges) <= 1 else MultiNode([node])
1048
-
1049
- graph.remove_node(node)
1050
-
1051
- if new_node is not None:
1052
- for src, _, data in in_edges:
1053
- graph.add_edge(src, new_node, **data)
1054
-
1055
- for _, dst, data in out_edges:
1056
- graph.add_edge(new_node, dst, **data)
1057
-
1058
- def _merge_nodes(
1059
- self, graph: networkx.DiGraph, node_a, node_b, force_multinode=False
1060
- ): # pylint:disable=no-self-use
1037
+ @staticmethod
1038
+ def _merge_nodes(graph: networkx.DiGraph, node_a, node_b, force_multinode=False):
1061
1039
  in_edges = list(graph.in_edges(node_a, data=True))
1062
1040
  out_edges = list(graph.out_edges(node_b, data=True))
1063
1041
 
@@ -1089,9 +1067,116 @@ class RegionIdentifier(Analysis):
1089
1067
 
1090
1068
  return new_node
1091
1069
 
1092
- def _absorb_node(
1093
- self, graph: networkx.DiGraph, node_mommy, node_kiddie, force_multinode=False
1094
- ): # pylint:disable=no-self-use
1070
+ def _ensure_jump_at_loop_exit_ends(self, node: Block | MultiNode) -> None:
1071
+ if isinstance(node, Block):
1072
+ if not node.statements:
1073
+ node.statements.append(
1074
+ Jump(
1075
+ None,
1076
+ Const(None, None, node.addr + node.original_size, self.project.arch.bits),
1077
+ ins_addr=node.addr,
1078
+ )
1079
+ )
1080
+ else:
1081
+ if not isinstance(first_nonlabel_nonphi_statement(node), ConditionalJump) and not isinstance(
1082
+ node.statements[-1],
1083
+ (
1084
+ Jump,
1085
+ ConditionalJump,
1086
+ IncompleteSwitchCaseHeadStatement,
1087
+ ),
1088
+ ):
1089
+ node.statements.append(
1090
+ Jump(
1091
+ None,
1092
+ Const(None, None, node.addr + node.original_size, self.project.arch.bits),
1093
+ ins_addr=node.addr,
1094
+ )
1095
+ )
1096
+ elif isinstance(node, MultiNode) and node.nodes:
1097
+ self._ensure_jump_at_loop_exit_ends(node.nodes[-1])
1098
+
1099
+ @staticmethod
1100
+ def _dbg_block_list(blocks):
1101
+ return [(hex(b.addr) if hasattr(b, "addr") else repr(b)) for b in blocks]
1102
+
1103
+ #
1104
+ # Reducibility
1105
+ #
1106
+
1107
+ def test_reducibility(self) -> bool:
1108
+ # make a copy of the graph
1109
+ graph = networkx.DiGraph(self._graph)
1110
+
1111
+ # preprocess: make it a super graph
1112
+ self._make_supergraph(graph)
1113
+
1114
+ while True:
1115
+ changed = False
1116
+
1117
+ # find a node with a back-edge, remove the edge (deleting the loop), and replace it with a MultiNode
1118
+ changed |= self._remove_self_loop(graph)
1119
+
1120
+ # find a node that has only one predecessor, and merge it with its predecessor (replace them with a
1121
+ # MultiNode)
1122
+ changed |= self._merge_single_entry_node(graph)
1123
+
1124
+ if not changed:
1125
+ # a fixed-point is reached
1126
+ break
1127
+
1128
+ # Flow graph reducibility, Hecht and Ullman
1129
+ return len(graph.nodes) == 1
1130
+
1131
+ def _remove_self_loop(self, graph: networkx.DiGraph) -> bool:
1132
+ r = False
1133
+
1134
+ while True:
1135
+ for node in graph.nodes():
1136
+ if node in graph[node]:
1137
+ # found a self loop
1138
+ self._remove_node(graph, node)
1139
+ r = True
1140
+ break
1141
+ else:
1142
+ break
1143
+
1144
+ return r
1145
+
1146
+ def _merge_single_entry_node(self, graph: networkx.DiGraph) -> bool:
1147
+ r = False
1148
+
1149
+ while True:
1150
+ for node in networkx.dfs_postorder_nodes(graph):
1151
+ preds = list(graph.predecessors(node))
1152
+ if len(preds) == 1:
1153
+ # merge the two nodes
1154
+ self._absorb_node(graph, preds[0], node)
1155
+ r = True
1156
+ break
1157
+ else:
1158
+ break
1159
+
1160
+ return r
1161
+
1162
+ def _remove_node(self, graph: networkx.DiGraph, node): # pylint:disable=no-self-use
1163
+ in_edges = [(src, dst, data) for (src, dst, data) in graph.in_edges(node, data=True) if src is not node]
1164
+ out_edges = [(src, dst, data) for (src, dst, data) in graph.out_edges(node, data=True) if dst is not node]
1165
+
1166
+ # true case: it forms a region by itself :-)
1167
+ new_node = None if len(in_edges) <= 1 and len(out_edges) <= 1 else MultiNode([node])
1168
+
1169
+ graph.remove_node(node)
1170
+
1171
+ if new_node is not None:
1172
+ for src, _, data in in_edges:
1173
+ graph.add_edge(src, new_node, **data)
1174
+
1175
+ for _, dst, data in out_edges:
1176
+ graph.add_edge(new_node, dst, **data)
1177
+
1178
+ @staticmethod
1179
+ def _absorb_node(graph: networkx.DiGraph, node_mommy, node_kiddie, force_multinode=False):
1095
1180
  in_edges_mommy = graph.in_edges(node_mommy, data=True)
1096
1181
  out_edges_mommy = graph.out_edges(node_mommy, data=True)
1097
1182
  out_edges_kiddie = graph.out_edges(node_kiddie, data=True)
@@ -1129,38 +1214,5 @@ class RegionIdentifier(Analysis):
1129
1214
  assert node_mommy not in graph
1130
1215
  assert node_kiddie not in graph
1131
1216
 
1132
- def _ensure_jump_at_loop_exit_ends(self, node: Block | MultiNode) -> None:
1133
- if isinstance(node, Block):
1134
- if not node.statements:
1135
- node.statements.append(
1136
- Jump(
1137
- None,
1138
- Const(None, None, node.addr + node.original_size, self.project.arch.bits),
1139
- ins_addr=node.addr,
1140
- )
1141
- )
1142
- else:
1143
- if not isinstance(first_nonlabel_nonphi_statement(node), ConditionalJump) and not isinstance(
1144
- node.statements[-1],
1145
- (
1146
- Jump,
1147
- ConditionalJump,
1148
- IncompleteSwitchCaseHeadStatement,
1149
- ),
1150
- ):
1151
- node.statements.append(
1152
- Jump(
1153
- None,
1154
- Const(None, None, node.addr + node.original_size, self.project.arch.bits),
1155
- ins_addr=node.addr,
1156
- )
1157
- )
1158
- elif isinstance(node, MultiNode) and node.nodes:
1159
- self._ensure_jump_at_loop_exit_ends(node.nodes[-1])
1160
-
1161
- @staticmethod
1162
- def _dbg_block_list(blocks):
1163
- return [(hex(b.addr) if hasattr(b, "addr") else repr(b)) for b in blocks]
1164
-
1165
1217
 
1166
1218
  register_analysis(RegionIdentifier, "RegionIdentifier")
@@ -1,4 +1,4 @@
1
- # pylint:disable=no-self-use,unused-argument
1
+ # pylint:disable=no-self-use,unused-argument,too-many-boolean-expressions
2
2
  from __future__ import annotations
3
3
  from typing import Literal
4
4
  import logging
@@ -9,6 +9,7 @@ from ailment.manager import Manager
9
9
  from ailment.statement import (
10
10
  Statement,
11
11
  Assignment,
12
+ CAS,
12
13
  Store,
13
14
  Call,
14
15
  Return,
@@ -18,6 +19,7 @@ from ailment.statement import (
18
19
  WeakAssignment,
19
20
  )
20
21
  from ailment.expression import (
22
+ Atom,
21
23
  Expression,
22
24
  Register,
23
25
  VirtualVariable,
@@ -204,6 +206,40 @@ class SimEngineSSARewriting(
204
206
  )
205
207
  return None
206
208
 
209
+ def _handle_stmt_CAS(self, stmt: CAS) -> CAS | None:
210
+ new_addr = self._expr(stmt.addr)
211
+ new_data_lo = self._expr(stmt.data_lo)
212
+ new_data_hi = self._expr(stmt.data_hi) if stmt.data_hi is not None else None
213
+ new_expd_lo = self._expr(stmt.expd_lo)
214
+ new_expd_hi = self._expr(stmt.expd_hi) if stmt.expd_hi is not None else None
215
+ new_old_lo = self._expr(stmt.old_lo)
216
+ new_old_hi = self._expr(stmt.old_hi) if stmt.old_hi is not None else None
217
+ assert new_old_lo is None or isinstance(new_old_lo, Atom)
218
+ assert new_old_hi is None or isinstance(new_old_hi, Atom)
219
+
220
+ if (
221
+ new_addr is not None
222
+ or new_old_lo is not None
223
+ or new_old_hi is not None
224
+ or new_data_lo is not None
225
+ or new_data_hi is not None
226
+ or new_expd_lo is not None
227
+ or new_expd_hi is not None
228
+ ):
229
+ return CAS(
230
+ stmt.idx,
231
+ stmt.addr if new_addr is None else new_addr,
232
+ stmt.data_lo if new_data_lo is None else new_data_lo,
233
+ stmt.data_hi if new_data_hi is None else new_data_hi,
234
+ stmt.expd_lo if new_expd_lo is None else new_expd_lo,
235
+ stmt.expd_hi if new_expd_hi is None else new_expd_hi,
236
+ stmt.old_lo if new_old_lo is None else new_old_lo,
237
+ stmt.old_hi if new_old_hi is None else new_old_hi,
238
+ stmt.endness,
239
+ **stmt.tags,
240
+ )
241
+ return None
242
+
207
243
  def _handle_stmt_Store(self, stmt: Store) -> Store | Assignment | tuple[Assignment, ...] | None:
208
244
  new_data = self._expr(stmt.data)
209
245
  if stmt.guard is None:
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
  from collections import OrderedDict
3
3
 
4
- from ailment.statement import Call, Store, ConditionalJump
4
+ from ailment.statement import Call, Store, ConditionalJump, CAS
5
5
  from ailment.expression import Register, BinaryOp, StackBaseOffset, ITE, VEXCCallExpression, Tmp, DirtyExpression, Load
6
6
 
7
7
  from angr.engines.light import SimEngineLightAIL
@@ -64,6 +64,15 @@ class SimEngineSSATraversal(SimEngineLightAIL[TraversalState, None, None, None])
64
64
  self._expr(stmt.src)
65
65
  self._expr(stmt.dst)
66
66
 
67
+ def _handle_stmt_CAS(self, stmt: CAS):
68
+ self._expr(stmt.addr)
69
+ self._expr(stmt.data_lo)
70
+ if stmt.data_hi is not None:
71
+ self._expr(stmt.data_hi)
72
+ self._expr(stmt.expd_lo)
73
+ if stmt.expd_hi is not None:
74
+ self._expr(stmt.expd_hi)
75
+
67
76
  def _handle_stmt_Store(self, stmt: Store):
68
77
  self._expr(stmt.addr)
69
78
  self._expr(stmt.data)
@@ -143,6 +143,26 @@ class SimEngineRDAIL(
143
143
  else:
144
144
  l.warning("Unsupported type of Assignment dst %s.", type(dst).__name__)
145
145
 
146
+ def _handle_stmt_CAS(self, stmt: ailment.statement.CAS):
147
+ addr = self._expr(stmt.addr)
148
+ old_lo = stmt.old_lo
149
+ old_hi = stmt.old_hi
150
+
151
+ self._expr(stmt.data_lo)
152
+ if stmt.data_hi is not None:
153
+ self._expr(stmt.data_hi)
154
+ self._expr(stmt.expd_lo)
155
+ if stmt.expd_hi is not None:
156
+ self._expr(stmt.expd_hi)
157
+
158
+ if isinstance(old_lo, ailment.Tmp):
159
+ self.state.kill_and_add_definition(Tmp(old_lo.tmp_idx, old_lo.size), addr)
160
+ self.tmps[old_lo.tmp_idx] = self._top(old_lo.size)
161
+
162
+ if isinstance(old_hi, ailment.Tmp):
163
+ self.state.kill_and_add_definition(Tmp(old_hi.tmp_idx, old_hi.size), addr)
164
+ self.tmps[old_hi.tmp_idx] = self._top(old_hi.size)
165
+
146
166
  def _handle_stmt_Store(self, stmt: ailment.Stmt.Store) -> None:
147
167
  data = self._expr(stmt.data)
148
168
  addr = self._expr_bv(stmt.addr)
@@ -511,5 +511,33 @@ class SPropagatorAnalysis(Analysis):
511
511
  }
512
512
  return (block_1.addr, block_1.idx) in stmt_0_targets
513
513
 
514
+ @staticmethod
515
+ def vvar_dep_graph(blocks, vvar_def_locs, vvar_use_locs) -> networkx.DiGraph:
516
+ g = networkx.DiGraph()
517
+
518
+ for var_id in vvar_def_locs:
519
+ # where is it used?
520
+ for _, use_loc in vvar_use_locs[var_id]:
521
+ if isinstance(use_loc, ExternalCodeLocation):
522
+ g.add_edge(var_id, "ExternalCodeLocation")
523
+ continue
524
+ assert use_loc.block_addr is not None
525
+ assert use_loc.stmt_idx is not None
526
+ block = blocks[(use_loc.block_addr, use_loc.block_idx)]
527
+ stmt = block.statements[use_loc.stmt_idx]
528
+ if isinstance(stmt, Assignment):
529
+ if isinstance(stmt.dst, VirtualVariable):
530
+ g.add_edge(var_id, stmt.dst.varid)
531
+ else:
532
+ g.add_edge(var_id, f"Assignment@{stmt.ins_addr:#x}")
533
+ elif isinstance(stmt, Store):
534
+ # store to memory
535
+ g.add_edge(var_id, f"Store@{stmt.ins_addr:#x}")
536
+ else:
537
+ # other statements
538
+ g.add_edge(var_id, f"{stmt.__class__.__name__}@{stmt.ins_addr:#x}")
539
+
540
+ return g
541
+
514
542
 
515
543
  register_analysis(SPropagatorAnalysis, "SPropagator")
angr/analyses/smc.py CHANGED
@@ -42,6 +42,8 @@ class TraceClassifier:
42
42
  """
43
43
  addr = state.solver.eval(state.inspect.mem_write_address)
44
44
  length = state.inspect.mem_write_length
45
+ if length is None:
46
+ length = len(state.inspect.mem_write_expr) // state.arch.byte_width
45
47
  if not isinstance(length, int):
46
48
  length = state.solver.eval(length)
47
49
  self.map.add(addr, length, TraceActions.WRITE)
@@ -103,7 +105,7 @@ class SelfModifyingCodeAnalysis(Analysis):
103
105
  """
104
106
  :param subject: Subject of analysis
105
107
  :param max_bytes: Maximum number of bytes from subject address. 0 for no limit (default).
106
- :param state: State to begin executing from from.
108
+ :param state: State to begin executing from.
107
109
  """
108
110
  assert self.project.selfmodifying_code
109
111
 
@@ -791,7 +791,8 @@ class StackPointerTracker(Analysis, ForwardAnalysis):
791
791
  sp_adjusted = True
792
792
  sp_v = state.regs[self.project.arch.sp_offset]
793
793
  sp_v -= Constant(stmt.data.con.value)
794
- state.put(self.project.arch.sp_offset, sp_v, force=True)
794
+ state.put(self.project.arch.sp_offset, sp_v, force=True) # sp -= OFFSET
795
+ state.put(stmt.offset, Constant(0), force=True) # rax = 0
795
796
  break
796
797
 
797
798
  callee_cleanups = [