angr 9.2.95__py3-none-manylinux2014_x86_64.whl → 9.2.97__py3-none-manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of angr might be problematic. Click here for more details.

Files changed (55) hide show
  1. angr/__init__.py +1 -1
  2. angr/analyses/cfg/cfg_fast.py +9 -6
  3. angr/analyses/cfg/indirect_jump_resolvers/const_resolver.py +6 -1
  4. angr/analyses/complete_calling_conventions.py +27 -11
  5. angr/analyses/decompiler/ail_simplifier.py +30 -8
  6. angr/analyses/decompiler/ccall_rewriters/amd64_ccalls.py +20 -7
  7. angr/analyses/decompiler/clinic.py +21 -5
  8. angr/analyses/decompiler/condition_processor.py +11 -0
  9. angr/analyses/decompiler/decompiler.py +58 -46
  10. angr/analyses/decompiler/optimization_passes/__init__.py +11 -5
  11. angr/analyses/decompiler/optimization_passes/flip_boolean_cmp.py +13 -7
  12. angr/analyses/decompiler/optimization_passes/optimization_pass.py +31 -11
  13. angr/analyses/decompiler/optimization_passes/{return_duplicator.py → return_duplicator_base.py} +54 -102
  14. angr/analyses/decompiler/optimization_passes/return_duplicator_high.py +57 -0
  15. angr/analyses/decompiler/optimization_passes/return_duplicator_low.py +121 -0
  16. angr/analyses/decompiler/region_identifier.py +13 -0
  17. angr/analyses/decompiler/seq_to_blocks.py +19 -0
  18. angr/analyses/decompiler/structured_codegen/c.py +21 -0
  19. angr/analyses/decompiler/structuring/phoenix.py +28 -4
  20. angr/analyses/decompiler/structuring/recursive_structurer.py +35 -1
  21. angr/analyses/decompiler/structuring/structurer_base.py +3 -0
  22. angr/analyses/decompiler/utils.py +41 -6
  23. angr/analyses/disassembly.py +4 -1
  24. angr/analyses/find_objects_static.py +15 -10
  25. angr/analyses/forward_analysis/forward_analysis.py +15 -1
  26. angr/analyses/propagator/engine_ail.py +40 -0
  27. angr/analyses/propagator/propagator.py +6 -3
  28. angr/analyses/reaching_definitions/engine_ail.py +16 -24
  29. angr/analyses/reaching_definitions/rd_state.py +14 -1
  30. angr/analyses/reaching_definitions/reaching_definitions.py +19 -2
  31. angr/analyses/variable_recovery/engine_ail.py +6 -6
  32. angr/analyses/variable_recovery/engine_base.py +22 -4
  33. angr/analyses/variable_recovery/variable_recovery_base.py +4 -1
  34. angr/engines/light/engine.py +8 -1
  35. angr/knowledge_plugins/key_definitions/atoms.py +4 -2
  36. angr/knowledge_plugins/key_definitions/environment.py +11 -0
  37. angr/knowledge_plugins/key_definitions/live_definitions.py +41 -8
  38. angr/knowledge_plugins/key_definitions/uses.py +18 -4
  39. angr/knowledge_plugins/propagations/states.py +22 -3
  40. angr/knowledge_plugins/types.py +6 -0
  41. angr/knowledge_plugins/variables/variable_manager.py +54 -5
  42. angr/simos/simos.py +2 -0
  43. angr/storage/memory_mixins/__init__.py +3 -0
  44. angr/storage/memory_mixins/multi_value_merger_mixin.py +22 -11
  45. angr/storage/memory_mixins/paged_memory/paged_memory_mixin.py +20 -2
  46. angr/storage/memory_mixins/paged_memory/pages/mv_list_page.py +81 -44
  47. angr/utils/cowdict.py +4 -2
  48. angr/utils/funcid.py +6 -0
  49. angr/utils/mp.py +1 -1
  50. {angr-9.2.95.dist-info → angr-9.2.97.dist-info}/METADATA +6 -6
  51. {angr-9.2.95.dist-info → angr-9.2.97.dist-info}/RECORD +55 -52
  52. {angr-9.2.95.dist-info → angr-9.2.97.dist-info}/LICENSE +0 -0
  53. {angr-9.2.95.dist-info → angr-9.2.97.dist-info}/WHEEL +0 -0
  54. {angr-9.2.95.dist-info → angr-9.2.97.dist-info}/entry_points.txt +0 -0
  55. {angr-9.2.95.dist-info → angr-9.2.97.dist-info}/top_level.txt +0 -0
@@ -1646,6 +1646,7 @@ class CUnaryOp(CExpression):
1646
1646
  "BitwiseNeg": self._c_repr_chunks_bitwiseneg,
1647
1647
  "Reference": self._c_repr_chunks_reference,
1648
1648
  "Dereference": self._c_repr_chunks_dereference,
1649
+ "Clz": self._c_repr_chunks_clz,
1649
1650
  }
1650
1651
 
1651
1652
  handler = OP_MAP.get(self.op, None)
@@ -1690,6 +1691,13 @@ class CUnaryOp(CExpression):
1690
1691
  yield from CExpression._try_c_repr_chunks(self.operand)
1691
1692
  yield ")", paren
1692
1693
 
1694
+ def _c_repr_chunks_clz(self):
1695
+ paren = CClosingObject("(")
1696
+ yield "Clz", self
1697
+ yield "(", paren
1698
+ yield from CExpression._try_c_repr_chunks(self.operand)
1699
+ yield ")", paren
1700
+
1693
1701
 
1694
1702
  class CBinaryOp(CExpression):
1695
1703
  """
@@ -2097,6 +2105,14 @@ class CConstant(CExpression):
2097
2105
  def fmt_float(self, v: bool):
2098
2106
  self._fmt_setter["float"] = v
2099
2107
 
2108
+ @property
2109
+ def fmt_double(self):
2110
+ return self.fmt.get("double", False)
2111
+
2112
+ @fmt_double.setter
2113
+ def fmt_double(self, v: bool):
2114
+ self._fmt_setter["double"] = v
2115
+
2100
2116
  @property
2101
2117
  def type(self):
2102
2118
  return self._type
@@ -2191,6 +2207,11 @@ class CConstant(CExpression):
2191
2207
  value &= 0xFF
2192
2208
  return repr(chr(value)) if value < 0x80 else f"'\\x{value:x}'"
2193
2209
 
2210
+ if self.fmt_double:
2211
+ if 0 < value <= 0xFFFF_FFFF_FFFF_FFFF:
2212
+ str_value = str(struct.unpack("d", struct.pack("Q", value))[0])
2213
+ return str_value
2214
+
2194
2215
  if self.fmt_neg:
2195
2216
  if value > 0:
2196
2217
  value -= 2**self._type.size
@@ -873,7 +873,6 @@ class PhoenixStructurer(StructurerBase):
873
873
  def _refine_cyclic_is_while_loop(
874
874
  self, graph, fullgraph, loop_head, head_succs
875
875
  ) -> Tuple[bool, Optional[Tuple[List, List, BaseNode, BaseNode]]]:
876
-
877
876
  if len(head_succs) == 2 and any(head_succ not in graph for head_succ in head_succs):
878
877
  # make sure the head_pred is not already structured
879
878
  _, _, head_block_0 = self._find_node_going_to_dst(loop_head, head_succs[0])
@@ -1081,6 +1080,7 @@ class PhoenixStructurer(StructurerBase):
1081
1080
  node,
1082
1081
  self.cond_proc.claripy_ast_from_ail_condition(last_stmt.switch_variable),
1083
1082
  cases,
1083
+ node_default_addr,
1084
1084
  node_default,
1085
1085
  last_stmt.ins_addr,
1086
1086
  to_remove,
@@ -1191,7 +1191,16 @@ class PhoenixStructurer(StructurerBase):
1191
1191
 
1192
1192
  to_remove.add(node_a) # add node_a
1193
1193
  self._make_switch_cases_core(
1194
- node, cmp_expr, cases, node_default, last_stmt.ins_addr, to_remove, graph, full_graph, node_a=node_a
1194
+ node,
1195
+ cmp_expr,
1196
+ cases,
1197
+ node_b_addr,
1198
+ node_default,
1199
+ last_stmt.ins_addr,
1200
+ to_remove,
1201
+ graph,
1202
+ full_graph,
1203
+ node_a=node_a,
1195
1204
  )
1196
1205
 
1197
1206
  self._switch_handle_gotos(cases, node_default, switch_end_addr)
@@ -1243,7 +1252,9 @@ class PhoenixStructurer(StructurerBase):
1243
1252
  # there must be a default case
1244
1253
  return False
1245
1254
 
1246
- self._make_switch_cases_core(node, cmp_expr, cases, node_default, node.addr, to_remove, graph, full_graph)
1255
+ self._make_switch_cases_core(
1256
+ node, cmp_expr, cases, default_addr, node_default, node.addr, to_remove, graph, full_graph
1257
+ )
1247
1258
 
1248
1259
  return True
1249
1260
 
@@ -1260,7 +1271,11 @@ class PhoenixStructurer(StructurerBase):
1260
1271
 
1261
1272
  successors = list(graph.successors(node))
1262
1273
 
1263
- if successors and all(graph.in_degree[succ] == 1 for succ in successors):
1274
+ if (
1275
+ successors
1276
+ and {succ.addr for succ in successors} == set(jump_tables[node.addr].jumptable_entries)
1277
+ and all(graph.in_degree[succ] == 1 for succ in successors)
1278
+ ):
1264
1279
  out_nodes = set()
1265
1280
  for succ in successors:
1266
1281
  out_nodes |= set(full_graph.successors(succ))
@@ -1397,6 +1412,7 @@ class PhoenixStructurer(StructurerBase):
1397
1412
  head,
1398
1413
  cmp_expr,
1399
1414
  cases: ODict,
1415
+ node_default_addr: int,
1400
1416
  node_default,
1401
1417
  addr,
1402
1418
  to_remove: Set,
@@ -1435,6 +1451,12 @@ class PhoenixStructurer(StructurerBase):
1435
1451
  # the head no longer goes to the default case
1436
1452
  graph.remove_edge(head, node_default)
1437
1453
  full_graph.remove_edge(head, node_default)
1454
+ else:
1455
+ # the default node is not in the current graph, but it might be in the full graph
1456
+ node_default_in_full_graph = next(iter(nn for nn in full_graph if nn.addr == node_default_addr), None)
1457
+ if node_default_in_full_graph is not None and full_graph.has_edge(head, node_default_in_full_graph):
1458
+ # the head no longer jumps to the default node - the switch jumps to it
1459
+ full_graph.remove_edge(head, node_default_in_full_graph)
1438
1460
 
1439
1461
  for nn in to_remove:
1440
1462
  graph.remove_node(nn)
@@ -2102,6 +2124,8 @@ class PhoenixStructurer(StructurerBase):
2102
2124
  for src, dst in acyclic_graph.edges:
2103
2125
  if src is dst:
2104
2126
  continue
2127
+ if src not in graph:
2128
+ continue
2105
2129
  if not dominates(idoms, src, dst) and not dominates(idoms, dst, src):
2106
2130
  if (src.addr, dst.addr) not in self.whitelist_edges:
2107
2131
  all_edges_wo_dominance.append((src, dst))
@@ -1,7 +1,9 @@
1
1
  import itertools
2
2
  from typing import Optional, Type, Dict, TYPE_CHECKING
3
+ import logging
3
4
 
4
5
  import networkx
6
+
5
7
  from ... import Analysis, register_analysis
6
8
  from ..condition_processor import ConditionProcessor
7
9
  from ..graph_region import GraphRegion
@@ -9,6 +11,7 @@ from ..jumptable_entry_condition_rewriter import JumpTableEntryConditionRewriter
9
11
  from ..empty_node_remover import EmptyNodeRemover
10
12
  from ..jump_target_collector import JumpTargetCollector
11
13
  from ..redundant_label_remover import RedundantLabelRemover
14
+ from .structurer_nodes import BaseNode
12
15
  from .structurer_base import StructurerBase
13
16
  from .dream import DreamStructurer
14
17
 
@@ -17,6 +20,9 @@ if TYPE_CHECKING:
17
20
  from angr.knowledge_plugins.functions import Function
18
21
 
19
22
 
23
+ _l = logging.getLogger(__name__)
24
+
25
+
20
26
  class RecursiveStructurer(Analysis):
21
27
  """
22
28
  Recursively structure a region and all of its subregions.
@@ -39,12 +45,14 @@ class RecursiveStructurer(Analysis):
39
45
  self.structurer_options = kwargs
40
46
 
41
47
  self.result = None
48
+ self.result_incomplete: bool = False
42
49
 
43
50
  self._analyze()
44
51
 
45
52
  def _analyze(self):
46
53
  region = self._region.recursive_copy()
47
54
  self._case_entry_to_switch_head: Dict[int, int] = self._get_switch_case_entries()
55
+ self.result_incomplete = False
48
56
 
49
57
  # visit the region in post-order DFS
50
58
  parent_map = {}
@@ -89,7 +97,16 @@ class RecursiveStructurer(Analysis):
89
97
  # replace this region with the resulting node in its parent region... if it's not an orphan
90
98
  if not parent_region:
91
99
  # this is the top-level region. we are done!
92
- self.result = st.result
100
+ if st.result is None:
101
+ # take the partial result out of the graph
102
+ _l.warning(
103
+ "Structuring failed to complete (most likely due to bugs in structuring). The "
104
+ "output will miss code blocks."
105
+ )
106
+ self.result = self._pick_incomplete_result_from_region(st._region)
107
+ self.result_incomplete = True
108
+ else:
109
+ self.result = st.result
93
110
  break
94
111
 
95
112
  if st.result is None:
@@ -148,5 +165,22 @@ class RecursiveStructurer(Analysis):
148
165
 
149
166
  return entries
150
167
 
168
+ def _pick_incomplete_result_from_region(self, region):
169
+ """
170
+ Parse the region graph and get (a) the node with address equal to the function address, or (b) the node with
171
+ the lowest address.
172
+ """
173
+
174
+ min_node = None
175
+ for node in region.graph.nodes:
176
+ if not isinstance(node, BaseNode):
177
+ continue
178
+ if node.addr == self.function.addr:
179
+ return node
180
+ if min_node is None or min_node.addr < node.addr:
181
+ min_node = node
182
+
183
+ return min_node
184
+
151
185
 
152
186
  register_analysis(RecursiveStructurer, "RecursiveStructurer")
@@ -145,6 +145,9 @@ class StructurerBase(Analysis):
145
145
  if isinstance(stmt, ailment.Stmt.Jump):
146
146
  targets = extract_jump_targets(stmt)
147
147
  for t in targets:
148
+ if t in cases or default is not None and t == default.addr:
149
+ # the node after switch cannot be one of the nodes in the switch-case construct
150
+ continue
148
151
  goto_addrs[t] += 1
149
152
 
150
153
  if switch_end_addr is None:
@@ -9,6 +9,7 @@ import ailment
9
9
 
10
10
  import angr
11
11
  from .call_counter import AILBlockCallCounter
12
+ from .seq_to_blocks import SequenceToBlocks
12
13
 
13
14
  _l = logging.getLogger(__name__)
14
15
 
@@ -655,13 +656,16 @@ def decompile_functions(path, functions=None, structurer=None, catch_errors=Fals
655
656
 
656
657
  # collect all functions when None are provided
657
658
  if functions is None:
658
- functions = cfg.functions.values()
659
+ functions = list(sorted(cfg.kb.functions))
659
660
 
660
661
  # normalize the functions that could be ints as names
661
- normalized_functions = []
662
+ normalized_functions: List[Union[int, str]] = []
662
663
  for func in functions:
663
664
  try:
664
- normalized_name = int(func, 0)
665
+ if isinstance(func, str):
666
+ normalized_name = int(func, 0)
667
+ else:
668
+ normalized_name = func
665
669
  except ValueError:
666
670
  normalized_name = func
667
671
  normalized_functions.append(normalized_name)
@@ -683,7 +687,7 @@ def decompile_functions(path, functions=None, structurer=None, catch_errors=Fals
683
687
  ]
684
688
  for func in functions:
685
689
  f = cfg.functions[func]
686
- if f is None or f.is_plt:
690
+ if f is None or f.is_plt or f.is_syscall or f.is_alignment or f.is_simprocedure:
687
691
  continue
688
692
 
689
693
  exception_string = ""
@@ -700,14 +704,14 @@ def decompile_functions(path, functions=None, structurer=None, catch_errors=Fals
700
704
  # do sanity checks on decompilation, skip checks if we already errored
701
705
  if not exception_string:
702
706
  if dec is None or not dec.codegen or not dec.codegen.text:
703
- exception_string = "Decompilation had no code output (failed in Dec)"
707
+ exception_string = "Decompilation had no code output (failed in decompilation)"
704
708
  elif "{\n}" in dec.codegen.text:
705
709
  exception_string = "Decompilation outputted an empty function (failed in structuring)"
706
710
  elif structurer in ["dream", "combing"] and "goto" in dec.codegen.text:
707
711
  exception_string = "Decompilation outputted a goto for a Gotoless algorithm (failed in structuring)"
708
712
 
709
713
  if exception_string:
710
- _l.critical("Failed to decompile %s because %s", str(func), exception_string)
714
+ _l.critical("Failed to decompile %s because %s", repr(f), exception_string)
711
715
  decompilation += f"// [error: {func} | {exception_string}]\n"
712
716
  else:
713
717
  decompilation += dec.codegen.text + "\n"
@@ -734,6 +738,37 @@ def find_block_by_addr(graph: networkx.DiGraph, addr: int):
734
738
  raise KeyError("The block is not in the graph!")
735
739
 
736
740
 
741
+ def sequence_to_blocks(seq: "BaseNode") -> List[ailment.Block]:
742
+ """
743
+ Converts a sequence node (BaseNode) to a list of ailment blocks contained in it and all its children.
744
+ """
745
+ walker = SequenceToBlocks()
746
+ walker.walk(seq)
747
+ return walker.blocks
748
+
749
+
750
+ def sequence_to_statements(
751
+ seq: "BaseNode", exclude=(ailment.statement.Jump, ailment.statement.Jump)
752
+ ) -> List[ailment.statement.Statement]:
753
+ """
754
+ Converts a sequence node (BaseNode) to a list of ailment Statements contained in it and all its children.
755
+ May exclude certain types of statements.
756
+ """
757
+ statements = []
758
+ blocks = sequence_to_blocks(seq)
759
+ block: ailment.Block
760
+ for block in blocks:
761
+ if not block.statements:
762
+ continue
763
+
764
+ for stmt in block.statements:
765
+ if isinstance(stmt, exclude):
766
+ continue
767
+ statements.append(stmt)
768
+
769
+ return statements
770
+
771
+
737
772
  # delayed import
738
773
  from .structuring.structurer_nodes import (
739
774
  MultiNode,
@@ -8,6 +8,7 @@ from angr.knowledge_plugins import Function
8
8
 
9
9
  from . import Analysis
10
10
 
11
+ from ..errors import AngrTypeError
11
12
  from ..utils.library import get_cpp_function_name
12
13
  from ..utils.formatting import ansi_color_enabled, ansi_color, add_edge_to_buffer
13
14
  from ..block import DisassemblerInsn, CapstoneInsn, SootBlockNode
@@ -1141,7 +1142,9 @@ class Disassembly(Analysis):
1141
1142
  self.raw_result_map["instructions"][stmt.addr] = stmt
1142
1143
  self.block_to_insn_addrs[block.addr].append(stmt.addr)
1143
1144
  else:
1144
- raise TypeError("")
1145
+ raise AngrTypeError(
1146
+ f"Cannot disassemble block with architecture {self.project.arch} for block type {type(block)}"
1147
+ )
1145
1148
 
1146
1149
  if self._include_ir:
1147
1150
  b = self.project.factory.block(block.addr, size=block.size)
@@ -45,6 +45,7 @@ class NewFunctionHandler(FunctionHandler):
45
45
  """
46
46
 
47
47
  def __init__(self, max_addr=None, new_func_addr=None, project=None):
48
+ super().__init__()
48
49
  self.max_addr = max_addr
49
50
 
50
51
  # this is a map between an object addr outside the mapped binary and PossibleObject instance
@@ -104,16 +105,20 @@ class NewFunctionHandler(FunctionHandler):
104
105
  data.depends(memory_location, value=MultiValues(offset_to_values=offset_to_values))
105
106
  self.max_addr += size
106
107
 
107
- elif "ctor" in self.project.kb.functions[function_address].demangled_name:
108
- # check if rdi has a possible this pointer/ object address, if so then we can assign this object this class
109
- # also if the func is a constructor(not stripped binaries)
110
- for addr, possible_object in self.possible_objects_dict.items():
111
- v1 = state.registers.load(72, state.arch.bits // state.arch.byte_width).one_value()
112
- obj_addr = v1.concrete_value if v1 is not None and v1.concrete else None
113
- if obj_addr is not None and addr == obj_addr:
114
- col_ind = self.project.kb.functions[function_address].demangled_name.rfind("::")
115
- class_name = self.project.kb.functions[function_address].demangled_name[:col_ind]
116
- possible_object.class_name = class_name
108
+ else:
109
+ if self.project.kb.functions.contains_addr(function_address):
110
+ func = self.project.kb.functions.get_by_addr(function_address)
111
+ if func is not None and "ctor" in func.demangled_name:
112
+ # check if rdi has a possible this pointer/ object address, if so then we can assign this object
113
+ # this class
114
+ # also if the func is a constructor(not stripped binaries)
115
+ for addr, possible_object in self.possible_objects_dict.items():
116
+ v1 = state.registers.load(72, state.arch.bits // state.arch.byte_width).one_value()
117
+ obj_addr = v1.concrete_value if v1 is not None and v1.concrete else None
118
+ if obj_addr is not None and addr == obj_addr:
119
+ col_ind = self.project.kb.functions[function_address].demangled_name.rfind("::")
120
+ class_name = self.project.kb.functions[function_address].demangled_name[:col_ind]
121
+ possible_object.class_name = class_name
117
122
 
118
123
 
119
124
  class StaticObjectFinder(Analysis):
@@ -209,6 +209,20 @@ class ForwardAnalysis(Generic[AnalysisState, NodeType, JobType, JobKey]):
209
209
 
210
210
  raise NotImplementedError("_merge_states() is not implemented.")
211
211
 
212
+ def _compare_states(self, node: NodeType, old_state: AnalysisState, new_state: AnalysisState) -> bool:
213
+ """
214
+ Determine if the analysis has reached fixed point at `node`.
215
+
216
+ You can override this method to implement a faster _compare_states() method.
217
+
218
+ :param node: The node that has been analyzed.
219
+ :param old_state: The original output state out of node.
220
+ :param new_state: The new output state out of node.
221
+ :return: True if the analysis has reached fixed at node. False otherwise.
222
+ """
223
+ _, has_no_changes = self._merge_states(node, old_state, new_state)
224
+ return has_no_changes
225
+
212
226
  def _widen_states(self, *states: AnalysisState) -> AnalysisState:
213
227
  raise NotImplementedError("_widen_states() is not implemented.")
214
228
 
@@ -288,7 +302,7 @@ class ForwardAnalysis(Generic[AnalysisState, NodeType, JobType, JobKey]):
288
302
  reached_fixedpoint = False
289
303
  else:
290
304
  # is the output state the same as the old one?
291
- _, reached_fixedpoint = self._merge_states(n, self._output_state[self._node_key(n)], output_state)
305
+ reached_fixedpoint = self._compare_states(n, self._output_state[self._node_key(n)], output_state)
292
306
  self._output_state[self._node_key(n)] = output_state
293
307
 
294
308
  if not reached_fixedpoint:
@@ -1221,6 +1221,8 @@ class SimEnginePropagatorAIL(
1221
1221
  bits=expr.bits,
1222
1222
  floating_point=expr.floating_point,
1223
1223
  rounding_mode=expr.rounding_mode,
1224
+ from_bits=expr.from_bits,
1225
+ to_bits=expr.to_bits,
1224
1226
  **expr.tags,
1225
1227
  )
1226
1228
  return PropValue.from_value_and_details(value, expr.size, new_expr, self._codeloc())
@@ -1443,6 +1445,44 @@ class SimEnginePropagatorAIL(
1443
1445
  )
1444
1446
  return PropValue.from_value_and_details(value, expr.size, new_expr, self._codeloc())
1445
1447
 
1448
+ def _ail_handle_ExpCmpNE(self, expr):
1449
+ o0_value = self._expr(expr.operands[0])
1450
+ o1_value = self._expr(expr.operands[1])
1451
+
1452
+ value = self.state.top(expr.bits)
1453
+ if o0_value is None or o1_value is None:
1454
+ new_expr = expr
1455
+ else:
1456
+ o0_expr = o0_value.one_expr
1457
+ o1_expr = o1_value.one_expr
1458
+ new_expr = Expr.BinaryOp(
1459
+ expr.idx,
1460
+ "ExpCmpNE",
1461
+ [
1462
+ o0_expr if o0_expr is not None else expr.operands[0],
1463
+ o1_expr if o1_expr is not None else expr.operands[1],
1464
+ ],
1465
+ expr.signed,
1466
+ **expr.tags,
1467
+ )
1468
+ return PropValue.from_value_and_details(value, expr.size, new_expr, self._codeloc())
1469
+
1470
+ def _ail_handle_Clz(self, expr):
1471
+ o0_value = self._expr(expr.operand)
1472
+
1473
+ value = self.state.top(expr.bits)
1474
+ if o0_value is None:
1475
+ new_expr = expr
1476
+ else:
1477
+ o0_expr = o0_value.one_expr
1478
+ new_expr = Expr.UnaryOp(
1479
+ expr.idx,
1480
+ "Clz",
1481
+ o0_expr if o0_expr is not None else expr.operand,
1482
+ **expr.tags,
1483
+ )
1484
+ return PropValue.from_value_and_details(value, expr.size, new_expr, self._codeloc())
1485
+
1446
1486
  #
1447
1487
  # Util methods
1448
1488
  #
@@ -53,7 +53,7 @@ class PropagatorAnalysis(ForwardAnalysis, Analysis): # pylint:disable=abstract-
53
53
  block=None,
54
54
  func_graph=None,
55
55
  base_state=None,
56
- max_iterations=3,
56
+ max_iterations=30,
57
57
  load_callback=None,
58
58
  stack_pointer_tracker=None,
59
59
  only_consts=False,
@@ -79,7 +79,10 @@ class PropagatorAnalysis(ForwardAnalysis, Analysis): # pylint:disable=abstract-
79
79
  else:
80
80
  raise ValueError("Unsupported analysis target.")
81
81
 
82
- start = time.perf_counter_ns() / 1000000
82
+ if profiling:
83
+ start = time.perf_counter_ns() / 1000000
84
+ else:
85
+ start = 0
83
86
 
84
87
  self._base_state = base_state
85
88
  self._function = func
@@ -320,7 +323,7 @@ class PropagatorAnalysis(ForwardAnalysis, Analysis): # pylint:disable=abstract-
320
323
  # TODO: Clear registers according to calling conventions
321
324
 
322
325
  if self.model.node_iterations[block_key] < self._max_iterations:
323
- return True, state
326
+ return None, state
324
327
  else:
325
328
  return False, state
326
329
 
@@ -596,7 +596,7 @@ class SimEngineRDAIL(
596
596
  operand_v = operand.one_value()
597
597
 
598
598
  if operand_v is not None and operand_v.concrete:
599
- r = MultiValues(~operand_v)
599
+ r = MultiValues(~operand_v) # pylint:disable=invalid-unary-operand-type
600
600
  else:
601
601
  r = MultiValues(self.state.top(bits))
602
602
 
@@ -612,7 +612,7 @@ class SimEngineRDAIL(
612
612
  operand_v = operand.one_value()
613
613
 
614
614
  if operand_v is not None and operand_v.concrete:
615
- r = MultiValues(-operand_v)
615
+ r = MultiValues(-operand_v) # pylint:disable=invalid-unary-operand-type
616
616
  else:
617
617
  r = MultiValues(self.state.top(bits))
618
618
 
@@ -626,7 +626,7 @@ class SimEngineRDAIL(
626
626
  operand_v = operand.one_value()
627
627
 
628
628
  if operand_v is not None and operand_v.concrete:
629
- r = MultiValues(offset_to_values={0: {~operand_v}})
629
+ r = MultiValues(offset_to_values={0: {~operand_v}}) # pylint:disable=invalid-unary-operand-type
630
630
  else:
631
631
  r = MultiValues(offset_to_values={0: {self.state.top(bits)}})
632
632
 
@@ -766,27 +766,6 @@ class SimEngineRDAIL(
766
766
 
767
767
  return r
768
768
 
769
- def _ail_handle_Div(self, expr: ailment.Expr.BinaryOp) -> MultiValues:
770
- expr0: MultiValues = self._expr(expr.operands[0])
771
- expr1: MultiValues = self._expr(expr.operands[1])
772
- bits = expr.bits
773
-
774
- expr0_v = expr0.one_value()
775
- expr1_v = expr1.one_value()
776
-
777
- if (
778
- expr0_v is not None
779
- and expr1_v is not None
780
- and expr0_v.concrete
781
- and expr1_v.concrete
782
- and expr1_v.concrete_value != 0
783
- ):
784
- r = MultiValues(offset_to_values={0: {expr0_v / expr1_v}})
785
- else:
786
- r = MultiValues(offset_to_values={0: {self.state.top(bits)}})
787
-
788
- return r
789
-
790
769
  def _ail_handle_Shr(self, expr: ailment.Expr.BinaryOp) -> MultiValues:
791
770
  expr0: MultiValues = self._expr(expr.operands[0])
792
771
  expr1: MultiValues = self._expr(expr.operands[1])
@@ -1143,6 +1122,19 @@ class SimEngineRDAIL(
1143
1122
  top = self.state.top(expr.bits)
1144
1123
  return MultiValues(offset_to_values={0: {top}})
1145
1124
 
1125
+ def _ail_handle_ExpCmpNE(self, expr) -> MultiValues:
1126
+ self._expr(expr.operands[0])
1127
+ self._expr(expr.operands[1])
1128
+
1129
+ top = self.state.top(expr.bits)
1130
+ return MultiValues(offset_to_values={0: {top}})
1131
+
1132
+ def _ail_handle_Clz(self, expr) -> MultiValues:
1133
+ self._expr(expr.operand)
1134
+
1135
+ top = self.state.top(expr.bits)
1136
+ return MultiValues(offset_to_values={0: {top}})
1137
+
1146
1138
  def _ail_handle_Const(self, expr) -> MultiValues:
1147
1139
  self.state.mark_const(expr.value, expr.size)
1148
1140
  if isinstance(expr.value, float):
@@ -72,6 +72,7 @@ class ReachingDefinitionsState:
72
72
  "_track_consts",
73
73
  "_sp_adjusted",
74
74
  "exit_observed",
75
+ "_element_limit",
75
76
  )
76
77
 
77
78
  def __init__(
@@ -90,6 +91,7 @@ class ReachingDefinitionsState:
90
91
  sp_adjusted: bool = False,
91
92
  all_definitions: Optional[Set[Definition]] = None,
92
93
  initializer: Optional["RDAStateInitializer"] = None,
94
+ element_limit: int = 5,
93
95
  ):
94
96
  # handy short-hands
95
97
  self.codeloc = codeloc
@@ -100,6 +102,7 @@ class ReachingDefinitionsState:
100
102
  self.analysis = analysis
101
103
  self._canonical_size: int = canonical_size
102
104
  self._sp_adjusted: bool = sp_adjusted
105
+ self._element_limit: int = element_limit
103
106
 
104
107
  self.all_definitions: Set[Definition] = set() if all_definitions is None else all_definitions
105
108
 
@@ -122,7 +125,10 @@ class ReachingDefinitionsState:
122
125
  if live_definitions is None:
123
126
  # the first time this state is created. initialize it
124
127
  self.live_definitions = LiveDefinitions(
125
- self.arch, track_tmps=self._track_tmps, canonical_size=canonical_size
128
+ self.arch,
129
+ track_tmps=self._track_tmps,
130
+ canonical_size=canonical_size,
131
+ element_limit=element_limit,
126
132
  )
127
133
  if self.analysis is not None:
128
134
  self.live_definitions.project = self.analysis.project
@@ -310,6 +316,7 @@ class ReachingDefinitionsState:
310
316
  environment=self._environment,
311
317
  sp_adjusted=self._sp_adjusted,
312
318
  all_definitions=self.all_definitions.copy(),
319
+ element_limit=self._element_limit,
313
320
  )
314
321
 
315
322
  return rd
@@ -323,6 +330,12 @@ class ReachingDefinitionsState:
323
330
 
324
331
  return state, merged_0 or merged_1
325
332
 
333
+ def compare(self, other: "ReachingDefinitionsState") -> bool:
334
+ r0 = self.live_definitions.compare(other.live_definitions)
335
+ r1 = self.environment.compare(other.environment)
336
+
337
+ return r0 and r1
338
+
326
339
  def move_codelocs(self, new_codeloc: CodeLocation) -> None:
327
340
  if self.codeloc != new_codeloc:
328
341
  self.codeloc = new_codeloc