angr 9.2.96__py3-none-manylinux2014_x86_64.whl → 9.2.97__py3-none-manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of angr might be problematic. Click here for more details.

Files changed (33) hide show
  1. angr/__init__.py +1 -1
  2. angr/analyses/complete_calling_conventions.py +27 -11
  3. angr/analyses/decompiler/ail_simplifier.py +20 -8
  4. angr/analyses/decompiler/condition_processor.py +2 -0
  5. angr/analyses/find_objects_static.py +15 -10
  6. angr/analyses/forward_analysis/forward_analysis.py +15 -1
  7. angr/analyses/propagator/engine_ail.py +2 -0
  8. angr/analyses/propagator/propagator.py +6 -3
  9. angr/analyses/reaching_definitions/rd_state.py +14 -1
  10. angr/analyses/reaching_definitions/reaching_definitions.py +19 -2
  11. angr/analyses/variable_recovery/engine_ail.py +6 -6
  12. angr/analyses/variable_recovery/variable_recovery_base.py +4 -1
  13. angr/engines/light/engine.py +8 -1
  14. angr/knowledge_plugins/key_definitions/environment.py +11 -0
  15. angr/knowledge_plugins/key_definitions/live_definitions.py +41 -8
  16. angr/knowledge_plugins/key_definitions/uses.py +18 -4
  17. angr/knowledge_plugins/propagations/states.py +22 -3
  18. angr/knowledge_plugins/types.py +6 -0
  19. angr/knowledge_plugins/variables/variable_manager.py +31 -5
  20. angr/simos/simos.py +2 -0
  21. angr/storage/memory_mixins/__init__.py +3 -0
  22. angr/storage/memory_mixins/multi_value_merger_mixin.py +22 -11
  23. angr/storage/memory_mixins/paged_memory/paged_memory_mixin.py +20 -2
  24. angr/storage/memory_mixins/paged_memory/pages/mv_list_page.py +81 -44
  25. angr/utils/cowdict.py +4 -2
  26. angr/utils/funcid.py +6 -0
  27. angr/utils/mp.py +1 -1
  28. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/METADATA +6 -6
  29. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/RECORD +33 -33
  30. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/LICENSE +0 -0
  31. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/WHEEL +0 -0
  32. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/entry_points.txt +0 -0
  33. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/top_level.txt +0 -0
angr/__init__.py CHANGED
@@ -1,7 +1,7 @@
1
1
  # pylint: disable=wildcard-import
2
2
  # pylint: disable=wrong-import-position
3
3
 
4
- __version__ = "9.2.96"
4
+ __version__ = "9.2.97"
5
5
 
6
6
  if bytes is str:
7
7
  raise Exception(
@@ -1,3 +1,4 @@
1
+ # pylint:disable=import-outside-toplevel
1
2
  from typing import Tuple, Optional, Callable, Iterable, Dict, Set, TYPE_CHECKING
2
3
  import queue
3
4
  import threading
@@ -10,6 +11,7 @@ import networkx
10
11
  import claripy
11
12
 
12
13
  from angr.utils.graph import GraphUtils
14
+ from angr.simos import SimWindows
13
15
  from ..utils.mp import mp_context, Initializer
14
16
  from ..knowledge_plugins.cfg import CFGModel
15
17
  from . import Analysis, register_analysis, VariableRecoveryFast, CallingConventionAnalysis
@@ -88,11 +90,13 @@ class CompleteCallingConventionsAnalysis(Analysis):
88
90
  self._results = []
89
91
  if workers > 0:
90
92
  self._remaining_funcs = _mp_context.Value("i", 0)
91
- self._func_queue = _mp_context.Queue()
92
93
  self._results = _mp_context.Queue()
94
+ self._results_lock = _mp_context.Lock()
95
+ self._func_queue = _mp_context.Queue()
93
96
  self._func_queue_lock = _mp_context.Lock()
94
97
  else:
95
98
  self._remaining_funcs = None # not needed
99
+ self._results_lock = None # not needed
96
100
  self._func_queue = None # not needed
97
101
  self._func_queue_lock = threading.Lock()
98
102
 
@@ -205,9 +209,7 @@ class CompleteCallingConventionsAnalysis(Analysis):
205
209
  dependents[callee].add(func_addr)
206
210
 
207
211
  # enqueue all leaf functions
208
- for func_addr in list(
209
- k for k in depends_on if not depends_on[k]
210
- ): # pylint:disable=consider-using-dict-items
212
+ for func_addr in [k for k in depends_on if not depends_on[k]]: # pylint:disable=consider-using-dict-items
211
213
  self._func_queue.put((func_addr, None))
212
214
  del depends_on[func_addr]
213
215
 
@@ -215,11 +217,17 @@ class CompleteCallingConventionsAnalysis(Analysis):
215
217
  cc_callback = self._cc_callback
216
218
  self._cc_callback = None
217
219
 
220
+ if self.project.simos is not None and isinstance(self.project.simos, SimWindows):
221
+ # delayed import
222
+ from angr.procedures.definitions import load_win32api_definitions
223
+
224
+ Initializer.get().register(load_win32api_definitions)
225
+
218
226
  # spawn workers to perform the analysis
219
227
  with self._func_queue_lock:
220
228
  procs = [
221
- _mp_context.Process(target=self._worker_routine, args=(Initializer.get(),), daemon=True)
222
- for _ in range(self._workers)
229
+ _mp_context.Process(target=self._worker_routine, args=(worker_id, Initializer.get()), daemon=True)
230
+ for worker_id in range(self._workers)
223
231
  ]
224
232
  for proc_idx, proc in enumerate(procs):
225
233
  self._update_progress(0, text=f"Spawning worker {proc_idx}...")
@@ -231,7 +239,13 @@ class CompleteCallingConventionsAnalysis(Analysis):
231
239
  self._update_progress(0)
232
240
  idx = 0
233
241
  while idx < total_funcs:
234
- func_addr, cc, proto, proto_libname, varman = self._results.get(True)
242
+ try:
243
+ with self._results_lock:
244
+ func_addr, cc, proto, proto_libname, varman = self._results.get(True, timeout=0.01)
245
+ except queue.Empty:
246
+ time.sleep(0.1)
247
+ continue
248
+
235
249
  func = self.kb.functions.get_by_addr(func_addr)
236
250
  if cc is not None or proto is not None:
237
251
  func.calling_convention = cc
@@ -260,13 +274,14 @@ class CompleteCallingConventionsAnalysis(Analysis):
260
274
  depends_on[dependent].discard(func_addr)
261
275
  if not depends_on[dependent]:
262
276
  callee_prototypes = self._get_callees_cc_prototypes(dependent)
263
- self._func_queue.put((dependent, callee_prototypes))
277
+ with self._func_queue_lock:
278
+ self._func_queue.put((dependent, callee_prototypes))
264
279
  del depends_on[dependent]
265
280
 
266
281
  for proc in procs:
267
282
  proc.join()
268
283
 
269
- def _worker_routine(self, initializer: Initializer):
284
+ def _worker_routine(self, worker_id: int, initializer: Initializer):
270
285
  initializer.initialize()
271
286
  idx = 0
272
287
  while self._remaining_funcs.value > 0:
@@ -293,9 +308,10 @@ class CompleteCallingConventionsAnalysis(Analysis):
293
308
  try:
294
309
  cc, proto, proto_libname, varman = self._analyze_core(func_addr)
295
310
  except Exception: # pylint:disable=broad-except
296
- _l.error("Exception occurred during _analyze_core().", exc_info=True)
311
+ _l.error("Worker %d: Exception occurred during _analyze_core().", worker_id, exc_info=True)
297
312
  cc, proto, proto_libname, varman = None, None, None, None
298
- self._results.put((func_addr, cc, proto, proto_libname, varman))
313
+ with self._results_lock:
314
+ self._results.put((func_addr, cc, proto, proto_libname, varman))
299
315
 
300
316
  def _analyze_core(
301
317
  self, func_addr: int
@@ -1,3 +1,4 @@
1
+ # pylint:disable=too-many-boolean-expressions
1
2
  from typing import Set, Dict, List, Tuple, Any, Optional, TYPE_CHECKING
2
3
  from collections import defaultdict
3
4
  import logging
@@ -183,6 +184,7 @@ class AILSimplifier(Analysis):
183
184
  observe_all=False,
184
185
  use_callee_saved_regs_at_return=self._use_callee_saved_regs_at_return,
185
186
  track_tmps=True,
187
+ element_limit=1,
186
188
  ).model
187
189
  self._reaching_definitions = rd
188
190
  return rd
@@ -504,7 +506,9 @@ class AILSimplifier(Analysis):
504
506
 
505
507
  first_op = walker.operations[0]
506
508
  if isinstance(first_op, Convert):
507
- return first_op.to_bits // self.project.arch.byte_width, ("convert", (first_op,))
509
+ if first_op.to_bits >= self.project.arch.byte_width:
510
+ # we need at least one byte!
511
+ return first_op.to_bits // self.project.arch.byte_width, ("convert", (first_op,))
508
512
  if isinstance(first_op, BinaryOp):
509
513
  second_op = None
510
514
  if len(walker.operations) >= 2:
@@ -526,6 +530,7 @@ class AILSimplifier(Analysis):
526
530
  and first_op.op not in {"Shr", "Sar"}
527
531
  and isinstance(second_op, Convert)
528
532
  and second_op.from_bits == expr.bits
533
+ and second_op.to_bits >= self.project.arch.byte_width # we need at least one byte!
529
534
  ):
530
535
  return min(expr.bits, second_op.to_bits) // self.project.arch.byte_width, (
531
536
  "binop-convert",
@@ -721,13 +726,13 @@ class AILSimplifier(Analysis):
721
726
  ):
722
727
  continue
723
728
 
724
- # Make sure the register is never updated across this function
725
- if any(
726
- (def_ != the_def and def_.atom == the_def.atom)
727
- for def_ in rd.all_definitions
728
- if isinstance(def_.atom, atoms.Register) and rd.all_uses.get_uses(def_)
729
- ):
730
- continue
729
+ # Make sure the register is never updated across this function
730
+ if any(
731
+ (def_ != the_def and def_.atom == the_def.atom)
732
+ for def_ in rd.all_definitions
733
+ if isinstance(def_.atom, atoms.Register) and rd.all_uses.get_uses(def_)
734
+ ):
735
+ continue
731
736
 
732
737
  # find all its uses
733
738
  all_arg_copy_var_uses: Set[Tuple[CodeLocation, Any]] = set(
@@ -1214,6 +1219,13 @@ class AILSimplifier(Analysis):
1214
1219
  continue
1215
1220
 
1216
1221
  uses = rd.all_uses.get_uses(def_)
1222
+ if (
1223
+ isinstance(def_.atom, atoms.Register)
1224
+ and def_.atom.reg_offset in self.project.arch.artificial_registers_offsets
1225
+ ):
1226
+ if len(uses) == 1 and next(iter(uses)) == def_.codeloc:
1227
+ # cc_ndep = amd64g_calculate_condition(..., cc_ndep)
1228
+ uses = set()
1217
1229
 
1218
1230
  if not uses:
1219
1231
  if not isinstance(def_.codeloc, ExternalCodeLocation):
@@ -766,6 +766,8 @@ class ConditionProcessor:
766
766
  var = claripy.BoolV(condition.value)
767
767
  else:
768
768
  var = claripy.BVV(condition.value, condition.bits)
769
+ if isinstance(var, claripy.Bits) and var.size() == 1:
770
+ var = claripy.true if var.concrete_value == 1 else claripy.false
769
771
  return var
770
772
  elif isinstance(condition, ailment.Expr.Tmp):
771
773
  l.warning("Left-over ailment.Tmp variable %s.", condition)
@@ -45,6 +45,7 @@ class NewFunctionHandler(FunctionHandler):
45
45
  """
46
46
 
47
47
  def __init__(self, max_addr=None, new_func_addr=None, project=None):
48
+ super().__init__()
48
49
  self.max_addr = max_addr
49
50
 
50
51
  # this is a map between an object addr outside the mapped binary and PossibleObject instance
@@ -104,16 +105,20 @@ class NewFunctionHandler(FunctionHandler):
104
105
  data.depends(memory_location, value=MultiValues(offset_to_values=offset_to_values))
105
106
  self.max_addr += size
106
107
 
107
- elif "ctor" in self.project.kb.functions[function_address].demangled_name:
108
- # check if rdi has a possible this pointer/ object address, if so then we can assign this object this class
109
- # also if the func is a constructor(not stripped binaries)
110
- for addr, possible_object in self.possible_objects_dict.items():
111
- v1 = state.registers.load(72, state.arch.bits // state.arch.byte_width).one_value()
112
- obj_addr = v1.concrete_value if v1 is not None and v1.concrete else None
113
- if obj_addr is not None and addr == obj_addr:
114
- col_ind = self.project.kb.functions[function_address].demangled_name.rfind("::")
115
- class_name = self.project.kb.functions[function_address].demangled_name[:col_ind]
116
- possible_object.class_name = class_name
108
+ else:
109
+ if self.project.kb.functions.contains_addr(function_address):
110
+ func = self.project.kb.functions.get_by_addr(function_address)
111
+ if func is not None and "ctor" in func.demangled_name:
112
+ # check if rdi has a possible this pointer/ object address, if so then we can assign this object
113
+ # this class
114
+ # also if the func is a constructor(not stripped binaries)
115
+ for addr, possible_object in self.possible_objects_dict.items():
116
+ v1 = state.registers.load(72, state.arch.bits // state.arch.byte_width).one_value()
117
+ obj_addr = v1.concrete_value if v1 is not None and v1.concrete else None
118
+ if obj_addr is not None and addr == obj_addr:
119
+ col_ind = self.project.kb.functions[function_address].demangled_name.rfind("::")
120
+ class_name = self.project.kb.functions[function_address].demangled_name[:col_ind]
121
+ possible_object.class_name = class_name
117
122
 
118
123
 
119
124
  class StaticObjectFinder(Analysis):
@@ -209,6 +209,20 @@ class ForwardAnalysis(Generic[AnalysisState, NodeType, JobType, JobKey]):
209
209
 
210
210
  raise NotImplementedError("_merge_states() is not implemented.")
211
211
 
212
+ def _compare_states(self, node: NodeType, old_state: AnalysisState, new_state: AnalysisState) -> bool:
213
+ """
214
+ Determine if the analysis has reached fixed point at `node`.
215
+
216
+ You can override this method to implement a faster _compare_states() method.
217
+
218
+ :param node: The node that has been analyzed.
219
+ :param old_state: The original output state out of node.
220
+ :param new_state: The new output state out of node.
221
+ :return: True if the analysis has reached fixed at node. False otherwise.
222
+ """
223
+ _, has_no_changes = self._merge_states(node, old_state, new_state)
224
+ return has_no_changes
225
+
212
226
  def _widen_states(self, *states: AnalysisState) -> AnalysisState:
213
227
  raise NotImplementedError("_widen_states() is not implemented.")
214
228
 
@@ -288,7 +302,7 @@ class ForwardAnalysis(Generic[AnalysisState, NodeType, JobType, JobKey]):
288
302
  reached_fixedpoint = False
289
303
  else:
290
304
  # is the output state the same as the old one?
291
- _, reached_fixedpoint = self._merge_states(n, self._output_state[self._node_key(n)], output_state)
305
+ reached_fixedpoint = self._compare_states(n, self._output_state[self._node_key(n)], output_state)
292
306
  self._output_state[self._node_key(n)] = output_state
293
307
 
294
308
  if not reached_fixedpoint:
@@ -1221,6 +1221,8 @@ class SimEnginePropagatorAIL(
1221
1221
  bits=expr.bits,
1222
1222
  floating_point=expr.floating_point,
1223
1223
  rounding_mode=expr.rounding_mode,
1224
+ from_bits=expr.from_bits,
1225
+ to_bits=expr.to_bits,
1224
1226
  **expr.tags,
1225
1227
  )
1226
1228
  return PropValue.from_value_and_details(value, expr.size, new_expr, self._codeloc())
@@ -53,7 +53,7 @@ class PropagatorAnalysis(ForwardAnalysis, Analysis): # pylint:disable=abstract-
53
53
  block=None,
54
54
  func_graph=None,
55
55
  base_state=None,
56
- max_iterations=3,
56
+ max_iterations=30,
57
57
  load_callback=None,
58
58
  stack_pointer_tracker=None,
59
59
  only_consts=False,
@@ -79,7 +79,10 @@ class PropagatorAnalysis(ForwardAnalysis, Analysis): # pylint:disable=abstract-
79
79
  else:
80
80
  raise ValueError("Unsupported analysis target.")
81
81
 
82
- start = time.perf_counter_ns() / 1000000
82
+ if profiling:
83
+ start = time.perf_counter_ns() / 1000000
84
+ else:
85
+ start = 0
83
86
 
84
87
  self._base_state = base_state
85
88
  self._function = func
@@ -320,7 +323,7 @@ class PropagatorAnalysis(ForwardAnalysis, Analysis): # pylint:disable=abstract-
320
323
  # TODO: Clear registers according to calling conventions
321
324
 
322
325
  if self.model.node_iterations[block_key] < self._max_iterations:
323
- return True, state
326
+ return None, state
324
327
  else:
325
328
  return False, state
326
329
 
@@ -72,6 +72,7 @@ class ReachingDefinitionsState:
72
72
  "_track_consts",
73
73
  "_sp_adjusted",
74
74
  "exit_observed",
75
+ "_element_limit",
75
76
  )
76
77
 
77
78
  def __init__(
@@ -90,6 +91,7 @@ class ReachingDefinitionsState:
90
91
  sp_adjusted: bool = False,
91
92
  all_definitions: Optional[Set[Definition]] = None,
92
93
  initializer: Optional["RDAStateInitializer"] = None,
94
+ element_limit: int = 5,
93
95
  ):
94
96
  # handy short-hands
95
97
  self.codeloc = codeloc
@@ -100,6 +102,7 @@ class ReachingDefinitionsState:
100
102
  self.analysis = analysis
101
103
  self._canonical_size: int = canonical_size
102
104
  self._sp_adjusted: bool = sp_adjusted
105
+ self._element_limit: int = element_limit
103
106
 
104
107
  self.all_definitions: Set[Definition] = set() if all_definitions is None else all_definitions
105
108
 
@@ -122,7 +125,10 @@ class ReachingDefinitionsState:
122
125
  if live_definitions is None:
123
126
  # the first time this state is created. initialize it
124
127
  self.live_definitions = LiveDefinitions(
125
- self.arch, track_tmps=self._track_tmps, canonical_size=canonical_size
128
+ self.arch,
129
+ track_tmps=self._track_tmps,
130
+ canonical_size=canonical_size,
131
+ element_limit=element_limit,
126
132
  )
127
133
  if self.analysis is not None:
128
134
  self.live_definitions.project = self.analysis.project
@@ -310,6 +316,7 @@ class ReachingDefinitionsState:
310
316
  environment=self._environment,
311
317
  sp_adjusted=self._sp_adjusted,
312
318
  all_definitions=self.all_definitions.copy(),
319
+ element_limit=self._element_limit,
313
320
  )
314
321
 
315
322
  return rd
@@ -323,6 +330,12 @@ class ReachingDefinitionsState:
323
330
 
324
331
  return state, merged_0 or merged_1
325
332
 
333
+ def compare(self, other: "ReachingDefinitionsState") -> bool:
334
+ r0 = self.live_definitions.compare(other.live_definitions)
335
+ r1 = self.environment.compare(other.environment)
336
+
337
+ return r0 and r1
338
+
326
339
  def move_codelocs(self, new_codeloc: CodeLocation) -> None:
327
340
  if self.codeloc != new_codeloc:
328
341
  self.codeloc = new_codeloc
@@ -55,7 +55,7 @@ class ReachingDefinitionsAnalysis(
55
55
  self,
56
56
  subject: Union[Subject, ailment.Block, Block, Function, str] = None,
57
57
  func_graph=None,
58
- max_iterations=3,
58
+ max_iterations=30,
59
59
  track_tmps=False,
60
60
  track_consts=True,
61
61
  observation_points: "Iterable[ObservationPoint]" = None,
@@ -74,6 +74,7 @@ class ReachingDefinitionsAnalysis(
74
74
  interfunction_level: int = 0,
75
75
  track_liveness: bool = True,
76
76
  func_addr: Optional[int] = None,
77
+ element_limit: int = 5,
77
78
  ):
78
79
  """
79
80
  :param subject: The subject of the analysis: a function, or a single basic block
@@ -131,6 +132,7 @@ class ReachingDefinitionsAnalysis(
131
132
  self._canonical_size = canonical_size
132
133
  self._use_callee_saved_regs_at_return = use_callee_saved_regs_at_return
133
134
  self._func_addr = func_addr
135
+ self._element_limit = element_limit
134
136
 
135
137
  if dep_graph is None or dep_graph is False:
136
138
  self._dep_graph = None
@@ -469,13 +471,28 @@ class ReachingDefinitionsAnalysis(
469
471
  analysis=self,
470
472
  canonical_size=self._canonical_size,
471
473
  initializer=self._state_initializer,
474
+ element_limit=self._element_limit,
472
475
  )
473
476
 
474
477
  # pylint: disable=no-self-use,arguments-differ
475
478
  def _merge_states(self, _node, *states: ReachingDefinitionsState):
479
+ assert len(states) >= 2
476
480
  merged_state, merge_occurred = states[0].merge(*states[1:])
477
481
  return merged_state, not merge_occurred
478
482
 
483
+ def _compare_states(self, node, old_state: ReachingDefinitionsState, new_state: ReachingDefinitionsState) -> bool:
484
+ """
485
+ Return True if new_state >= old_state in the lattice.
486
+
487
+ :param node:
488
+ :param old_state:
489
+ :param new_state:
490
+ :return:
491
+ """
492
+
493
+ reached_fixedpoint = new_state.compare(old_state)
494
+ return reached_fixedpoint
495
+
479
496
  def _run_on_node(self, node, state: ReachingDefinitionsState):
480
497
  """
481
498
 
@@ -550,7 +567,7 @@ class ReachingDefinitionsAnalysis(
550
567
  state.downsize()
551
568
 
552
569
  if self._node_iterations[block_key] < self._max_iterations:
553
- return True, state
570
+ return None, state
554
571
  else:
555
572
  return False, state
556
573
 
@@ -469,20 +469,20 @@ class SimEngineVRAIL(
469
469
 
470
470
  r0 = self._expr(arg0)
471
471
  r1 = self._expr(arg1)
472
- from_size = r1.bits
473
- to_size = r0.bits
472
+ from_size = expr.from_bits
473
+ to_size = expr.to_bits
474
474
 
475
475
  if expr.signed:
476
- quotient = r0.data.SDiv(claripy.SignExt(to_size - from_size, r1.data))
477
- remainder = r0.data.SMod(claripy.SignExt(to_size - from_size, r1.data))
476
+ quotient = r0.data.SDiv(claripy.SignExt(from_size - to_size, r1.data))
477
+ remainder = r0.data.SMod(claripy.SignExt(from_size - to_size, r1.data))
478
478
  quotient_size = to_size
479
479
  remainder_size = to_size
480
480
  r = claripy.Concat(
481
481
  claripy.Extract(remainder_size - 1, 0, remainder), claripy.Extract(quotient_size - 1, 0, quotient)
482
482
  )
483
483
  else:
484
- quotient = r0.data // claripy.ZeroExt(to_size - from_size, r1.data)
485
- remainder = r0.data % claripy.ZeroExt(to_size - from_size, r1.data)
484
+ quotient = r0.data // claripy.ZeroExt(from_size - to_size, r1.data)
485
+ remainder = r0.data % claripy.ZeroExt(from_size - to_size, r1.data)
486
486
  quotient_size = to_size
487
487
  remainder_size = to_size
488
488
  r = claripy.Concat(
@@ -175,6 +175,7 @@ class VariableRecoveryStateBase:
175
175
  self.stack_region: MultiValuedMemory = MultiValuedMemory(
176
176
  memory_id="mem",
177
177
  top_func=self.top,
178
+ is_top_func=self.is_top,
178
179
  phi_maker=self._make_phi_variable,
179
180
  skip_missing_values_during_merging=True,
180
181
  page_kwargs={"mo_cmp": self._mo_cmp},
@@ -188,6 +189,7 @@ class VariableRecoveryStateBase:
188
189
  self.register_region: MultiValuedMemory = MultiValuedMemory(
189
190
  memory_id="reg",
190
191
  top_func=self.top,
192
+ is_top_func=self.is_top,
191
193
  phi_maker=self._make_phi_variable,
192
194
  skip_missing_values_during_merging=True,
193
195
  page_kwargs={"mo_cmp": self._mo_cmp},
@@ -201,6 +203,7 @@ class VariableRecoveryStateBase:
201
203
  self.global_region: MultiValuedMemory = MultiValuedMemory(
202
204
  memory_id="mem",
203
205
  top_func=self.top,
206
+ is_top_func=self.is_top,
204
207
  phi_maker=self._make_phi_variable,
205
208
  skip_missing_values_during_merging=True,
206
209
  page_kwargs={"mo_cmp": self._mo_cmp},
@@ -215,7 +218,7 @@ class VariableRecoveryStateBase:
215
218
  self.type_constraints = defaultdict(set) if type_constraints is None else type_constraints
216
219
  self.func_typevar = func_typevar
217
220
  self.delayed_type_constraints = (
218
- DefaultChainMapCOW(set, collapse_threshold=25)
221
+ DefaultChainMapCOW(default_factory=set, collapse_threshold=25)
219
222
  if delayed_type_constraints is None
220
223
  else delayed_type_constraints
221
224
  )
@@ -1078,7 +1078,14 @@ class SimEngineLightAILMixin(SimEngineLightMixin):
1078
1078
  expr_1 = arg1
1079
1079
 
1080
1080
  return ailment.Expr.BinaryOp(
1081
- expr.idx, "DivMod", [expr_0, expr_1], expr.signed, bits=expr_0.bits * 2, **expr.tags
1081
+ expr.idx,
1082
+ "DivMod",
1083
+ [expr_0, expr_1],
1084
+ expr.signed,
1085
+ bits=expr.bits,
1086
+ from_bits=expr.from_bits,
1087
+ to_bits=expr.to_bits,
1088
+ **expr.tags,
1082
1089
  )
1083
1090
 
1084
1091
  def _ail_handle_Mod(self, expr):
@@ -14,6 +14,8 @@ class Environment:
14
14
  **Note**: The <Environment> object does not store the values associated with variables themselves.
15
15
  """
16
16
 
17
+ __slots__ = ("_environment",)
18
+
17
19
  def __init__(self, environment: Dict[Union[str, Undefined], Set[claripy.ast.Base]] = None):
18
20
  self._environment: Dict[Union[str, Undefined], Set[claripy.ast.Base]] = environment or {}
19
21
 
@@ -81,3 +83,12 @@ class Environment:
81
83
 
82
84
  merge_occurred = new_env != self._environment
83
85
  return Environment(environment=new_env), merge_occurred
86
+
87
+ def compare(self, other: "Environment") -> bool:
88
+ for k in set(self._environment.keys()).union(set(other._environment.keys())):
89
+ if k not in self._environment:
90
+ return False
91
+ if k in self._environment and k in other._environment:
92
+ if not self._environment[k].issuperset(other._environment[k]):
93
+ return False
94
+ return True
@@ -49,11 +49,12 @@ class DefinitionAnnotation(Annotation):
49
49
  An annotation that attaches a `Definition` to an AST.
50
50
  """
51
51
 
52
- __slots__ = ("definition",)
52
+ __slots__ = ("definition", "_hash")
53
53
 
54
54
  def __init__(self, definition):
55
55
  super().__init__()
56
56
  self.definition = definition
57
+ self._hash = hash((DefinitionAnnotation, self.definition))
57
58
 
58
59
  @property
59
60
  def relocatable(self):
@@ -64,15 +65,11 @@ class DefinitionAnnotation(Annotation):
64
65
  return False
65
66
 
66
67
  def __hash__(self):
67
- return hash((self.definition, self.relocatable, self.eliminatable))
68
+ return self._hash
68
69
 
69
70
  def __eq__(self, other: "object"):
70
- if isinstance(other, DefinitionAnnotation):
71
- return (
72
- self.definition == other.definition
73
- and self.relocatable == other.relocatable
74
- and self.eliminatable == other.eliminatable
75
- )
71
+ if type(other) is DefinitionAnnotation:
72
+ return self.definition == other.definition
76
73
  else:
77
74
  return False
78
75
 
@@ -129,6 +126,7 @@ class LiveDefinitions:
129
126
  memory_uses=None,
130
127
  tmp_uses=None,
131
128
  other_uses=None,
129
+ element_limit=5,
132
130
  ):
133
131
  self.project: Optional["Project"] = None
134
132
  self.arch = arch
@@ -139,9 +137,11 @@ class LiveDefinitions:
139
137
  MultiValuedMemory(
140
138
  memory_id="reg",
141
139
  top_func=self.top,
140
+ is_top_func=self.is_top,
142
141
  skip_missing_values_during_merging=False,
143
142
  page_kwargs={"mo_cmp": self._mo_cmp},
144
143
  endness=self.arch.register_endness,
144
+ element_limit=element_limit,
145
145
  )
146
146
  if registers is None
147
147
  else registers
@@ -150,8 +150,10 @@ class LiveDefinitions:
150
150
  MultiValuedMemory(
151
151
  memory_id="mem",
152
152
  top_func=self.top,
153
+ is_top_func=self.is_top,
153
154
  skip_missing_values_during_merging=False,
154
155
  page_kwargs={"mo_cmp": self._mo_cmp},
156
+ element_limit=element_limit,
155
157
  )
156
158
  if stack is None
157
159
  else stack
@@ -160,8 +162,10 @@ class LiveDefinitions:
160
162
  MultiValuedMemory(
161
163
  memory_id="mem",
162
164
  top_func=self.top,
165
+ is_top_func=self.is_top,
163
166
  skip_missing_values_during_merging=False,
164
167
  page_kwargs={"mo_cmp": self._mo_cmp},
168
+ element_limit=element_limit,
165
169
  )
166
170
  if memory is None
167
171
  else memory
@@ -170,8 +174,10 @@ class LiveDefinitions:
170
174
  MultiValuedMemory(
171
175
  memory_id="mem",
172
176
  top_func=self.top,
177
+ is_top_func=self.is_top,
173
178
  skip_missing_values_during_merging=False,
174
179
  page_kwargs={"mo_cmp": self._mo_cmp},
180
+ element_limit=element_limit,
175
181
  )
176
182
  if heap is None
177
183
  else heap
@@ -464,6 +470,33 @@ class LiveDefinitions:
464
470
 
465
471
  return state, merge_occurred
466
472
 
473
+ def compare(self, other: "LiveDefinitions") -> bool:
474
+ r0 = self.registers.compare(other.registers)
475
+ if r0 is False:
476
+ return False
477
+ r1 = self.heap.compare(other.heap)
478
+ if r1 is False:
479
+ return False
480
+ r2 = self.memory.compare(other.memory)
481
+ if r2 is False:
482
+ return False
483
+ r3 = self.stack.compare(other.stack)
484
+ if r3 is False:
485
+ return False
486
+
487
+ r4 = True
488
+ for k in other.others:
489
+ if k in self.others:
490
+ thing = self.others[k].merge(other.others[k])
491
+ if thing != self.others[k]:
492
+ r4 = False
493
+ break
494
+ else:
495
+ r4 = False
496
+ break
497
+
498
+ return r0 and r1 and r2 and r3 and r4
499
+
467
500
  def kill_definitions(self, atom: Atom) -> None:
468
501
  """
469
502
  Overwrite existing definitions w.r.t 'atom' with a dummy definition instance. A dummy definition will not be