angr 9.2.96__py3-none-manylinux2014_x86_64.whl → 9.2.97__py3-none-manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of angr might be problematic. Click here for more details.

Files changed (33) hide show
  1. angr/__init__.py +1 -1
  2. angr/analyses/complete_calling_conventions.py +27 -11
  3. angr/analyses/decompiler/ail_simplifier.py +20 -8
  4. angr/analyses/decompiler/condition_processor.py +2 -0
  5. angr/analyses/find_objects_static.py +15 -10
  6. angr/analyses/forward_analysis/forward_analysis.py +15 -1
  7. angr/analyses/propagator/engine_ail.py +2 -0
  8. angr/analyses/propagator/propagator.py +6 -3
  9. angr/analyses/reaching_definitions/rd_state.py +14 -1
  10. angr/analyses/reaching_definitions/reaching_definitions.py +19 -2
  11. angr/analyses/variable_recovery/engine_ail.py +6 -6
  12. angr/analyses/variable_recovery/variable_recovery_base.py +4 -1
  13. angr/engines/light/engine.py +8 -1
  14. angr/knowledge_plugins/key_definitions/environment.py +11 -0
  15. angr/knowledge_plugins/key_definitions/live_definitions.py +41 -8
  16. angr/knowledge_plugins/key_definitions/uses.py +18 -4
  17. angr/knowledge_plugins/propagations/states.py +22 -3
  18. angr/knowledge_plugins/types.py +6 -0
  19. angr/knowledge_plugins/variables/variable_manager.py +31 -5
  20. angr/simos/simos.py +2 -0
  21. angr/storage/memory_mixins/__init__.py +3 -0
  22. angr/storage/memory_mixins/multi_value_merger_mixin.py +22 -11
  23. angr/storage/memory_mixins/paged_memory/paged_memory_mixin.py +20 -2
  24. angr/storage/memory_mixins/paged_memory/pages/mv_list_page.py +81 -44
  25. angr/utils/cowdict.py +4 -2
  26. angr/utils/funcid.py +6 -0
  27. angr/utils/mp.py +1 -1
  28. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/METADATA +6 -6
  29. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/RECORD +33 -33
  30. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/LICENSE +0 -0
  31. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/WHEEL +0 -0
  32. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/entry_points.txt +0 -0
  33. {angr-9.2.96.dist-info → angr-9.2.97.dist-info}/top_level.txt +0 -0
@@ -21,10 +21,14 @@ class Uses:
21
21
  uses_by_location: Optional[DefaultChainMapCOW] = None,
22
22
  ):
23
23
  self._uses_by_definition: DefaultChainMapCOW["Definition", Set[Tuple[CodeLocation, Optional[Any]]]] = (
24
- DefaultChainMapCOW(set, collapse_threshold=25) if uses_by_definition is None else uses_by_definition
24
+ DefaultChainMapCOW(default_factory=set, collapse_threshold=25)
25
+ if uses_by_definition is None
26
+ else uses_by_definition
25
27
  )
26
28
  self._uses_by_location: DefaultChainMapCOW[CodeLocation, Set[Tuple["Definition", Optional[Any]]]] = (
27
- DefaultChainMapCOW(set, collapse_threshold=25) if uses_by_location is None else uses_by_location
29
+ DefaultChainMapCOW(default_factory=set, collapse_threshold=25)
30
+ if uses_by_location is None
31
+ else uses_by_location
28
32
  )
29
33
 
30
34
  def add_use(self, definition: "Definition", codeloc: CodeLocation, expr: Optional[Any] = None):
@@ -35,7 +39,9 @@ class Uses:
35
39
  :param codeloc: The code location where the use occurs.
36
40
  :param expr: The expression that uses the specified definition at this location.
37
41
  """
42
+ self._uses_by_definition = self._uses_by_definition.clean()
38
43
  self._uses_by_definition[definition].add((codeloc, expr))
44
+ self._uses_by_location = self._uses_by_location.clean()
39
45
  self._uses_by_location[codeloc].add((definition, expr))
40
46
 
41
47
  def get_uses(self, definition: "Definition") -> Set[CodeLocation]:
@@ -65,6 +71,7 @@ class Uses:
65
71
  """
66
72
  if definition in self._uses_by_definition:
67
73
  if codeloc in self._uses_by_definition[definition]:
74
+ self._uses_by_definition = self._uses_by_definition.clean()
68
75
  if expr is None:
69
76
  for codeloc_, expr_ in list(self._uses_by_definition[definition]):
70
77
  if codeloc_ == codeloc:
@@ -73,6 +80,7 @@ class Uses:
73
80
  self._uses_by_definition[definition].remove((codeloc, expr))
74
81
 
75
82
  if codeloc in self._uses_by_location:
83
+ self._uses_by_location = self._uses_by_location.clean()
76
84
  for item in list(self._uses_by_location[codeloc]):
77
85
  if item[0] == definition:
78
86
  self._uses_by_location[codeloc].remove(item)
@@ -85,9 +93,11 @@ class Uses:
85
93
  :return: None
86
94
  """
87
95
  if definition in self._uses_by_definition:
96
+ self._uses_by_definition = self._uses_by_definition.clean()
88
97
  codeloc_and_ids = self._uses_by_definition[definition]
89
98
  del self._uses_by_definition[definition]
90
99
 
100
+ self._uses_by_location = self._uses_by_location.clean()
91
101
  for codeloc, _ in codeloc_and_ids:
92
102
  for item in list(self._uses_by_location[codeloc]):
93
103
  if item[0] == definition:
@@ -149,18 +159,22 @@ class Uses:
149
159
 
150
160
  for k, v in other._uses_by_definition.items():
151
161
  if k not in self._uses_by_definition:
162
+ self._uses_by_definition = self._uses_by_definition.clean()
152
163
  self._uses_by_definition[k] = v
153
164
  merge_occurred = True
154
165
  elif not v.issubset(self._uses_by_definition[k]):
155
166
  merge_occurred = True
156
- self._uses_by_definition[k] |= v
167
+ self._uses_by_definition = self._uses_by_definition.clean()
168
+ self._uses_by_definition[k] = self._uses_by_definition[k] | v
157
169
 
158
170
  for k, v in other._uses_by_location.items():
159
171
  if k not in self._uses_by_location:
172
+ self._uses_by_location = self._uses_by_location.clean()
160
173
  self._uses_by_location[k] = v
161
174
  merge_occurred = True
162
175
  elif not v.issubset(self._uses_by_location[k]):
163
176
  merge_occurred = True
164
- self._uses_by_location[k] |= v
177
+ self._uses_by_location = self._uses_by_location.clean()
178
+ self._uses_by_location[k] = self._uses_by_location[k] | v
165
179
 
166
180
  return merge_occurred
@@ -221,9 +221,10 @@ class PropagatorState:
221
221
  merge_occurred = True
222
222
  else:
223
223
  if PropagatorState.is_top(repl) or PropagatorState.is_top(replacements_0[loc][var]):
224
- t = PropagatorState.top(_get_repl_size(repl))
225
- replacements_0[loc][var] = t
226
- merge_occurred = True
224
+ if not PropagatorState.is_top(replacements_0[loc][var]):
225
+ t = PropagatorState.top(_get_repl_size(repl))
226
+ replacements_0[loc][var] = t
227
+ merge_occurred = True
227
228
  elif (
228
229
  isinstance(replacements_0[loc][var], claripy.ast.Base) or isinstance(repl, claripy.ast.Base)
229
230
  ) and replacements_0[loc][var] is not repl:
@@ -316,6 +317,12 @@ class RegisterAnnotation(claripy.Annotation):
316
317
  def relocatable(self) -> bool:
317
318
  return True
318
319
 
320
+ def __hash__(self):
321
+ return hash((RegisterAnnotation, self.offset, self.size))
322
+
323
+ def __eq__(self, other):
324
+ return type(other) is RegisterAnnotation and self.offset == other.offset and self.size == other.size
325
+
319
326
 
320
327
  class RegisterComparisonAnnotation(claripy.Annotation):
321
328
  """
@@ -336,6 +343,18 @@ class RegisterComparisonAnnotation(claripy.Annotation):
336
343
  def relocatable(self) -> bool:
337
344
  return True
338
345
 
346
+ def __hash__(self):
347
+ return hash((RegisterComparisonAnnotation, self.offset, self.size, self.cmp_op, self.value))
348
+
349
+ def __eq__(self, other):
350
+ return (
351
+ type(other) is RegisterAnnotation
352
+ and self.offset == other.offset
353
+ and self.size == other.size
354
+ and self.cmp_op == other.cmp_op
355
+ and self.value == other.value
356
+ )
357
+
339
358
 
340
359
  class PropagatorVEXState(PropagatorState):
341
360
  """
@@ -56,6 +56,12 @@ class TypesStore(KnowledgeBasePlugin, UserDict):
56
56
  yield from super().__iter__()
57
57
  yield from iter(ALL_TYPES)
58
58
 
59
+ def __getstate__(self):
60
+ return self.data # do not pickle self.kb
61
+
62
+ def __setstate__(self, state):
63
+ self.data = state
64
+
59
65
  def iter_own(self):
60
66
  """
61
67
  Iterate over all the names which are stored in this object - i.e. ``values()`` without ``ALL_TYPES``
@@ -122,7 +122,32 @@ class VariableManagerInternal(Serializable):
122
122
  self.__dict__.update(state)
123
123
 
124
124
  def __getstate__(self):
125
- d = dict(self.__dict__)
125
+ attributes = [
126
+ "func_addr",
127
+ "_variables",
128
+ "_global_region",
129
+ "_stack_region",
130
+ "_register_region",
131
+ "_live_variables",
132
+ "_variable_accesses",
133
+ "_insn_to_variable",
134
+ "_stmt_to_variable",
135
+ "_variable_to_stmt",
136
+ "_atom_to_variable",
137
+ "_ident_to_variable",
138
+ "_variable_counters",
139
+ "_unified_variables",
140
+ "_variables_to_unified_variables",
141
+ "_phi_variables",
142
+ "_variables_to_phivars",
143
+ "_phi_variables_by_block",
144
+ "types",
145
+ "variable_to_types",
146
+ "variables_with_manual_types",
147
+ "_variables_without_writes",
148
+ "ret_val_size",
149
+ ]
150
+ d = {k: getattr(self, k) for k in attributes}
126
151
  d["manager"] = None
127
152
  d["types"].kb = None
128
153
  return d
@@ -759,10 +784,11 @@ class VariableManagerInternal(Serializable):
759
784
  if variable in self._phi_variables:
760
785
  # a phi variable is definitely not an input variable
761
786
  continue
762
- accesses = self._variable_accesses[variable]
763
- if has_read_access(accesses):
764
- if not exclude_specials or not variable.category:
765
- input_variables.append(variable)
787
+ if variable in self._variable_accesses:
788
+ accesses = self._variable_accesses[variable]
789
+ if has_read_access(accesses):
790
+ if not exclude_specials or not variable.category:
791
+ input_variables.append(variable)
766
792
 
767
793
  return input_variables
768
794
 
angr/simos/simos.py CHANGED
@@ -277,6 +277,8 @@ class SimOS:
277
277
 
278
278
  if state.arch.name == "PPC64" and toc is not None:
279
279
  state.regs.r2 = toc
280
+ elif state.arch.name in ("MIPS32", "MIPS64"):
281
+ state.regs.t9 = addr
280
282
 
281
283
  return state
282
284
 
@@ -67,6 +67,9 @@ class MemoryMixin(SimStatePlugin):
67
67
  def merge(self, others, merge_conditions, common_ancestor=None) -> bool:
68
68
  pass
69
69
 
70
+ def compare(self, other) -> bool:
71
+ pass
72
+
70
73
  def widen(self, others):
71
74
  pass
72
75
 
@@ -1,13 +1,17 @@
1
+ # pylint:disable=missing-class-docstring
1
2
  from typing import Iterable, Tuple, Any, Callable, Optional
2
3
 
3
4
  from . import MemoryMixin
4
5
 
5
6
 
6
7
  class MultiValueMergerMixin(MemoryMixin):
7
- def __init__(self, *args, element_limit=5, annotation_limit=256, top_func=None, phi_maker=None, **kwargs):
8
+ def __init__(
9
+ self, *args, element_limit=5, annotation_limit=256, top_func=None, is_top_func=None, phi_maker=None, **kwargs
10
+ ):
8
11
  self._element_limit = element_limit
9
12
  self._annotation_limit = annotation_limit
10
13
  self._top_func: Callable = top_func
14
+ self._is_top_func: Callable = is_top_func
11
15
  self._phi_maker: Optional[Callable] = phi_maker
12
16
 
13
17
  super().__init__(*args, **kwargs)
@@ -20,18 +24,23 @@ class MultiValueMergerMixin(MemoryMixin):
20
24
  return {phi_var}
21
25
 
22
26
  # try to merge it in the traditional way
23
- if len(values_set) > self._element_limit:
24
- # strip annotations from each value and see how many raw values there are in total
25
- # We have to use cache_key to determine uniqueness here, because if __hash__ collides,
26
- # python implicitly calls __eq__ to determine if the two objects are actually the same
27
- # and that just results in a new AST for a BV. Python then tries to convert that AST to a bool
28
- # which fails with the safeguard in claripy.ast.bool.Bool.__bool__.
29
- stripped_values_set = {v._apply_to_annotations(lambda alist: None).cache_key for v in values_set}
30
- if len(stripped_values_set) > 1:
27
+ has_top = any(self._is_top_func(v) for v in values_set)
28
+ if has_top or len(values_set) > self._element_limit:
29
+ if has_top:
31
30
  ret_val = self._top_func(merged_size * self.state.arch.byte_width)
32
31
  else:
33
- # Get the AST back from the cache_key
34
- ret_val = next(iter(stripped_values_set)).ast
32
+ # strip annotations from each value and see how many raw values there are in total
33
+ # We have to use cache_key to determine uniqueness here, because if __hash__ collides,
34
+ # python implicitly calls __eq__ to determine if the two objects are actually the same
35
+ # and that just results in a new AST for a BV. Python then tries to convert that AST to a bool
36
+ # which fails with the safeguard in claripy.ast.bool.Bool.__bool__.
37
+ stripped_values_set = {v._apply_to_annotations(lambda alist: None).cache_key for v in values_set}
38
+ if len(stripped_values_set) > 1:
39
+ ret_val = self._top_func(merged_size * self.state.arch.byte_width)
40
+ else:
41
+ # Get the AST back from the cache_key
42
+ ret_val = next(iter(stripped_values_set)).ast
43
+
35
44
  # migrate annotations
36
45
  annotations = []
37
46
  annotations_set = set()
@@ -41,6 +50,7 @@ class MultiValueMergerMixin(MemoryMixin):
41
50
  annotations.append(anno)
42
51
  annotations_set.add(anno)
43
52
  if annotations:
53
+ annotations = sorted(annotations, key=str)
44
54
  ret_val = ret_val.annotate(*annotations[: self._annotation_limit])
45
55
  merged_val = {ret_val}
46
56
  else:
@@ -52,5 +62,6 @@ class MultiValueMergerMixin(MemoryMixin):
52
62
  copied._element_limit = self._element_limit
53
63
  copied._annotation_limit = self._annotation_limit
54
64
  copied._top_func = self._top_func
65
+ copied._is_top_func = self._is_top_func
55
66
  copied._phi_maker = self._phi_maker
56
67
  return copied
@@ -294,6 +294,24 @@ class PagedMemoryMixin(MemoryMixin):
294
294
 
295
295
  return bool(merged_bytes)
296
296
 
297
+ def compare(self, other: "PagedMemoryMixin") -> bool:
298
+ changed_pages_and_offsets: Dict[int, Optional[Set[int]]] = dict(self.changed_pages(other))
299
+
300
+ for page_no in sorted(changed_pages_and_offsets):
301
+ page = self._get_page(page_no, False)
302
+ page_addr = page_no * self.page_size
303
+ if page_no in other._pages:
304
+ r = page.compare(
305
+ other._pages[page_no],
306
+ page_addr=page_addr,
307
+ memory=self,
308
+ changed_offsets=changed_pages_and_offsets[page_no],
309
+ )
310
+ if r is False:
311
+ return False
312
+
313
+ return True
314
+
297
315
  def permissions(self, addr, permissions=None, **kwargs):
298
316
  if type(addr) is not int:
299
317
  raise TypeError("addr must be an int in paged memory")
@@ -643,10 +661,10 @@ class LabeledPagesMixin(PagedMemoryMixin):
643
661
  if endness is None:
644
662
  endness = self.endness
645
663
 
646
- if type(size) is not int:
664
+ if not isinstance(size, int):
647
665
  raise TypeError("Need size to be resolved to an int by this point")
648
666
 
649
- if type(addr) is not int:
667
+ if not isinstance(addr, int):
650
668
  raise TypeError("Need addr to be resolved to an int by this point")
651
669
 
652
670
  pageno, pageoff = self._divide_addr(addr)
@@ -1,6 +1,6 @@
1
- # pylint:disable=abstract-method,arguments-differ
1
+ # pylint:disable=abstract-method,arguments-differ,assignment-from-no-return
2
2
  import logging
3
- from typing import Optional, List, Set, Tuple, Union, Callable
3
+ from typing import Optional, List, Set, Tuple, Union, Callable, Any, FrozenSet
4
4
 
5
5
  from angr.utils.dynamic_dictlist import DynamicDictList
6
6
  from .....storage.memory_object import SimMemoryObject, SimLabeledMemoryObject
@@ -166,40 +166,58 @@ class MVListPage(
166
166
  continue
167
167
  l.debug("... on byte 0x%x", b)
168
168
 
169
- memory_objects = []
169
+ memory_object_sets: Set[Tuple[FrozenSet[SimMemoryObject], Any]] = set()
170
170
  unconstrained_in = []
171
171
 
172
- # first get a list of all memory objects at that location, and
173
- # all memories that don't have those bytes
172
+ # first get a list of all memory objects at that location, and all memories that don't have those bytes
173
+ self_has_memory_object_set = False
174
174
  for sm, fv in zip(all_pages, merge_conditions):
175
175
  if sm._contains(b, page_addr):
176
176
  l.info("... present in %s", fv)
177
+ memory_objects = set()
177
178
  for mo in sm.content_gen(b):
178
179
  if mo.includes(page_addr + b):
179
- memory_objects.append((mo, fv))
180
+ memory_objects.add(mo)
181
+ memory_object_sets.add((frozenset(memory_objects), fv))
182
+ if sm is self:
183
+ self_has_memory_object_set = True
180
184
  else:
181
185
  l.info("... not present in %s", fv)
182
186
  unconstrained_in.append((sm, fv))
183
187
 
184
- if not memory_objects:
188
+ if not memory_object_sets:
189
+ continue
190
+ if self_has_memory_object_set and len(memory_object_sets) == 1:
185
191
  continue
186
192
 
187
- mos = {mo for mo, _ in memory_objects}
188
- mo_bases = {mo.base for mo, _ in memory_objects}
189
- mo_lengths = {mo.length for mo, _ in memory_objects}
190
- endnesses = {mo.endness for mo in mos}
191
-
192
- if not unconstrained_in and not (mos - merged_objects): # pylint:disable=superfluous-parens
193
+ mo_sets = {mo_set for mo_set, _ in memory_object_sets}
194
+ mo_bases = set()
195
+ mo_lengths = set()
196
+ endnesses = set()
197
+ for mo_set in mo_sets:
198
+ for mo in mo_set:
199
+ mo_bases.add(mo.base)
200
+ mo_lengths.add(mo.length)
201
+ endnesses.add(mo.endness)
202
+
203
+ if not unconstrained_in and not (mo_sets - merged_objects): # pylint:disable=superfluous-parens
193
204
  continue
194
205
 
195
206
  # first, optimize the case where we are dealing with the same-sized memory objects
196
207
  if len(mo_bases) == 1 and len(mo_lengths) == 1 and not unconstrained_in and len(endnesses) == 1:
208
+ if len(memory_object_sets) == 1:
209
+ # nothing to merge!
210
+ continue
211
+
197
212
  the_endness = next(iter(endnesses))
198
- to_merge = [(mo.object, fv) for mo, fv in memory_objects]
213
+ to_merge = []
214
+ for mo_set, fv in memory_object_sets:
215
+ for mo in mo_set:
216
+ to_merge.append((mo.object, fv))
199
217
 
200
218
  # Update `merged_to`
201
219
  mo_base = list(mo_bases)[0]
202
- mo_length = memory_objects[0][0].length
220
+ mo_length = next(iter(mo_lengths))
203
221
  size = min(mo_length - (page_addr + b - mo_base), len(self.content) - b)
204
222
  merged_to = b + size
205
223
 
@@ -212,25 +230,18 @@ class MVListPage(
212
230
  # TODO: Implement in-place replacement instead of calling store()
213
231
  # new_object = self._replace_memory_object(our_mo, merged_val, page_addr, memory.page_size)
214
232
 
215
- first_value = True
216
- for v in merged_val:
217
- self.store(
218
- b,
219
- {SimMemoryObject(v, mo_base, endness=the_endness)},
220
- size=size,
221
- cooperate=True,
222
- weak=not first_value,
223
- )
224
- first_value = False
233
+ new_mos = {SimMemoryObject(v, mo_base, endness=the_endness) for v in merged_val}
234
+ self.store(b, new_mos, size=size, cooperate=True, weak=False)
225
235
 
226
236
  merged_offsets.add(b)
227
237
 
228
238
  else:
229
- # get the size that we can merge easily. This is the minimum of
230
- # the size of all memory objects and unallocated spaces.
231
- min_size = min(
232
- [mo.length - (b + page_addr - mo.base) for mo, _ in memory_objects] + [len(self.content) - b]
233
- )
239
+ # get the size that we can merge easily. This is the minimum of the size of all memory objects and
240
+ # unallocated spaces.
241
+ min_size = len(self.content) - b
242
+ for mo_set in mo_sets:
243
+ for mo in mo_set:
244
+ min_size = min(min_size, mo.length - (b + page_addr - mo.base))
234
245
  for um, _ in unconstrained_in:
235
246
  for i in range(0, min_size):
236
247
  if um._contains(b + i, page_addr):
@@ -241,9 +252,11 @@ class MVListPage(
241
252
 
242
253
  # Now, we have the minimum size. We'll extract/create expressions of that
243
254
  # size and merge them
244
- extracted = (
245
- [(mo.bytes_at(page_addr + b, min_size), fv) for mo, fv in memory_objects] if min_size != 0 else []
246
- )
255
+ extracted = []
256
+ if min_size != 0:
257
+ for mo_set, fv in memory_object_sets:
258
+ for mo in mo_set:
259
+ extracted.append((mo.bytes_at(page_addr + b, min_size), fv))
247
260
  if not memory.skip_missing_values_during_merging:
248
261
  created = [
249
262
  (self._default_value(None, min_size, name=f"merge_uc_{uc.id}_{b:x}", memory=memory), fv)
@@ -257,22 +270,46 @@ class MVListPage(
257
270
  if merged_val is None:
258
271
  continue
259
272
 
260
- first_value = True
261
- for v in merged_val:
262
- self.store(
263
- b,
264
- {SimMemoryObject(v, page_addr + b, endness="Iend_BE")},
265
- size=min_size,
266
- endness="Iend_BE",
267
- cooperate=True,
268
- weak=not first_value,
269
- ) # do not convert endianness again
270
- first_value = False
273
+ new_mos = {SimMemoryObject(v, page_addr + b, endness="Iend_BE") for v in merged_val}
274
+ self.store(b, new_mos, size=min_size, cooperate=True, weak=False)
271
275
  merged_offsets.add(b)
272
276
 
273
277
  self.stored_offset |= merged_offsets
274
278
  return merged_offsets
275
279
 
280
+ def compare(
281
+ self, other: "MVListPage", page_addr: int = None, memory=None, changed_offsets=None
282
+ ) -> bool: # pylint: disable=unused-argument
283
+ compared_to = None
284
+ for b in sorted(changed_offsets):
285
+ if compared_to is not None and not b >= compared_to:
286
+ continue
287
+
288
+ unconstrained_in = []
289
+ self_has_memory_object_set = False
290
+ memory_object_sets: Set[FrozenSet[SimMemoryObject]] = set()
291
+ for sm in [self, other]:
292
+ if sm._contains(b, page_addr):
293
+ memory_objects = set()
294
+ for mo in sm.content_gen(b):
295
+ if mo.includes(page_addr + b):
296
+ memory_objects.add(mo)
297
+ memory_object_sets.add(frozenset(memory_objects))
298
+ if sm is self:
299
+ self_has_memory_object_set = True
300
+ else:
301
+ unconstrained_in.append(sm)
302
+
303
+ if not memory_object_sets:
304
+ continue
305
+ if self_has_memory_object_set and len(memory_object_sets) == 1:
306
+ continue
307
+
308
+ # TODO: compare_values even more?
309
+ return False
310
+
311
+ return True
312
+
276
313
  def changed_bytes(self, other: "MVListPage", page_addr: int = None):
277
314
  candidates: Set[int] = super().changed_bytes(other)
278
315
  if candidates is not None:
angr/utils/cowdict.py CHANGED
@@ -35,7 +35,7 @@ class DefaultChainMapCOW(ChainMapCOW):
35
35
  Implements a copy-on-write version of ChainMap with default values that supports auto-collapsing.
36
36
  """
37
37
 
38
- def __init__(self, default_factory, *args, collapse_threshold=None):
38
+ def __init__(self, *args, default_factory=None, collapse_threshold=None):
39
39
  super().__init__(*args, collapse_threshold=collapse_threshold)
40
40
  self.default_factory = default_factory
41
41
 
@@ -53,7 +53,9 @@ class DefaultChainMapCOW(ChainMapCOW):
53
53
  collapsed = {}
54
54
  for m in reversed(self.maps):
55
55
  collapsed.update(m)
56
- return DefaultChainMapCOW(collapsed, collapse_threshold=self.collapse_threshold)
56
+ return DefaultChainMapCOW(
57
+ collapsed, default_factory=self.default_factory, collapse_threshold=self.collapse_threshold
58
+ )
57
59
  r = self.new_child()
58
60
  r.default_factory = self.default_factory
59
61
  r.collapse_threshold = self.collapse_threshold
angr/utils/funcid.py CHANGED
@@ -13,6 +13,8 @@ def is_function_security_check_cookie(func, project, security_cookie_addr: int)
13
13
  block = project.factory.block(func.addr)
14
14
  if block.instructions != 2:
15
15
  return False
16
+ if not block.capstone.insns or len(block.capstone.insns) != 2:
17
+ return False
16
18
  ins0 = block.capstone.insns[0]
17
19
  if (
18
20
  ins0.mnemonic == "cmp"
@@ -57,6 +59,8 @@ def is_function_security_init_cookie(func: "Function", project, security_cookie_
57
59
  block = project.factory.block(node_addr, size=node_size)
58
60
  if not block.instructions:
59
61
  continue
62
+ if not block.capstone.insns:
63
+ continue
60
64
  last_insn = block.capstone.insns[-1]
61
65
  if (
62
66
  last_insn.mnemonic == "mov"
@@ -78,6 +82,8 @@ def is_function_security_init_cookie_win8(func: "Function", project, security_co
78
82
  block = project.factory.block(func.addr)
79
83
  if block.instructions != 3:
80
84
  return False
85
+ if not block.capstone.insns or len(block.capstone.insns) != 3:
86
+ return False
81
87
  ins0 = block.capstone.insns[0]
82
88
  if (
83
89
  ins0.mnemonic == "mov"
angr/utils/mp.py CHANGED
@@ -43,7 +43,7 @@ class Initializer:
43
43
  def initialize(self) -> None:
44
44
  """
45
45
  Initialize a multiprocessing.Process
46
- Set the current global initalizer to the same state as this initalizer, then calls each initalizer
46
+ Set the current global initializer to the same state as this initializer, then calls each initializer
47
47
  """
48
48
  self._single = self
49
49
  for i in self.initializers:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: angr
3
- Version: 9.2.96
3
+ Version: 9.2.97
4
4
  Summary: A multi-architecture binary analysis toolkit, with the ability to perform dynamic symbolic execution and various static analyses on binaries
5
5
  Home-page: https://github.com/angr/angr
6
6
  License: BSD-2-Clause
@@ -17,13 +17,13 @@ Description-Content-Type: text/markdown
17
17
  License-File: LICENSE
18
18
  Requires-Dist: CppHeaderParser
19
19
  Requires-Dist: GitPython
20
- Requires-Dist: ailment ==9.2.96
21
- Requires-Dist: archinfo ==9.2.96
20
+ Requires-Dist: ailment ==9.2.97
21
+ Requires-Dist: archinfo ==9.2.97
22
22
  Requires-Dist: cachetools
23
23
  Requires-Dist: capstone ==5.0.0.post1
24
24
  Requires-Dist: cffi >=1.14.0
25
- Requires-Dist: claripy ==9.2.96
26
- Requires-Dist: cle ==9.2.96
25
+ Requires-Dist: claripy ==9.2.97
26
+ Requires-Dist: cle ==9.2.97
27
27
  Requires-Dist: dpkt
28
28
  Requires-Dist: itanium-demangler
29
29
  Requires-Dist: mulpyplexer
@@ -33,7 +33,7 @@ Requires-Dist: protobuf >=3.19.0
33
33
  Requires-Dist: psutil
34
34
  Requires-Dist: pycparser >=2.18
35
35
  Requires-Dist: pyformlang
36
- Requires-Dist: pyvex ==9.2.96
36
+ Requires-Dist: pyvex ==9.2.97
37
37
  Requires-Dist: rich >=13.1.0
38
38
  Requires-Dist: rpyc
39
39
  Requires-Dist: sortedcontainers