angr 9.2.176__cp310-abi3-macosx_11_0_arm64.whl → 9.2.178__cp310-abi3-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of angr might be problematic. Click here for more details.
- angr/__init__.py +1 -1
- angr/analyses/cfg/cfg_fast.py +15 -0
- angr/analyses/decompiler/ail_simplifier.py +69 -1
- angr/analyses/decompiler/ccall_rewriters/amd64_ccalls.py +45 -7
- angr/analyses/decompiler/clinic.py +15 -7
- angr/analyses/decompiler/dirty_rewriters/__init__.py +7 -0
- angr/analyses/decompiler/dirty_rewriters/amd64_dirty.py +69 -0
- angr/analyses/decompiler/dirty_rewriters/rewriter_base.py +27 -0
- angr/analyses/decompiler/optimization_passes/__init__.py +3 -0
- angr/analyses/decompiler/optimization_passes/optimization_pass.py +10 -8
- angr/analyses/decompiler/optimization_passes/register_save_area_simplifier.py +44 -6
- angr/analyses/decompiler/optimization_passes/register_save_area_simplifier_adv.py +198 -0
- angr/analyses/decompiler/optimization_passes/win_stack_canary_simplifier.py +111 -55
- angr/analyses/decompiler/peephole_optimizations/cas_intrinsics.py +69 -12
- angr/analyses/decompiler/peephole_optimizations/inlined_wcscpy_consolidation.py +189 -6
- angr/analyses/decompiler/peephole_optimizations/remove_redundant_shifts_around_comparators.py +72 -1
- angr/analyses/decompiler/presets/basic.py +2 -0
- angr/analyses/decompiler/presets/fast.py +2 -0
- angr/analyses/decompiler/presets/full.py +2 -0
- angr/analyses/decompiler/utils.py +10 -3
- angr/analyses/flirt/flirt.py +5 -4
- angr/analyses/s_propagator.py +23 -21
- angr/analyses/smc.py +2 -3
- angr/analyses/variable_recovery/engine_ail.py +39 -0
- angr/emulator.py +2 -1
- angr/engines/hook.py +1 -1
- angr/engines/icicle.py +19 -3
- angr/knowledge_plugins/functions/function.py +2 -2
- angr/knowledge_plugins/labels.py +4 -4
- angr/procedures/definitions/__init__.py +9 -0
- angr/procedures/definitions/parse_win32json.py +11 -0
- angr/procedures/definitions/wdk/ntoskrnl.json +4 -0
- angr/rustylib.abi3.so +0 -0
- angr/unicornlib.dylib +0 -0
- angr/utils/funcid.py +85 -0
- angr/utils/ssa/__init__.py +2 -6
- {angr-9.2.176.dist-info → angr-9.2.178.dist-info}/METADATA +6 -5
- {angr-9.2.176.dist-info → angr-9.2.178.dist-info}/RECORD +42 -38
- {angr-9.2.176.dist-info → angr-9.2.178.dist-info}/WHEEL +0 -0
- {angr-9.2.176.dist-info → angr-9.2.178.dist-info}/entry_points.txt +0 -0
- {angr-9.2.176.dist-info → angr-9.2.178.dist-info}/licenses/LICENSE +0 -0
- {angr-9.2.176.dist-info → angr-9.2.178.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
# pylint:disable=too-many-boolean-expressions
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
import logging
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
from angr.ailment.statement import Assignment
|
|
7
|
+
from angr.ailment.expression import VirtualVariable
|
|
8
|
+
from angr.code_location import CodeLocation, ExternalCodeLocation
|
|
9
|
+
from angr.analyses.decompiler.stack_item import StackItem, StackItemType
|
|
10
|
+
from angr.utils.ail import is_phi_assignment
|
|
11
|
+
from .optimization_pass import OptimizationPass, OptimizationPassStage
|
|
12
|
+
|
|
13
|
+
_l = logging.getLogger(name=__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class RegisterSaveAreaSimplifierAdvanced(OptimizationPass):
|
|
17
|
+
"""
|
|
18
|
+
Optimizes away registers that are stored to or restored on the stack space.
|
|
19
|
+
|
|
20
|
+
This analysis is more complex than RegisterSaveAreaSimplifier because it handles:
|
|
21
|
+
(1) Registers that are stored in the stack shadow space (sp+N) according to the Windows x64 calling convention.
|
|
22
|
+
(2) Registers that are aliases of sp.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
ARCHES = None
|
|
26
|
+
PLATFORMS = None
|
|
27
|
+
STAGE = OptimizationPassStage.AFTER_SSA_LEVEL1_TRANSFORMATION
|
|
28
|
+
NAME = "Simplify register save areas (advanced)"
|
|
29
|
+
DESCRIPTION = __doc__.strip() # type:ignore
|
|
30
|
+
|
|
31
|
+
def __init__(self, func, **kwargs):
|
|
32
|
+
super().__init__(func, **kwargs)
|
|
33
|
+
self._srda = None
|
|
34
|
+
|
|
35
|
+
self.analyze()
|
|
36
|
+
|
|
37
|
+
def _check(self):
|
|
38
|
+
|
|
39
|
+
self._srda = self.project.analyses.SReachingDefinitions(
|
|
40
|
+
subject=self._func, func_graph=self._graph, func_args=self._arg_vvars
|
|
41
|
+
)
|
|
42
|
+
info = self._find_reg_store_and_restore_locations()
|
|
43
|
+
if not info:
|
|
44
|
+
return False, None
|
|
45
|
+
|
|
46
|
+
return True, {"info": info}
|
|
47
|
+
|
|
48
|
+
@staticmethod
|
|
49
|
+
def _modify_statement(
|
|
50
|
+
old_block, stmt_idx_: int, updated_blocks_, stack_offset: int | None = None
|
|
51
|
+
): # pylint:disable=unused-argument
|
|
52
|
+
if old_block not in updated_blocks_:
|
|
53
|
+
block = old_block.copy()
|
|
54
|
+
updated_blocks_[old_block] = block
|
|
55
|
+
else:
|
|
56
|
+
block = updated_blocks_[old_block]
|
|
57
|
+
block.statements[stmt_idx_] = None
|
|
58
|
+
|
|
59
|
+
def _analyze(self, cache=None):
|
|
60
|
+
|
|
61
|
+
if cache is None:
|
|
62
|
+
return
|
|
63
|
+
|
|
64
|
+
info: list[tuple[int, CodeLocation, int, CodeLocation, int]] = cache["info"]
|
|
65
|
+
updated_blocks = {}
|
|
66
|
+
|
|
67
|
+
for _regvar, regvar_loc, _stackvar, stackvar_loc, _ in info:
|
|
68
|
+
# remove storing statements
|
|
69
|
+
old_block = self._get_block(regvar_loc.block_addr, idx=regvar_loc.block_idx)
|
|
70
|
+
assert regvar_loc.stmt_idx is not None
|
|
71
|
+
self._modify_statement(old_block, regvar_loc.stmt_idx, updated_blocks)
|
|
72
|
+
old_block = self._get_block(stackvar_loc.block_addr, idx=stackvar_loc.block_idx)
|
|
73
|
+
assert stackvar_loc.stmt_idx is not None
|
|
74
|
+
self._modify_statement(old_block, stackvar_loc.stmt_idx, updated_blocks)
|
|
75
|
+
|
|
76
|
+
for old_block, new_block in updated_blocks.items():
|
|
77
|
+
# remove all statements that are None
|
|
78
|
+
new_block.statements = [stmt for stmt in new_block.statements if stmt is not None]
|
|
79
|
+
# update it
|
|
80
|
+
self._update_block(old_block, new_block)
|
|
81
|
+
|
|
82
|
+
if updated_blocks:
|
|
83
|
+
# update stack_items
|
|
84
|
+
for _, _, _, _, stack_offset in info:
|
|
85
|
+
self.stack_items[stack_offset] = StackItem(
|
|
86
|
+
stack_offset, self.project.arch.bytes, "regs", StackItemType.SAVED_REGS
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
def _find_reg_store_and_restore_locations(self) -> list[tuple[int, CodeLocation, int, CodeLocation, int]]:
|
|
90
|
+
results = []
|
|
91
|
+
|
|
92
|
+
assert self._srda is not None
|
|
93
|
+
srda_model = self._srda.model
|
|
94
|
+
# find all registers that are defined externally and used exactly once
|
|
95
|
+
saved_vvars: set[tuple[int, CodeLocation]] = set()
|
|
96
|
+
for vvar_id, loc in srda_model.all_vvar_definitions.items():
|
|
97
|
+
if isinstance(loc, ExternalCodeLocation):
|
|
98
|
+
uses = srda_model.all_vvar_uses.get(vvar_id, [])
|
|
99
|
+
if len(uses) == 1:
|
|
100
|
+
vvar, used_loc = next(iter(uses))
|
|
101
|
+
if vvar is not None and vvar.was_reg:
|
|
102
|
+
saved_vvars.add((vvar_id, used_loc))
|
|
103
|
+
|
|
104
|
+
if not saved_vvars:
|
|
105
|
+
return results
|
|
106
|
+
|
|
107
|
+
# for each candidate, we check to ensure:
|
|
108
|
+
# - it is stored onto the stack (into a stack virtual variable)
|
|
109
|
+
# - the stack virtual variable is only used once and restores the value to the same register
|
|
110
|
+
# - the restore location is in the dominance frontier of the store location
|
|
111
|
+
for vvar_id, used_loc in saved_vvars:
|
|
112
|
+
def_block = self._get_block(used_loc.block_addr, idx=used_loc.block_idx)
|
|
113
|
+
assert def_block is not None and used_loc.stmt_idx is not None
|
|
114
|
+
stmt = def_block.statements[used_loc.stmt_idx]
|
|
115
|
+
if not (
|
|
116
|
+
isinstance(stmt, Assignment)
|
|
117
|
+
and isinstance(stmt.dst, VirtualVariable)
|
|
118
|
+
and stmt.dst.was_stack
|
|
119
|
+
and isinstance(stmt.src, VirtualVariable)
|
|
120
|
+
and stmt.src.was_reg
|
|
121
|
+
and stmt.src.varid == vvar_id
|
|
122
|
+
):
|
|
123
|
+
continue
|
|
124
|
+
stack_vvar = stmt.dst
|
|
125
|
+
all_stack_vvar_uses = srda_model.all_vvar_uses.get(stack_vvar.varid, [])
|
|
126
|
+
# eliminate the use location if it's a phi statement
|
|
127
|
+
stack_vvar_uses = set()
|
|
128
|
+
for vvar_, loc_ in all_stack_vvar_uses:
|
|
129
|
+
use_block = self._get_block(loc_.block_addr, idx=loc_.block_idx)
|
|
130
|
+
if use_block is None or loc_.stmt_idx is None:
|
|
131
|
+
continue
|
|
132
|
+
use_stmt = use_block.statements[loc_.stmt_idx]
|
|
133
|
+
if is_phi_assignment(use_stmt):
|
|
134
|
+
continue
|
|
135
|
+
stack_vvar_uses.add((vvar_, loc_))
|
|
136
|
+
if len(stack_vvar_uses) != 1:
|
|
137
|
+
continue
|
|
138
|
+
_, stack_vvar_use_loc = next(iter(stack_vvar_uses))
|
|
139
|
+
restore_block = self._get_block(stack_vvar_use_loc.block_addr, idx=stack_vvar_use_loc.block_idx)
|
|
140
|
+
assert restore_block is not None
|
|
141
|
+
restore_stmt = restore_block.statements[stack_vvar_use_loc.stmt_idx]
|
|
142
|
+
|
|
143
|
+
if not (
|
|
144
|
+
isinstance(restore_stmt, Assignment)
|
|
145
|
+
and isinstance(restore_stmt.src, VirtualVariable)
|
|
146
|
+
and restore_stmt.src.varid == stack_vvar.varid
|
|
147
|
+
and isinstance(restore_stmt.dst, VirtualVariable)
|
|
148
|
+
and restore_stmt.dst.was_reg
|
|
149
|
+
and restore_stmt.dst.reg_offset == stmt.src.reg_offset
|
|
150
|
+
):
|
|
151
|
+
continue
|
|
152
|
+
# this is the dumb version of the dominance frontier check
|
|
153
|
+
if self._within_dominance_frontier(def_block, restore_block, True, True):
|
|
154
|
+
results.append(
|
|
155
|
+
(stmt.src.varid, used_loc, stack_vvar.varid, stack_vvar_use_loc, stack_vvar.stack_offset)
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
return results
|
|
159
|
+
|
|
160
|
+
def _within_dominance_frontier(self, dom_node, node, use_preds: bool, use_succs: bool) -> bool:
|
|
161
|
+
if use_succs:
|
|
162
|
+
# scan forward
|
|
163
|
+
succs = [succ for succ in self._graph.successors(dom_node) if succ is not dom_node]
|
|
164
|
+
if len(succs) == 1:
|
|
165
|
+
succ = succs[0]
|
|
166
|
+
succ_preds = [pred for pred in self._graph.predecessors(succ) if pred is not succ]
|
|
167
|
+
if len(succ_preds) == 0:
|
|
168
|
+
# the successor has no other predecessors
|
|
169
|
+
r = self._within_dominance_frontier(succ, node, False, True)
|
|
170
|
+
if r:
|
|
171
|
+
return True
|
|
172
|
+
|
|
173
|
+
else:
|
|
174
|
+
# the successor has other predecessors; gotta step back
|
|
175
|
+
preds = [pred for pred in self._graph.predecessors(node) if pred is not node]
|
|
176
|
+
if len(preds) == 1 and preds[0] is node:
|
|
177
|
+
return True
|
|
178
|
+
elif len(succs) == 2:
|
|
179
|
+
return any(succ is node for succ in succs)
|
|
180
|
+
|
|
181
|
+
if use_preds:
|
|
182
|
+
# scan backward
|
|
183
|
+
preds = [pred for pred in self._graph.predecessors(dom_node) if pred is not dom_node]
|
|
184
|
+
if len(preds) == 1:
|
|
185
|
+
pred = preds[0]
|
|
186
|
+
pred_succs = [succ for succ in self._graph.successors(pred) if succ is not pred]
|
|
187
|
+
if len(pred_succs) == 0:
|
|
188
|
+
# the predecessor has no other successors
|
|
189
|
+
return self._within_dominance_frontier(pred, node, True, False)
|
|
190
|
+
|
|
191
|
+
# the predecessor has other successors; gotta step forward
|
|
192
|
+
succs = [succ for succ in self._graph.successors(node) if succ is not node]
|
|
193
|
+
if len(succs) == 1:
|
|
194
|
+
return self._graph.has_edge(node, succs[0])
|
|
195
|
+
elif len(preds) == 2:
|
|
196
|
+
return False
|
|
197
|
+
|
|
198
|
+
return False
|
|
@@ -33,9 +33,12 @@ class WinStackCanarySimplifier(OptimizationPass):
|
|
|
33
33
|
|
|
34
34
|
def __init__(self, func, **kwargs):
|
|
35
35
|
super().__init__(func, **kwargs)
|
|
36
|
-
self._security_cookie_addr = None
|
|
36
|
+
self._security_cookie_addr: int | None = None
|
|
37
37
|
if isinstance(self.project.loader.main_object, cle.PE):
|
|
38
38
|
self._security_cookie_addr = self.project.loader.main_object.load_config.get("SecurityCookie", None)
|
|
39
|
+
if self._security_cookie_addr is None:
|
|
40
|
+
# maybe it's just not set - check labels
|
|
41
|
+
self._security_cookie_addr = self.project.kb.labels.lookup("_security_cookie")
|
|
39
42
|
|
|
40
43
|
self.analyze()
|
|
41
44
|
|
|
@@ -186,21 +189,16 @@ class WinStackCanarySimplifier(OptimizationPass):
|
|
|
186
189
|
xor_stmt_idx = None
|
|
187
190
|
xored_reg = None
|
|
188
191
|
|
|
192
|
+
assert self._security_cookie_addr is not None
|
|
193
|
+
|
|
189
194
|
for idx, stmt in enumerate(block.statements):
|
|
190
195
|
# if we are lucky and things get folded into one statement:
|
|
191
196
|
if (
|
|
192
197
|
isinstance(stmt, ailment.Stmt.Store)
|
|
193
198
|
and isinstance(stmt.addr, ailment.Expr.StackBaseOffset)
|
|
194
|
-
and
|
|
195
|
-
and stmt.data.op == "Xor"
|
|
196
|
-
and isinstance(stmt.data.operands[1], ailment.Expr.StackBaseOffset)
|
|
197
|
-
and isinstance(stmt.data.operands[0], ailment.Expr.Load)
|
|
198
|
-
and isinstance(stmt.data.operands[0].addr, ailment.Expr.Const)
|
|
199
|
+
and self._is_expr_loading_stack_cookie(stmt.data, self._security_cookie_addr)
|
|
199
200
|
):
|
|
200
|
-
|
|
201
|
-
load_addr = stmt.data.operands[0].addr.value
|
|
202
|
-
if load_addr == self._security_cookie_addr:
|
|
203
|
-
return [idx]
|
|
201
|
+
return [idx]
|
|
204
202
|
# or if we are unlucky and the load and the xor are two different statements
|
|
205
203
|
if (
|
|
206
204
|
isinstance(stmt, ailment.Stmt.Assignment)
|
|
@@ -213,25 +211,14 @@ class WinStackCanarySimplifier(OptimizationPass):
|
|
|
213
211
|
if load_addr == self._security_cookie_addr:
|
|
214
212
|
load_stmt_idx = idx
|
|
215
213
|
load_reg = stmt.dst.reg_offset
|
|
216
|
-
if load_stmt_idx is not None and xor_stmt_idx is None and idx >= load_stmt_idx + 1:
|
|
214
|
+
if load_stmt_idx is not None and load_reg is not None and xor_stmt_idx is None and idx >= load_stmt_idx + 1:
|
|
215
|
+
assert self.project.arch.bp_offset is not None
|
|
217
216
|
if (
|
|
218
217
|
isinstance(stmt, ailment.Stmt.Assignment)
|
|
219
218
|
and isinstance(stmt.dst, ailment.Expr.VirtualVariable)
|
|
220
219
|
and stmt.dst.was_reg
|
|
221
220
|
and not self.project.arch.is_artificial_register(stmt.dst.reg_offset, stmt.dst.size)
|
|
222
|
-
and
|
|
223
|
-
and stmt.src.op == "Xor"
|
|
224
|
-
and isinstance(stmt.src.operands[0], ailment.Expr.VirtualVariable)
|
|
225
|
-
and stmt.src.operands[0].was_reg
|
|
226
|
-
and stmt.src.operands[0].reg_offset == load_reg
|
|
227
|
-
and (
|
|
228
|
-
isinstance(stmt.src.operands[1], ailment.Expr.StackBaseOffset)
|
|
229
|
-
or (
|
|
230
|
-
isinstance(stmt.src.operands[1], ailment.Expr.VirtualVariable)
|
|
231
|
-
and stmt.src.operands[1].was_reg
|
|
232
|
-
and stmt.src.operands[1].reg_offset == self.project.arch.registers["ebp"][0]
|
|
233
|
-
)
|
|
234
|
-
)
|
|
221
|
+
and self._is_expr_xoring_stack_cookie_reg(stmt.src, load_reg, self.project.arch.bp_offset)
|
|
235
222
|
):
|
|
236
223
|
xor_stmt_idx = idx
|
|
237
224
|
xored_reg = stmt.dst.reg_offset
|
|
@@ -255,6 +242,25 @@ class WinStackCanarySimplifier(OptimizationPass):
|
|
|
255
242
|
):
|
|
256
243
|
return [load_stmt_idx, xor_stmt_idx, idx]
|
|
257
244
|
break
|
|
245
|
+
if load_stmt_idx is not None and xor_stmt_idx is None and idx >= load_stmt_idx + 1: # noqa:SIM102
|
|
246
|
+
if isinstance(stmt, ailment.Stmt.Store) and (
|
|
247
|
+
isinstance(stmt.addr, ailment.Expr.StackBaseOffset)
|
|
248
|
+
or (
|
|
249
|
+
isinstance(stmt.addr, ailment.Expr.BinaryOp)
|
|
250
|
+
and stmt.addr.op == "Sub"
|
|
251
|
+
and isinstance(stmt.addr.operands[0], ailment.Expr.VirtualVariable)
|
|
252
|
+
and stmt.addr.operands[0].was_reg
|
|
253
|
+
and stmt.addr.operands[0].reg_offset == self.project.arch.registers["ebp"][0]
|
|
254
|
+
and isinstance(stmt.addr.operands[1], ailment.Expr.Const)
|
|
255
|
+
)
|
|
256
|
+
):
|
|
257
|
+
if (
|
|
258
|
+
isinstance(stmt.data, ailment.Expr.VirtualVariable)
|
|
259
|
+
and stmt.data.was_reg
|
|
260
|
+
and stmt.data.reg_offset == load_reg
|
|
261
|
+
):
|
|
262
|
+
return [load_stmt_idx, idx]
|
|
263
|
+
break
|
|
258
264
|
return None
|
|
259
265
|
|
|
260
266
|
def _find_amd64_canary_storing_stmt(self, block, canary_value_stack_offset):
|
|
@@ -263,23 +269,27 @@ class WinStackCanarySimplifier(OptimizationPass):
|
|
|
263
269
|
for idx, stmt in enumerate(block.statements):
|
|
264
270
|
# when we are lucky, we have one instruction
|
|
265
271
|
if (
|
|
266
|
-
(
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
and stmt.dst.reg_offset == self.project.arch.registers["rcx"][0]
|
|
271
|
-
)
|
|
272
|
-
and isinstance(stmt.src, ailment.Expr.BinaryOp)
|
|
273
|
-
and stmt.src.op == "Xor"
|
|
272
|
+
isinstance(stmt, ailment.Stmt.Assignment)
|
|
273
|
+
and isinstance(stmt.dst, ailment.Expr.VirtualVariable)
|
|
274
|
+
and stmt.dst.was_reg
|
|
275
|
+
and stmt.dst.reg_offset == self.project.arch.registers["rcx"][0]
|
|
274
276
|
):
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
277
|
+
if isinstance(stmt.src, ailment.Expr.BinaryOp) and stmt.src.op == "Xor":
|
|
278
|
+
op0, op1 = stmt.src.operands
|
|
279
|
+
if (
|
|
280
|
+
isinstance(op0, ailment.Expr.Load)
|
|
281
|
+
and isinstance(op0.addr, ailment.Expr.StackBaseOffset)
|
|
282
|
+
and op0.addr.offset == canary_value_stack_offset
|
|
283
|
+
) and isinstance(op1, ailment.Expr.StackBaseOffset):
|
|
284
|
+
# found it
|
|
285
|
+
return idx
|
|
286
|
+
elif isinstance(stmt.src, ailment.Expr.Load):
|
|
287
|
+
if (
|
|
288
|
+
isinstance(stmt.src.addr, ailment.Expr.StackBaseOffset)
|
|
289
|
+
and stmt.src.addr.offset == canary_value_stack_offset
|
|
290
|
+
):
|
|
291
|
+
# found it
|
|
292
|
+
return idx
|
|
283
293
|
# or when we are unlucky, we have two instructions...
|
|
284
294
|
if (
|
|
285
295
|
isinstance(stmt, ailment.Stmt.Assignment)
|
|
@@ -317,22 +327,26 @@ class WinStackCanarySimplifier(OptimizationPass):
|
|
|
317
327
|
for idx, stmt in enumerate(block.statements):
|
|
318
328
|
# when we are lucky, we have one instruction
|
|
319
329
|
if (
|
|
320
|
-
(
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
and not self.project.arch.is_artificial_register(stmt.dst.reg_offset, stmt.dst.size)
|
|
325
|
-
)
|
|
326
|
-
and isinstance(stmt.src, ailment.Expr.BinaryOp)
|
|
327
|
-
and stmt.src.op == "Xor"
|
|
330
|
+
isinstance(stmt, ailment.Stmt.Assignment)
|
|
331
|
+
and isinstance(stmt.dst, ailment.Expr.VirtualVariable)
|
|
332
|
+
and stmt.dst.was_reg
|
|
333
|
+
and not self.project.arch.is_artificial_register(stmt.dst.reg_offset, stmt.dst.size)
|
|
328
334
|
):
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
335
|
+
if isinstance(stmt.src, ailment.Expr.BinaryOp) and stmt.src.op == "Xor":
|
|
336
|
+
op0, op1 = stmt.src.operands
|
|
337
|
+
if (
|
|
338
|
+
isinstance(op0, ailment.Expr.Load)
|
|
339
|
+
and self._get_bp_offset(op0.addr, stmt.ins_addr) == canary_value_stack_offset
|
|
340
|
+
) and isinstance(op1, ailment.Expr.StackBaseOffset):
|
|
341
|
+
# found it
|
|
342
|
+
return idx
|
|
343
|
+
elif isinstance(stmt.src, ailment.Expr.Load):
|
|
344
|
+
if (
|
|
345
|
+
isinstance(stmt.src.addr, ailment.Expr.StackBaseOffset)
|
|
346
|
+
and stmt.src.addr.offset == canary_value_stack_offset
|
|
347
|
+
):
|
|
348
|
+
# found it
|
|
349
|
+
return idx
|
|
336
350
|
# or when we are unlucky, we have two instructions...
|
|
337
351
|
if (
|
|
338
352
|
isinstance(stmt, ailment.Stmt.Assignment)
|
|
@@ -419,3 +433,45 @@ class WinStackCanarySimplifier(OptimizationPass):
|
|
|
419
433
|
return idx
|
|
420
434
|
|
|
421
435
|
return None
|
|
436
|
+
|
|
437
|
+
@staticmethod
|
|
438
|
+
def _is_expr_loading_stack_cookie(expr: ailment.Expr.Expression, security_cookie_addr: int) -> bool:
|
|
439
|
+
if (
|
|
440
|
+
isinstance(expr, ailment.Expr.BinaryOp)
|
|
441
|
+
and expr.op == "Xor"
|
|
442
|
+
and isinstance(expr.operands[1], ailment.Expr.StackBaseOffset)
|
|
443
|
+
and isinstance(expr.operands[0], ailment.Expr.Load)
|
|
444
|
+
and isinstance(expr.operands[0].addr, ailment.Expr.Const)
|
|
445
|
+
):
|
|
446
|
+
# Check addr: must be __security_cookie
|
|
447
|
+
load_addr = expr.operands[0].addr.value
|
|
448
|
+
if load_addr == security_cookie_addr:
|
|
449
|
+
return True
|
|
450
|
+
|
|
451
|
+
if isinstance(expr, ailment.Expr.Load) and isinstance(expr.addr, ailment.Expr.Const):
|
|
452
|
+
load_addr = expr.addr.value
|
|
453
|
+
# Check addr: must be __security_cookie
|
|
454
|
+
if load_addr == security_cookie_addr:
|
|
455
|
+
return True
|
|
456
|
+
|
|
457
|
+
return False
|
|
458
|
+
|
|
459
|
+
@staticmethod
|
|
460
|
+
def _is_expr_xoring_stack_cookie_reg(
|
|
461
|
+
expr: ailment.Expr.Expression, security_cookie_reg: int, bp_offset: int
|
|
462
|
+
) -> bool:
|
|
463
|
+
return (
|
|
464
|
+
isinstance(expr, ailment.Expr.BinaryOp)
|
|
465
|
+
and expr.op == "Xor"
|
|
466
|
+
and isinstance(expr.operands[0], ailment.Expr.VirtualVariable)
|
|
467
|
+
and expr.operands[0].was_reg
|
|
468
|
+
and expr.operands[0].reg_offset == security_cookie_reg
|
|
469
|
+
and (
|
|
470
|
+
isinstance(expr.operands[1], ailment.Expr.StackBaseOffset)
|
|
471
|
+
or (
|
|
472
|
+
isinstance(expr.operands[1], ailment.Expr.VirtualVariable)
|
|
473
|
+
and expr.operands[1].was_reg
|
|
474
|
+
and expr.operands[1].reg_offset == bp_offset
|
|
475
|
+
)
|
|
476
|
+
)
|
|
477
|
+
)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
# pylint:disable=arguments-differ,too-many-boolean-expressions
|
|
2
2
|
from __future__ import annotations
|
|
3
3
|
|
|
4
|
+
from angr.ailment import Const
|
|
4
5
|
from angr.ailment.expression import BinaryOp, Load, Expression, Tmp
|
|
5
6
|
from angr.ailment.statement import CAS, ConditionalJump, Statement, Assignment, Call
|
|
6
7
|
|
|
@@ -8,8 +9,22 @@ from .base import PeepholeOptimizationMultiStmtBase
|
|
|
8
9
|
|
|
9
10
|
|
|
10
11
|
_INTRINSICS_NAMES = {
|
|
11
|
-
"
|
|
12
|
-
"
|
|
12
|
+
"xchg8": {"Win32": "InterlockedExchange8", "Linux": "atomic_exchange"},
|
|
13
|
+
"xchg16": {"Win32": "InterlockedExchange16", "Linux": "atomic_exchange"},
|
|
14
|
+
"xchg32": {"Win32": "InterlockedExchange", "Linux": "atomic_exchange"},
|
|
15
|
+
"xchg64": {"Win32": "InterlockedExchange64", "Linux": "atomic_exchange"},
|
|
16
|
+
"cmpxchg16": {"Win32": "InterlockedCompareExchange16", "Linux": "atomic_compare_exchange"},
|
|
17
|
+
"cmpxchg32": {"Win32": "InterlockedCompareExchange", "Linux": "atomic_compare_exchange"},
|
|
18
|
+
"cmpxchg64": {"Win32": "InterlockedCompareExchange64", "Linux": "atomic_compare_exchange"},
|
|
19
|
+
"cmpxchg128": {"Win32": "InterlockedCompareExchange128", "Linux": "atomic_compare_exchange"},
|
|
20
|
+
"lock_inc16": {"Win32": "InterlockedIncrement16", "Linux": "atomic_fetch_add"},
|
|
21
|
+
"lock_inc32": {"Win32": "InterlockedIncrement", "Linux": "atomic_fetch_add"},
|
|
22
|
+
"lock_inc64": {"Win32": "InterlockedIncrement64", "Linux": "atomic_fetch_add"},
|
|
23
|
+
"lock_dec16": {"Win32": "InterlockedDecrement16", "Linux": "atomic_fetch_dec"},
|
|
24
|
+
"lock_dec32": {"Win32": "InterlockedDecrement", "Linux": "atomic_fetch_dec"},
|
|
25
|
+
"lock_dec64": {"Win32": "InterlockedDecrement64", "Linux": "atomic_fetch_dec"},
|
|
26
|
+
"lock_xadd32": {"Win32": "InterlockedExchangeAdd", "Linux": "atomic_exchange_add"},
|
|
27
|
+
"lock_xadd64": {"Win32": "InterlockedExchangeAdd64", "Linux": "atomic_exchange_add"},
|
|
13
28
|
}
|
|
14
29
|
|
|
15
30
|
|
|
@@ -72,14 +87,54 @@ class CASIntrinsics(PeepholeOptimizationMultiStmtBase):
|
|
|
72
87
|
):
|
|
73
88
|
# TODO: Support cases where cas_stmt.old_hi is not None
|
|
74
89
|
# Case 1
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
90
|
+
|
|
91
|
+
call_expr = None
|
|
92
|
+
if isinstance(cas_stmt.data_lo, BinaryOp):
|
|
93
|
+
if cas_stmt.data_lo.op == "Add" and cas_stmt.data_lo.operands[0].likes(cas_stmt.expd_lo):
|
|
94
|
+
if isinstance(cas_stmt.data_lo.operands[1], Const) and cas_stmt.data_lo.operands[1].value == 1:
|
|
95
|
+
# lock inc
|
|
96
|
+
call_expr = Call(
|
|
97
|
+
cas_stmt.idx,
|
|
98
|
+
self._get_instrincs_name(f"lock_inc{cas_stmt.bits}"),
|
|
99
|
+
args=[cas_stmt.addr],
|
|
100
|
+
bits=cas_stmt.bits,
|
|
101
|
+
ins_addr=cas_stmt.ins_addr,
|
|
102
|
+
)
|
|
103
|
+
else:
|
|
104
|
+
# lock xadd
|
|
105
|
+
call_expr = Call(
|
|
106
|
+
cas_stmt.idx,
|
|
107
|
+
self._get_instrincs_name(f"lock_xadd{cas_stmt.bits}"),
|
|
108
|
+
args=[cas_stmt.addr, cas_stmt.data_lo.operands[1]],
|
|
109
|
+
bits=cas_stmt.bits,
|
|
110
|
+
ins_addr=cas_stmt.ins_addr,
|
|
111
|
+
)
|
|
112
|
+
elif (
|
|
113
|
+
cas_stmt.data_lo.op == "Sub"
|
|
114
|
+
and cas_stmt.data_lo.operands[0].likes(cas_stmt.expd_lo)
|
|
115
|
+
and isinstance(cas_stmt.data_lo.operands[1], Const)
|
|
116
|
+
and cas_stmt.data_lo.operands[1].value == 1
|
|
117
|
+
):
|
|
118
|
+
# lock dec
|
|
119
|
+
call_expr = Call(
|
|
120
|
+
cas_stmt.idx,
|
|
121
|
+
self._get_instrincs_name(f"lock_dec{cas_stmt.bits}"),
|
|
122
|
+
args=[cas_stmt.addr],
|
|
123
|
+
bits=cas_stmt.bits,
|
|
124
|
+
ins_addr=cas_stmt.ins_addr,
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
if call_expr is None:
|
|
128
|
+
call_expr = Call(
|
|
129
|
+
cas_stmt.idx,
|
|
130
|
+
self._get_instrincs_name(f"xchg{cas_stmt.bits}"),
|
|
131
|
+
args=[addr, cas_stmt.data_lo],
|
|
132
|
+
bits=cas_stmt.bits,
|
|
133
|
+
ins_addr=cas_stmt.ins_addr,
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
assignment_dst = cas_stmt.expd_lo
|
|
137
|
+
stmt = Assignment(cas_stmt.idx, assignment_dst, call_expr, **cas_stmt.tags) # type:ignore
|
|
83
138
|
return [stmt]
|
|
84
139
|
|
|
85
140
|
if next_stmt.ins_addr <= cas_stmt.ins_addr:
|
|
@@ -88,9 +143,10 @@ class CASIntrinsics(PeepholeOptimizationMultiStmtBase):
|
|
|
88
143
|
|
|
89
144
|
if cas_stmt.old_hi is None:
|
|
90
145
|
# TODO: Support cases where cas_stmt.old_hi is not None
|
|
146
|
+
# Case 2
|
|
91
147
|
call_expr = Call(
|
|
92
148
|
cas_stmt.idx,
|
|
93
|
-
self._get_instrincs_name("cmpxchg"),
|
|
149
|
+
self._get_instrincs_name(f"cmpxchg{cas_stmt.bits}"),
|
|
94
150
|
args=[
|
|
95
151
|
cas_stmt.addr,
|
|
96
152
|
cas_stmt.data_lo,
|
|
@@ -99,7 +155,8 @@ class CASIntrinsics(PeepholeOptimizationMultiStmtBase):
|
|
|
99
155
|
bits=cas_stmt.bits,
|
|
100
156
|
ins_addr=cas_stmt.ins_addr,
|
|
101
157
|
)
|
|
102
|
-
|
|
158
|
+
assignment_dst = cas_stmt.expd_lo
|
|
159
|
+
stmt = Assignment(cas_stmt.idx, assignment_dst, call_expr, **cas_stmt.tags) # type:ignore
|
|
103
160
|
return [stmt, next_stmt]
|
|
104
161
|
|
|
105
162
|
return None
|