white-hat-scanner 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/analyzer.js +852 -0
- package/dist/contest.js +144 -0
- package/dist/disclosure.js +85 -0
- package/dist/discovery.js +260 -0
- package/dist/index.js +88 -0
- package/dist/notifier.js +51 -0
- package/dist/redis.js +36 -0
- package/dist/scorer.js +33 -0
- package/dist/submission.js +103 -0
- package/dist/test/smoke.js +511 -0
- package/package.json +23 -0
- package/research/bounty-economics.md +145 -0
- package/research/tooling-landscape.md +216 -0
- package/research/vuln-pattern-library.md +401 -0
- package/src/analyzer.ts +974 -0
- package/src/contest.ts +172 -0
- package/src/disclosure.ts +111 -0
- package/src/discovery.ts +297 -0
- package/src/index.ts +105 -0
- package/src/notifier.ts +58 -0
- package/src/redis.ts +31 -0
- package/src/scorer.ts +46 -0
- package/src/submission.ts +124 -0
- package/src/test/smoke.ts +457 -0
- package/system/architecture.md +488 -0
- package/system/scanner-mvp.md +305 -0
- package/targets/active-bounty-programs.md +111 -0
- package/tsconfig.json +15 -0
package/dist/analyzer.js
ADDED
|
@@ -0,0 +1,852 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.analyzeProtocol = exports.contractPriority = exports.installDeps = exports.resolveCloneUrl = exports.detectSolcVersion = void 0;
|
|
4
|
+
const child_process_1 = require("child_process");
|
|
5
|
+
const fs_1 = require("fs");
|
|
6
|
+
const path_1 = require("path");
|
|
7
|
+
const os_1 = require("os");
|
|
8
|
+
const redis_1 = require("./redis");
|
|
9
|
+
// ---------------------------------------------------------------------------
|
|
10
|
+
// solc-select helpers
|
|
11
|
+
// ---------------------------------------------------------------------------
|
|
12
|
+
function findSolcSelectBin() {
|
|
13
|
+
const candidates = [
|
|
14
|
+
'/opt/homebrew/bin/solc-select',
|
|
15
|
+
'/usr/local/bin/solc-select',
|
|
16
|
+
'/usr/bin/solc-select',
|
|
17
|
+
];
|
|
18
|
+
for (const p of candidates) {
|
|
19
|
+
if ((0, fs_1.existsSync)(p))
|
|
20
|
+
return p;
|
|
21
|
+
}
|
|
22
|
+
try {
|
|
23
|
+
const fromWhich = (0, child_process_1.execSync)('which solc-select 2>/dev/null', { encoding: 'utf8', stdio: 'pipe' }).trim();
|
|
24
|
+
if (fromWhich)
|
|
25
|
+
return fromWhich;
|
|
26
|
+
}
|
|
27
|
+
catch { }
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* Parse pragma solidity version constraints from .sol files in repoDir.
|
|
32
|
+
* Returns the most commonly required version string (e.g. "0.7.6", "0.8.17").
|
|
33
|
+
* Returns null if no pragmas found or version can't be resolved.
|
|
34
|
+
*/
|
|
35
|
+
function detectSolcVersion(repoDir) {
|
|
36
|
+
try {
|
|
37
|
+
const result = (0, child_process_1.spawnSync)('grep', ['-rh', '--include=*.sol', 'pragma solidity', repoDir], { encoding: 'utf8', timeout: 10000 });
|
|
38
|
+
if (result.status !== 0 && !result.stdout)
|
|
39
|
+
return null;
|
|
40
|
+
const lines = result.stdout.trim().split('\n').filter(Boolean);
|
|
41
|
+
// Extract version strings, e.g. "=0.7.6", "^0.8.17", ">=0.8.0 <0.9.0"
|
|
42
|
+
const versionCounts = {};
|
|
43
|
+
for (const line of lines) {
|
|
44
|
+
// Match pinned versions first (=0.x.y), then caret/tilde, then bare ranges
|
|
45
|
+
const pinned = line.match(/pragma solidity\s*=\s*(0\.\d+\.\d+)/);
|
|
46
|
+
if (pinned) {
|
|
47
|
+
versionCounts[pinned[1]] = (versionCounts[pinned[1]] ?? 0) + 1;
|
|
48
|
+
continue;
|
|
49
|
+
}
|
|
50
|
+
const caretOrTilde = line.match(/pragma solidity\s*[\^~]\s*(0\.\d+\.\d+)/);
|
|
51
|
+
if (caretOrTilde) {
|
|
52
|
+
versionCounts[caretOrTilde[1]] = (versionCounts[caretOrTilde[1]] ?? 0) + 1;
|
|
53
|
+
continue;
|
|
54
|
+
}
|
|
55
|
+
// >=0.x.y or just 0.x.y
|
|
56
|
+
const any = line.match(/pragma solidity[^;]*?(0\.\d+\.\d+)/);
|
|
57
|
+
if (any) {
|
|
58
|
+
versionCounts[any[1]] = (versionCounts[any[1]] ?? 0) + 1;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
if (Object.keys(versionCounts).length === 0)
|
|
62
|
+
return null;
|
|
63
|
+
// Return the most common version
|
|
64
|
+
return Object.entries(versionCounts).sort((a, b) => b[1] - a[1])[0][0];
|
|
65
|
+
}
|
|
66
|
+
catch {
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
exports.detectSolcVersion = detectSolcVersion;
|
|
71
|
+
// Known good fallback versions installed on this machine (by minor series)
|
|
72
|
+
const FALLBACK_VERSIONS = {
|
|
73
|
+
'0.4': '0.4.26',
|
|
74
|
+
'0.5': '0.5.17',
|
|
75
|
+
'0.6': '0.6.12',
|
|
76
|
+
'0.7': '0.7.6',
|
|
77
|
+
'0.8': '0.8.20',
|
|
78
|
+
};
|
|
79
|
+
/**
|
|
80
|
+
* Ensure a specific solc version is installed and active via solc-select.
|
|
81
|
+
* Returns the version that was set, or null on failure.
|
|
82
|
+
*/
|
|
83
|
+
function setSolcVersion(version) {
|
|
84
|
+
const bin = findSolcSelectBin();
|
|
85
|
+
if (!bin)
|
|
86
|
+
return null;
|
|
87
|
+
const toolEnv = {
|
|
88
|
+
...process.env,
|
|
89
|
+
PATH: ['/opt/homebrew/bin', '/usr/local/bin', '/usr/bin', '/bin', process.env.PATH].filter(Boolean).join(':'),
|
|
90
|
+
};
|
|
91
|
+
// Resolve to a known stable patch version if only major.minor is given
|
|
92
|
+
const minor = version.match(/^(0\.\d+)/);
|
|
93
|
+
const resolvedVersion = FALLBACK_VERSIONS[minor?.[1] ?? ''] ?? version;
|
|
94
|
+
try {
|
|
95
|
+
// Try to use directly first (already installed)
|
|
96
|
+
const useResult = (0, child_process_1.spawnSync)(bin, ['use', resolvedVersion], {
|
|
97
|
+
encoding: 'utf8', timeout: 15000, stdio: 'pipe', env: toolEnv,
|
|
98
|
+
});
|
|
99
|
+
if (useResult.status === 0)
|
|
100
|
+
return resolvedVersion;
|
|
101
|
+
}
|
|
102
|
+
catch { }
|
|
103
|
+
// Not installed — install it
|
|
104
|
+
try {
|
|
105
|
+
(0, child_process_1.spawnSync)(bin, ['install', resolvedVersion], {
|
|
106
|
+
encoding: 'utf8', timeout: 120000, stdio: 'pipe', env: toolEnv,
|
|
107
|
+
});
|
|
108
|
+
const useResult2 = (0, child_process_1.spawnSync)(bin, ['use', resolvedVersion], {
|
|
109
|
+
encoding: 'utf8', timeout: 15000, stdio: 'pipe', env: toolEnv,
|
|
110
|
+
});
|
|
111
|
+
if (useResult2.status === 0)
|
|
112
|
+
return resolvedVersion;
|
|
113
|
+
}
|
|
114
|
+
catch { }
|
|
115
|
+
return null;
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Detect import remappings from remappings.txt or foundry.toml.
|
|
119
|
+
* Returns array of remapping strings like ["@openzeppelin/=lib/openzeppelin/"].
|
|
120
|
+
*/
|
|
121
|
+
function detectRemappings(repoDir) {
|
|
122
|
+
// remappings.txt (one per line)
|
|
123
|
+
const remappingsTxt = (0, path_1.join)(repoDir, 'remappings.txt');
|
|
124
|
+
if ((0, fs_1.existsSync)(remappingsTxt)) {
|
|
125
|
+
try {
|
|
126
|
+
return (0, fs_1.readFileSync)(remappingsTxt, 'utf8')
|
|
127
|
+
.split('\n')
|
|
128
|
+
.map((l) => l.trim())
|
|
129
|
+
.filter(Boolean);
|
|
130
|
+
}
|
|
131
|
+
catch { }
|
|
132
|
+
}
|
|
133
|
+
// foundry.toml — extract remappings array
|
|
134
|
+
const foundryToml = (0, path_1.join)(repoDir, 'foundry.toml');
|
|
135
|
+
if ((0, fs_1.existsSync)(foundryToml)) {
|
|
136
|
+
try {
|
|
137
|
+
const content = (0, fs_1.readFileSync)(foundryToml, 'utf8');
|
|
138
|
+
const remappingsMatch = content.match(/remappings\s*=\s*\[([\s\S]*?)\]/);
|
|
139
|
+
if (remappingsMatch) {
|
|
140
|
+
return remappingsMatch[1]
|
|
141
|
+
.split('\n')
|
|
142
|
+
.map((l) => l.replace(/["',]/g, '').trim())
|
|
143
|
+
.filter(Boolean);
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
catch { }
|
|
147
|
+
}
|
|
148
|
+
return [];
|
|
149
|
+
}
|
|
150
|
+
/**
|
|
151
|
+
* Docker-based Slither fallback using trailofbits/eth-security-toolbox.
|
|
152
|
+
* Only attempted if Docker is available and image is present (no auto-pull to avoid long waits).
|
|
153
|
+
*/
|
|
154
|
+
function runSlitherDocker(repoDir, solcVersion) {
|
|
155
|
+
try {
|
|
156
|
+
// Check docker is available
|
|
157
|
+
const dockerCheck = (0, child_process_1.spawnSync)('docker', ['info'], { encoding: 'utf8', timeout: 5000, stdio: 'pipe' });
|
|
158
|
+
if (dockerCheck.status !== 0)
|
|
159
|
+
return { findings: [], status: 'compilation_error' };
|
|
160
|
+
// Check image is already pulled (don't auto-pull — too slow and bandwidth-heavy)
|
|
161
|
+
const imageCheck = (0, child_process_1.spawnSync)('docker', ['image', 'inspect', 'trailofbits/eth-security-toolbox'], { encoding: 'utf8', timeout: 5000, stdio: 'pipe' });
|
|
162
|
+
if (imageCheck.status !== 0)
|
|
163
|
+
return { findings: [], status: 'compilation_error' };
|
|
164
|
+
const outputPath = '/tmp/slither-docker-out.json';
|
|
165
|
+
const versionCmd = solcVersion
|
|
166
|
+
? `solc-select install ${solcVersion} 2>/dev/null; solc-select use ${solcVersion} 2>/dev/null; `
|
|
167
|
+
: '';
|
|
168
|
+
const dockerResult = (0, child_process_1.spawnSync)('docker', [
|
|
169
|
+
'run', '--rm',
|
|
170
|
+
'-v', `${repoDir}:/src`,
|
|
171
|
+
'trailofbits/eth-security-toolbox',
|
|
172
|
+
'bash', '-c',
|
|
173
|
+
`cd /src && ${versionCmd}slither . --json ${outputPath} --no-fail-pedantic 2>/dev/null; cat ${outputPath} 2>/dev/null`,
|
|
174
|
+
], { encoding: 'utf8', timeout: 240000, stdio: 'pipe' });
|
|
175
|
+
if (!dockerResult.stdout)
|
|
176
|
+
return { findings: [], status: 'compilation_error' };
|
|
177
|
+
// Output may have extra lines before the JSON
|
|
178
|
+
const jsonStart = dockerResult.stdout.indexOf('{');
|
|
179
|
+
if (jsonStart === -1)
|
|
180
|
+
return { findings: [], status: 'compilation_error' };
|
|
181
|
+
const parsed = JSON.parse(dockerResult.stdout.slice(jsonStart));
|
|
182
|
+
const detectors = parsed?.results?.detectors || [];
|
|
183
|
+
const findings = detectors.filter((d) => d.impact === 'High' || d.impact === 'Medium' || d.impact === 'Critical');
|
|
184
|
+
return { findings, status: 'success' };
|
|
185
|
+
}
|
|
186
|
+
catch {
|
|
187
|
+
return { findings: [], status: 'compilation_error' };
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
function findClaudeBin() {
|
|
191
|
+
const candidates = [
|
|
192
|
+
process.env.CLAUDE_BIN,
|
|
193
|
+
'/Users/feral/.npm-global/bin/claude',
|
|
194
|
+
'/usr/local/bin/claude',
|
|
195
|
+
'/opt/homebrew/bin/claude',
|
|
196
|
+
].filter(Boolean);
|
|
197
|
+
try {
|
|
198
|
+
const fromWhich = (0, child_process_1.execSync)('which claude 2>/dev/null', { encoding: 'utf8' }).trim();
|
|
199
|
+
if (fromWhich)
|
|
200
|
+
candidates.unshift(fromWhich);
|
|
201
|
+
}
|
|
202
|
+
catch { }
|
|
203
|
+
for (const p of candidates) {
|
|
204
|
+
if ((0, fs_1.existsSync)(p))
|
|
205
|
+
return p;
|
|
206
|
+
}
|
|
207
|
+
return candidates[0] || 'claude';
|
|
208
|
+
}
|
|
209
|
+
function ensureSlither() {
|
|
210
|
+
// Check common explicit paths first (launchd may not have full PATH)
|
|
211
|
+
const slitherPaths = [
|
|
212
|
+
'/opt/homebrew/bin/slither',
|
|
213
|
+
'/usr/local/bin/slither',
|
|
214
|
+
'/usr/bin/slither',
|
|
215
|
+
];
|
|
216
|
+
for (const p of slitherPaths) {
|
|
217
|
+
if ((0, fs_1.existsSync)(p))
|
|
218
|
+
return true;
|
|
219
|
+
}
|
|
220
|
+
try {
|
|
221
|
+
const fromWhich = (0, child_process_1.execSync)('which slither 2>/dev/null', { encoding: 'utf8', stdio: 'pipe' }).trim();
|
|
222
|
+
if (fromWhich)
|
|
223
|
+
return true;
|
|
224
|
+
}
|
|
225
|
+
catch { }
|
|
226
|
+
console.log('[whiteh] Slither not found, attempting pip3 install...');
|
|
227
|
+
try {
|
|
228
|
+
(0, child_process_1.execSync)('pip3 install slither-analyzer --break-system-packages 2>&1', { encoding: 'utf8', timeout: 120000, stdio: 'pipe' });
|
|
229
|
+
return (0, fs_1.existsSync)('/opt/homebrew/bin/slither') || (0, fs_1.existsSync)('/usr/local/bin/slither');
|
|
230
|
+
}
|
|
231
|
+
catch (e) {
|
|
232
|
+
return false;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
/**
|
|
236
|
+
* DeFiLlama's `github` field is either an org name (e.g. "pendle-finance"),
|
|
237
|
+
* a full repo URL (e.g. "https://github.com/pendle-finance/pendle-core"),
|
|
238
|
+
* or an org URL (e.g. "https://github.com/BeltFi").
|
|
239
|
+
* This function resolves it to a clonable https URL by querying the GitHub API for the
|
|
240
|
+
* org's most relevant public repo (preferring Solidity repos, then by star count).
|
|
241
|
+
*/
|
|
242
|
+
async function resolveCloneUrl(orgOrUrl) {
|
|
243
|
+
if (orgOrUrl.startsWith('https://github.com/')) {
|
|
244
|
+
// Distinguish org URL (1 path segment) from repo URL (2 path segments)
|
|
245
|
+
const path = orgOrUrl.replace('https://github.com/', '').replace(/\/$/, '');
|
|
246
|
+
const parts = path.split('/').filter(Boolean);
|
|
247
|
+
if (parts.length >= 2) {
|
|
248
|
+
// Already a repo URL — return as-is
|
|
249
|
+
return orgOrUrl;
|
|
250
|
+
}
|
|
251
|
+
// Org URL — extract org name and resolve via API
|
|
252
|
+
const org = parts[0];
|
|
253
|
+
if (!org)
|
|
254
|
+
return null;
|
|
255
|
+
return resolveOrgToRepo(org);
|
|
256
|
+
}
|
|
257
|
+
if (orgOrUrl.startsWith('git@')) {
|
|
258
|
+
return orgOrUrl;
|
|
259
|
+
}
|
|
260
|
+
const org = orgOrUrl.trim();
|
|
261
|
+
if (!org)
|
|
262
|
+
return null;
|
|
263
|
+
return resolveOrgToRepo(org);
|
|
264
|
+
}
|
|
265
|
+
exports.resolveCloneUrl = resolveCloneUrl;
|
|
266
|
+
async function resolveOrgToRepo(org) {
|
|
267
|
+
try {
|
|
268
|
+
const res = await fetch(`https://api.github.com/orgs/${org}/repos?sort=stars&per_page=20&type=public`, {
|
|
269
|
+
headers: {
|
|
270
|
+
'User-Agent': 'white-hat-scanner/1.0',
|
|
271
|
+
Accept: 'application/vnd.github.v3+json',
|
|
272
|
+
},
|
|
273
|
+
signal: AbortSignal.timeout(15000),
|
|
274
|
+
});
|
|
275
|
+
if (!res.ok) {
|
|
276
|
+
// Might be a user account rather than an org — try /users/:org/repos
|
|
277
|
+
const res2 = await fetch(`https://api.github.com/users/${org}/repos?sort=stars&per_page=20&type=public`, {
|
|
278
|
+
headers: {
|
|
279
|
+
'User-Agent': 'white-hat-scanner/1.0',
|
|
280
|
+
Accept: 'application/vnd.github.v3+json',
|
|
281
|
+
},
|
|
282
|
+
signal: AbortSignal.timeout(15000),
|
|
283
|
+
});
|
|
284
|
+
if (!res2.ok)
|
|
285
|
+
return null;
|
|
286
|
+
const repos = (await res2.json());
|
|
287
|
+
return pickBestRepo(org, repos);
|
|
288
|
+
}
|
|
289
|
+
const repos = (await res.json());
|
|
290
|
+
return pickBestRepo(org, repos);
|
|
291
|
+
}
|
|
292
|
+
catch {
|
|
293
|
+
return null;
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
// Repo names that are almost certainly upstream library copies, not the protocol's own code.
|
|
297
|
+
const LIBRARY_REPO_PATTERNS = [
|
|
298
|
+
/^openzeppelin/i,
|
|
299
|
+
/^solmate/i,
|
|
300
|
+
/^solady/i,
|
|
301
|
+
/^forge-std/i,
|
|
302
|
+
/^hardhat/i,
|
|
303
|
+
/^foundry/i,
|
|
304
|
+
/war-room/i,
|
|
305
|
+
/^ds-/i,
|
|
306
|
+
/^dapp-/i,
|
|
307
|
+
];
|
|
308
|
+
function isLibraryRepo(name) {
|
|
309
|
+
return LIBRARY_REPO_PATTERNS.some((p) => p.test(name));
|
|
310
|
+
}
|
|
311
|
+
/** Score a repo: higher = better candidate for the protocol's core contracts. */
|
|
312
|
+
function repoScore(org, r) {
|
|
313
|
+
const name = r.name.toLowerCase();
|
|
314
|
+
const orgLower = org.toLowerCase();
|
|
315
|
+
let score = 0;
|
|
316
|
+
// Heavily penalise known library copies
|
|
317
|
+
if (isLibraryRepo(r.name))
|
|
318
|
+
return -1000;
|
|
319
|
+
// Reward: org name appears in repo name (e.g. tornadocash/tornado-core)
|
|
320
|
+
if (name.includes(orgLower) || name.replace(/-/g, '').includes(orgLower.replace(/-/g, ''))) {
|
|
321
|
+
score += 50;
|
|
322
|
+
}
|
|
323
|
+
// Reward: Solidity (the protocol's actual language)
|
|
324
|
+
if (r.language === 'Solidity')
|
|
325
|
+
score += 30;
|
|
326
|
+
// Reward: contract / core / protocol / vault / pool keyword in name
|
|
327
|
+
if (/\b(core|protocol|vault|pool|main|primary)\b/.test(name))
|
|
328
|
+
score += 20;
|
|
329
|
+
if (name.includes('contract'))
|
|
330
|
+
score += 10;
|
|
331
|
+
// Stars as tiebreaker (capped to avoid inflated library star counts dominating)
|
|
332
|
+
score += Math.min(r.stargazers_count, 200) * 0.01;
|
|
333
|
+
return score;
|
|
334
|
+
}
|
|
335
|
+
function pickBestRepo(org, repos) {
|
|
336
|
+
if (!repos || repos.length === 0)
|
|
337
|
+
return null;
|
|
338
|
+
const active = repos.filter((r) => !r.archived && !r.fork);
|
|
339
|
+
const candidates = active.length > 0 ? active : repos;
|
|
340
|
+
const scored = candidates
|
|
341
|
+
.map((r) => ({ r, score: repoScore(org, r) }))
|
|
342
|
+
.filter(({ score }) => score > -1000)
|
|
343
|
+
.sort((a, b) => b.score - a.score);
|
|
344
|
+
if (scored.length === 0)
|
|
345
|
+
return null;
|
|
346
|
+
const best = scored[0].r;
|
|
347
|
+
return `https://github.com/${org}/${best.name}`;
|
|
348
|
+
}
|
|
349
|
+
function cloneRepo(githubUrl, destDir) {
|
|
350
|
+
try {
|
|
351
|
+
const result = (0, child_process_1.spawnSync)('git', ['clone', '--depth', '1', '--quiet', githubUrl, destDir], {
|
|
352
|
+
timeout: 60000,
|
|
353
|
+
encoding: 'utf8',
|
|
354
|
+
});
|
|
355
|
+
return result.status === 0;
|
|
356
|
+
}
|
|
357
|
+
catch {
|
|
358
|
+
return false;
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
function findSlitherBin() {
|
|
362
|
+
const candidates = [
|
|
363
|
+
'/opt/homebrew/bin/slither',
|
|
364
|
+
'/usr/local/bin/slither',
|
|
365
|
+
'/usr/bin/slither',
|
|
366
|
+
];
|
|
367
|
+
for (const p of candidates) {
|
|
368
|
+
if ((0, fs_1.existsSync)(p))
|
|
369
|
+
return p;
|
|
370
|
+
}
|
|
371
|
+
try {
|
|
372
|
+
const fromWhich = (0, child_process_1.execSync)('which slither 2>/dev/null', { encoding: 'utf8', stdio: 'pipe' }).trim();
|
|
373
|
+
if (fromWhich)
|
|
374
|
+
return fromWhich;
|
|
375
|
+
}
|
|
376
|
+
catch { }
|
|
377
|
+
return 'slither';
|
|
378
|
+
}
|
|
379
|
+
function findForgeBin() {
|
|
380
|
+
const candidates = [
|
|
381
|
+
'/Users/feral/.foundry/bin/forge',
|
|
382
|
+
'/opt/homebrew/bin/forge',
|
|
383
|
+
'/usr/local/bin/forge',
|
|
384
|
+
];
|
|
385
|
+
for (const p of candidates) {
|
|
386
|
+
if ((0, fs_1.existsSync)(p))
|
|
387
|
+
return p;
|
|
388
|
+
}
|
|
389
|
+
try {
|
|
390
|
+
const fromWhich = (0, child_process_1.execSync)('which forge 2>/dev/null', { encoding: 'utf8', stdio: 'pipe' }).trim();
|
|
391
|
+
if (fromWhich)
|
|
392
|
+
return fromWhich;
|
|
393
|
+
}
|
|
394
|
+
catch { }
|
|
395
|
+
return null;
|
|
396
|
+
}
|
|
397
|
+
/**
|
|
398
|
+
* Install project dependencies so Slither can compile the contracts.
|
|
399
|
+
* Returns a brief status string for logging.
|
|
400
|
+
*/
|
|
401
|
+
function installDeps(repoDir) {
|
|
402
|
+
// Hardhat / Truffle — npm install (most common for DeFi)
|
|
403
|
+
if ((0, fs_1.existsSync)((0, path_1.join)(repoDir, 'package.json'))) {
|
|
404
|
+
try {
|
|
405
|
+
(0, child_process_1.spawnSync)('npm', ['install', '--ignore-scripts', '--prefer-offline', '--no-audit'], {
|
|
406
|
+
cwd: repoDir,
|
|
407
|
+
timeout: 120000,
|
|
408
|
+
encoding: 'utf8',
|
|
409
|
+
stdio: 'pipe',
|
|
410
|
+
});
|
|
411
|
+
}
|
|
412
|
+
catch {
|
|
413
|
+
return 'npm-failed';
|
|
414
|
+
}
|
|
415
|
+
// Best-effort pre-compilation: Hardhat downloads the right solc version and
|
|
416
|
+
// caches compilation artifacts, which dramatically improves Slither's compile rate.
|
|
417
|
+
try {
|
|
418
|
+
const hardhatBin = (0, path_1.join)(repoDir, 'node_modules', '.bin', 'hardhat');
|
|
419
|
+
if ((0, fs_1.existsSync)(hardhatBin)) {
|
|
420
|
+
(0, child_process_1.spawnSync)('node', [hardhatBin, 'compile', '--quiet'], {
|
|
421
|
+
cwd: repoDir,
|
|
422
|
+
timeout: 120000,
|
|
423
|
+
encoding: 'utf8',
|
|
424
|
+
stdio: 'pipe',
|
|
425
|
+
});
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
catch { }
|
|
429
|
+
return 'npm';
|
|
430
|
+
}
|
|
431
|
+
// Foundry — forge install + forge build
|
|
432
|
+
if ((0, fs_1.existsSync)((0, path_1.join)(repoDir, 'foundry.toml'))) {
|
|
433
|
+
const forgeBin = findForgeBin();
|
|
434
|
+
const toolEnv = {
|
|
435
|
+
...process.env,
|
|
436
|
+
PATH: [
|
|
437
|
+
'/opt/homebrew/bin',
|
|
438
|
+
'/usr/local/bin',
|
|
439
|
+
'/usr/bin',
|
|
440
|
+
'/bin',
|
|
441
|
+
'/Users/feral/.foundry/bin',
|
|
442
|
+
process.env.PATH,
|
|
443
|
+
].filter(Boolean).join(':'),
|
|
444
|
+
};
|
|
445
|
+
const libDir = (0, path_1.join)(repoDir, 'lib');
|
|
446
|
+
// A shallow git clone (--depth 1) may create lib/ as an empty directory when
|
|
447
|
+
// submodules are registered but not populated. Check for actual content so we
|
|
448
|
+
// don't skip forge install on a repo that still needs its dependencies fetched.
|
|
449
|
+
let libHasContent = false;
|
|
450
|
+
try {
|
|
451
|
+
libHasContent = (0, fs_1.existsSync)(libDir) && (0, fs_1.readdirSync)(libDir).length > 0;
|
|
452
|
+
}
|
|
453
|
+
catch { }
|
|
454
|
+
if (forgeBin) {
|
|
455
|
+
if (!libHasContent) {
|
|
456
|
+
// Fetch registered dependencies without cloning full git history
|
|
457
|
+
(0, child_process_1.spawnSync)(forgeBin, ['install', '--no-git'], {
|
|
458
|
+
cwd: repoDir,
|
|
459
|
+
timeout: 120000,
|
|
460
|
+
encoding: 'utf8',
|
|
461
|
+
stdio: 'pipe',
|
|
462
|
+
env: toolEnv,
|
|
463
|
+
});
|
|
464
|
+
}
|
|
465
|
+
// Pre-build so Slither can use Foundry's compilation artifacts (out/).
|
|
466
|
+
// Best-effort: continue even if some contracts fail to compile.
|
|
467
|
+
(0, child_process_1.spawnSync)(forgeBin, ['build', '--skip', 'test', 'script'], {
|
|
468
|
+
cwd: repoDir,
|
|
469
|
+
timeout: 180000,
|
|
470
|
+
encoding: 'utf8',
|
|
471
|
+
stdio: 'pipe',
|
|
472
|
+
env: toolEnv,
|
|
473
|
+
});
|
|
474
|
+
}
|
|
475
|
+
return libHasContent ? 'foundry-lib-present' : 'forge';
|
|
476
|
+
}
|
|
477
|
+
return 'no-pkg-manager';
|
|
478
|
+
}
|
|
479
|
+
exports.installDeps = installDeps;
|
|
480
|
+
/**
|
|
481
|
+
* Count .sol files in a directory (recursive, ignores node_modules).
|
|
482
|
+
* Returns quickly (milliseconds) and is used as a fast Slither gate —
|
|
483
|
+
* no .sol files means no point running Slither.
|
|
484
|
+
*/
|
|
485
|
+
function countSolFiles(dir) {
|
|
486
|
+
try {
|
|
487
|
+
const result = (0, child_process_1.spawnSync)('find', [dir, '-name', '*.sol', '-not', '-path', '*/node_modules/*', '-not', '-path', '*/.git/*'], { encoding: 'utf8', timeout: 10000 });
|
|
488
|
+
if (result.status !== 0)
|
|
489
|
+
return 0;
|
|
490
|
+
return result.stdout.trim().split('\n').filter(Boolean).length;
|
|
491
|
+
}
|
|
492
|
+
catch {
|
|
493
|
+
return 0;
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
function runSlither(repoDir) {
|
|
497
|
+
const outputPath = (0, path_1.join)((0, os_1.tmpdir)(), `slither-${Date.now()}.json`);
|
|
498
|
+
const slitherBin = findSlitherBin();
|
|
499
|
+
// Install deps so Slither can compile — without this, Hardhat repos always return 0 findings
|
|
500
|
+
installDeps(repoDir);
|
|
501
|
+
// Detect the required solc version from pragma statements and switch to it
|
|
502
|
+
const detectedVersion = detectSolcVersion(repoDir);
|
|
503
|
+
if (detectedVersion) {
|
|
504
|
+
setSolcVersion(detectedVersion);
|
|
505
|
+
}
|
|
506
|
+
// Detect import remappings (remappings.txt or foundry.toml)
|
|
507
|
+
const remappings = detectRemappings(repoDir);
|
|
508
|
+
// Build PATH that includes common tool locations (launchd has minimal PATH)
|
|
509
|
+
const toolEnv = {
|
|
510
|
+
...process.env,
|
|
511
|
+
PATH: [
|
|
512
|
+
'/opt/homebrew/bin',
|
|
513
|
+
'/usr/local/bin',
|
|
514
|
+
'/usr/bin',
|
|
515
|
+
'/bin',
|
|
516
|
+
'/Users/feral/.foundry/bin',
|
|
517
|
+
process.env.PATH,
|
|
518
|
+
].filter(Boolean).join(':'),
|
|
519
|
+
};
|
|
520
|
+
const slitherArgs = [
|
|
521
|
+
'.',
|
|
522
|
+
'--json', outputPath,
|
|
523
|
+
'--disable-color',
|
|
524
|
+
'--no-fail-pedantic', // continue analysis even if some files fail to compile
|
|
525
|
+
];
|
|
526
|
+
if (remappings.length > 0) {
|
|
527
|
+
slitherArgs.push('--solc-remaps', remappings.join(' '));
|
|
528
|
+
}
|
|
529
|
+
// For Foundry projects, pass --foundry-compile-all to get better coverage
|
|
530
|
+
if ((0, fs_1.existsSync)((0, path_1.join)(repoDir, 'foundry.toml'))) {
|
|
531
|
+
slitherArgs.push('--foundry-compile-all');
|
|
532
|
+
}
|
|
533
|
+
try {
|
|
534
|
+
const result = (0, child_process_1.spawnSync)(slitherBin, slitherArgs, {
|
|
535
|
+
cwd: repoDir,
|
|
536
|
+
timeout: 180000,
|
|
537
|
+
encoding: 'utf8',
|
|
538
|
+
stdio: 'pipe',
|
|
539
|
+
env: toolEnv,
|
|
540
|
+
});
|
|
541
|
+
// Log stderr for debugging (first 500 chars)
|
|
542
|
+
const errSnippet = (result.stderr ?? '').slice(0, 500);
|
|
543
|
+
if (errSnippet && !(0, fs_1.existsSync)(outputPath)) {
|
|
544
|
+
(0, fs_1.writeFileSync)((0, path_1.join)((0, os_1.tmpdir)(), `slither-err-${Date.now()}.txt`), errSnippet);
|
|
545
|
+
}
|
|
546
|
+
if (!(0, fs_1.existsSync)(outputPath)) {
|
|
547
|
+
// Native Slither failed — try Docker fallback
|
|
548
|
+
const dockerResult = runSlitherDocker(repoDir, detectedVersion);
|
|
549
|
+
if (dockerResult.status === 'success')
|
|
550
|
+
return dockerResult;
|
|
551
|
+
return { findings: [], status: 'compilation_error' };
|
|
552
|
+
}
|
|
553
|
+
const raw = (0, fs_1.readFileSync)(outputPath, 'utf8');
|
|
554
|
+
const parsed = JSON.parse(raw);
|
|
555
|
+
const detectors = parsed?.results?.detectors || [];
|
|
556
|
+
const findings = detectors.filter((d) => d.impact === 'High' || d.impact === 'Medium' || d.impact === 'Critical');
|
|
557
|
+
return { findings, status: 'success' };
|
|
558
|
+
}
|
|
559
|
+
catch {
|
|
560
|
+
return { findings: [], status: 'compilation_error' };
|
|
561
|
+
}
|
|
562
|
+
finally {
|
|
563
|
+
try {
|
|
564
|
+
if ((0, fs_1.existsSync)(outputPath))
|
|
565
|
+
(0, fs_1.rmSync)(outputPath);
|
|
566
|
+
}
|
|
567
|
+
catch { }
|
|
568
|
+
}
|
|
569
|
+
}
|
|
570
|
+
/**
|
|
571
|
+
* Returns true if a Solidity file is likely a test, mock, or interface file
|
|
572
|
+
* that shouldn't be prioritized for security review.
|
|
573
|
+
*/
|
|
574
|
+
function isTestOrMockFile(filePath) {
|
|
575
|
+
const base = filePath.split('/').pop() ?? '';
|
|
576
|
+
const baseLower = base.toLowerCase();
|
|
577
|
+
const pathLower = filePath.toLowerCase();
|
|
578
|
+
// Files in test/mock/interface directories
|
|
579
|
+
if (/\/(test|tests|mock|mocks|interface|interfaces|fixture|fixtures|stub|stubs|spec)\//.test(pathLower))
|
|
580
|
+
return true;
|
|
581
|
+
// Filenames starting with test/mock/interface etc. (case-insensitive)
|
|
582
|
+
if (/^(test|mock|interface|fixture|stub|spec)/.test(baseLower))
|
|
583
|
+
return true;
|
|
584
|
+
// Interface files: IPool.sol, IERC20.sol (capital I + capital letter)
|
|
585
|
+
if (/^I[A-Z]/.test(base))
|
|
586
|
+
return true;
|
|
587
|
+
// Foundry test style: Foo.t.sol
|
|
588
|
+
if (baseLower.endsWith('.t.sol'))
|
|
589
|
+
return true;
|
|
590
|
+
// Files ending with Test/Mock/Interface (e.g. PoolTest.sol, TokenMock.sol)
|
|
591
|
+
if (/(test|mock|interface|fixture|stub)\.sol$/.test(baseLower))
|
|
592
|
+
return true;
|
|
593
|
+
return false;
|
|
594
|
+
}
|
|
595
|
+
/**
|
|
596
|
+
* Returns a priority score for a Solidity file path.
|
|
597
|
+
* Higher = more likely to be a core protocol contract worth auditing.
|
|
598
|
+
*/
|
|
599
|
+
function contractPriority(filePath) {
|
|
600
|
+
const base = filePath.split('/').pop() ?? '';
|
|
601
|
+
// Deprioritise test/mock/interface files
|
|
602
|
+
if (isTestOrMockFile(filePath))
|
|
603
|
+
return 0;
|
|
604
|
+
// Boost core protocol contracts
|
|
605
|
+
if (/^(Pool|Vault|Core|Main|Protocol|Manager|Controller|Factory|Router|Staking|Lending|Borrow|Exchange|Swap|Bridge|Token|Governor|Governance|Treasury|Strategy|Proxy|Upgradeable)/i.test(base))
|
|
606
|
+
return 3;
|
|
607
|
+
if (/^(Base|Abstract|Lib|Library|Helper|Utils|Math)/i.test(base))
|
|
608
|
+
return 1;
|
|
609
|
+
return 2; // default: include, medium priority
|
|
610
|
+
}
|
|
611
|
+
exports.contractPriority = contractPriority;
|
|
612
|
+
function summarizeContracts(repoDir) {
|
|
613
|
+
const solidityFiles = [];
|
|
614
|
+
function findSol(dir, depth = 0) {
|
|
615
|
+
if (depth > 4)
|
|
616
|
+
return;
|
|
617
|
+
try {
|
|
618
|
+
const entries = (0, child_process_1.execSync)(`ls -1 "${dir}" 2>/dev/null`, { encoding: 'utf8' })
|
|
619
|
+
.trim()
|
|
620
|
+
.split('\n')
|
|
621
|
+
.filter(Boolean);
|
|
622
|
+
for (const entry of entries) {
|
|
623
|
+
const fullPath = (0, path_1.join)(dir, entry);
|
|
624
|
+
if (entry.endsWith('.sol')) {
|
|
625
|
+
solidityFiles.push(fullPath);
|
|
626
|
+
}
|
|
627
|
+
else if (!entry.startsWith('.') && !entry.includes('node_modules')) {
|
|
628
|
+
try {
|
|
629
|
+
const stat = (0, child_process_1.execSync)(`stat -f "%HT" "${fullPath}" 2>/dev/null`, { encoding: 'utf8' }).trim();
|
|
630
|
+
if (stat === 'Directory')
|
|
631
|
+
findSol(fullPath, depth + 1);
|
|
632
|
+
}
|
|
633
|
+
catch { }
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
catch { }
|
|
638
|
+
}
|
|
639
|
+
findSol(repoDir);
|
|
640
|
+
// Sort by priority: core contracts first, test/mock/interface files filtered out
|
|
641
|
+
const prioritized = solidityFiles
|
|
642
|
+
.map((f) => ({ path: f, priority: contractPriority(f) }))
|
|
643
|
+
.filter((f) => f.priority > 0)
|
|
644
|
+
.sort((a, b) => b.priority - a.priority)
|
|
645
|
+
.map((f) => f.path);
|
|
646
|
+
const snippets = [];
|
|
647
|
+
let totalChars = 0;
|
|
648
|
+
const MAX_CHARS = 16000;
|
|
649
|
+
const MAX_FILES = 15;
|
|
650
|
+
for (const f of prioritized.slice(0, MAX_FILES)) {
|
|
651
|
+
if (totalChars >= MAX_CHARS)
|
|
652
|
+
break;
|
|
653
|
+
try {
|
|
654
|
+
const content = (0, fs_1.readFileSync)(f, 'utf8');
|
|
655
|
+
const budget = Math.min(1200, MAX_CHARS - totalChars);
|
|
656
|
+
const snippet = content.slice(0, budget);
|
|
657
|
+
snippets.push(`// File: ${f.replace(repoDir, '')}\n${snippet}`);
|
|
658
|
+
totalChars += snippet.length;
|
|
659
|
+
}
|
|
660
|
+
catch { }
|
|
661
|
+
}
|
|
662
|
+
return snippets.join('\n\n---\n\n') || 'No Solidity files found';
|
|
663
|
+
}
|
|
664
|
+
async function runClaudeReview(protocol, slitherFindings, contractCode, sourceAvailable) {
|
|
665
|
+
const claudeBin = findClaudeBin();
|
|
666
|
+
const claudeToken = process.env.CLAUDE_CODE_OAUTH_TOKEN || process.env.ANTHROPIC_API_KEY || '';
|
|
667
|
+
const slitherSummary = slitherFindings.length > 0
|
|
668
|
+
? slitherFindings
|
|
669
|
+
.map((f) => `[${f.impact}/${f.confidence}] ${f.check}: ${f.description?.slice(0, 200)}`)
|
|
670
|
+
.join('\n')
|
|
671
|
+
: 'No Slither findings (Slither may not be installed or no Solidity files found)';
|
|
672
|
+
const noCodeWarning = sourceAvailable
|
|
673
|
+
? ''
|
|
674
|
+
: `
|
|
675
|
+
IMPORTANT: No contract source code is available for this protocol. Your review MUST be an architecture-level threat model only.
|
|
676
|
+
- Do NOT claim specific vulnerabilities exist without code evidence.
|
|
677
|
+
- Set RISK_LEVEL to MEDIUM at most unless there is documented public evidence of a critical flaw.
|
|
678
|
+
- Set BOUNTY_ESTIMATE_USD to 0 — speculative findings are not bounty-eligible.
|
|
679
|
+
- The SUMMARY must clearly state: "Architecture review only — no source code analyzed."
|
|
680
|
+
`;
|
|
681
|
+
const prompt = `You are a smart contract security auditor. Review this protocol and its contracts for critical vulnerabilities.
|
|
682
|
+
|
|
683
|
+
Protocol: ${protocol.name}
|
|
684
|
+
Chain: ${protocol.chain}
|
|
685
|
+
TVL: $${(protocol.tvl / 1000000).toFixed(1)}M
|
|
686
|
+
Source code available: ${sourceAvailable ? 'YES' : 'NO'}
|
|
687
|
+
${noCodeWarning}
|
|
688
|
+
Focus on:
|
|
689
|
+
- Governance/voting manipulation
|
|
690
|
+
- Flash loan attack vectors
|
|
691
|
+
- Oracle price manipulation
|
|
692
|
+
- Reentrancy
|
|
693
|
+
- Access control issues
|
|
694
|
+
- Economic exploits
|
|
695
|
+
|
|
696
|
+
Slither findings:
|
|
697
|
+
${slitherSummary}
|
|
698
|
+
|
|
699
|
+
Contract code summary:
|
|
700
|
+
${contractCode}
|
|
701
|
+
|
|
702
|
+
Respond in this exact format:
|
|
703
|
+
RISK_LEVEL: <CRITICAL|HIGH|MEDIUM|LOW>
|
|
704
|
+
BOUNTY_ESTIMATE_USD: <number>
|
|
705
|
+
SUMMARY: <one paragraph disclosure summary>
|
|
706
|
+
FULL_REVIEW: <detailed analysis>`;
|
|
707
|
+
const promptFile = (0, path_1.join)((0, os_1.tmpdir)(), `whiteh-prompt-${Date.now()}.txt`);
|
|
708
|
+
try {
|
|
709
|
+
(0, fs_1.writeFileSync)(promptFile, prompt, 'utf8');
|
|
710
|
+
const env = { ...process.env };
|
|
711
|
+
if (claudeToken) {
|
|
712
|
+
if (claudeToken.startsWith('sk-ant-oat')) {
|
|
713
|
+
env.CLAUDE_CODE_OAUTH_TOKEN = claudeToken;
|
|
714
|
+
}
|
|
715
|
+
else {
|
|
716
|
+
env.ANTHROPIC_API_KEY = claudeToken;
|
|
717
|
+
}
|
|
718
|
+
}
|
|
719
|
+
const result = (0, child_process_1.spawnSync)(claudeBin, ['--print', '--dangerously-skip-permissions', '-p', prompt], {
|
|
720
|
+
encoding: 'utf8',
|
|
721
|
+
timeout: 120000,
|
|
722
|
+
env,
|
|
723
|
+
stdio: 'pipe',
|
|
724
|
+
});
|
|
725
|
+
const output = result.stdout || '';
|
|
726
|
+
const riskMatch = output.match(/RISK_LEVEL:\s*(CRITICAL|HIGH|MEDIUM|LOW)/i);
|
|
727
|
+
const bountyMatch = output.match(/BOUNTY_ESTIMATE_USD:\s*(\d+)/i);
|
|
728
|
+
const summaryMatch = output.match(/SUMMARY:\s*(.+?)(?=\nFULL_REVIEW:|$)/is);
|
|
729
|
+
return {
|
|
730
|
+
review: output,
|
|
731
|
+
riskLevel: riskMatch?.[1]?.toUpperCase() || 'UNKNOWN',
|
|
732
|
+
bounty: bountyMatch ? parseInt(bountyMatch[1], 10) : 0,
|
|
733
|
+
summary: summaryMatch?.[1]?.trim() || 'No summary generated',
|
|
734
|
+
};
|
|
735
|
+
}
|
|
736
|
+
catch (err) {
|
|
737
|
+
return {
|
|
738
|
+
review: `Claude review failed: ${err.message}`,
|
|
739
|
+
riskLevel: 'UNKNOWN',
|
|
740
|
+
bounty: 0,
|
|
741
|
+
summary: 'Review failed',
|
|
742
|
+
};
|
|
743
|
+
}
|
|
744
|
+
finally {
|
|
745
|
+
try {
|
|
746
|
+
if ((0, fs_1.existsSync)(promptFile))
|
|
747
|
+
(0, fs_1.rmSync)(promptFile);
|
|
748
|
+
}
|
|
749
|
+
catch { }
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
async function analyzeProtocol(protocol) {
|
|
753
|
+
await (0, redis_1.log)(`Analyzing protocol: ${protocol.name} (TVL: $${(protocol.tvl / 1000000).toFixed(1)}M)`);
|
|
754
|
+
const result = {
|
|
755
|
+
protocolId: protocol.id,
|
|
756
|
+
protocolName: protocol.name,
|
|
757
|
+
chain: protocol.chain,
|
|
758
|
+
tvl: protocol.tvl,
|
|
759
|
+
slitherFindings: [],
|
|
760
|
+
slitherStatus: 'not_applicable',
|
|
761
|
+
claudeReview: '',
|
|
762
|
+
riskLevel: 'UNKNOWN',
|
|
763
|
+
estimatedBounty: 0,
|
|
764
|
+
disclosureSummary: '',
|
|
765
|
+
scannedAt: Date.now(),
|
|
766
|
+
sourceAvailable: false,
|
|
767
|
+
};
|
|
768
|
+
if (!protocol.github) {
|
|
769
|
+
await (0, redis_1.log)(`${protocol.name}: no GitHub URL, running Claude-only review`);
|
|
770
|
+
const { review, riskLevel, bounty, summary } = await runClaudeReview(protocol, [], 'No repository available', false);
|
|
771
|
+
result.claudeReview = review;
|
|
772
|
+
result.riskLevel = riskLevel;
|
|
773
|
+
result.estimatedBounty = bounty;
|
|
774
|
+
result.disclosureSummary = summary;
|
|
775
|
+
result.sourceAvailable = false;
|
|
776
|
+
return result;
|
|
777
|
+
}
|
|
778
|
+
const tempDir = (0, path_1.join)((0, os_1.tmpdir)(), `whiteh-${protocol.id}-${Date.now()}`);
|
|
779
|
+
try {
|
|
780
|
+
(0, fs_1.mkdirSync)(tempDir, { recursive: true });
|
|
781
|
+
const cloneUrl = await resolveCloneUrl(protocol.github);
|
|
782
|
+
if (!cloneUrl) {
|
|
783
|
+
await (0, redis_1.log)(`${protocol.name}: could not resolve GitHub org "${protocol.github}" to a clone URL, running Claude-only review`);
|
|
784
|
+
const { review, riskLevel, bounty, summary } = await runClaudeReview(protocol, [], 'Repository URL could not be resolved', false);
|
|
785
|
+
result.claudeReview = review;
|
|
786
|
+
result.riskLevel = riskLevel;
|
|
787
|
+
result.estimatedBounty = bounty;
|
|
788
|
+
result.disclosureSummary = summary;
|
|
789
|
+
result.sourceAvailable = false;
|
|
790
|
+
return result;
|
|
791
|
+
}
|
|
792
|
+
await (0, redis_1.log)(`${protocol.name}: cloning ${cloneUrl} (org: ${protocol.github})`);
|
|
793
|
+
const cloned = cloneRepo(cloneUrl, tempDir);
|
|
794
|
+
if (!cloned) {
|
|
795
|
+
await (0, redis_1.log)(`${protocol.name}: clone failed, running Claude-only review`);
|
|
796
|
+
const { review, riskLevel, bounty, summary } = await runClaudeReview(protocol, [], 'Repository clone failed', false);
|
|
797
|
+
result.claudeReview = review;
|
|
798
|
+
result.riskLevel = riskLevel;
|
|
799
|
+
result.estimatedBounty = bounty;
|
|
800
|
+
result.disclosureSummary = summary;
|
|
801
|
+
result.sourceAvailable = false;
|
|
802
|
+
return result;
|
|
803
|
+
}
|
|
804
|
+
result.sourceAvailable = true;
|
|
805
|
+
const solCount = countSolFiles(tempDir);
|
|
806
|
+
if (solCount === 0) {
|
|
807
|
+
result.slitherStatus = 'not_applicable';
|
|
808
|
+
await (0, redis_1.log)(`${protocol.name}: no .sol files found — skipping Slither (non-EVM or frontend repo)`);
|
|
809
|
+
}
|
|
810
|
+
else {
|
|
811
|
+
await (0, redis_1.log)(`${protocol.name}: found ${solCount} .sol file(s)`);
|
|
812
|
+
const slitherAvailable = ensureSlither();
|
|
813
|
+
if (slitherAvailable) {
|
|
814
|
+
await (0, redis_1.log)(`${protocol.name}: running Slither analysis`);
|
|
815
|
+
const slitherResult = runSlither(tempDir);
|
|
816
|
+
result.slitherFindings = slitherResult.findings;
|
|
817
|
+
result.slitherStatus = slitherResult.status;
|
|
818
|
+
if (slitherResult.status === 'compilation_error') {
|
|
819
|
+
await (0, redis_1.log)(`${protocol.name}: Slither compilation error — no static findings (contracts may use unsupported compiler or missing deps)`);
|
|
820
|
+
}
|
|
821
|
+
else {
|
|
822
|
+
await (0, redis_1.log)(`${protocol.name}: Slither found ${result.slitherFindings.length} HIGH/CRITICAL findings`);
|
|
823
|
+
}
|
|
824
|
+
}
|
|
825
|
+
else {
|
|
826
|
+
result.slitherStatus = 'unavailable';
|
|
827
|
+
await (0, redis_1.log)(`${protocol.name}: Slither unavailable, skipping static analysis`);
|
|
828
|
+
}
|
|
829
|
+
}
|
|
830
|
+
const contractCode = summarizeContracts(tempDir);
|
|
831
|
+
await (0, redis_1.log)(`${protocol.name}: running Claude security review`);
|
|
832
|
+
const { review, riskLevel, bounty, summary } = await runClaudeReview(protocol, result.slitherFindings, contractCode, true);
|
|
833
|
+
result.claudeReview = review;
|
|
834
|
+
result.riskLevel = riskLevel;
|
|
835
|
+
result.estimatedBounty = bounty;
|
|
836
|
+
result.disclosureSummary = summary;
|
|
837
|
+
await (0, redis_1.log)(`${protocol.name}: analysis complete — risk=${riskLevel}, bounty=$${bounty}`);
|
|
838
|
+
}
|
|
839
|
+
catch (err) {
|
|
840
|
+
result.error = err.message;
|
|
841
|
+
await (0, redis_1.log)(`${protocol.name}: analysis error: ${result.error}`);
|
|
842
|
+
}
|
|
843
|
+
finally {
|
|
844
|
+
try {
|
|
845
|
+
if ((0, fs_1.existsSync)(tempDir))
|
|
846
|
+
(0, fs_1.rmSync)(tempDir, { recursive: true, force: true });
|
|
847
|
+
}
|
|
848
|
+
catch { }
|
|
849
|
+
}
|
|
850
|
+
return result;
|
|
851
|
+
}
|
|
852
|
+
exports.analyzeProtocol = analyzeProtocol;
|