muaddib-scanner 2.9.7 → 2.9.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "muaddib-scanner",
3
- "version": "2.9.7",
3
+ "version": "2.9.9",
4
4
  "description": "Supply-chain threat detection & response for npm & PyPI/Python",
5
5
  "main": "src/index.js",
6
6
  "bin": {
@@ -0,0 +1,339 @@
1
+ #!/usr/bin/env node
2
+ /**
3
+ * MUAD'DIB — npm Random Package Sampler
4
+ *
5
+ * Samples 200 packages from the npm registry by stratified random sampling.
6
+ * Used to measure FPR on a representative npm sample (not curated).
7
+ *
8
+ * Strata (by dependency count):
9
+ * small (<10 deps): 80 packages (40%)
10
+ * medium (10-50 deps): 60 packages (30%)
11
+ * large (50-100 deps): 40 packages (20%)
12
+ * vlarge (100+ deps): 20 packages (10%)
13
+ *
14
+ * Exclusions: @types/*, deprecated, already in packages-npm.txt
15
+ *
16
+ * Usage:
17
+ * node scripts/sample-npm-random.js [--seed N] [--output path]
18
+ */
19
+
20
+ const https = require('https');
21
+ const fs = require('fs');
22
+ const path = require('path');
23
+
24
+ const ROOT = path.join(__dirname, '..');
25
+ const CURATED_FILE = path.join(ROOT, 'datasets', 'benign', 'packages-npm.txt');
26
+ const DEFAULT_OUTPUT = path.join(ROOT, 'datasets', 'benign', 'packages-npm-random.txt');
27
+
28
+ const STRATA = {
29
+ small: { min: 0, max: 9, quota: 80 },
30
+ medium: { min: 10, max: 50, quota: 60 },
31
+ large: { min: 51, max: 100, quota: 40 },
32
+ vlarge: { min: 101, max: Infinity, quota: 20 }
33
+ };
34
+
35
+ // Search keywords — diverse enough to sample across npm
36
+ const SEARCH_KEYWORDS = [
37
+ 'util', 'helper', 'config', 'server', 'client', 'api', 'data',
38
+ 'file', 'string', 'array', 'json', 'http', 'url', 'path', 'stream',
39
+ 'log', 'debug', 'test', 'mock', 'format', 'parse', 'transform',
40
+ 'crypto', 'hash', 'encode', 'decode', 'compress', 'cache', 'queue',
41
+ 'event', 'promise', 'async', 'callback', 'middleware', 'router',
42
+ 'database', 'mongo', 'redis', 'sql', 'orm', 'schema', 'validate',
43
+ 'cli', 'terminal', 'color', 'progress', 'spinner', 'prompt',
44
+ 'image', 'pdf', 'csv', 'xml', 'yaml', 'markdown', 'html',
45
+ 'email', 'auth', 'token', 'session', 'cookie', 'proxy',
46
+ 'date', 'time', 'math', 'random', 'uuid', 'id', 'slug',
47
+ 'webpack', 'babel', 'eslint', 'prettier', 'rollup', 'vite',
48
+ 'react', 'vue', 'angular', 'svelte', 'solid', 'preact',
49
+ 'express', 'koa', 'fastify', 'socket', 'graphql', 'rest',
50
+ 'aws', 'azure', 'gcp', 'docker', 'kubernetes', 'ci',
51
+ 'i18n', 'locale', 'charset', 'buffer', 'binary', 'hex',
52
+ 'retry', 'timeout', 'rate', 'limit', 'throttle', 'debounce',
53
+ 'merge', 'deep', 'clone', 'diff', 'patch', 'compare',
54
+ 'glob', 'pattern', 'regex', 'match', 'search', 'filter',
55
+ 'tree', 'graph', 'list', 'map', 'set', 'stack',
56
+ 'plugin', 'loader', 'adapter', 'wrapper', 'bridge', 'connector'
57
+ ];
58
+
59
+ // Seeded PRNG (mulberry32) for reproducibility
60
+ function mulberry32(seed) {
61
+ return function() {
62
+ seed |= 0; seed = seed + 0x6D2B79F5 | 0;
63
+ let t = Math.imul(seed ^ seed >>> 15, 1 | seed);
64
+ t = t + Math.imul(t ^ t >>> 7, 61 | t) ^ t;
65
+ return ((t ^ t >>> 14) >>> 0) / 4294967296;
66
+ };
67
+ }
68
+
69
+ function shuffleArray(arr, rng) {
70
+ for (let i = arr.length - 1; i > 0; i--) {
71
+ const j = Math.floor(rng() * (i + 1));
72
+ [arr[i], arr[j]] = [arr[j], arr[i]];
73
+ }
74
+ return arr;
75
+ }
76
+
77
+ function httpsGet(url) {
78
+ return new Promise((resolve, reject) => {
79
+ const req = https.get(url, { timeout: 15000 }, (res) => {
80
+ if (res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
81
+ httpsGet(res.headers.location).then(resolve).catch(reject);
82
+ return;
83
+ }
84
+ if (res.statusCode !== 200) {
85
+ res.resume();
86
+ reject(new Error(`HTTP ${res.statusCode} for ${url}`));
87
+ return;
88
+ }
89
+ let data = '';
90
+ res.on('data', chunk => data += chunk);
91
+ res.on('end', () => {
92
+ try { resolve(JSON.parse(data)); }
93
+ catch (e) { reject(new Error(`JSON parse error: ${e.message}`)); }
94
+ });
95
+ });
96
+ req.on('error', reject);
97
+ req.on('timeout', () => { req.destroy(); reject(new Error('timeout')); });
98
+ });
99
+ }
100
+
101
+ /**
102
+ * Search npm registry for packages matching a keyword.
103
+ * Returns array of { name, version } objects.
104
+ */
105
+ async function searchNpm(keyword, from = 0, size = 250) {
106
+ const url = `https://registry.npmjs.org/-/v1/search?text=${encodeURIComponent(keyword)}&size=${size}&from=${from}`;
107
+ try {
108
+ const data = await httpsGet(url);
109
+ return (data.objects || []).map(o => ({
110
+ name: o.package.name,
111
+ version: o.package.version,
112
+ description: o.package.description || '',
113
+ deprecated: o.package.deprecated || false
114
+ }));
115
+ } catch (err) {
116
+ console.error(` [WARN] npm search "${keyword}" failed: ${err.message}`);
117
+ return [];
118
+ }
119
+ }
120
+
121
+ /**
122
+ * Get dependency count for a package via npm view.
123
+ * Returns { deps, devDeps } or null on failure.
124
+ */
125
+ async function getDepCount(pkgName) {
126
+ const url = `https://registry.npmjs.org/${encodeURIComponent(pkgName)}/latest`;
127
+ try {
128
+ const data = await httpsGet(url);
129
+ const deps = data.dependencies ? Object.keys(data.dependencies).length : 0;
130
+ const devDeps = data.devDependencies ? Object.keys(data.devDependencies).length : 0;
131
+ return { deps, devDeps, totalDeps: deps + devDeps };
132
+ } catch {
133
+ return null;
134
+ }
135
+ }
136
+
137
+ function classifyStratum(depCount) {
138
+ for (const [name, { min, max }] of Object.entries(STRATA)) {
139
+ if (depCount >= min && depCount <= max) return name;
140
+ }
141
+ return 'small';
142
+ }
143
+
144
+ function loadCuratedPackages() {
145
+ try {
146
+ return new Set(
147
+ fs.readFileSync(CURATED_FILE, 'utf8')
148
+ .split(/\r?\n/)
149
+ .map(l => l.trim())
150
+ .filter(l => l && !l.startsWith('#'))
151
+ );
152
+ } catch {
153
+ return new Set();
154
+ }
155
+ }
156
+
157
+ async function main() {
158
+ const args = process.argv.slice(2);
159
+ let seed = 42;
160
+ let outputPath = DEFAULT_OUTPUT;
161
+
162
+ for (let i = 0; i < args.length; i++) {
163
+ if (args[i] === '--seed' && args[i + 1]) { seed = parseInt(args[i + 1], 10); i++; }
164
+ if (args[i] === '--output' && args[i + 1]) { outputPath = args[i + 1]; i++; }
165
+ }
166
+
167
+ const rng = mulberry32(seed);
168
+ const curated = loadCuratedPackages();
169
+ console.log(` Loaded ${curated.size} curated packages to exclude`);
170
+ console.log(` Seed: ${seed}`);
171
+
172
+ // Phase 1: Collect candidate packages from npm search
173
+ console.log(`\n [1/3] Collecting candidates from npm search...`);
174
+ const candidates = new Map(); // name -> { name, version, description }
175
+ const shuffledKeywords = shuffleArray([...SEARCH_KEYWORDS], rng);
176
+
177
+ for (let i = 0; i < shuffledKeywords.length; i++) {
178
+ const keyword = shuffledKeywords[i];
179
+ if (process.stdout.isTTY) {
180
+ process.stdout.write(`\r Searching "${keyword}" (${i + 1}/${shuffledKeywords.length})... `);
181
+ }
182
+
183
+ // Search with random offset for diversity
184
+ const offset = Math.floor(rng() * 200);
185
+ const results = await searchNpm(keyword, offset, 250);
186
+
187
+ for (const pkg of results) {
188
+ // Exclusion filters
189
+ if (candidates.has(pkg.name)) continue;
190
+ if (curated.has(pkg.name)) continue;
191
+ if (pkg.name.startsWith('@types/')) continue;
192
+ if (pkg.deprecated) continue;
193
+ if (pkg.name.startsWith('_')) continue;
194
+
195
+ candidates.set(pkg.name, pkg);
196
+ }
197
+
198
+ // Stop early if we have enough candidates
199
+ if (candidates.size >= 2000) break;
200
+
201
+ // Rate limiting: ~100ms between requests
202
+ await new Promise(r => setTimeout(r, 100));
203
+ }
204
+
205
+ if (process.stdout.isTTY) {
206
+ process.stdout.write('\r' + ''.padEnd(80) + '\r');
207
+ }
208
+ console.log(` Collected ${candidates.size} unique candidates`);
209
+
210
+ // Phase 2: Classify by dependency count
211
+ // Over-collect: allow 2x quota per stratum to enable backfill
212
+ console.log(`\n [2/3] Classifying by dependency count...`);
213
+ const buckets = { small: [], medium: [], large: [], vlarge: [] };
214
+ const candidateList = shuffleArray([...candidates.keys()], rng);
215
+
216
+ const totalQuota = Object.values(STRATA).reduce((s, v) => s + v.quota, 0);
217
+ let classified = 0;
218
+ let processed = 0;
219
+ // Over-collect limit: 2x quota per stratum to provide backfill pool
220
+ const OVER_COLLECT = 2;
221
+
222
+ for (const pkgName of candidateList) {
223
+ // Check if all buckets have enough for backfill
224
+ const allOverCollected = Object.entries(STRATA).every(
225
+ ([name, { quota }]) => buckets[name].length >= quota * OVER_COLLECT
226
+ );
227
+ if (allOverCollected) break;
228
+
229
+ processed++;
230
+ if (process.stdout.isTTY && processed % 10 === 0) {
231
+ const bucketStatus = Object.entries(buckets).map(([k, v]) => `${k}:${v.length}/${STRATA[k].quota}`).join(' ');
232
+ process.stdout.write(`\r Classifying [${processed}/${candidateList.length}] ${bucketStatus} `);
233
+ }
234
+
235
+ const info = await getDepCount(pkgName);
236
+ if (!info) continue;
237
+
238
+ const stratum = classifyStratum(info.totalDeps);
239
+ if (buckets[stratum].length < STRATA[stratum].quota * OVER_COLLECT) {
240
+ buckets[stratum].push({ name: pkgName, deps: info.totalDeps, stratum });
241
+ classified++;
242
+ }
243
+
244
+ // Rate limiting
245
+ await new Promise(r => setTimeout(r, 50));
246
+ }
247
+
248
+ if (process.stdout.isTTY) {
249
+ process.stdout.write('\r' + ''.padEnd(80) + '\r');
250
+ }
251
+
252
+ // Phase 3: Output with backfill
253
+ // If large/vlarge strata can't meet quota, redistribute remaining slots
254
+ // to small/medium proportionally (reflects real npm distribution).
255
+ console.log(`\n [3/3] Writing results...`);
256
+ const selected = [];
257
+ let deficit = 0;
258
+ for (const [name, { quota }] of Object.entries(STRATA)) {
259
+ const actual = Math.min(buckets[name].length, quota);
260
+ console.log(` ${name}: ${actual}/${quota} packages`);
261
+ selected.push(...buckets[name].slice(0, actual));
262
+ deficit += quota - actual;
263
+ }
264
+
265
+ // Backfill deficit from small/medium overflow (proportional)
266
+ if (deficit > 0) {
267
+ console.log(` Backfilling ${deficit} slots from small/medium overflow...`);
268
+ const backfillSources = ['small', 'medium']; // priority order
269
+ for (const src of backfillSources) {
270
+ if (deficit <= 0) break;
271
+ const overflow = buckets[src].slice(STRATA[src].quota);
272
+ const take = Math.min(overflow.length, deficit);
273
+ if (take > 0) {
274
+ selected.push(...overflow.slice(0, take));
275
+ deficit -= take;
276
+ console.log(` +${take} from ${src} overflow`);
277
+ }
278
+ }
279
+ }
280
+
281
+ const totalSelected = selected.length;
282
+ console.log(`\n Total: ${totalSelected}/200 packages`);
283
+
284
+ if (totalSelected < 200) {
285
+ console.warn(`\n [WARN] Only ${totalSelected} packages found. Re-run with different --seed or add more search keywords.`);
286
+ }
287
+
288
+ // Write output file
289
+ // Use a Set to track already-written packages (avoid duplication from backfill)
290
+ const writtenNames = new Set();
291
+ const header = [
292
+ '# MUAD\'DIB Benign Random Dataset — npm stratified random sample',
293
+ `# Generated: ${new Date().toISOString()}`,
294
+ `# Seed: ${seed}`,
295
+ `# Total: ${totalSelected} packages`,
296
+ '# Strata: small (<10 deps): 80, medium (10-50): 60, large (51-100): 40, vlarge (100+): 20',
297
+ '# Backfill: unfilled large/vlarge slots redistributed to small/medium',
298
+ '# Used by `muaddib evaluate` to measure FPR on representative npm sample',
299
+ ''
300
+ ];
301
+
302
+ const lines = [];
303
+ for (const [name, { quota }] of Object.entries(STRATA)) {
304
+ const actual = Math.min(buckets[name].length, quota);
305
+ lines.push(`# === ${name} (${actual}/${quota}) ===`);
306
+ for (const pkg of buckets[name].slice(0, actual)) {
307
+ lines.push(pkg.name);
308
+ writtenNames.add(pkg.name);
309
+ }
310
+ lines.push('');
311
+ }
312
+
313
+ // Backfill section (additional packages from overflow)
314
+ const backfillPkgs = selected.filter(p => !writtenNames.has(p.name));
315
+ if (backfillPkgs.length > 0) {
316
+ lines.push(`# === backfill (${backfillPkgs.length}) ===`);
317
+ for (const pkg of backfillPkgs) {
318
+ lines.push(pkg.name);
319
+ }
320
+ lines.push('');
321
+ }
322
+
323
+ fs.mkdirSync(path.dirname(outputPath), { recursive: true });
324
+ fs.writeFileSync(outputPath, header.join('\n') + lines.join('\n'));
325
+ console.log(` Written to: ${path.relative(ROOT, outputPath)}`);
326
+
327
+ // Verify no overlap with curated
328
+ const overlap = selected.filter(p => curated.has(p.name));
329
+ if (overlap.length > 0) {
330
+ console.error(`\n [ERROR] ${overlap.length} packages overlap with curated corpus: ${overlap.map(p => p.name).join(', ')}`);
331
+ } else {
332
+ console.log(' No overlap with curated corpus');
333
+ }
334
+ }
335
+
336
+ main().catch(err => {
337
+ console.error(`[ERROR] ${err.message}`);
338
+ process.exit(1);
339
+ });
package/src/config.js CHANGED
@@ -205,12 +205,23 @@ function validateConfig(raw) {
205
205
 
206
206
  /**
207
207
  * Resolve which config file to load.
208
- * Priority: --config <path> > .muaddibrc.json at targetPath root
208
+ * Priority: --config <path> > ~/.muaddibrc.json > CWD/.muaddibrc.json (if CWD ≠ targetPath)
209
+ *
210
+ * SECURITY: NEVER auto-detect config from targetPath (the scanned directory).
211
+ * An attacker can place .muaddibrc.json in their npm package with
212
+ * severityWeights: {critical:0, high:0, medium:0, low:0} to neutralize the scanner.
213
+ * Only load config from trusted locations:
214
+ * - Explicit --config <path>
215
+ * - User home directory (~/.muaddibrc.json)
216
+ * - CWD, but ONLY when CWD is different from the scan target
217
+ *
209
218
  * @param {string} targetPath - scan target directory
210
219
  * @param {string|null} configPath - explicit --config path (or null)
211
220
  * @returns {{ config: object|null, warnings: string[], errors: string[], source: string|null }}
212
221
  */
213
222
  function resolveConfig(targetPath, configPath) {
223
+ const warnings = [];
224
+
214
225
  // Explicit --config path
215
226
  if (configPath) {
216
227
  const absPath = path.isAbsolute(configPath) ? configPath : path.resolve(configPath);
@@ -229,22 +240,39 @@ function resolveConfig(targetPath, configPath) {
229
240
  return result;
230
241
  }
231
242
 
232
- // Auto-detect .muaddibrc.json at target root
233
- const rcPath = path.join(targetPath, '.muaddibrc.json');
234
- if (!fs.existsSync(rcPath)) {
235
- return { config: null, warnings: [], errors: [], source: null };
243
+ // SECURITY: Warn if .muaddibrc.json is found INSIDE the scanned package (informational only)
244
+ const targetRcPath = path.join(targetPath, '.muaddibrc.json');
245
+ if (fs.existsSync(targetRcPath)) {
246
+ warnings.push('[SECURITY] .muaddibrc.json found inside scanned package ignored (potential config neutralization attack)');
236
247
  }
237
- const { raw, error } = loadConfigFile(rcPath);
238
- if (error) {
239
- // Auto-detected config with errors is a warning, not a fatal error
240
- return { config: null, warnings: [`[CONFIG] ${error} — .muaddibrc.json ignored`], errors: [], source: null };
241
- }
242
- const result = validateConfig(raw);
243
- if (result.config) {
244
- result.warnings.unshift('Loaded custom thresholds from .muaddibrc.json');
248
+
249
+ // Auto-detect ONLY from safe locations (NOT from scan target)
250
+ const cwd = process.cwd();
251
+ const homedir = require('os').homedir();
252
+ const candidates = [
253
+ path.join(homedir, '.muaddibrc.json'),
254
+ // CWD config only if CWD is NOT the scan target (developer scanning an external package)
255
+ ...(path.resolve(cwd) !== path.resolve(targetPath) ? [path.join(cwd, '.muaddibrc.json')] : [])
256
+ ];
257
+
258
+ for (const rcPath of candidates) {
259
+ if (!fs.existsSync(rcPath)) continue;
260
+ const { raw, error } = loadConfigFile(rcPath);
261
+ if (error) {
262
+ warnings.push(`[CONFIG] ${error} — ${rcPath} ignored`);
263
+ continue;
264
+ }
265
+ const result = validateConfig(raw);
266
+ // Prepend any security warnings accumulated before this config was found
267
+ result.warnings = [...warnings, ...result.warnings];
268
+ if (result.config) {
269
+ result.warnings.unshift(`Loaded custom thresholds from ${rcPath}`);
270
+ }
271
+ result.source = rcPath;
272
+ return result;
245
273
  }
246
- result.source = rcPath;
247
- return result;
274
+
275
+ return { config: null, warnings, errors: [], source: null };
248
276
  }
249
277
 
250
278
  module.exports = { DEFAULTS, loadConfigFile, validateConfig, resolveConfig };
@@ -537,6 +537,12 @@ const PLAYBOOKS = {
537
537
  'Cout de rotation: 0.000005 SOL par changement d\'adresse C2 — censorship-resistant. ' +
538
538
  'Bloquer les connexions vers les RPC Solana. Supprimer le package.',
539
539
 
540
+ module_load_bypass:
541
+ 'CRITIQUE: Module._load() detecte — bypass du module loader interne de Node.js. ' +
542
+ 'Permet de charger dynamiquement des modules (child_process, fs, net) sans passer par require(), ' +
543
+ 'contournant les restrictions et les hooks de chargement. ' +
544
+ 'Supprimer le package immediatement. Auditer les modules charges dynamiquement.',
545
+
540
546
  blockchain_rpc_endpoint:
541
547
  'Endpoint RPC blockchain hardcode detecte (Solana mainnet, Infura Ethereum). ' +
542
548
  'Dans un package non-crypto, cela indique un potentiel canal C2 via blockchain. ' +
@@ -1595,6 +1595,19 @@ const RULES = {
1595
1595
  ],
1596
1596
  mitre: 'T1102'
1597
1597
  },
1598
+ module_load_bypass: {
1599
+ id: 'MUADDIB-AST-056',
1600
+ name: 'Module._load() Internal Loader Bypass',
1601
+ severity: 'CRITICAL',
1602
+ confidence: 'high',
1603
+ description: 'Module._load() detecte — bypass du module loader interne de Node.js pour charger dynamiquement des modules sans passer par require(). Technique d\'evasion contournant les restrictions de chargement de modules.',
1604
+ references: [
1605
+ 'https://nodejs.org/api/modules.html',
1606
+ 'https://attack.mitre.org/techniques/T1059/007/'
1607
+ ],
1608
+ mitre: 'T1059.007'
1609
+ },
1610
+
1598
1611
  blockchain_rpc_endpoint: {
1599
1612
  id: 'MUADDIB-AST-055',
1600
1613
  name: 'Hardcoded Blockchain RPC Endpoint',
@@ -240,18 +240,8 @@ async function runSingleSandbox(packageName, options = {}) {
240
240
  proc.on('close', (code) => {
241
241
  clearTimeout(timer);
242
242
 
243
- // Docker-level failure: log error and return clean result
244
- if (code !== 0 && !stdout.includes('---MUADDIB-REPORT-START---')) {
245
- const errLines = stderr.split(/\r?\n/).filter(l => l && !l.includes('[SANDBOX]'));
246
- if (errLines.length > 0) {
247
- console.log(`[SANDBOX] Docker error (exit ${code}): ${errLines[0]}`);
248
- } else {
249
- console.log(`[SANDBOX] Container exited with code ${code} (no output)`);
250
- }
251
- resolve(cleanResult);
252
- return;
253
- }
254
-
243
+ // TIMEOUT FIRST: docker kill causes non-zero exit (code 137/SIGKILL),
244
+ // must check before Docker error handler to avoid returning CLEAN on timeout
255
245
  if (timedOut) {
256
246
  const result = {
257
247
  score: 100,
@@ -269,6 +259,18 @@ async function runSingleSandbox(packageName, options = {}) {
269
259
  return;
270
260
  }
271
261
 
262
+ // Docker-level failure (non-timeout): log error and return clean result
263
+ if (code !== 0 && !stdout.includes('---MUADDIB-REPORT-START---')) {
264
+ const errLines = stderr.split(/\r?\n/).filter(l => l && !l.includes('[SANDBOX]'));
265
+ if (errLines.length > 0) {
266
+ console.log(`[SANDBOX] Docker error (exit ${code}): ${errLines[0]}`);
267
+ } else {
268
+ console.log(`[SANDBOX] Container exited with code ${code} (no output)`);
269
+ }
270
+ resolve(cleanResult);
271
+ return;
272
+ }
273
+
272
274
  // Parse JSON from container stdout using delimiter
273
275
  let report;
274
276
  try {
@@ -281,6 +281,40 @@ function resolveStringConcat(node) {
281
281
  return null;
282
282
  }
283
283
 
284
+ /**
285
+ * Like resolveStringConcat, but additionally resolves Identifier nodes via
286
+ * a stringVarValues Map (variable name → known string value).
287
+ * Used for double-indirection patterns: var a='ev',b='al'; globalThis[a+b]()
288
+ */
289
+ function resolveStringConcatWithVars(node, stringVarValues) {
290
+ if (!node) return null;
291
+ if (node.type === 'Literal' && typeof node.value === 'string') return node.value;
292
+ if (node.type === 'Identifier' && stringVarValues && stringVarValues.has(node.name)) {
293
+ return stringVarValues.get(node.name);
294
+ }
295
+ if (node.type === 'TemplateLiteral' && node.expressions.length === 0) {
296
+ return node.quasis.map(q => q.value.raw).join('');
297
+ }
298
+ if (node.type === 'TemplateLiteral' && node.expressions.length > 0) {
299
+ const parts = [];
300
+ for (let i = 0; i < node.quasis.length; i++) {
301
+ parts.push(node.quasis[i].value.raw);
302
+ if (i < node.expressions.length) {
303
+ const resolved = resolveStringConcatWithVars(node.expressions[i], stringVarValues);
304
+ if (resolved === null) return null;
305
+ parts.push(resolved);
306
+ }
307
+ }
308
+ return parts.join('');
309
+ }
310
+ if (node.type === 'BinaryExpression' && node.operator === '+') {
311
+ const left = resolveStringConcatWithVars(node.left, stringVarValues);
312
+ const right = resolveStringConcatWithVars(node.right, stringVarValues);
313
+ if (left !== null && right !== null) return left + right;
314
+ }
315
+ return null;
316
+ }
317
+
284
318
  /**
285
319
  * Extract string value from a node, including BinaryExpression resolution.
286
320
  * Falls back to extractStringValue if concat resolution fails.
@@ -383,7 +417,7 @@ function handleVariableDeclarator(node, ctx) {
383
417
  ctx.staticAssignments.add(node.id.name);
384
418
  }
385
419
 
386
- // Track dynamic require vars
420
+ // Track dynamic require vars + module aliases
387
421
  if (node.init?.type === 'CallExpression') {
388
422
  const initCallName = getCallName(node.init);
389
423
  if (initCallName === 'require' && node.init.arguments.length > 0) {
@@ -391,6 +425,12 @@ function handleVariableDeclarator(node, ctx) {
391
425
  if (arg.type !== 'Literal') {
392
426
  ctx.dynamicRequireVars.add(node.id.name);
393
427
  }
428
+ // Track require('module') or require('node:module') aliases for Module._load detection
429
+ const reqVal = extractStringValueDeep(arg);
430
+ if (reqVal === 'module') {
431
+ if (!ctx.moduleAliases) ctx.moduleAliases = new Set();
432
+ ctx.moduleAliases.add(node.id.name);
433
+ }
394
434
  }
395
435
  }
396
436
  // Track variables assigned dangerous command strings
@@ -717,6 +757,35 @@ function handleCallExpression(node, ctx) {
717
757
  }
718
758
  }
719
759
 
760
+ // Detect process.mainModule.require('child_process') — module system bypass
761
+ if (node.callee.type === 'MemberExpression' &&
762
+ node.callee.property?.type === 'Identifier' && node.callee.property.name === 'require' &&
763
+ node.callee.object?.type === 'MemberExpression' &&
764
+ node.callee.object.object?.type === 'Identifier' &&
765
+ node.callee.object.object.name === 'process' &&
766
+ node.callee.object.property?.type === 'Identifier' &&
767
+ node.callee.object.property.name === 'mainModule' &&
768
+ node.arguments.length > 0) {
769
+ const arg = node.arguments[0];
770
+ const modName = extractStringValueDeep(arg);
771
+ const DANGEROUS_MODS = ['child_process', 'fs', 'net', 'dns', 'http', 'https', 'tls'];
772
+ if (modName && DANGEROUS_MODS.includes(modName)) {
773
+ ctx.threats.push({
774
+ type: 'dynamic_require',
775
+ severity: 'CRITICAL',
776
+ message: `process.mainModule.require('${modName}') — bypasses module system restrictions.`,
777
+ file: ctx.relFile
778
+ });
779
+ } else {
780
+ ctx.threats.push({
781
+ type: 'dynamic_require',
782
+ severity: 'HIGH',
783
+ message: `process.mainModule.require() detected — module system bypass.`,
784
+ file: ctx.relFile
785
+ });
786
+ }
787
+ }
788
+
720
789
  // Detect exec/execSync with dangerous shell commands (direct or via MemberExpression)
721
790
  const execName = callName === 'exec' || callName === 'execSync' ? callName : null;
722
791
  const memberExec = !execName && node.callee.type === 'MemberExpression' &&
@@ -1490,22 +1559,45 @@ function handleCallExpression(node, ctx) {
1490
1559
  });
1491
1560
  }
1492
1561
  }
1493
- // Detect computed call on globalThis/global alias with variable property
1562
+ // Detect computed call on globalThis/global alias with variable or expression property
1494
1563
  const obj = node.callee.object;
1495
- if (prop.type === 'Identifier' && obj?.type === 'Identifier' &&
1564
+ if (obj?.type === 'Identifier' &&
1496
1565
  (ctx.globalThisAliases.has(obj.name) || obj.name === 'globalThis' || obj.name === 'global')) {
1497
- ctx.hasEvalInFile = true;
1498
- // Resolve variable value via stringVarValues (e.g., const f = 'eval'; globalThis[f]())
1499
- const resolvedValue = ctx.stringVarValues.get(prop.name);
1500
- const isEvalOrFunction = resolvedValue === 'eval' || resolvedValue === 'Function';
1501
- ctx.threats.push({
1502
- type: 'dangerous_call_eval',
1503
- severity: isEvalOrFunction ? 'CRITICAL' : 'HIGH',
1504
- message: isEvalOrFunction
1505
- ? `Resolved indirect ${resolvedValue}() via computed property (${obj.name}[${prop.name}="${resolvedValue}"]) — confirmed eval evasion.`
1506
- : `Dynamic global dispatch via computed property (${obj.name}[${prop.name}]) — likely indirect eval evasion.`,
1507
- file: ctx.relFile
1508
- });
1566
+ if (prop.type === 'Identifier') {
1567
+ ctx.hasEvalInFile = true;
1568
+ // Resolve variable value via stringVarValues (e.g., const f = 'eval'; globalThis[f]())
1569
+ const resolvedValue = ctx.stringVarValues.get(prop.name);
1570
+ const isEvalOrFunction = resolvedValue === 'eval' || resolvedValue === 'Function';
1571
+ ctx.threats.push({
1572
+ type: 'dangerous_call_eval',
1573
+ severity: isEvalOrFunction ? 'CRITICAL' : 'HIGH',
1574
+ message: isEvalOrFunction
1575
+ ? `Resolved indirect ${resolvedValue}() via computed property (${obj.name}[${prop.name}="${resolvedValue}"]) — confirmed eval evasion.`
1576
+ : `Dynamic global dispatch via computed property (${obj.name}[${prop.name}]) — likely indirect eval evasion.`,
1577
+ file: ctx.relFile
1578
+ });
1579
+ } else {
1580
+ // BinaryExpression, TemplateLiteral, or other computed expression
1581
+ // Try to resolve via stringVarValues (e.g., var a='ev',b='al'; globalThis[a+b]())
1582
+ const resolvedProp = resolveStringConcatWithVars(prop, ctx.stringVarValues);
1583
+ if (resolvedProp === 'eval' || resolvedProp === 'Function') {
1584
+ ctx.hasEvalInFile = true;
1585
+ ctx.threats.push({
1586
+ type: 'dangerous_call_eval',
1587
+ severity: 'CRITICAL',
1588
+ message: `Resolved indirect ${resolvedProp}() via computed expression (${obj.name}[...="${resolvedProp}"]) — concat evasion.`,
1589
+ file: ctx.relFile
1590
+ });
1591
+ } else if (resolvedProp !== null) {
1592
+ ctx.hasEvalInFile = true;
1593
+ ctx.threats.push({
1594
+ type: 'dangerous_call_eval',
1595
+ severity: 'HIGH',
1596
+ message: `Dynamic global dispatch via computed expression (${obj.name}[...="${resolvedProp}"]).`,
1597
+ file: ctx.relFile
1598
+ });
1599
+ }
1600
+ }
1509
1601
  }
1510
1602
  }
1511
1603
 
@@ -1609,6 +1701,24 @@ function handleCallExpression(node, ctx) {
1609
1701
  }
1610
1702
  }
1611
1703
 
1704
+ // Module._load() — internal module loader bypass (ANSSI audit v2)
1705
+ if (propName === '_load') {
1706
+ const calleeObj = node.callee.object;
1707
+ const isModuleIdentifier = calleeObj.type === 'Identifier' &&
1708
+ (calleeObj.name === 'Module' || calleeObj.name === 'module' ||
1709
+ (ctx.moduleAliases && ctx.moduleAliases.has(calleeObj.name)));
1710
+ const isMemberChain = calleeObj.type === 'MemberExpression';
1711
+ const isConstructed = calleeObj.type === 'NewExpression' || calleeObj.type === 'CallExpression';
1712
+ if (isModuleIdentifier || isMemberChain || isConstructed || ctx.hasModuleImport) {
1713
+ ctx.threats.push({
1714
+ type: 'module_load_bypass',
1715
+ severity: 'CRITICAL',
1716
+ message: 'Module._load() detected — internal module loader bypass for dynamic code loading.',
1717
+ file: ctx.relFile
1718
+ });
1719
+ }
1720
+ }
1721
+
1612
1722
  // SANDWORM_MODE: Track writeFileSync/writeFile to temp paths
1613
1723
  if (propName === 'writeFileSync' || propName === 'writeFile') {
1614
1724
  const arg = node.arguments && node.arguments[0];