muaddib-scanner 1.6.8 → 1.6.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.fr.md CHANGED
@@ -202,10 +202,12 @@ muaddib sandbox <nom-package>
202
202
  muaddib sandbox <nom-package> --strict
203
203
  ```
204
204
 
205
- Analyse un package dans un container Docker isolé. Capture :
206
- - Connexions réseau (détecte exfiltration vers hosts suspects)
207
- - Accès fichiers (détecte vol credentials : .npmrc, .ssh, .aws, .env)
208
- - Spawn de processus (détecte reverse shells, abus curl/wget)
205
+ Analyse un package dans un container Docker isolé avec monitoring multi-couches :
206
+ - **Traçage système** (strace) : accès fichiers, spawn de processus, monitoring syscalls
207
+ - **Capture réseau** (tcpdump) : résolutions DNS avec IPs résolues, requêtes HTTP (méthode, host, path, body), détection TLS SNI
208
+ - **Diff filesystem** : snapshot avant/après install, détecte les fichiers créés dans des emplacements suspects
209
+ - **Détection exfiltration de données** : 16 patterns sensibles (tokens, credentials, clés SSH, clés privées, .env)
210
+ - **Moteur de scoring** : score de risque 0-100 basé sur la sévérité des comportements
209
211
 
210
212
  Utilisez `--strict` pour bloquer tout trafic réseau sortant non essentiel via iptables.
211
213
 
package/README.md CHANGED
@@ -202,10 +202,12 @@ muaddib sandbox <package-name>
202
202
  muaddib sandbox <package-name> --strict
203
203
  ```
204
204
 
205
- Analyzes a package in an isolated Docker container. Captures:
206
- - Network connections (detects exfiltration to suspicious hosts)
207
- - File access (detects credential theft: .npmrc, .ssh, .aws, .env)
208
- - Process spawns (detects reverse shells, curl/wget abuse)
205
+ Analyzes a package in an isolated Docker container with multi-layer monitoring:
206
+ - **System tracing** (strace): file access, process spawns, syscall monitoring
207
+ - **Network capture** (tcpdump): DNS resolutions with resolved IPs, HTTP requests (method, host, path, body), TLS SNI detection
208
+ - **Filesystem diff**: snapshot before/after install, detects files created in suspicious locations
209
+ - **Data exfiltration detection**: 16 sensitive patterns (tokens, credentials, SSH keys, private keys, .env)
210
+ - **Scoring engine**: 0-100 risk score based on behavioral severity
209
211
 
210
212
  Use `--strict` to block all non-essential outbound network traffic via iptables.
211
213
 
package/bin/muaddib.js CHANGED
@@ -1,5 +1,5 @@
1
1
  #!/usr/bin/env node
2
- const { execSync } = require('child_process');
2
+ const { exec } = require('child_process');
3
3
  const { run } = require('../src/index.js');
4
4
  const { updateIOCs } = require('../src/ioc/updater.js');
5
5
  const { watch } = require('../src/watch.js');
@@ -23,6 +23,7 @@ let explainMode = false;
23
23
  let failLevel = 'high';
24
24
  let webhookUrl = null;
25
25
  let paranoidMode = false;
26
+ let excludeDirs = [];
26
27
 
27
28
  for (let i = 0; i < options.length; i++) {
28
29
  if (options[i] === '--json') {
@@ -41,6 +42,11 @@ for (let i = 0; i < options.length; i++) {
41
42
  } else if (options[i] === '--webhook') {
42
43
  webhookUrl = options[i + 1];
43
44
  i++;
45
+ } else if (options[i] === '--exclude') {
46
+ if (options[i + 1] && !options[i + 1].startsWith('-')) {
47
+ excludeDirs.push(options[i + 1]);
48
+ i++;
49
+ }
44
50
  } else if (options[i] === '--paranoid') {
45
51
  paranoidMode = true;
46
52
  } else if (options[i] === '--strict') {
@@ -50,17 +56,20 @@ for (let i = 0; i < options.length; i++) {
50
56
  }
51
57
  }
52
58
 
53
- // Version check (non-blocking, skip for machine-readable output)
59
+ // Version check (truly non-blocking, skip for machine-readable output)
54
60
  if (!jsonOutput && !sarifOutput) {
55
61
  try {
56
62
  const currentVersion = require('../package.json').version;
57
- const latest = execSync('npm view muaddib-scanner version', { timeout: 5000 }).toString().trim();
58
- if (latest !== currentVersion) {
59
- console.log(`\n[UPDATE] New version available: ${currentVersion} -> ${latest}`);
60
- console.log(` Run: npm install -g muaddib-scanner@latest\n`);
61
- }
63
+ exec('npm view muaddib-scanner version', { timeout: 5000 }, (err, stdout) => {
64
+ if (err) return; // No network or npm unavailable
65
+ const latest = (stdout || '').toString().trim();
66
+ if (latest && latest !== currentVersion) {
67
+ console.log(`\n[UPDATE] New version available: ${currentVersion} -> ${latest}`);
68
+ console.log(` Run: npm install -g muaddib-scanner@latest\n`);
69
+ }
70
+ });
62
71
  } catch {
63
- // No network or npm unavailable, skip silently
72
+ // Skip silently
64
73
  }
65
74
  }
66
75
 
@@ -273,6 +282,7 @@ const helpText = `
273
282
  --fail-on [level] Fail level (critical|high|medium|low)
274
283
  --webhook [url] Discord/Slack webhook
275
284
  --paranoid Ultra-strict mode
285
+ --exclude [dir] Exclude directory from scan (repeatable)
276
286
  --save-dev, -D Install as dev dependency
277
287
  -g, --global Install globally
278
288
  --force Force install despite threats
@@ -300,9 +310,13 @@ if (command === 'version' || command === '--version' || command === '-v') {
300
310
  explain: explainMode,
301
311
  failLevel: failLevel,
302
312
  webhook: webhookUrl,
303
- paranoid: paranoidMode
313
+ paranoid: paranoidMode,
314
+ exclude: excludeDirs
304
315
  }).then(exitCode => {
305
316
  process.exit(exitCode);
317
+ }).catch(err => {
318
+ console.error('[ERROR]', err.message);
319
+ process.exit(1);
306
320
  });
307
321
  } else if (command === 'watch') {
308
322
  watch(target);
@@ -436,7 +450,7 @@ if (command === 'version' || command === '--version' || command === '-v') {
436
450
  console.log(helpText);
437
451
  process.exit(0);
438
452
  } else {
439
- console.log(`Unknown command: ${command}`);
453
+ console.log(`Unknown command: ${String(command).replace(/[\x00-\x1f\x7f-\x9f]/g, '')}`);
440
454
  console.log('Type "muaddib help" to see available commands.');
441
455
  process.exit(1);
442
456
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "muaddib-scanner",
3
- "version": "1.6.8",
3
+ "version": "1.6.9",
4
4
  "description": "Supply-chain threat detection & response for npm & PyPI/Python",
5
5
  "main": "src/index.js",
6
6
  "bin": {
@@ -52,7 +52,7 @@
52
52
  },
53
53
  "devDependencies": {
54
54
  "@eslint/js": "9.39.2",
55
- "eslint": "9.39.2",
55
+ "eslint": "10.0.0",
56
56
  "eslint-plugin-security": "^3.0.1",
57
57
  "globals": "17.3.0"
58
58
  }
package/src/daemon.js CHANGED
@@ -3,11 +3,9 @@ const path = require('path');
3
3
  const { run } = require('./index.js');
4
4
 
5
5
  let webhookUrl = null;
6
- let isRunning = false;
7
6
 
8
7
  async function startDaemon(options = {}) {
9
8
  webhookUrl = options.webhook || null;
10
- isRunning = true;
11
9
 
12
10
  console.log(`
13
11
  ╔════════════════════════════════════════════╗
@@ -33,9 +31,8 @@ async function startDaemon(options = {}) {
33
31
 
34
32
  // Keep process alive until SIGINT
35
33
  await new Promise((resolve) => {
36
- process.on('SIGINT', () => {
34
+ process.once('SIGINT', () => {
37
35
  console.log('\n[DAEMON] Arret...');
38
- isRunning = false;
39
36
  cleanup();
40
37
  resolve();
41
38
  });
@@ -70,6 +67,10 @@ function watchDirectory(dir) {
70
67
  }
71
68
 
72
69
  // Surveille la creation de node_modules
70
+ if (process.platform === 'linux') {
71
+ console.log('[DAEMON] Note: recursive fs.watch may not work on Linux');
72
+ }
73
+
73
74
  const dirWatcher = fs.watch(dir, (eventType, filename) => {
74
75
  if (filename === 'node_modules' && eventType === 'rename') {
75
76
  const nmPath = path.join(dir, 'node_modules');
@@ -83,6 +84,9 @@ function watchDirectory(dir) {
83
84
  triggerScan(dir);
84
85
  }
85
86
  });
87
+ dirWatcher.on('error', (err) => {
88
+ console.log(`[DAEMON] Watcher error on ${dir}: ${err.message}`);
89
+ });
86
90
  watchers.push(dirWatcher);
87
91
 
88
92
  return watchers;
@@ -96,7 +100,7 @@ function watchFile(filePath, projectDir) {
96
100
  return null; // File deleted between existsSync and statSync
97
101
  }
98
102
 
99
- return fs.watch(filePath, (eventType) => {
103
+ const watcher = fs.watch(filePath, (eventType) => {
100
104
  if (eventType === 'change') {
101
105
  try {
102
106
  const currentMtime = fs.statSync(filePath).mtime.getTime();
@@ -110,36 +114,52 @@ function watchFile(filePath, projectDir) {
110
114
  }
111
115
  }
112
116
  });
117
+ watcher.on('error', (err) => {
118
+ console.log(`[DAEMON] Watcher error on ${filePath}: ${err.message}`);
119
+ });
120
+ return watcher;
113
121
  }
114
122
 
115
123
  function watchNodeModules(nodeModulesPath, projectDir) {
116
- return fs.watch(nodeModulesPath, { recursive: true }, (eventType, filename) => {
124
+ const watcher = fs.watch(nodeModulesPath, { recursive: true }, (eventType, filename) => {
117
125
  if (filename && filename.includes('package.json')) {
118
126
  console.log(`[DAEMON] Nouveau package detecte: ${filename}`);
119
127
  triggerScan(projectDir);
120
128
  }
121
129
  });
130
+ watcher.on('error', (err) => {
131
+ console.log(`[DAEMON] Watcher error on ${nodeModulesPath}: ${err.message}`);
132
+ });
133
+ return watcher;
122
134
  }
123
135
 
124
- let scanTimeout = null;
125
- let lastScanTime = 0;
136
+ // Per-directory scan state to prevent cross-directory scan suppression
137
+ const scanState = new Map();
138
+
139
+ function getScanState(dir) {
140
+ if (!scanState.has(dir)) {
141
+ scanState.set(dir, { timeout: null, lastScanTime: 0 });
142
+ }
143
+ return scanState.get(dir);
144
+ }
126
145
 
127
146
  function triggerScan(dir) {
128
147
  const now = Date.now();
129
-
148
+ const state = getScanState(dir);
149
+
130
150
  // Debounce: attend 3 secondes avant de scanner
131
- if (scanTimeout) {
132
- clearTimeout(scanTimeout);
151
+ if (state.timeout) {
152
+ clearTimeout(state.timeout);
133
153
  }
134
154
 
135
155
  // Evite les scans trop frequents (minimum 10 secondes entre chaque)
136
- if (now - lastScanTime < 10000) {
137
- scanTimeout = setTimeout(() => triggerScan(dir), 10000 - (now - lastScanTime));
156
+ if (now - state.lastScanTime < 10000) {
157
+ state.timeout = setTimeout(() => triggerScan(dir), 10000 - (now - state.lastScanTime));
138
158
  return;
139
159
  }
140
160
 
141
- scanTimeout = setTimeout(async () => {
142
- lastScanTime = Date.now();
161
+ state.timeout = setTimeout(async () => {
162
+ state.lastScanTime = Date.now();
143
163
  console.log(`\n[DAEMON] ========== SCAN AUTOMATIQUE ==========`);
144
164
  console.log(`[DAEMON] Cible: ${dir}`);
145
165
  console.log(`[DAEMON] Heure: ${new Date().toLocaleTimeString()}\n`);
package/src/diff.js CHANGED
@@ -1,4 +1,4 @@
1
- const { execSync } = require('child_process');
1
+ const { execSync, execFileSync } = require('child_process');
2
2
  const { run } = require('./index.js');
3
3
  const path = require('path');
4
4
  const fs = require('fs');
@@ -18,7 +18,7 @@ function getRecentRefs(targetPath, limit = 10) {
18
18
  stdio: ['pipe', 'pipe', 'pipe']
19
19
  }).trim().split('\n').filter(Boolean).slice(0, 5);
20
20
 
21
- const commits = execSync(`git log --oneline -${limit}`, {
21
+ const commits = execFileSync('git', ['log', '--oneline', `-${Number(limit) || 10}`], {
22
22
  cwd: targetPath,
23
23
  encoding: 'utf8',
24
24
  stdio: ['pipe', 'pipe', 'pipe']
@@ -68,7 +68,7 @@ function resolveRef(targetPath, ref) {
68
68
  return null;
69
69
  }
70
70
  try {
71
- return execSync(`git rev-parse ${ref}`, {
71
+ return execFileSync('git', ['rev-parse', ref], {
72
72
  cwd: targetPath,
73
73
  encoding: 'utf8',
74
74
  stdio: ['pipe', 'pipe', 'pipe']
@@ -106,13 +106,13 @@ function createTempCopyAtCommit(targetPath, commitHash) {
106
106
  const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'muaddib-diff-'));
107
107
 
108
108
  try {
109
- // Clone the repo to temp directory (use -- to separate paths from options)
110
- execSync(`git clone --quiet -- "${targetPath}" "${tempDir}"`, {
109
+ // Clone the repo to temp directory (use execFileSync to prevent injection)
110
+ execFileSync('git', ['clone', '--quiet', '--', targetPath, tempDir], {
111
111
  stdio: ['pipe', 'pipe', 'pipe']
112
112
  });
113
113
 
114
114
  // Checkout the specific commit
115
- execSync(`git checkout --quiet ${commitHash}`, {
115
+ execFileSync('git', ['checkout', '--quiet', commitHash], {
116
116
  cwd: tempDir,
117
117
  stdio: ['pipe', 'pipe', 'pipe']
118
118
  });
package/src/hooks-init.js CHANGED
@@ -29,10 +29,12 @@ function detectHookSystem(targetPath) {
29
29
  /**
30
30
  * Initialize hooks for a project
31
31
  */
32
+ const VALID_MODES = ['scan', 'diff'];
33
+
32
34
  async function initHooks(targetPath, options = {}) {
33
35
  const resolvedPath = path.resolve(targetPath);
34
36
  const hookType = options.type || 'auto';
35
- const mode = options.mode || 'scan'; // 'scan' or 'diff'
37
+ const mode = VALID_MODES.includes(options.mode) ? options.mode : 'scan';
36
38
 
37
39
  console.log('\n[MUADDIB] Initializing git hooks...\n');
38
40
 
@@ -193,11 +195,23 @@ fi
193
195
  exit 0
194
196
  `;
195
197
 
196
- // Backup existing hook
198
+ // Backup existing hook (limit to 3 backups)
197
199
  if (fs.existsSync(preCommitPath)) {
198
200
  const backup = `${preCommitPath}.backup.${Date.now()}`;
199
201
  fs.copyFileSync(preCommitPath, backup);
200
202
  console.log(`[INFO] Backed up existing hook to ${backup}`);
203
+
204
+ // Cleanup old backups, keep only 3 most recent
205
+ try {
206
+ const hooksDir = path.dirname(preCommitPath);
207
+ const backups = fs.readdirSync(hooksDir)
208
+ .filter(f => f.startsWith('pre-commit.backup.'))
209
+ .sort()
210
+ .reverse();
211
+ for (const old of backups.slice(3)) {
212
+ fs.unlinkSync(path.join(hooksDir, old));
213
+ }
214
+ } catch { /* ignore cleanup errors */ }
201
215
  }
202
216
 
203
217
  fs.writeFileSync(preCommitPath, hookContent, { mode: 0o755 });
package/src/index.js CHANGED
@@ -16,6 +16,7 @@ const path = require('path');
16
16
  const { scanGitHubActions } = require('./scanner/github-actions.js');
17
17
  const { detectPythonProject, normalizePythonName } = require('./scanner/python.js');
18
18
  const { loadCachedIOCs } = require('./ioc/updater.js');
19
+ const { setExtraExcludes, getExtraExcludes } = require('./utils.js');
19
20
 
20
21
  // ============================================
21
22
  // SCORING CONSTANTS
@@ -52,12 +53,16 @@ const RISK_THRESHOLDS = {
52
53
  // Maximum score (capped)
53
54
  const MAX_RISK_SCORE = 100;
54
55
 
56
+ const MAX_FILE_SIZE = 10 * 1024 * 1024; // 10MB
57
+
55
58
  // Paranoid mode scanner
56
59
  function scanParanoid(targetPath) {
57
60
  const threats = [];
58
61
 
59
62
  function scanFile(filePath) {
60
63
  try {
64
+ const stat = fs.statSync(filePath);
65
+ if (stat.size > MAX_FILE_SIZE) return;
61
66
  const content = fs.readFileSync(filePath, 'utf8');
62
67
 
63
68
  // Ignore URLs (they often contain patterns like .git)
@@ -81,8 +86,9 @@ function scanParanoid(targetPath) {
81
86
  }
82
87
  }
83
88
 
84
- function walkDir(dir) {
85
- const excluded = ['node_modules', '.git', 'test', 'tests', 'src', 'vscode-extension', '.muaddib-cache', 'data', 'iocs', 'docker'];
89
+ function walkDir(dir, depth) {
90
+ if (depth > 50) return; // Max depth guard (IDX-06)
91
+ const excluded = ['node_modules', '.git', '.muaddib-cache', ...getExtraExcludes()];
86
92
  try {
87
93
  const files = fs.readdirSync(dir);
88
94
  for (const file of files) {
@@ -94,7 +100,7 @@ function scanParanoid(targetPath) {
94
100
 
95
101
  if (stat.isDirectory()) {
96
102
  if (!excluded.includes(file)) {
97
- walkDir(fullPath);
103
+ walkDir(fullPath, depth + 1);
98
104
  }
99
105
  } else if (file.endsWith('.js') || file.endsWith('.json') || file.endsWith('.sh')) {
100
106
  scanFile(fullPath);
@@ -105,7 +111,7 @@ function scanParanoid(targetPath) {
105
111
  }
106
112
  }
107
113
 
108
- walkDir(targetPath);
114
+ walkDir(targetPath, 0);
109
115
  return threats;
110
116
  }
111
117
 
@@ -187,6 +193,11 @@ function checkPyPITyposquatting(deps, targetPath) {
187
193
  }
188
194
 
189
195
  async function run(targetPath, options = {}) {
196
+ // Apply --exclude dirs for this scan
197
+ if (options.exclude && options.exclude.length > 0) {
198
+ setExtraExcludes(options.exclude);
199
+ }
200
+
190
201
  // Detect Python project (synchronous, fast file reads)
191
202
  const pythonDeps = detectPythonProject(targetPath);
192
203
 
@@ -240,7 +251,7 @@ async function run(targetPath, options = {}) {
240
251
 
241
252
  // Sandbox integration
242
253
  let sandboxData = null;
243
- if (options.sandboxResult && options.sandboxResult.findings) {
254
+ if (options.sandboxResult && Array.isArray(options.sandboxResult.findings)) {
244
255
  const sr = options.sandboxResult;
245
256
  const pkg = sr.raw_report?.package || 'unknown';
246
257
  sandboxData = {
@@ -451,7 +462,7 @@ async function run(targetPath, options = {}) {
451
462
  }
452
463
 
453
464
  // Send webhook if configured
454
- if (options.webhook && threats.length > 0) {
465
+ if (options.webhook && enrichedThreats.length > 0) {
455
466
  try {
456
467
  await sendWebhook(options.webhook, result);
457
468
  console.log(`[OK] Alert sent to webhook`);
@@ -472,7 +483,10 @@ async function run(targetPath, options = {}) {
472
483
  const levelsToCheck = severityLevels[failLevel] || severityLevels.high;
473
484
  const failingThreats = deduped.filter(t => levelsToCheck.includes(t.severity));
474
485
 
475
- return failingThreats.length;
486
+ // Clear runtime excludes
487
+ setExtraExcludes([]);
488
+
489
+ return Math.min(failingThreats.length, 125);
476
490
  }
477
491
 
478
492
  module.exports = { run };
@@ -118,6 +118,7 @@ function loadStaticIOCs() {
118
118
  }
119
119
 
120
120
  const MAX_REDIRECTS = 5;
121
+ const MAX_RESPONSE_SIZE = 200 * 1024 * 1024; // 200MB
121
122
 
122
123
  function fetchJSON(url, options = {}, redirectCount = 0) {
123
124
  return new Promise((resolve, reject) => {
@@ -135,7 +136,8 @@ function fetchJSON(url, options = {}, redirectCount = 0) {
135
136
 
136
137
  const req = https.request(reqOptions, (res) => {
137
138
  // Handle redirects (with security validation and limit)
138
- if (res.statusCode === 301 || res.statusCode === 302) {
139
+ if ([301, 302, 307, 308].includes(res.statusCode)) {
140
+ res.resume(); // Drain old response before following redirect
139
141
  if (redirectCount >= MAX_REDIRECTS) {
140
142
  reject(new Error('Too many redirects'));
141
143
  return;
@@ -150,7 +152,16 @@ function fetchJSON(url, options = {}, redirectCount = 0) {
150
152
  }
151
153
 
152
154
  let data = '';
153
- res.on('data', chunk => data += chunk);
155
+ let dataSize = 0;
156
+ res.on('data', chunk => {
157
+ dataSize += chunk.length;
158
+ if (dataSize > MAX_RESPONSE_SIZE) {
159
+ req.destroy();
160
+ reject(new Error('Response exceeded maximum size'));
161
+ return;
162
+ }
163
+ data += chunk;
164
+ });
154
165
  res.on('end', () => {
155
166
  try {
156
167
  resolve({ status: res.statusCode, data: JSON.parse(data) });
@@ -165,11 +176,11 @@ function fetchJSON(url, options = {}, redirectCount = 0) {
165
176
  req.destroy();
166
177
  reject(new Error('Timeout'));
167
178
  });
168
-
179
+
169
180
  if (options.body) {
170
181
  req.write(JSON.stringify(options.body));
171
182
  }
172
-
183
+
173
184
  req.end();
174
185
  });
175
186
  }
@@ -188,7 +199,8 @@ function fetchText(url, redirectCount = 0) {
188
199
 
189
200
  const req = https.request(reqOptions, (res) => {
190
201
  // Handle redirects (with security validation and limit)
191
- if (res.statusCode === 301 || res.statusCode === 302) {
202
+ if ([301, 302, 307, 308].includes(res.statusCode)) {
203
+ res.resume(); // Drain old response before following redirect
192
204
  if (redirectCount >= MAX_REDIRECTS) {
193
205
  reject(new Error('Too many redirects'));
194
206
  return;
@@ -203,7 +215,16 @@ function fetchText(url, redirectCount = 0) {
203
215
  }
204
216
 
205
217
  let data = '';
206
- res.on('data', chunk => data += chunk);
218
+ let dataSize = 0;
219
+ res.on('data', chunk => {
220
+ dataSize += chunk.length;
221
+ if (dataSize > MAX_RESPONSE_SIZE) {
222
+ req.destroy();
223
+ reject(new Error('Response exceeded maximum size'));
224
+ return;
225
+ }
226
+ data += chunk;
227
+ });
207
228
  res.on('end', () => {
208
229
  resolve({ status: res.statusCode, data: data });
209
230
  });
@@ -232,7 +253,8 @@ function fetchBuffer(url, redirectCount = 0) {
232
253
  };
233
254
 
234
255
  const req = https.request(reqOptions, (res) => {
235
- if (res.statusCode === 301 || res.statusCode === 302) {
256
+ if ([301, 302, 307, 308].includes(res.statusCode)) {
257
+ res.resume(); // Drain response body before following redirect
236
258
  if (redirectCount >= MAX_REDIRECTS) {
237
259
  reject(new Error('Too many redirects'));
238
260
  return;
@@ -247,12 +269,22 @@ function fetchBuffer(url, redirectCount = 0) {
247
269
  }
248
270
 
249
271
  if (res.statusCode !== 200) {
272
+ res.resume(); // Drain response body on error
250
273
  reject(new Error('HTTP ' + res.statusCode));
251
274
  return;
252
275
  }
253
276
 
254
277
  const chunks = [];
255
- res.on('data', chunk => chunks.push(chunk));
278
+ let received = 0;
279
+ res.on('data', chunk => {
280
+ received += chunk.length;
281
+ if (received > MAX_RESPONSE_SIZE) {
282
+ req.destroy();
283
+ reject(new Error('Response exceeded maximum size'));
284
+ return;
285
+ }
286
+ chunks.push(chunk);
287
+ });
256
288
  res.on('end', () => resolve(Buffer.concat(chunks)));
257
289
  });
258
290
 
@@ -283,7 +315,8 @@ function fetchBufferWithProgress(url, label, redirectCount = 0) {
283
315
  };
284
316
 
285
317
  const req = https.request(reqOptions, (res) => {
286
- if (res.statusCode === 301 || res.statusCode === 302) {
318
+ if ([301, 302, 307, 308].includes(res.statusCode)) {
319
+ res.resume(); // Drain response body before following redirect
287
320
  if (redirectCount >= MAX_REDIRECTS) {
288
321
  reject(new Error('Too many redirects'));
289
322
  return;
@@ -298,6 +331,7 @@ function fetchBufferWithProgress(url, label, redirectCount = 0) {
298
331
  }
299
332
 
300
333
  if (res.statusCode !== 200) {
334
+ res.resume(); // Drain response body on error
301
335
  reject(new Error('HTTP ' + res.statusCode));
302
336
  return;
303
337
  }
@@ -313,6 +347,12 @@ function fetchBufferWithProgress(url, label, redirectCount = 0) {
313
347
  res.on('data', (chunk) => {
314
348
  chunks.push(chunk);
315
349
  received += chunk.length;
350
+ if (received > MAX_RESPONSE_SIZE) {
351
+ req.destroy();
352
+ spinner.fail('Download exceeded maximum size');
353
+ reject(new Error('Response exceeded maximum size'));
354
+ return;
355
+ }
316
356
  const mb = Math.round(received / 1024 / 1024);
317
357
  if (totalMb) {
318
358
  spinner.update('Downloading ' + label + '... ' + mb + 'MB/' + totalMb + 'MB');
@@ -627,7 +667,7 @@ async function scrapeOSSFMaliciousPackages(knownIds) {
627
667
  for (const result of results) {
628
668
  if (!result || result.status !== 200 || !result.data) continue;
629
669
  const parsed = parseOSVEntry(result.data, 'ossf-malicious');
630
- packages.push(...parsed);
670
+ for (const p of parsed) packages.push(p);
631
671
  }
632
672
 
633
673
  // Progress
@@ -689,7 +729,7 @@ async function scrapeOSVDataDump() {
689
729
  const content = entry.getData().toString('utf8');
690
730
  const vuln = JSON.parse(content);
691
731
  const parsed = parseOSVEntry(vuln, 'osv-malicious');
692
- packages.push(...parsed);
732
+ for (const p of parsed) packages.push(p);
693
733
 
694
734
  // Track known IDs so OSSF can skip them
695
735
  knownIds.add(vuln.id || path.basename(name, '.json'));
@@ -745,7 +785,7 @@ async function scrapeOSVPyPIDataDump() {
745
785
  const content = entry.getData().toString('utf8');
746
786
  const vuln = JSON.parse(content);
747
787
  const parsed = parseOSVEntry(vuln, 'osv-malicious-pypi', 'PyPI');
748
- packages.push(...parsed);
788
+ for (const p of parsed) packages.push(p);
749
789
  malCount++;
750
790
  } catch {
751
791
  // Skip unparseable entries
@@ -1121,15 +1161,19 @@ async function runScraper() {
1121
1161
  'snyk-known'
1122
1162
  ];
1123
1163
 
1124
- // Save enriched (full) IOCs
1164
+ // Save enriched (full) IOCs — atomic write via .tmp + rename
1125
1165
  const saveSpinner = new Spinner();
1126
1166
  saveSpinner.start('Saving IOCs...');
1127
- fs.writeFileSync(IOC_FILE, JSON.stringify(existingIOCs, null, 2));
1167
+ const tmpIOCFile = IOC_FILE + '.tmp';
1168
+ fs.writeFileSync(tmpIOCFile, JSON.stringify(existingIOCs, null, 2));
1169
+ fs.renameSync(tmpIOCFile, IOC_FILE);
1128
1170
 
1129
- // Save compact IOCs (lightweight, shipped in npm)
1171
+ // Save compact IOCs (lightweight, shipped in npm) — atomic write
1130
1172
  saveSpinner.update('Generating compact IOCs...');
1131
1173
  const compactIOCs = generateCompactIOCs(existingIOCs);
1132
- fs.writeFileSync(COMPACT_IOC_FILE, JSON.stringify(compactIOCs));
1174
+ const tmpCompactFile = COMPACT_IOC_FILE + '.tmp';
1175
+ fs.writeFileSync(tmpCompactFile, JSON.stringify(compactIOCs));
1176
+ fs.renameSync(tmpCompactFile, COMPACT_IOC_FILE);
1133
1177
  saveSpinner.succeed('Saved IOCs + compact format');
1134
1178
 
1135
1179
  // Display summary
@@ -71,7 +71,10 @@ async function updateIOCs() {
71
71
  delete baseIOCs._markerSet;
72
72
  delete baseIOCs._fileSet;
73
73
 
74
- fs.writeFileSync(CACHE_IOC_FILE, JSON.stringify(baseIOCs));
74
+ // Atomic write: write to .tmp then rename (UP-001)
75
+ const tmpFile = CACHE_IOC_FILE + '.tmp';
76
+ fs.writeFileSync(tmpFile, JSON.stringify(baseIOCs));
77
+ fs.renameSync(tmpFile, CACHE_IOC_FILE);
75
78
 
76
79
  const totalNpm = baseIOCs.packages.length;
77
80
  const totalPyPI = (baseIOCs.pypi_packages || []).length;
@@ -148,7 +151,7 @@ function mergeIOCs(target, source) {
148
151
  // Cache to avoid reloading IOCs on each call
149
152
  let cachedIOCsResult = null;
150
153
  let cachedIOCsTime = 0;
151
- const CACHE_TTL = 60000; // 1 minute
154
+ const CACHE_TTL = 10000; // 10 seconds
152
155
 
153
156
  function loadCachedIOCs() {
154
157
  // Return cache if still valid
@@ -343,8 +346,11 @@ function expandCompactIOCs(compact) {
343
346
  const defaultSev = compact.defaultSeverity || 'critical';
344
347
  const overrides = compact.severityOverrides || {};
345
348
 
346
- // Expand npm wildcards
349
+ // Expand npm wildcards (deduplicate via Set)
350
+ const seenWildcards = new Set();
347
351
  for (const name of compact.wildcards || []) {
352
+ if (seenWildcards.has(name)) continue;
353
+ seenWildcards.add(name);
348
354
  const severity = (overrides[name] && overrides[name]['*']) || defaultSev;
349
355
  packages.push({ name: name, version: '*', severity: severity });
350
356
  }
@@ -381,4 +387,9 @@ function expandCompactIOCs(compact) {
381
387
  };
382
388
  }
383
389
 
384
- module.exports = { updateIOCs, loadCachedIOCs, generateCompactIOCs, expandCompactIOCs };
390
+ function invalidateCache() {
391
+ cachedIOCsResult = null;
392
+ cachedIOCsTime = 0;
393
+ }
394
+
395
+ module.exports = { updateIOCs, loadCachedIOCs, invalidateCache, generateCompactIOCs, expandCompactIOCs };