muaddib-scanner 2.3.2 → 2.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/monitor.js DELETED
@@ -1,1880 +0,0 @@
1
- const https = require('https');
2
- const fs = require('fs');
3
- const path = require('path');
4
- const os = require('os');
5
- const { run } = require('./index.js');
6
- const { runSandbox, isDockerAvailable } = require('./sandbox.js');
7
- const { sendWebhook } = require('./webhook.js');
8
- const { detectSuddenLifecycleChange } = require('./temporal-analysis.js');
9
- const { detectSuddenAstChanges } = require('./temporal-ast-diff.js');
10
- const { detectPublishAnomaly } = require('./publish-anomaly.js');
11
- const { detectMaintainerChange } = require('./maintainer-change.js');
12
- const { downloadToFile, extractTarGz, sanitizePackageName } = require('./shared/download.js');
13
- const { MAX_TARBALL_SIZE } = require('./shared/constants.js');
14
-
15
- // Prevent unhandled promise rejections from crashing the monitor process
16
- process.on('unhandledRejection', (reason, promise) => {
17
- console.error('[MONITOR] Unhandled rejection:', reason);
18
- });
19
-
20
- const STATE_FILE = path.join(__dirname, '..', 'data', 'monitor-state.json');
21
- const ALERTS_FILE = path.join(__dirname, '..', 'data', 'monitor-alerts.json');
22
- const DETECTIONS_FILE = path.join(__dirname, '..', 'data', 'detections.json');
23
- const SCAN_STATS_FILE = path.join(__dirname, '..', 'data', 'scan-stats.json');
24
- const POLL_INTERVAL = 60_000;
25
- const POLL_MAX_BACKOFF = 960_000; // 16 minutes max backoff
26
- const SCAN_TIMEOUT_MS = 180_000; // 3 minutes per package
27
-
28
- // --- Stats counters ---
29
-
30
- const stats = {
31
- scanned: 0,
32
- clean: 0,
33
- suspect: 0,
34
- errors: 0,
35
- totalTimeMs: 0,
36
- lastReportTime: Date.now(),
37
- lastDailyReportDate: null // YYYY-MM-DD (Paris) of last daily report sent
38
- };
39
-
40
- // Track daily suspects for the daily report (name, version, ecosystem, findingsCount)
41
- const dailyAlerts = [];
42
-
43
- // Deduplication: track recently scanned packages (cleared every 24h with daily report)
44
- const recentlyScanned = new Set();
45
-
46
- // Consecutive poll error tracking for exponential backoff
47
- let consecutivePollErrors = 0;
48
-
49
- // --- Scan queue (FIFO, sequential) ---
50
-
51
- const scanQueue = [];
52
-
53
- // --- Sandbox integration ---
54
-
55
- let sandboxAvailable = false;
56
-
57
- function isCanaryEnabled() {
58
- const env = process.env.MUADDIB_MONITOR_CANARY;
59
- if (env !== undefined && env.toLowerCase() === 'false') return false;
60
- return true;
61
- }
62
-
63
- function buildCanaryExfiltrationWebhookEmbed(packageName, version, exfiltrations) {
64
- const exfilLines = exfiltrations.map(e => {
65
- return `**${e.token}** — ${e.foundIn}`;
66
- }).join('\n');
67
-
68
- const npmLink = `https://www.npmjs.com/package/${packageName}`;
69
-
70
- return {
71
- embeds: [{
72
- title: '\uD83D\uDD34 CANARY EXFILTRATION \u2014 CRITICAL',
73
- color: 0xe74c3c,
74
- fields: [
75
- { name: 'Package', value: `[${packageName}](${npmLink})`, inline: true },
76
- { name: 'Version', value: version || 'N/A', inline: true },
77
- { name: 'Severity', value: 'CRITICAL', inline: true },
78
- { name: 'Exfiltrated Tokens', value: exfilLines || 'None', inline: false },
79
- { name: 'Action', value: 'CONFIRMED MALICIOUS \u2014 Do NOT install, report to npm', inline: false }
80
- ],
81
- footer: {
82
- text: `MUAD'DIB Canary Token Analysis | ${new Date().toISOString().replace('T', ' ').replace(/\.\d+Z$/, ' UTC')}`
83
- },
84
- timestamp: new Date().toISOString()
85
- }]
86
- };
87
- }
88
-
89
- function isSandboxEnabled() {
90
- const env = process.env.MUADDIB_MONITOR_SANDBOX;
91
- if (env !== undefined && env.toLowerCase() === 'false') return false;
92
- return true;
93
- }
94
-
95
- function isTemporalEnabled() {
96
- const env = process.env.MUADDIB_MONITOR_TEMPORAL;
97
- if (env !== undefined && env.toLowerCase() === 'false') return false;
98
- return true;
99
- }
100
-
101
- function hasHighOrCritical(result) {
102
- return result.summary.critical > 0 || result.summary.high > 0;
103
- }
104
-
105
- // --- Verbose mode (--verbose sends ALL alerts including temporal/publish/maintainer) ---
106
-
107
- let verboseMode = false;
108
-
109
- function isVerboseMode() {
110
- if (verboseMode) return true;
111
- const env = process.env.MUADDIB_MONITOR_VERBOSE;
112
- return env !== undefined && env.toLowerCase() === 'true';
113
- }
114
-
115
- function setVerboseMode(value) {
116
- verboseMode = !!value;
117
- }
118
-
119
- // --- IOC match types (these are the only static-analysis types that warrant a webhook) ---
120
-
121
- const IOC_MATCH_TYPES = new Set([
122
- 'known_malicious_package',
123
- 'known_malicious_hash',
124
- 'pypi_malicious_package',
125
- 'shai_hulud_marker',
126
- 'shai_hulud_backdoor'
127
- ]);
128
-
129
- function hasIOCMatch(result) {
130
- if (!result || !result.threats) return false;
131
- return result.threats.some(t => IOC_MATCH_TYPES.has(t.type));
132
- }
133
-
134
- // --- Webhook alerting ---
135
-
136
- function getWebhookUrl() {
137
- return process.env.MUADDIB_WEBHOOK_URL || null;
138
- }
139
-
140
- function shouldSendWebhook(result, sandboxResult) {
141
- if (!getWebhookUrl()) return false;
142
-
143
- // If sandbox ran, it is the final arbiter
144
- if (sandboxResult && sandboxResult.score !== undefined) {
145
- return sandboxResult.score > 0;
146
- }
147
-
148
- // No sandbox — only send webhook for confirmed IOC matches
149
- // (known_malicious_package, known_malicious_hash, pypi_malicious_package, etc.)
150
- if (hasIOCMatch(result)) return true;
151
-
152
- return false;
153
- }
154
-
155
- function buildMonitorWebhookPayload(name, version, ecosystem, result, sandboxResult) {
156
- const payload = {
157
- event: 'malicious_package',
158
- package: name,
159
- version,
160
- ecosystem,
161
- timestamp: new Date().toISOString(),
162
- findings: result.threats.map(t => ({
163
- rule: t.rule_id || t.type,
164
- severity: t.severity
165
- }))
166
- };
167
- if (sandboxResult && sandboxResult.score > 0) {
168
- payload.sandbox = {
169
- score: sandboxResult.score,
170
- severity: sandboxResult.severity
171
- };
172
- }
173
- return payload;
174
- }
175
-
176
- function computeRiskLevel(summary) {
177
- if (summary.critical > 0) return 'CRITICAL';
178
- if (summary.high > 0) return 'HIGH';
179
- if (summary.medium > 0) return 'MEDIUM';
180
- if (summary.low > 0) return 'LOW';
181
- return 'CLEAN';
182
- }
183
-
184
- function computeRiskScore(summary) {
185
- const raw = (summary.critical || 0) * 25
186
- + (summary.high || 0) * 15
187
- + (summary.medium || 0) * 5
188
- + (summary.low || 0) * 1;
189
- return Math.min(raw, 100);
190
- }
191
-
192
- async function trySendWebhook(name, version, ecosystem, result, sandboxResult) {
193
- if (!shouldSendWebhook(result, sandboxResult)) {
194
- if (sandboxResult && sandboxResult.score === 0) {
195
- console.log(`[MONITOR] FALSE POSITIVE (sandbox clean): ${name}@${version}`);
196
- }
197
- return;
198
- }
199
- const url = getWebhookUrl();
200
- const payload = buildMonitorWebhookPayload(name, version, ecosystem, result, sandboxResult);
201
- const webhookData = {
202
- target: `${ecosystem}/${name}@${version}`,
203
- timestamp: payload.timestamp,
204
- ecosystem,
205
- summary: {
206
- ...result.summary,
207
- riskLevel: computeRiskLevel(result.summary),
208
- riskScore: computeRiskScore(result.summary)
209
- },
210
- threats: result.threats
211
- };
212
- if (sandboxResult && sandboxResult.score > 0) {
213
- webhookData.sandbox = {
214
- score: sandboxResult.score,
215
- severity: sandboxResult.severity
216
- };
217
- }
218
- try {
219
- await sendWebhook(url, webhookData);
220
- console.log(`[MONITOR] Webhook sent for ${name}@${version}`);
221
- } catch (err) {
222
- console.error(`[MONITOR] Webhook failed for ${name}@${version}: ${err.message}`);
223
- }
224
- }
225
-
226
- // --- Temporal analysis integration ---
227
-
228
- function buildTemporalWebhookEmbed(temporalResult) {
229
- const findings = temporalResult.findings || [];
230
- const topFinding = findings[0] || {};
231
- const severity = topFinding.severity || 'HIGH';
232
- const color = severity === 'CRITICAL' ? 0xe74c3c : 0xe67e22;
233
- const emoji = severity === 'CRITICAL' ? '\uD83D\uDD34' : '\uD83D\uDFE0';
234
-
235
- const changeLines = findings.map(f => {
236
- const action = f.type === 'lifecycle_added' ? 'ADDED' : 'MODIFIED';
237
- const value = f.type === 'lifecycle_modified' ? f.newValue : f.value;
238
- return `**${f.script}** script ${action}: \`${value}\``;
239
- }).join('\n');
240
-
241
- const pkgName = temporalResult.packageName;
242
- const npmLink = `https://www.npmjs.com/package/${pkgName}`;
243
-
244
- return {
245
- embeds: [{
246
- title: `${emoji} TEMPORAL ANOMALY \u2014 ${severity}`,
247
- color: color,
248
- fields: [
249
- { name: 'Package', value: `[${pkgName}](${npmLink})`, inline: true },
250
- { name: 'Version Change', value: `${temporalResult.previousVersion} \u2192 ${temporalResult.latestVersion}`, inline: true },
251
- { name: 'Severity', value: severity, inline: true },
252
- { name: 'Changes Detected', value: changeLines || 'None', inline: false },
253
- { name: 'Published', value: temporalResult.metadata.latestPublishedAt || 'unknown', inline: true },
254
- { name: 'Action', value: 'DO NOT INSTALL \u2014 Verify changelog before upgrading', inline: false }
255
- ],
256
- footer: {
257
- text: `MUAD'DIB Temporal Analysis | ${new Date().toISOString().replace('T', ' ').replace(/\.\d+Z$/, ' UTC')}`
258
- },
259
- timestamp: new Date().toISOString()
260
- }]
261
- };
262
- }
263
-
264
- async function tryTemporalAlert(temporalResult, options) {
265
- const force = options && options.force;
266
- // Temporal anomalies are logged only — no webhook unless --verbose or forced
267
- if (!force) {
268
- console.log(`[MONITOR] ANOMALY (logged only): temporal lifecycle change for ${temporalResult.packageName}`);
269
- }
270
- if (!force && !isVerboseMode()) return;
271
-
272
- const url = getWebhookUrl();
273
- if (!url) return;
274
-
275
- const payload = buildTemporalWebhookEmbed(temporalResult);
276
- try {
277
- await sendWebhook(url, payload, { rawPayload: true });
278
- console.log(`[MONITOR] Temporal webhook sent for ${temporalResult.packageName} (verbose mode)`);
279
- } catch (err) {
280
- console.error(`[MONITOR] Temporal webhook failed for ${temporalResult.packageName}: ${err.message}`);
281
- }
282
- }
283
-
284
- function isTemporalAstEnabled() {
285
- const env = process.env.MUADDIB_MONITOR_TEMPORAL_AST;
286
- if (env !== undefined && env.toLowerCase() === 'false') return false;
287
- return true;
288
- }
289
-
290
- function buildTemporalAstWebhookEmbed(astResult) {
291
- const findings = astResult.findings || [];
292
- const topFinding = findings[0] || {};
293
- const severity = topFinding.severity || 'HIGH';
294
- const color = severity === 'CRITICAL' ? 0xe74c3c : severity === 'HIGH' ? 0xe67e22 : 0xf1c40f;
295
- const emoji = severity === 'CRITICAL' ? '\uD83D\uDD34' : severity === 'HIGH' ? '\uD83D\uDFE0' : '\uD83D\uDFE1';
296
-
297
- const changeLines = findings.map(f => {
298
- return `**${f.pattern}** — ${f.severity}: ${f.description}`;
299
- }).join('\n');
300
-
301
- const pkgName = astResult.packageName;
302
- const npmLink = `https://www.npmjs.com/package/${pkgName}`;
303
-
304
- return {
305
- embeds: [{
306
- title: `${emoji} AST ANOMALY \u2014 ${severity}`,
307
- color: color,
308
- fields: [
309
- { name: 'Package', value: `[${pkgName}](${npmLink})`, inline: true },
310
- { name: 'Version Change', value: `${astResult.previousVersion} \u2192 ${astResult.latestVersion}`, inline: true },
311
- { name: 'Severity', value: severity, inline: true },
312
- { name: 'New Dangerous APIs', value: changeLines || 'None', inline: false },
313
- { name: 'Published', value: astResult.metadata.latestPublishedAt || 'unknown', inline: true },
314
- { name: 'Action', value: 'DO NOT UPDATE \u2014 Compare sources: npm diff pkg@old pkg@new', inline: false }
315
- ],
316
- footer: {
317
- text: `MUAD'DIB Temporal AST Analysis | ${new Date().toISOString().replace('T', ' ').replace(/\.\d+Z$/, ' UTC')}`
318
- },
319
- timestamp: new Date().toISOString()
320
- }]
321
- };
322
- }
323
-
324
- async function tryTemporalAstAlert(astResult, options) {
325
- const force = options && options.force;
326
- // AST anomalies are logged only — no webhook unless --verbose or forced
327
- if (!force) {
328
- console.log(`[MONITOR] ANOMALY (logged only): AST change for ${astResult.packageName}`);
329
- }
330
- if (!force && !isVerboseMode()) return;
331
-
332
- const url = getWebhookUrl();
333
- if (!url) return;
334
-
335
- const payload = buildTemporalAstWebhookEmbed(astResult);
336
- try {
337
- await sendWebhook(url, payload, { rawPayload: true });
338
- console.log(`[MONITOR] Temporal AST webhook sent for ${astResult.packageName} (verbose mode)`);
339
- } catch (err) {
340
- console.error(`[MONITOR] Temporal AST webhook failed for ${astResult.packageName}: ${err.message}`);
341
- }
342
- }
343
-
344
- async function runTemporalAstCheck(packageName) {
345
- if (!isTemporalAstEnabled()) return null;
346
- try {
347
- const result = await detectSuddenAstChanges(packageName);
348
- if (result.suspicious) {
349
- const findingsStr = result.findings.map(f => {
350
- return `${f.pattern} (${f.severity})`;
351
- }).join(', ');
352
- console.log(`[MONITOR] AST ANOMALY: ${packageName} v${result.previousVersion} → v${result.latestVersion}: ${findingsStr}`);
353
-
354
- appendAlert({
355
- timestamp: new Date().toISOString(),
356
- name: packageName,
357
- version: result.latestVersion,
358
- ecosystem: 'npm',
359
- temporalAst: true,
360
- findings: result.findings.map(f => ({
361
- rule: f.severity === 'CRITICAL' ? 'MUADDIB-TEMPORAL-AST-001'
362
- : f.severity === 'HIGH' ? 'MUADDIB-TEMPORAL-AST-002'
363
- : 'MUADDIB-TEMPORAL-AST-003',
364
- severity: f.severity,
365
- pattern: f.pattern
366
- }))
367
- });
368
-
369
- dailyAlerts.push({
370
- name: packageName,
371
- version: result.latestVersion,
372
- ecosystem: 'npm',
373
- findingsCount: result.findings.length,
374
- temporalAst: true
375
- });
376
-
377
- // Webhook deferred — sent after sandbox confirms (see resolveTarballAndScan)
378
- }
379
- return result;
380
- } catch (err) {
381
- console.error(`[MONITOR] Temporal AST analysis error for ${packageName}: ${err.message}`);
382
- return null;
383
- }
384
- }
385
-
386
- function isTemporalPublishEnabled() {
387
- const env = process.env.MUADDIB_MONITOR_TEMPORAL_PUBLISH;
388
- if (env !== undefined && env.toLowerCase() === 'false') return false;
389
- return true;
390
- }
391
-
392
- function buildPublishAnomalyWebhookEmbed(publishResult) {
393
- const anomalies = publishResult.anomalies || [];
394
- const topAnomaly = anomalies[0] || {};
395
- const severity = topAnomaly.severity || 'HIGH';
396
- const color = severity === 'CRITICAL' ? 0xe74c3c : severity === 'HIGH' ? 0xe67e22 : 0xf1c40f;
397
- const emoji = severity === 'CRITICAL' ? '\uD83D\uDD34' : severity === 'HIGH' ? '\uD83D\uDFE0' : '\uD83D\uDFE1';
398
-
399
- const anomalyLines = anomalies.map(a => {
400
- return `**${a.type}** — ${a.severity}: ${a.description}`;
401
- }).join('\n');
402
-
403
- const pkgName = publishResult.packageName;
404
- const npmLink = `https://www.npmjs.com/package/${pkgName}`;
405
-
406
- return {
407
- embeds: [{
408
- title: `${emoji} PUBLISH ANOMALY \u2014 ${severity}`,
409
- color: color,
410
- fields: [
411
- { name: 'Package', value: `[${pkgName}](${npmLink})`, inline: true },
412
- { name: 'Versions Analyzed', value: `${publishResult.versionCount || 'N/A'}`, inline: true },
413
- { name: 'Severity', value: severity, inline: true },
414
- { name: 'Anomalies Detected', value: anomalyLines || 'None', inline: false },
415
- { name: 'Action', value: 'Verify maintainer activity on npm/GitHub. Check changelogs for each version.', inline: false }
416
- ],
417
- footer: {
418
- text: `MUAD'DIB Publish Frequency Analysis | ${new Date().toISOString().replace('T', ' ').replace(/\.\d+Z$/, ' UTC')}`
419
- },
420
- timestamp: new Date().toISOString()
421
- }]
422
- };
423
- }
424
-
425
- async function tryTemporalPublishAlert(publishResult, options) {
426
- const force = options && options.force;
427
- // Publish anomalies are logged only — no webhook unless --verbose or forced
428
- if (!force) {
429
- console.log(`[MONITOR] ANOMALY (logged only): publish frequency for ${publishResult.packageName}`);
430
- }
431
- if (!force && !isVerboseMode()) return;
432
-
433
- const url = getWebhookUrl();
434
- if (!url) return;
435
-
436
- const payload = buildPublishAnomalyWebhookEmbed(publishResult);
437
- try {
438
- await sendWebhook(url, payload, { rawPayload: true });
439
- console.log(`[MONITOR] Publish anomaly webhook sent for ${publishResult.packageName} (verbose mode)`);
440
- } catch (err) {
441
- console.error(`[MONITOR] Publish anomaly webhook failed for ${publishResult.packageName}: ${err.message}`);
442
- }
443
- }
444
-
445
- async function runTemporalPublishCheck(packageName) {
446
- if (!isTemporalPublishEnabled()) return null;
447
- try {
448
- const result = await detectPublishAnomaly(packageName);
449
- if (result.suspicious) {
450
- const anomalyStr = result.anomalies.map(a => {
451
- return `${a.type} (${a.severity})`;
452
- }).join(', ');
453
- console.log(`[MONITOR] PUBLISH ANOMALY: ${packageName}: ${anomalyStr}`);
454
-
455
- appendAlert({
456
- timestamp: new Date().toISOString(),
457
- name: packageName,
458
- version: 'N/A',
459
- ecosystem: 'npm',
460
- temporalPublish: true,
461
- findings: result.anomalies.map(a => ({
462
- rule: a.type === 'publish_burst' ? 'MUADDIB-PUBLISH-001'
463
- : a.type === 'dormant_spike' ? 'MUADDIB-PUBLISH-002'
464
- : 'MUADDIB-PUBLISH-003',
465
- severity: a.severity,
466
- type: a.type
467
- }))
468
- });
469
-
470
- dailyAlerts.push({
471
- name: packageName,
472
- version: 'N/A',
473
- ecosystem: 'npm',
474
- findingsCount: result.anomalies.length,
475
- temporalPublish: true
476
- });
477
-
478
- // Webhook deferred — sent after sandbox confirms (see resolveTarballAndScan)
479
- }
480
- return result;
481
- } catch (err) {
482
- console.error(`[MONITOR] Publish frequency analysis error for ${packageName}: ${err.message}`);
483
- return null;
484
- }
485
- }
486
-
487
- function isTemporalMaintainerEnabled() {
488
- const env = process.env.MUADDIB_MONITOR_TEMPORAL_MAINTAINER;
489
- if (env !== undefined && env.toLowerCase() === 'false') return false;
490
- return true;
491
- }
492
-
493
- function buildMaintainerChangeWebhookEmbed(maintainerResult) {
494
- const findings = maintainerResult.findings || [];
495
- const topFinding = findings[0] || {};
496
- const severity = topFinding.severity || 'HIGH';
497
- const color = severity === 'CRITICAL' ? 0xe74c3c : severity === 'HIGH' ? 0xe67e22 : 0xf1c40f;
498
- const emoji = severity === 'CRITICAL' ? '\uD83D\uDD34' : severity === 'HIGH' ? '\uD83D\uDFE0' : '\uD83D\uDFE1';
499
-
500
- const findingLines = findings.map(f => {
501
- let detail = `**${f.type}** — ${f.severity}: ${f.description}`;
502
- if (f.riskAssessment && f.riskAssessment.reasons.length > 0) {
503
- detail += `\nRisk: ${f.riskAssessment.reasons.join(', ')}`;
504
- }
505
- return detail;
506
- }).join('\n');
507
-
508
- const pkgName = maintainerResult.packageName;
509
- const npmLink = `https://www.npmjs.com/package/${pkgName}`;
510
-
511
- return {
512
- embeds: [{
513
- title: `${emoji} MAINTAINER CHANGE \u2014 ${severity}`,
514
- color: color,
515
- fields: [
516
- { name: 'Package', value: `[${pkgName}](${npmLink})`, inline: true },
517
- { name: 'Severity', value: severity, inline: true },
518
- { name: 'Findings', value: findingLines || 'None', inline: false },
519
- { name: 'Action', value: 'Verify legitimacy before installing', inline: false }
520
- ],
521
- footer: {
522
- text: `MUAD'DIB Maintainer Change Analysis | ${new Date().toISOString().replace('T', ' ').replace(/\.\d+Z$/, ' UTC')}`
523
- },
524
- timestamp: new Date().toISOString()
525
- }]
526
- };
527
- }
528
-
529
- async function tryTemporalMaintainerAlert(maintainerResult, options) {
530
- const force = options && options.force;
531
- // Maintainer changes are logged only — no webhook unless --verbose or forced
532
- if (!force) {
533
- console.log(`[MONITOR] ANOMALY (logged only): maintainer change for ${maintainerResult.packageName}`);
534
- }
535
- if (!force && !isVerboseMode()) return;
536
-
537
- const url = getWebhookUrl();
538
- if (!url) return;
539
-
540
- const payload = buildMaintainerChangeWebhookEmbed(maintainerResult);
541
- try {
542
- await sendWebhook(url, payload, { rawPayload: true });
543
- console.log(`[MONITOR] Maintainer change webhook sent for ${maintainerResult.packageName} (verbose mode)`);
544
- } catch (err) {
545
- console.error(`[MONITOR] Maintainer change webhook failed for ${maintainerResult.packageName}: ${err.message}`);
546
- }
547
- }
548
-
549
- async function runTemporalMaintainerCheck(packageName) {
550
- if (!isTemporalMaintainerEnabled()) return null;
551
- try {
552
- const result = await detectMaintainerChange(packageName);
553
- if (result.suspicious) {
554
- const findingsStr = result.findings.map(f => {
555
- return `${f.type} (${f.severity})`;
556
- }).join(', ');
557
- console.log(`[MONITOR] MAINTAINER CHANGE: ${packageName}: ${findingsStr}`);
558
-
559
- appendAlert({
560
- timestamp: new Date().toISOString(),
561
- name: packageName,
562
- version: 'N/A',
563
- ecosystem: 'npm',
564
- temporalMaintainer: true,
565
- findings: result.findings.map(f => ({
566
- rule: f.type === 'new_maintainer' ? 'MUADDIB-MAINTAINER-001'
567
- : f.type === 'suspicious_maintainer' ? 'MUADDIB-MAINTAINER-002'
568
- : f.type === 'sole_maintainer_change' ? 'MUADDIB-MAINTAINER-003'
569
- : 'MUADDIB-MAINTAINER-004',
570
- severity: f.severity,
571
- type: f.type
572
- }))
573
- });
574
-
575
- dailyAlerts.push({
576
- name: packageName,
577
- version: 'N/A',
578
- ecosystem: 'npm',
579
- findingsCount: result.findings.length,
580
- temporalMaintainer: true
581
- });
582
-
583
- // Webhook deferred — sent after sandbox confirms (see resolveTarballAndScan)
584
- }
585
- return result;
586
- } catch (err) {
587
- console.error(`[MONITOR] Maintainer change analysis error for ${packageName}: ${err.message}`);
588
- return null;
589
- }
590
- }
591
-
592
- async function runTemporalCheck(packageName) {
593
- if (!isTemporalEnabled()) return null;
594
- try {
595
- const result = await detectSuddenLifecycleChange(packageName);
596
- if (result.suspicious) {
597
- const findingsStr = result.findings.map(f => {
598
- const action = f.type === 'lifecycle_added' ? 'added' : 'modified';
599
- return `${f.script} ${action} (${f.severity})`;
600
- }).join(', ');
601
- console.log(`[MONITOR] TEMPORAL ANOMALY: ${packageName} v${result.previousVersion} → v${result.latestVersion}: ${findingsStr}`);
602
-
603
- appendAlert({
604
- timestamp: new Date().toISOString(),
605
- name: packageName,
606
- version: result.latestVersion,
607
- ecosystem: 'npm',
608
- temporal: true,
609
- findings: result.findings.map(f => ({
610
- rule: f.type === 'lifecycle_added' ? 'MUADDIB-TEMPORAL-001' : 'MUADDIB-TEMPORAL-003',
611
- severity: f.severity,
612
- script: f.script
613
- }))
614
- });
615
-
616
- dailyAlerts.push({
617
- name: packageName,
618
- version: result.latestVersion,
619
- ecosystem: 'npm',
620
- findingsCount: result.findings.length,
621
- temporal: true
622
- });
623
-
624
- // Webhook deferred — sent after sandbox confirms (see resolveTarballAndScan)
625
- }
626
- return result;
627
- } catch (err) {
628
- console.error(`[MONITOR] Temporal analysis error for ${packageName}: ${err.message}`);
629
- return null;
630
- }
631
- }
632
-
633
- // --- State persistence ---
634
-
635
- function loadState() {
636
- try {
637
- const raw = fs.readFileSync(STATE_FILE, 'utf8');
638
- const state = JSON.parse(raw);
639
- // Restore daily report date so it survives restarts (auto-update, crashes)
640
- if (typeof state.lastDailyReportDate === 'string') {
641
- stats.lastDailyReportDate = state.lastDailyReportDate;
642
- }
643
- return {
644
- npmLastPackage: typeof state.npmLastPackage === 'string' ? state.npmLastPackage : '',
645
- pypiLastPackage: typeof state.pypiLastPackage === 'string' ? state.pypiLastPackage : ''
646
- };
647
- } catch {
648
- return { npmLastPackage: '', pypiLastPackage: '' };
649
- }
650
- }
651
-
652
- function saveState(state) {
653
- try {
654
- const dir = path.dirname(STATE_FILE);
655
- if (!fs.existsSync(dir)) {
656
- fs.mkdirSync(dir, { recursive: true });
657
- }
658
- // Persist daily report date so it survives restarts
659
- const persistedState = {
660
- ...state,
661
- lastDailyReportDate: stats.lastDailyReportDate
662
- };
663
- // Atomic write: write to .tmp then rename (crash-safe)
664
- const tmpFile = STATE_FILE + '.tmp';
665
- fs.writeFileSync(tmpFile, JSON.stringify(persistedState, null, 2), 'utf8');
666
- fs.renameSync(tmpFile, STATE_FILE);
667
- } catch (err) {
668
- console.error(`[MONITOR] Failed to save state: ${err.message}`);
669
- }
670
- }
671
-
672
- // --- HTTP helpers ---
673
-
674
- function httpsGet(url, timeoutMs = 30_000) {
675
- return new Promise((resolve, reject) => {
676
- const req = https.get(url, { timeout: timeoutMs }, (res) => {
677
- if (res.statusCode === 301 || res.statusCode === 302) {
678
- res.resume();
679
- const location = res.headers.location;
680
- if (!location) return reject(new Error(`Redirect without Location for ${url}`));
681
- return httpsGet(location, timeoutMs).then(resolve, reject);
682
- }
683
- if (res.statusCode < 200 || res.statusCode >= 300) {
684
- res.resume();
685
- return reject(new Error(`HTTP ${res.statusCode} for ${url}`));
686
- }
687
- const chunks = [];
688
- res.on('data', (chunk) => chunks.push(chunk));
689
- res.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
690
- res.on('error', reject);
691
- });
692
- req.on('error', reject);
693
- req.on('timeout', () => {
694
- req.destroy();
695
- reject(new Error(`Timeout for ${url}`));
696
- });
697
- });
698
- }
699
-
700
- // --- Tarball URL helpers ---
701
-
702
- function getNpmTarballUrl(pkgData) {
703
- return (pkgData.dist && pkgData.dist.tarball) || null;
704
- }
705
-
706
- async function getPyPITarballUrl(packageName) {
707
- const url = `https://pypi.org/pypi/${encodeURIComponent(packageName)}/json`;
708
- const body = await httpsGet(url);
709
- let data;
710
- try {
711
- data = JSON.parse(body);
712
- } catch (e) {
713
- throw new Error(`Invalid JSON from PyPI for ${packageName}: ${e.message}`);
714
- }
715
- const version = (data.info && data.info.version) || '';
716
- const urls = data.urls || [];
717
- // Prefer sdist (.tar.gz)
718
- const sdist = urls.find(u => u.packagetype === 'sdist' && u.url);
719
- if (sdist) return { url: sdist.url, version };
720
- // Fallback: any .tar.gz
721
- const tarGz = urls.find(u => u.url && u.url.endsWith('.tar.gz'));
722
- if (tarGz) return { url: tarGz.url, version };
723
- // Fallback: first available file
724
- if (urls.length > 0 && urls[0].url) return { url: urls[0].url, version };
725
- return { url: null, version };
726
- }
727
-
728
- // --- Alerts persistence ---
729
-
730
- function appendAlert(alert) {
731
- try {
732
- const dir = path.dirname(ALERTS_FILE);
733
- if (!fs.existsSync(dir)) {
734
- fs.mkdirSync(dir, { recursive: true });
735
- }
736
- let alerts = [];
737
- try {
738
- alerts = JSON.parse(fs.readFileSync(ALERTS_FILE, 'utf8'));
739
- } catch {}
740
- alerts.push(alert);
741
- fs.writeFileSync(ALERTS_FILE, JSON.stringify(alerts, null, 2), 'utf8');
742
- } catch (err) {
743
- console.error(`[MONITOR] Failed to save alert: ${err.message}`);
744
- }
745
- }
746
-
747
- // --- Detection time logging ---
748
-
749
- function loadDetections() {
750
- try {
751
- const raw = fs.readFileSync(DETECTIONS_FILE, 'utf8');
752
- const data = JSON.parse(raw);
753
- if (data && Array.isArray(data.detections)) return data;
754
- return { detections: [] };
755
- } catch {
756
- return { detections: [] };
757
- }
758
- }
759
-
760
- function appendDetection(name, version, ecosystem, findings, severity) {
761
- try {
762
- const dir = path.dirname(DETECTIONS_FILE);
763
- if (!fs.existsSync(dir)) {
764
- fs.mkdirSync(dir, { recursive: true });
765
- }
766
- const data = loadDetections();
767
- const key = `${name}@${version}`;
768
- if (data.detections.some(d => `${d.package}@${d.version}` === key)) {
769
- return; // dedup
770
- }
771
- data.detections.push({
772
- package: name,
773
- version,
774
- ecosystem,
775
- first_seen_at: new Date().toISOString(),
776
- findings,
777
- severity,
778
- advisory_at: null,
779
- lead_time_hours: null
780
- });
781
- fs.writeFileSync(DETECTIONS_FILE, JSON.stringify(data, null, 2), 'utf8');
782
- } catch (err) {
783
- console.error(`[MONITOR] Failed to save detection: ${err.message}`);
784
- }
785
- }
786
-
787
- function getDetectionStats() {
788
- const data = loadDetections();
789
- const detections = data.detections;
790
- const total = detections.length;
791
-
792
- const bySeverity = {};
793
- const byEcosystem = {};
794
- for (const d of detections) {
795
- bySeverity[d.severity] = (bySeverity[d.severity] || 0) + 1;
796
- byEcosystem[d.ecosystem] = (byEcosystem[d.ecosystem] || 0) + 1;
797
- }
798
-
799
- const withLeadTime = detections.filter(d => d.advisory_at && d.lead_time_hours != null);
800
- let leadTime = null;
801
- if (withLeadTime.length > 0) {
802
- const hours = withLeadTime.map(d => d.lead_time_hours);
803
- leadTime = {
804
- count: withLeadTime.length,
805
- avg: hours.reduce((a, b) => a + b, 0) / hours.length,
806
- min: Math.min(...hours),
807
- max: Math.max(...hours)
808
- };
809
- }
810
-
811
- return { total, bySeverity, byEcosystem, leadTime };
812
- }
813
-
814
- // --- Scan stats (FP rate tracking) ---
815
-
816
- function loadScanStats() {
817
- try {
818
- const raw = fs.readFileSync(SCAN_STATS_FILE, 'utf8');
819
- const data = JSON.parse(raw);
820
- if (data && data.stats && Array.isArray(data.daily)) return data;
821
- return { stats: { total_scanned: 0, clean: 0, suspect: 0, false_positive: 0, confirmed_malicious: 0 }, daily: [] };
822
- } catch {
823
- return { stats: { total_scanned: 0, clean: 0, suspect: 0, false_positive: 0, confirmed_malicious: 0 }, daily: [] };
824
- }
825
- }
826
-
827
- function updateScanStats(result) {
828
- const data = loadScanStats();
829
- data.stats.total_scanned++;
830
-
831
- if (result === 'clean') data.stats.clean++;
832
- else if (result === 'suspect') data.stats.suspect++;
833
- else if (result === 'false_positive') data.stats.false_positive++;
834
- else if (result === 'confirmed') data.stats.confirmed_malicious++;
835
-
836
- const today = new Date().toISOString().slice(0, 10);
837
- let dayEntry = data.daily.find(d => d.date === today);
838
- if (!dayEntry) {
839
- dayEntry = { date: today, scanned: 0, clean: 0, suspect: 0, false_positive: 0, confirmed: 0, fp_rate: 0 };
840
- data.daily.push(dayEntry);
841
- }
842
- dayEntry.scanned++;
843
-
844
- if (result === 'clean') dayEntry.clean++;
845
- else if (result === 'suspect') dayEntry.suspect++;
846
- else if (result === 'false_positive') dayEntry.false_positive++;
847
- else if (result === 'confirmed') dayEntry.confirmed++;
848
-
849
- const denom = dayEntry.false_positive + dayEntry.confirmed;
850
- dayEntry.fp_rate = denom > 0 ? dayEntry.false_positive / denom : 0;
851
-
852
- try {
853
- const dir = path.dirname(SCAN_STATS_FILE);
854
- if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
855
- fs.writeFileSync(SCAN_STATS_FILE, JSON.stringify(data, null, 2), 'utf8');
856
- } catch (err) {
857
- console.error(`[MONITOR] Failed to save scan stats: ${err.message}`);
858
- }
859
- }
860
-
861
- // --- Bundled tooling false-positive filter ---
862
-
863
- const KNOWN_BUNDLED_FILES = ['yarn.js', 'webpack.js', 'terser.js', 'esbuild.js', 'polyfills.js'];
864
- const KNOWN_BUNDLED_PATHS = ['_next/static/chunks/', '.next/static/chunks/'];
865
-
866
- function isBundledToolingOnly(threats) {
867
- if (threats.length === 0) return false;
868
- return threats.every(t => {
869
- if (!t.file) return false;
870
- const basename = path.basename(t.file);
871
- if (KNOWN_BUNDLED_FILES.includes(basename)) return true;
872
- const normalized = t.file.replace(/\\/g, '/');
873
- return KNOWN_BUNDLED_PATHS.some(p => normalized.includes(p));
874
- });
875
- }
876
-
877
- // --- Package scanning ---
878
-
879
- async function scanPackage(name, version, ecosystem, tarballUrl) {
880
- const startTime = Date.now();
881
- const tmpBase = path.join(os.tmpdir(), 'muaddib-monitor');
882
- if (!fs.existsSync(tmpBase)) fs.mkdirSync(tmpBase, { recursive: true });
883
- const tmpDir = fs.mkdtempSync(path.join(tmpBase, `${sanitizePackageName(name)}-`));
884
-
885
- try {
886
- const tgzPath = path.join(tmpDir, 'package.tar.gz');
887
- await downloadToFile(tarballUrl, tgzPath);
888
-
889
- // Check downloaded size
890
- const fileSize = fs.statSync(tgzPath).size;
891
- if (fileSize > MAX_TARBALL_SIZE) {
892
- console.log(`[MONITOR] SKIP: ${name}@${version} — tarball too large (${(fileSize / 1024 / 1024).toFixed(1)}MB)`);
893
- stats.scanned++;
894
- return;
895
- }
896
-
897
- const extractedDir = extractTarGz(tgzPath, tmpDir);
898
- const result = await run(extractedDir, { _capture: true });
899
-
900
- if (result.summary.total === 0) {
901
- stats.scanned++;
902
- const elapsed = Date.now() - startTime;
903
- stats.totalTimeMs += elapsed;
904
- stats.clean++;
905
- console.log(`[MONITOR] CLEAN: ${name}@${version} (0 findings, ${(elapsed / 1000).toFixed(1)}s)`);
906
- updateScanStats('clean');
907
- return { sandboxResult: null, staticClean: true };
908
- } else {
909
- const counts = [];
910
- if (result.summary.critical > 0) counts.push(`${result.summary.critical} CRITICAL`);
911
- if (result.summary.high > 0) counts.push(`${result.summary.high} HIGH`);
912
- if (result.summary.medium > 0) counts.push(`${result.summary.medium} MEDIUM`);
913
- if (result.summary.low > 0) counts.push(`${result.summary.low} LOW`);
914
-
915
- // Check if all findings come from bundled tooling files
916
- if (isBundledToolingOnly(result.threats)) {
917
- stats.scanned++;
918
- const elapsed = Date.now() - startTime;
919
- stats.totalTimeMs += elapsed;
920
- stats.clean++;
921
- console.log(`[MONITOR] SKIPPED (bundled tooling): ${name}@${version} (${counts.join(', ')})`);
922
-
923
- const alert = {
924
- timestamp: new Date().toISOString(),
925
- name,
926
- version,
927
- ecosystem,
928
- skipped: true,
929
- findings: result.threats.map(t => ({
930
- rule: t.rule_id || t.type,
931
- severity: t.severity,
932
- file: t.file
933
- }))
934
- };
935
- appendAlert(alert);
936
- updateScanStats('clean');
937
- return { sandboxResult: null, staticClean: true };
938
- } else {
939
- stats.suspect++;
940
- console.log(`[MONITOR] SUSPECT: ${name}@${version} (${counts.join(', ')})`);
941
-
942
- // Sandbox: run dynamic analysis on HIGH/CRITICAL findings
943
- let sandboxResult = null;
944
- if (hasHighOrCritical(result) && isSandboxEnabled() && sandboxAvailable) {
945
- try {
946
- const canary = isCanaryEnabled();
947
- console.log(`[MONITOR] SANDBOX: launching for ${name}@${version}${canary ? ' (canary: on)' : ''}...`);
948
- sandboxResult = await runSandbox(name, { canary });
949
- console.log(`[MONITOR] SANDBOX: ${name}@${version} → score: ${sandboxResult.score}, severity: ${sandboxResult.severity}`);
950
-
951
- // Check for canary exfiltration findings and send dedicated alert
952
- const canaryFindings = (sandboxResult.findings || []).filter(f => f.type === 'canary_exfiltration');
953
- if (canaryFindings.length > 0) {
954
- console.log(`[MONITOR] CANARY EXFILTRATION: ${name}@${version} — ${canaryFindings.length} token(s) stolen!`);
955
- const url = getWebhookUrl();
956
- if (url) {
957
- const exfiltrations = canaryFindings.map(f => ({
958
- token: f.detail.match(/exfiltrate (\S+)/)?.[1] || 'UNKNOWN',
959
- foundIn: f.detail
960
- }));
961
- const payload = buildCanaryExfiltrationWebhookEmbed(name, version, exfiltrations);
962
- try {
963
- await sendWebhook(url, payload, { rawPayload: true });
964
- console.log(`[MONITOR] Canary exfiltration webhook sent for ${name}@${version}`);
965
- } catch (webhookErr) {
966
- console.error(`[MONITOR] Canary webhook failed for ${name}@${version}: ${webhookErr.message}`);
967
- }
968
- }
969
- }
970
- } catch (err) {
971
- console.error(`[MONITOR] SANDBOX error for ${name}@${version}: ${err.message}`);
972
- }
973
- }
974
-
975
- stats.scanned++;
976
- const elapsed = Date.now() - startTime;
977
- stats.totalTimeMs += elapsed;
978
- console.log(`[MONITOR] ${name}@${version} total time: ${(elapsed / 1000).toFixed(1)}s`);
979
-
980
- const alert = {
981
- timestamp: new Date().toISOString(),
982
- name,
983
- version,
984
- ecosystem,
985
- findings: result.threats.map(t => ({
986
- rule: t.rule_id || t.type,
987
- severity: t.severity,
988
- file: t.file
989
- }))
990
- };
991
-
992
- if (sandboxResult && sandboxResult.score > 0) {
993
- alert.sandbox = {
994
- score: sandboxResult.score,
995
- severity: sandboxResult.severity,
996
- findings: sandboxResult.findings
997
- };
998
- }
999
-
1000
- appendAlert(alert);
1001
-
1002
- const findingTypes = [...new Set(result.threats.map(t => t.type))];
1003
- const maxSeverity = result.summary.critical > 0 ? 'CRITICAL'
1004
- : result.summary.high > 0 ? 'HIGH'
1005
- : result.summary.medium > 0 ? 'MEDIUM' : 'LOW';
1006
- appendDetection(name, version, ecosystem, findingTypes, maxSeverity);
1007
-
1008
- dailyAlerts.push({ name, version, ecosystem, findingsCount: result.summary.total });
1009
- await trySendWebhook(name, version, ecosystem, result, sandboxResult);
1010
- return { sandboxResult, staticClean: false };
1011
- }
1012
- }
1013
- } catch (err) {
1014
- stats.errors++;
1015
- stats.scanned++;
1016
- stats.totalTimeMs += Date.now() - startTime;
1017
- console.error(`[MONITOR] ERROR scanning ${name}@${version}: ${err.message}`);
1018
- return { sandboxResult: null, staticClean: false };
1019
- } finally {
1020
- // Cleanup temp dir
1021
- try { fs.rmSync(tmpDir, { recursive: true, force: true }); } catch {}
1022
- }
1023
- }
1024
-
1025
- function timeoutPromise(ms) {
1026
- return new Promise((_, reject) => {
1027
- setTimeout(() => reject(new Error(`Scan timeout after ${ms / 1000}s`)), ms);
1028
- });
1029
- }
1030
-
1031
- async function processQueue() {
1032
- while (scanQueue.length > 0) {
1033
- const item = scanQueue.shift();
1034
- try {
1035
- await Promise.race([
1036
- resolveTarballAndScan(item),
1037
- timeoutPromise(SCAN_TIMEOUT_MS)
1038
- ]);
1039
- } catch (err) {
1040
- stats.errors++;
1041
- console.error(`[MONITOR] Queue error for ${item.name}: ${err.message}`);
1042
- }
1043
- }
1044
- }
1045
-
1046
- // --- Stats reporting ---
1047
-
1048
- function reportStats() {
1049
- const avg = stats.scanned > 0 ? (stats.totalTimeMs / stats.scanned / 1000).toFixed(1) : '0.0';
1050
- console.log(`[MONITOR] Stats: ${stats.scanned} scanned, ${stats.clean} clean, ${stats.suspect} suspect, ${stats.errors} error${stats.errors !== 1 ? 's' : ''}, avg ${avg}s/pkg`);
1051
- stats.lastReportTime = Date.now();
1052
- }
1053
-
1054
- const DAILY_REPORT_HOUR = 8; // 08:00 Paris time (Europe/Paris)
1055
-
1056
- /**
1057
- * Returns the current hour in Europe/Paris timezone (0-23).
1058
- */
1059
- function getParisHour() {
1060
- const formatter = new Intl.DateTimeFormat('en-GB', {
1061
- timeZone: 'Europe/Paris',
1062
- hour: 'numeric',
1063
- hour12: false
1064
- });
1065
- return parseInt(formatter.format(new Date()), 10);
1066
- }
1067
-
1068
- /**
1069
- * Returns today's date string in Europe/Paris timezone (YYYY-MM-DD).
1070
- */
1071
- function getParisDateString() {
1072
- const formatter = new Intl.DateTimeFormat('en-CA', { timeZone: 'Europe/Paris' });
1073
- return formatter.format(new Date());
1074
- }
1075
-
1076
- /**
1077
- * Check if the daily report is due: Paris hour matches DAILY_REPORT_HOUR
1078
- * and we haven't already sent one today.
1079
- */
1080
- function isDailyReportDue() {
1081
- const parisHour = getParisHour();
1082
- if (parisHour !== DAILY_REPORT_HOUR) return false;
1083
- const today = getParisDateString();
1084
- return stats.lastDailyReportDate !== today;
1085
- }
1086
-
1087
- function buildDailyReportEmbed() {
1088
- // Use disk-based daily entries filtered by lastDailyReportDate for accurate delta
1089
- const { agg, top3: diskTop3 } = buildReportFromDisk();
1090
-
1091
- // Prefer in-memory dailyAlerts for top suspects (richer data), fallback to disk
1092
- const top3 = dailyAlerts.length > 0
1093
- ? dailyAlerts.slice().sort((a, b) => b.findingsCount - a.findingsCount).slice(0, 3)
1094
- : diskTop3;
1095
-
1096
- const top3Text = top3.length > 0
1097
- ? top3.map((a, i) => {
1098
- const name = a.ecosystem ? `${a.ecosystem}/${a.name || a.package}` : (a.name || a.package);
1099
- const version = a.version || 'N/A';
1100
- const count = a.findingsCount || (a.findings ? a.findings.length : 0);
1101
- return `${i + 1}. **${name}@${version}** — ${count} finding(s)`;
1102
- }).join('\n')
1103
- : 'None';
1104
-
1105
- // Avg scan time from in-memory stats (not available on disk)
1106
- const avg = stats.scanned > 0 ? (stats.totalTimeMs / stats.scanned / 1000).toFixed(1) : '0.0';
1107
-
1108
- const now = new Date();
1109
- const readableTime = now.toISOString().replace('T', ' ').replace(/\.\d+Z$/, ' UTC');
1110
-
1111
- return {
1112
- embeds: [{
1113
- title: '\uD83D\uDCCA MUAD\'DIB Daily Report',
1114
- color: 0x3498db,
1115
- fields: [
1116
- { name: 'Packages Scanned', value: `${agg.scanned}`, inline: true },
1117
- { name: 'Clean', value: `${agg.clean}`, inline: true },
1118
- { name: 'Suspects', value: `${agg.suspect}`, inline: true },
1119
- { name: 'Errors', value: `${stats.errors}`, inline: true },
1120
- { name: 'Avg Scan Time', value: `${avg}s/pkg`, inline: true },
1121
- { name: 'Top Suspects', value: top3Text, inline: false }
1122
- ],
1123
- footer: {
1124
- text: `MUAD'DIB - Daily summary | ${readableTime}`
1125
- },
1126
- timestamp: now.toISOString()
1127
- }]
1128
- };
1129
- }
1130
-
1131
- async function sendDailyReport() {
1132
- const url = getWebhookUrl();
1133
- if (!url) return;
1134
-
1135
- const payload = buildDailyReportEmbed();
1136
- try {
1137
- await sendWebhook(url, payload, { rawPayload: true });
1138
- console.log('[MONITOR] Daily report sent');
1139
- } catch (err) {
1140
- console.error(`[MONITOR] Daily report webhook failed: ${err.message}`);
1141
- }
1142
-
1143
- // Reset daily counters
1144
- stats.scanned = 0;
1145
- stats.clean = 0;
1146
- stats.suspect = 0;
1147
- stats.errors = 0;
1148
- stats.totalTimeMs = 0;
1149
- dailyAlerts.length = 0;
1150
- recentlyScanned.clear();
1151
- stats.lastDailyReportDate = getParisDateString();
1152
- }
1153
-
1154
- // --- CLI report helpers (muaddib report --now / --status) ---
1155
-
1156
- /**
1157
- * Read raw state file (without restoring into stats).
1158
- */
1159
- function loadStateRaw() {
1160
- try {
1161
- const raw = fs.readFileSync(STATE_FILE, 'utf8');
1162
- return JSON.parse(raw);
1163
- } catch {
1164
- return {};
1165
- }
1166
- }
1167
-
1168
- /**
1169
- * Reconstruct daily report data from persisted files (no in-memory stats needed).
1170
- * Used by `muaddib report --now` to send a report from a separate CLI process.
1171
- */
1172
- function buildReportFromDisk() {
1173
- const scanData = loadScanStats();
1174
- const stateRaw = loadStateRaw();
1175
- const lastDate = stateRaw.lastDailyReportDate || null;
1176
-
1177
- // First report (null): show today only (>= today).
1178
- // Subsequent reports: show days after last report (> lastDate).
1179
- const today = getParisDateString();
1180
- const sinceDays = lastDate
1181
- ? scanData.daily.filter(d => d.date > lastDate)
1182
- : scanData.daily.filter(d => d.date >= today);
1183
-
1184
- // Aggregate counters
1185
- const agg = { scanned: 0, clean: 0, suspect: 0 };
1186
- for (const d of sinceDays) {
1187
- agg.scanned += d.scanned || 0;
1188
- agg.clean += d.clean || 0;
1189
- agg.suspect += d.suspect || 0;
1190
- }
1191
-
1192
- // Load detections since last report for top suspects
1193
- const detections = loadDetections();
1194
- const recentDetections = lastDate
1195
- ? detections.detections.filter(d => d.first_seen_at && d.first_seen_at.slice(0, 10) > lastDate)
1196
- : detections.detections.filter(d => d.first_seen_at && d.first_seen_at.slice(0, 10) >= today);
1197
-
1198
- const top3 = recentDetections
1199
- .slice()
1200
- .sort((a, b) => (b.findings ? b.findings.length : 0) - (a.findings ? a.findings.length : 0))
1201
- .slice(0, 3);
1202
-
1203
- return { agg, top3, hasData: agg.scanned > 0 };
1204
- }
1205
-
1206
- /**
1207
- * Build a Discord embed from disk data (same format as buildDailyReportEmbed).
1208
- */
1209
- function buildReportEmbedFromDisk() {
1210
- const { agg, top3, hasData } = buildReportFromDisk();
1211
- if (!hasData) return null;
1212
-
1213
- const top3Text = top3.length > 0
1214
- ? top3.map((a, i) => `${i + 1}. **${a.ecosystem}/${a.package}@${a.version}** — ${a.findings ? a.findings.length : 0} finding(s)`).join('\n')
1215
- : 'None';
1216
-
1217
- const now = new Date();
1218
- const readableTime = now.toISOString().replace('T', ' ').replace(/\.\d+Z$/, ' UTC');
1219
-
1220
- return {
1221
- embeds: [{
1222
- title: '\uD83D\uDCCA MUAD\'DIB Daily Report (manual)',
1223
- color: 0x3498db,
1224
- fields: [
1225
- { name: 'Packages Scanned', value: `${agg.scanned}`, inline: true },
1226
- { name: 'Clean', value: `${agg.clean}`, inline: true },
1227
- { name: 'Suspects', value: `${agg.suspect}`, inline: true },
1228
- { name: 'Top Suspects', value: top3Text, inline: false }
1229
- ],
1230
- footer: {
1231
- text: `MUAD'DIB - Manual report | ${readableTime}`
1232
- },
1233
- timestamp: now.toISOString()
1234
- }]
1235
- };
1236
- }
1237
-
1238
- /**
1239
- * Force send a daily report from persisted data.
1240
- * Returns { sent: boolean, message: string }.
1241
- */
1242
- async function sendReportNow() {
1243
- const url = getWebhookUrl();
1244
- if (!url) {
1245
- return { sent: false, message: 'MUADDIB_WEBHOOK_URL not configured' };
1246
- }
1247
-
1248
- const payload = buildReportEmbedFromDisk();
1249
- if (!payload) {
1250
- return { sent: false, message: 'No data to report' };
1251
- }
1252
-
1253
- try {
1254
- await sendWebhook(url, payload, { rawPayload: true });
1255
- } catch (err) {
1256
- return { sent: false, message: `Webhook failed: ${err.message}` };
1257
- }
1258
-
1259
- // Update lastDailyReportDate on disk
1260
- const stateRaw = loadStateRaw();
1261
- const state = {
1262
- npmLastPackage: stateRaw.npmLastPackage || '',
1263
- pypiLastPackage: stateRaw.pypiLastPackage || ''
1264
- };
1265
- stats.lastDailyReportDate = getParisDateString();
1266
- saveState(state);
1267
-
1268
- return { sent: true, message: 'Daily report sent' };
1269
- }
1270
-
1271
- /**
1272
- * Get report status for `muaddib report --status`.
1273
- */
1274
- function getReportStatus() {
1275
- const stateRaw = loadStateRaw();
1276
- const lastDate = stateRaw.lastDailyReportDate || null;
1277
-
1278
- // Count packages scanned since last report (today only if never sent)
1279
- const scanData = loadScanStats();
1280
- const today = getParisDateString();
1281
- const sinceDays = lastDate
1282
- ? scanData.daily.filter(d => d.date > lastDate)
1283
- : scanData.daily.filter(d => d.date >= today);
1284
-
1285
- let scannedSince = 0;
1286
- for (const d of sinceDays) {
1287
- scannedSince += d.scanned || 0;
1288
- }
1289
-
1290
- // Compute next report time
1291
- const parisHour = getParisHour();
1292
- let nextReport;
1293
- if (lastDate === today || (lastDate !== today && parisHour >= DAILY_REPORT_HOUR)) {
1294
- // Already sent today OR past 08:00 but not sent (will fire soon if monitor runs)
1295
- if (lastDate === today) {
1296
- nextReport = 'Tomorrow 08:00 (Europe/Paris)';
1297
- } else {
1298
- nextReport = 'Today 08:00 (Europe/Paris) — pending, monitor must be running';
1299
- }
1300
- } else {
1301
- nextReport = 'Today 08:00 (Europe/Paris)';
1302
- }
1303
-
1304
- return { lastDailyReportDate: lastDate, scannedSince, nextReport };
1305
- }
1306
-
1307
- // --- npm polling ---
1308
-
1309
- /**
1310
- * Parse npm RSS XML (same regex approach as parsePyPIRss).
1311
- * Returns array of package names from <title> tags inside <item>.
1312
- */
1313
- function parseNpmRss(xml) {
1314
- const packages = [];
1315
- const itemRegex = /<item>([\s\S]*?)<\/item>/g;
1316
- let match;
1317
- while ((match = itemRegex.exec(xml)) !== null) {
1318
- const itemContent = match[1];
1319
- const titleMatch = itemContent.match(/<title>(?:<!\[CDATA\[)?(.*?)(?:\]\]>)?<\/title>/);
1320
- if (titleMatch) {
1321
- const title = titleMatch[1].trim();
1322
- const name = title.split(/\s+/)[0];
1323
- if (name) {
1324
- packages.push(name);
1325
- }
1326
- }
1327
- }
1328
- return packages;
1329
- }
1330
-
1331
- /**
1332
- * Fetch latest version metadata for an npm package.
1333
- * Returns { version, tarball } or null on failure.
1334
- */
1335
- async function getNpmLatestTarball(packageName) {
1336
- const url = `https://registry.npmjs.org/${encodeURIComponent(packageName)}/latest`;
1337
- const body = await httpsGet(url);
1338
- let data;
1339
- try {
1340
- data = JSON.parse(body);
1341
- } catch (e) {
1342
- throw new Error(`Invalid JSON from npm registry for ${packageName}: ${e.message}`);
1343
- }
1344
- const version = data.version || '';
1345
- const tarball = (data.dist && data.dist.tarball) || null;
1346
- return { version, tarball };
1347
- }
1348
-
1349
- async function pollNpm(state) {
1350
- const url = 'https://registry.npmjs.org/-/rss?descending=true&limit=50';
1351
-
1352
- try {
1353
- const body = await httpsGet(url);
1354
- const packages = parseNpmRss(body);
1355
-
1356
- // Find new packages (those after the last seen one)
1357
- let newPackages;
1358
- if (!state.npmLastPackage) {
1359
- newPackages = packages;
1360
- } else {
1361
- const lastIdx = packages.indexOf(state.npmLastPackage);
1362
- if (lastIdx === -1) {
1363
- newPackages = packages;
1364
- } else {
1365
- newPackages = packages.slice(0, lastIdx);
1366
- }
1367
- }
1368
-
1369
- for (const name of newPackages) {
1370
- console.log(`[MONITOR] New npm: ${name}`);
1371
- // Queue npm packages — tarball URL resolved during scan
1372
- scanQueue.push({
1373
- name,
1374
- version: '',
1375
- ecosystem: 'npm',
1376
- tarballUrl: null // resolved lazily via resolveTarballAndScan
1377
- });
1378
- }
1379
-
1380
- // Remember the most recent package (first in RSS)
1381
- if (packages.length > 0) {
1382
- state.npmLastPackage = packages[0];
1383
- }
1384
-
1385
- return newPackages.length;
1386
- } catch (err) {
1387
- console.error(`[MONITOR] npm poll error: ${err.message}`);
1388
- return -1;
1389
- }
1390
- }
1391
-
1392
- // --- PyPI polling ---
1393
-
1394
- /**
1395
- * Parse PyPI RSS XML (simple regex, no deps).
1396
- * Returns array of package names from <title> tags inside <item>.
1397
- */
1398
- function parsePyPIRss(xml) {
1399
- const packages = [];
1400
- // Match each <item>...</item> block
1401
- const itemRegex = /<item>([\s\S]*?)<\/item>/g;
1402
- let match;
1403
- while ((match = itemRegex.exec(xml)) !== null) {
1404
- const itemContent = match[1];
1405
- // Extract <title>...</title> inside item (handles CDATA)
1406
- const titleMatch = itemContent.match(/<title>(?:<!\[CDATA\[)?(.*?)(?:\]\]>)?<\/title>/);
1407
- if (titleMatch) {
1408
- // Title format is usually "package-name 1.0.0"
1409
- const title = titleMatch[1].trim();
1410
- // Extract just the package name (first word before space or version)
1411
- const name = title.split(/\s+/)[0];
1412
- if (name) {
1413
- packages.push(name);
1414
- }
1415
- }
1416
- }
1417
- return packages;
1418
- }
1419
-
1420
- async function pollPyPI(state) {
1421
- const url = 'https://pypi.org/rss/packages.xml';
1422
-
1423
- try {
1424
- const body = await httpsGet(url);
1425
- const packages = parsePyPIRss(body);
1426
-
1427
- // Find new packages (those after the last seen one)
1428
- let newPackages;
1429
- if (!state.pypiLastPackage) {
1430
- // First run: log all and remember the first one
1431
- newPackages = packages;
1432
- } else {
1433
- const lastIdx = packages.indexOf(state.pypiLastPackage);
1434
- if (lastIdx === -1) {
1435
- // Last seen not in feed — all are new
1436
- newPackages = packages;
1437
- } else {
1438
- // Items before lastIdx are newer (RSS is newest-first)
1439
- newPackages = packages.slice(0, lastIdx);
1440
- }
1441
- }
1442
-
1443
- for (const name of newPackages) {
1444
- console.log(`[MONITOR] New pypi: ${name}`);
1445
- // Queue PyPI packages — tarball URL resolved during scan
1446
- scanQueue.push({
1447
- name,
1448
- version: '',
1449
- ecosystem: 'pypi',
1450
- tarballUrl: null // resolved lazily in scanPackage wrapper
1451
- });
1452
- }
1453
-
1454
- // Remember the most recent package (first in RSS)
1455
- if (packages.length > 0) {
1456
- state.pypiLastPackage = packages[0];
1457
- }
1458
-
1459
- return newPackages.length;
1460
- } catch (err) {
1461
- console.error(`[MONITOR] PyPI poll error: ${err.message}`);
1462
- return -1;
1463
- }
1464
- }
1465
-
1466
- // --- Main loop ---
1467
-
1468
- function cleanupOrphanTmpDirs() {
1469
- const tmpBase = path.join(os.tmpdir(), 'muaddib-monitor');
1470
- try {
1471
- if (!fs.existsSync(tmpBase)) return;
1472
- const entries = fs.readdirSync(tmpBase);
1473
- for (const entry of entries) {
1474
- const fullPath = path.join(tmpBase, entry);
1475
- try {
1476
- fs.rmSync(fullPath, { recursive: true, force: true });
1477
- } catch {}
1478
- }
1479
- if (entries.length > 0) {
1480
- console.log(`[MONITOR] Cleaned up ${entries.length} orphan temp dir(s)`);
1481
- }
1482
- } catch {}
1483
- }
1484
-
1485
- async function startMonitor(options) {
1486
- if (options && options.verbose) {
1487
- setVerboseMode(true);
1488
- }
1489
-
1490
- // Cleanup temp dirs from previous runs (SIGTERM/crash may leave orphans)
1491
- cleanupOrphanTmpDirs();
1492
-
1493
- console.log(`
1494
- ╔════════════════════════════════════════════╗
1495
- ║ MUAD'DIB - Registry Monitor ║
1496
- ║ Scanning npm + PyPI new packages ║
1497
- ╚════════════════════════════════════════════╝
1498
- `);
1499
-
1500
- // Check sandbox availability
1501
- if (isSandboxEnabled()) {
1502
- sandboxAvailable = isDockerAvailable();
1503
- if (sandboxAvailable) {
1504
- console.log('[MONITOR] Docker detected — sandbox enabled for HIGH/CRITICAL findings');
1505
- } else {
1506
- console.log('[MONITOR] WARNING: Docker not available — running static analysis only');
1507
- }
1508
- } else {
1509
- console.log('[MONITOR] Sandbox disabled (MUADDIB_MONITOR_SANDBOX=false)');
1510
- }
1511
-
1512
- // Canary tokens status
1513
- if (isCanaryEnabled()) {
1514
- console.log('[MONITOR] Canary tokens enabled — honey tokens injected in sandbox for exfiltration detection');
1515
- } else {
1516
- console.log('[MONITOR] Canary tokens disabled (MUADDIB_MONITOR_CANARY=false)');
1517
- }
1518
-
1519
- // Temporal analysis status
1520
- if (isTemporalEnabled()) {
1521
- console.log('[MONITOR] Temporal lifecycle analysis enabled — detecting sudden lifecycle script changes');
1522
- } else {
1523
- console.log('[MONITOR] Temporal lifecycle analysis disabled (MUADDIB_MONITOR_TEMPORAL=false)');
1524
- }
1525
-
1526
- if (isTemporalAstEnabled()) {
1527
- console.log('[MONITOR] Temporal AST analysis enabled — detecting sudden dangerous API additions');
1528
- } else {
1529
- console.log('[MONITOR] Temporal AST analysis disabled (MUADDIB_MONITOR_TEMPORAL_AST=false)');
1530
- }
1531
-
1532
- if (isTemporalPublishEnabled()) {
1533
- console.log('[MONITOR] Publish frequency analysis enabled — detecting publish bursts, dormant spikes');
1534
- } else {
1535
- console.log('[MONITOR] Publish frequency analysis disabled (MUADDIB_MONITOR_TEMPORAL_PUBLISH=false)');
1536
- }
1537
-
1538
- if (isTemporalMaintainerEnabled()) {
1539
- console.log('[MONITOR] Maintainer change analysis enabled — detecting maintainer changes, account takeovers');
1540
- } else {
1541
- console.log('[MONITOR] Maintainer change analysis disabled (MUADDIB_MONITOR_TEMPORAL_MAINTAINER=false)');
1542
- }
1543
-
1544
- // Webhook filtering mode
1545
- if (isVerboseMode()) {
1546
- console.log('[MONITOR] Verbose mode ON — ALL anomalies sent as webhooks (temporal, publish, maintainer, AST)');
1547
- } else {
1548
- console.log('[MONITOR] Strict webhook mode — only IOC matches, sandbox confirmations, and canary exfiltrations trigger webhooks');
1549
- console.log('[MONITOR] Temporal/publish/maintainer anomalies are logged but NOT sent as webhooks');
1550
- console.log('[MONITOR] Use --verbose to send all anomalies as webhooks');
1551
- }
1552
-
1553
- const state = loadState();
1554
- console.log(`[MONITOR] State loaded — npm last: ${state.npmLastPackage || 'none'}, pypi last: ${state.pypiLastPackage || 'none'}`);
1555
- console.log(`[MONITOR] Polling every ${POLL_INTERVAL / 1000}s. Ctrl+C to stop.\n`);
1556
-
1557
- let running = true;
1558
-
1559
- // Graceful shutdown handler (shared by SIGINT and SIGTERM)
1560
- async function gracefulShutdown(signal) {
1561
- console.log(`\n[MONITOR] Received ${signal} — sending pending daily report...`);
1562
- if (stats.scanned > 0) {
1563
- await sendDailyReport();
1564
- }
1565
- saveState(state);
1566
- reportStats();
1567
- console.log('[MONITOR] State saved. Bye!');
1568
- running = false;
1569
- process.exit(0);
1570
- }
1571
-
1572
- process.on('SIGINT', () => gracefulShutdown('SIGINT'));
1573
- process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
1574
-
1575
- // Initial poll + scan
1576
- await poll(state);
1577
- saveState(state);
1578
- await processQueue();
1579
-
1580
- // Interval polling
1581
- while (running) {
1582
- await sleep(POLL_INTERVAL);
1583
- if (!running) break;
1584
- await poll(state);
1585
- saveState(state);
1586
- await processQueue();
1587
-
1588
- // Hourly stats report
1589
- if (Date.now() - stats.lastReportTime >= 3600_000) {
1590
- reportStats();
1591
- }
1592
-
1593
- // Daily webhook report at 08:00 Paris time
1594
- if (isDailyReportDue()) {
1595
- await sendDailyReport();
1596
- }
1597
- }
1598
- }
1599
-
1600
- async function poll(state) {
1601
- const timestamp = new Date().toISOString().slice(0, 19).replace('T', ' ');
1602
- console.log(`[MONITOR] ${timestamp} — polling registries...`);
1603
-
1604
- const [npmCount, pypiCount] = await Promise.all([
1605
- pollNpm(state),
1606
- pollPyPI(state)
1607
- ]);
1608
-
1609
- // Track consecutive poll failures for backoff
1610
- if (npmCount === -1 && pypiCount === -1) {
1611
- consecutivePollErrors++;
1612
- if (consecutivePollErrors > 1) {
1613
- const backoff = Math.min(POLL_INTERVAL * Math.pow(2, consecutivePollErrors - 1), POLL_MAX_BACKOFF);
1614
- console.log(`[MONITOR] Both registries failed (${consecutivePollErrors}x) — backing off ${(backoff / 1000).toFixed(0)}s`);
1615
- await sleep(backoff);
1616
- }
1617
- } else {
1618
- consecutivePollErrors = 0;
1619
- }
1620
-
1621
- const npmDisplay = npmCount === -1 ? 'error' : npmCount;
1622
- const pypiDisplay = pypiCount === -1 ? 'error' : pypiCount;
1623
- console.log(`[MONITOR] Found ${npmDisplay} npm + ${pypiDisplay} PyPI new packages`);
1624
- }
1625
-
1626
- /**
1627
- * Returns the highest severity level from all suspicious temporal results.
1628
- * Used to decide whether a temporal alert can be downgraded to FALSE POSITIVE.
1629
- * Returns 'CRITICAL', 'HIGH', 'MEDIUM', 'LOW', or null if no findings.
1630
- */
1631
- function getTemporalMaxSeverity(temporalResult, astResult, publishResult, maintainerResult) {
1632
- const SEVERITY_ORDER = { CRITICAL: 4, HIGH: 3, MEDIUM: 2, LOW: 1 };
1633
- let maxLevel = 0;
1634
- let maxSeverity = null;
1635
-
1636
- const allFindings = [];
1637
- if (temporalResult && temporalResult.suspicious && temporalResult.findings) {
1638
- allFindings.push(...temporalResult.findings);
1639
- }
1640
- if (astResult && astResult.suspicious && astResult.findings) {
1641
- allFindings.push(...astResult.findings);
1642
- }
1643
- // publishResult deliberately excluded — publish anomalies alone (nightly builds,
1644
- // burst releases) should not trigger temporal preservation. They are handled
1645
- // separately by isPublishAnomalyOnly().
1646
- if (maintainerResult && maintainerResult.suspicious && maintainerResult.findings) {
1647
- allFindings.push(...maintainerResult.findings);
1648
- }
1649
-
1650
- for (const f of allFindings) {
1651
- const level = SEVERITY_ORDER[f.severity] || 0;
1652
- if (level > maxLevel) {
1653
- maxLevel = level;
1654
- maxSeverity = f.severity;
1655
- }
1656
- }
1657
-
1658
- return maxSeverity;
1659
- }
1660
-
1661
- /**
1662
- * Returns true if publish_anomaly is the ONLY suspicious temporal result.
1663
- * publish_anomaly alone is too noisy for webhooks — only alert when combined
1664
- * with another anomaly (lifecycle, AST, maintainer).
1665
- */
1666
- function isPublishAnomalyOnly(temporalResult, astResult, publishResult, maintainerResult) {
1667
- const hasLifecycle = temporalResult && temporalResult.suspicious;
1668
- const hasAst = astResult && astResult.suspicious;
1669
- const hasPublish = publishResult && publishResult.suspicious;
1670
- const hasMaintainer = maintainerResult && maintainerResult.suspicious;
1671
-
1672
- return !!(hasPublish && !hasLifecycle && !hasAst && !hasMaintainer);
1673
- }
1674
-
1675
- /**
1676
- * Wrapper to resolve PyPI tarball URLs before scanning.
1677
- * For npm packages, tarballUrl is already set from the registry response.
1678
- * For PyPI packages, we need to fetch the JSON API to get the tarball URL.
1679
- */
1680
- async function resolveTarballAndScan(item) {
1681
- if (item.ecosystem === 'npm' && !item.tarballUrl) {
1682
- try {
1683
- const npmInfo = await getNpmLatestTarball(item.name);
1684
- if (!npmInfo.tarball) {
1685
- console.log(`[MONITOR] SKIP: ${item.name} — no tarball URL found on npm`);
1686
- return;
1687
- }
1688
- item.tarballUrl = npmInfo.tarball;
1689
- if (npmInfo.version) item.version = npmInfo.version;
1690
- } catch (err) {
1691
- console.error(`[MONITOR] ERROR resolving npm tarball for ${item.name}: ${err.message}`);
1692
- stats.errors++;
1693
- return;
1694
- }
1695
- }
1696
- if (item.ecosystem === 'pypi' && !item.tarballUrl) {
1697
- try {
1698
- const pypiInfo = await getPyPITarballUrl(item.name);
1699
- if (!pypiInfo.url) {
1700
- console.log(`[MONITOR] SKIP: ${item.name} — no tarball URL found on PyPI`);
1701
- return;
1702
- }
1703
- item.tarballUrl = pypiInfo.url;
1704
- if (pypiInfo.version) item.version = pypiInfo.version;
1705
- } catch (err) {
1706
- console.error(`[MONITOR] ERROR resolving PyPI tarball for ${item.name}: ${err.message}`);
1707
- stats.errors++;
1708
- return;
1709
- }
1710
- }
1711
- // Deduplication: skip if already scanned in the last 24h
1712
- const dedupeKey = `${item.ecosystem}/${item.name}@${item.version}`;
1713
- if (recentlyScanned.has(dedupeKey)) {
1714
- console.log(`[MONITOR] SKIP (already scanned): ${item.name}@${item.version}`);
1715
- return;
1716
- }
1717
- recentlyScanned.add(dedupeKey);
1718
-
1719
- // Temporal analysis: check for sudden lifecycle script changes (npm only)
1720
- // Webhooks are deferred until after sandbox confirms the threat
1721
- let temporalResult = null;
1722
- let astResult = null;
1723
- let publishResult = null;
1724
- let maintainerResult = null;
1725
-
1726
- if (item.ecosystem === 'npm') {
1727
- temporalResult = await runTemporalCheck(item.name);
1728
- astResult = await runTemporalAstCheck(item.name);
1729
- publishResult = await runTemporalPublishCheck(item.name);
1730
- maintainerResult = await runTemporalMaintainerCheck(item.name);
1731
- }
1732
-
1733
- const scanResult = await scanPackage(item.name, item.version, item.ecosystem, item.tarballUrl);
1734
- const sandboxResult = scanResult && scanResult.sandboxResult;
1735
- const staticClean = scanResult && scanResult.staticClean;
1736
-
1737
- // FP rate tracking
1738
- if (scanResult) {
1739
- if (!staticClean) {
1740
- if (sandboxResult && sandboxResult.score === 0) {
1741
- updateScanStats('false_positive');
1742
- } else if (sandboxResult && sandboxResult.score > 0) {
1743
- updateScanStats('confirmed');
1744
- } else {
1745
- updateScanStats('suspect');
1746
- }
1747
- }
1748
- }
1749
-
1750
- // Send temporal webhooks only if the package is confirmed suspicious
1751
- const hasSuspiciousTemporal = (temporalResult && temporalResult.suspicious)
1752
- || (astResult && astResult.suspicious)
1753
- || (publishResult && publishResult.suspicious)
1754
- || (maintainerResult && maintainerResult.suspicious);
1755
-
1756
- if (hasSuspiciousTemporal) {
1757
- // Sandbox ran and package is CLEAN → suppress temporal webhooks
1758
- if (sandboxResult && sandboxResult.score === 0) {
1759
- console.log(`[MONITOR] FALSE POSITIVE (sandbox clean, no alert): ${item.name}@${item.version}`);
1760
- // Static scan is CLEAN (0 findings) and no sandbox ran
1761
- } else if (staticClean && !sandboxResult) {
1762
- // Temporal CRITICAL/HIGH cannot be downgraded — "static clean" may mean obfuscated payload
1763
- const temporalMaxSev = getTemporalMaxSeverity(temporalResult, astResult, publishResult, maintainerResult);
1764
- if (temporalMaxSev === 'CRITICAL' || temporalMaxSev === 'HIGH') {
1765
- console.log(`[MONITOR] Temporal ${temporalMaxSev} preserved despite static clean scan: ${item.name}@${item.version}`);
1766
- console.log(`[MONITOR] SUSPECT (temporal anomaly, possible obfuscated payload): ${item.name}@${item.version}`);
1767
- stats.suspect++;
1768
- stats.clean--;
1769
- updateScanStats('suspect');
1770
- // Force-send temporal webhooks (bypass verbose mode check)
1771
- if (temporalResult && temporalResult.suspicious) await tryTemporalAlert(temporalResult, { force: true });
1772
- if (astResult && astResult.suspicious) await tryTemporalAstAlert(astResult, { force: true });
1773
- if (publishResult && publishResult.suspicious) await tryTemporalPublishAlert(publishResult, { force: true });
1774
- if (maintainerResult && maintainerResult.suspicious) await tryTemporalMaintainerAlert(maintainerResult, { force: true });
1775
- } else {
1776
- console.log(`[MONITOR] FALSE POSITIVE (static clean, no alert): ${item.name}@${item.version}`);
1777
- }
1778
- // publish_anomaly alone → no webhook (too noisy, not actionable alone)
1779
- } else if (isPublishAnomalyOnly(temporalResult, astResult, publishResult, maintainerResult)) {
1780
- console.log(`[MONITOR] PUBLISH ANOMALY (alone, no alert): ${item.name}@${item.version}`);
1781
- } else {
1782
- // Sandbox confirmed threat (score > 0) OR static scan found threats → send webhooks
1783
- if (temporalResult && temporalResult.suspicious) await tryTemporalAlert(temporalResult);
1784
- if (astResult && astResult.suspicious) await tryTemporalAstAlert(astResult);
1785
- if (publishResult && publishResult.suspicious) await tryTemporalPublishAlert(publishResult);
1786
- if (maintainerResult && maintainerResult.suspicious) await tryTemporalMaintainerAlert(maintainerResult);
1787
- }
1788
- }
1789
- }
1790
-
1791
- function sleep(ms) {
1792
- return new Promise((resolve) => setTimeout(resolve, ms));
1793
- }
1794
-
1795
- module.exports = {
1796
- startMonitor,
1797
- parseNpmRss,
1798
- parsePyPIRss,
1799
- loadState,
1800
- saveState,
1801
- STATE_FILE,
1802
- ALERTS_FILE,
1803
- downloadToFile,
1804
- extractTarGz,
1805
- getNpmTarballUrl,
1806
- getNpmLatestTarball,
1807
- getPyPITarballUrl,
1808
- scanPackage,
1809
- scanQueue,
1810
- processQueue,
1811
- appendAlert,
1812
- timeoutPromise,
1813
- reportStats,
1814
- stats,
1815
- dailyAlerts,
1816
- recentlyScanned,
1817
- resolveTarballAndScan,
1818
- MAX_TARBALL_SIZE,
1819
- KNOWN_BUNDLED_FILES,
1820
- KNOWN_BUNDLED_PATHS,
1821
- isBundledToolingOnly,
1822
- isSandboxEnabled,
1823
- hasHighOrCritical,
1824
- get sandboxAvailable() { return sandboxAvailable; },
1825
- set sandboxAvailable(v) { sandboxAvailable = v; },
1826
- getWebhookUrl,
1827
- shouldSendWebhook,
1828
- buildMonitorWebhookPayload,
1829
- trySendWebhook,
1830
- computeRiskLevel,
1831
- computeRiskScore,
1832
- buildDailyReportEmbed,
1833
- sendDailyReport,
1834
- DAILY_REPORT_HOUR,
1835
- isDailyReportDue,
1836
- getParisHour,
1837
- getParisDateString,
1838
- isTemporalEnabled,
1839
- buildTemporalWebhookEmbed,
1840
- runTemporalCheck,
1841
- isTemporalAstEnabled,
1842
- buildTemporalAstWebhookEmbed,
1843
- runTemporalAstCheck,
1844
- isTemporalPublishEnabled,
1845
- buildPublishAnomalyWebhookEmbed,
1846
- runTemporalPublishCheck,
1847
- isTemporalMaintainerEnabled,
1848
- buildMaintainerChangeWebhookEmbed,
1849
- runTemporalMaintainerCheck,
1850
- isCanaryEnabled,
1851
- buildCanaryExfiltrationWebhookEmbed,
1852
- getTemporalMaxSeverity,
1853
- isPublishAnomalyOnly,
1854
- isVerboseMode,
1855
- setVerboseMode,
1856
- hasIOCMatch,
1857
- IOC_MATCH_TYPES,
1858
- DETECTIONS_FILE,
1859
- appendDetection,
1860
- loadDetections,
1861
- getDetectionStats,
1862
- SCAN_STATS_FILE,
1863
- loadScanStats,
1864
- updateScanStats,
1865
- buildReportFromDisk,
1866
- buildReportEmbedFromDisk,
1867
- sendReportNow,
1868
- getReportStatus,
1869
- cleanupOrphanTmpDirs,
1870
- consecutivePollErrors: { get() { return consecutivePollErrors; }, set(v) { consecutivePollErrors = v; } },
1871
- POLL_MAX_BACKOFF
1872
- };
1873
-
1874
- // Standalone entry point: node src/monitor.js
1875
- if (require.main === module) {
1876
- startMonitor().catch(err => {
1877
- console.error('[MONITOR] Fatal error:', err.message);
1878
- process.exit(1);
1879
- });
1880
- }