muaddib-scanner 2.10.101 → 2.11.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +3 -3
- package/src/integrations/api-ingest.js +222 -0
- package/src/ioc/scraper.js +148 -8
- package/src/ioc/updater.js +23 -1
- package/src/ioc/yaml-loader.js +49 -1
- package/src/monitor/webhook.js +7 -0
- package/src/pipeline/executor.js +16 -3
- package/src/pipeline/processor.js +27 -1
- package/src/response/playbooks.js +21 -0
- package/src/rules/index.js +83 -0
- package/src/scanner/anti-forensic.js +225 -0
- package/src/scanner/ioc-strings.js +109 -0
- package/src/scanner/stub-package.js +179 -0
- package/src/scoring.js +316 -29
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "muaddib-scanner",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.11.1",
|
|
4
4
|
"description": "Supply-chain threat detection & response for npm & PyPI/Python",
|
|
5
5
|
"main": "src/index.js",
|
|
6
6
|
"bin": {
|
|
@@ -55,8 +55,8 @@
|
|
|
55
55
|
},
|
|
56
56
|
"devDependencies": {
|
|
57
57
|
"@eslint/js": "10.0.1",
|
|
58
|
-
"eslint": "10.2.
|
|
58
|
+
"eslint": "10.2.1",
|
|
59
59
|
"eslint-plugin-security": "^4.0.0",
|
|
60
|
-
"globals": "17.
|
|
60
|
+
"globals": "17.5.0"
|
|
61
61
|
}
|
|
62
62
|
}
|
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* api-ingest.js — Real-time alert push from monitor to muad-api.
|
|
3
|
+
*
|
|
4
|
+
* The monitor calls sendIngest() whenever it decides to fire a Discord
|
|
5
|
+
* webhook. Fire-and-forget: errors are logged but never block the caller.
|
|
6
|
+
*
|
|
7
|
+
* Required env:
|
|
8
|
+
* MUADDIB_API_URL Base URL of muad-api (e.g. https://api.example.com)
|
|
9
|
+
* MUADDIB_INGEST_TOKEN Static shared secret matching the API's INGEST_TOKEN
|
|
10
|
+
*
|
|
11
|
+
* Both unset = ingest disabled silently (the monitor still works on its own).
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
const https = require('https');
|
|
15
|
+
const http = require('http');
|
|
16
|
+
const dns = require('dns');
|
|
17
|
+
|
|
18
|
+
const PRIVATE_IP_PATTERNS = [
|
|
19
|
+
/^127\./,
|
|
20
|
+
/^10\./,
|
|
21
|
+
/^172\.(1[6-9]|2[0-9]|3[0-1])\./,
|
|
22
|
+
/^192\.168\./,
|
|
23
|
+
/^0\./,
|
|
24
|
+
/^169\.254\./,
|
|
25
|
+
/^::1$/,
|
|
26
|
+
/^::ffff:127\./,
|
|
27
|
+
/^fc00:/,
|
|
28
|
+
/^fe80:/
|
|
29
|
+
];
|
|
30
|
+
|
|
31
|
+
const REQUEST_TIMEOUT_MS = 5000;
|
|
32
|
+
const MAX_FINDINGS = 200;
|
|
33
|
+
const MAX_FINDING_LENGTH = 500;
|
|
34
|
+
|
|
35
|
+
function getApiUrl() {
|
|
36
|
+
const url = process.env.MUADDIB_API_URL;
|
|
37
|
+
return url && url.trim() ? url.replace(/\/$/, '') : null;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function getIngestToken() {
|
|
41
|
+
return process.env.MUADDIB_INGEST_TOKEN || null;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
function isIngestConfigured() {
|
|
45
|
+
return Boolean(getApiUrl() && getIngestToken());
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
function isLocalHostname(hostname) {
|
|
49
|
+
return hostname === 'localhost' || hostname === '127.0.0.1' || hostname === '::1';
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
function validateApiUrl(url) {
|
|
53
|
+
let urlObj;
|
|
54
|
+
try {
|
|
55
|
+
urlObj = new URL(url);
|
|
56
|
+
} catch (e) {
|
|
57
|
+
return { valid: false, error: `Invalid URL: ${e.message}` };
|
|
58
|
+
}
|
|
59
|
+
const hostname = urlObj.hostname.toLowerCase();
|
|
60
|
+
const local = isLocalHostname(hostname);
|
|
61
|
+
if (urlObj.protocol !== 'https:' && !local) {
|
|
62
|
+
return { valid: false, error: 'HTTPS required for non-localhost API' };
|
|
63
|
+
}
|
|
64
|
+
if (urlObj.protocol !== 'https:' && urlObj.protocol !== 'http:') {
|
|
65
|
+
return { valid: false, error: `Unsupported protocol: ${urlObj.protocol}` };
|
|
66
|
+
}
|
|
67
|
+
if (!local && PRIVATE_IP_PATTERNS.some(p => p.test(hostname))) {
|
|
68
|
+
return { valid: false, error: 'Private IP not allowed' };
|
|
69
|
+
}
|
|
70
|
+
return { valid: true, urlObj, local };
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Normalize a threat severity into the API's allowed enum.
|
|
75
|
+
* API accepts only CRITICAL|HIGH|MEDIUM|LOW. CLEAN/unknown -> LOW.
|
|
76
|
+
*/
|
|
77
|
+
function normalizeSeverity(level) {
|
|
78
|
+
switch ((level || '').toUpperCase()) {
|
|
79
|
+
case 'CRITICAL':
|
|
80
|
+
return 'CRITICAL';
|
|
81
|
+
case 'HIGH':
|
|
82
|
+
return 'HIGH';
|
|
83
|
+
case 'MEDIUM':
|
|
84
|
+
return 'MEDIUM';
|
|
85
|
+
default:
|
|
86
|
+
return 'LOW';
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function computeSeverityFromSummary(summary) {
|
|
91
|
+
if (!summary) return 'LOW';
|
|
92
|
+
if (summary.riskLevel) return normalizeSeverity(summary.riskLevel);
|
|
93
|
+
const score = summary.riskScore || 0;
|
|
94
|
+
if (score >= 75) return 'CRITICAL';
|
|
95
|
+
if (score >= 50) return 'HIGH';
|
|
96
|
+
if (score >= 25) return 'MEDIUM';
|
|
97
|
+
return 'LOW';
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
function buildIngestPayload(name, version, result) {
|
|
101
|
+
const summary = (result && result.summary) || {};
|
|
102
|
+
const threats = (result && Array.isArray(result.threats)) ? result.threats : [];
|
|
103
|
+
|
|
104
|
+
const findings = threats
|
|
105
|
+
.slice(0, MAX_FINDINGS)
|
|
106
|
+
.map(t => {
|
|
107
|
+
const raw = t.message || t.rule_id || t.type || 'unknown';
|
|
108
|
+
return String(raw).slice(0, MAX_FINDING_LENGTH);
|
|
109
|
+
})
|
|
110
|
+
.filter(s => s.length > 0);
|
|
111
|
+
|
|
112
|
+
const breakdown = Array.isArray(summary.breakdown) ? summary.breakdown : undefined;
|
|
113
|
+
|
|
114
|
+
const payload = {
|
|
115
|
+
package: name,
|
|
116
|
+
version: version || 'unknown',
|
|
117
|
+
score: Math.max(0, Math.min(100, summary.riskScore || 0)),
|
|
118
|
+
severity: computeSeverityFromSummary(summary),
|
|
119
|
+
findings
|
|
120
|
+
};
|
|
121
|
+
if (breakdown !== undefined) payload.breakdown = breakdown;
|
|
122
|
+
return payload;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
async function resolveAndCheck(hostname, allowPrivate) {
|
|
126
|
+
if (allowPrivate) {
|
|
127
|
+
return null;
|
|
128
|
+
}
|
|
129
|
+
const [v4, v6] = await Promise.all([
|
|
130
|
+
dns.promises.resolve4(hostname).catch(() => []),
|
|
131
|
+
dns.promises.resolve6(hostname).catch(() => [])
|
|
132
|
+
]);
|
|
133
|
+
const all = [...v4, ...v6];
|
|
134
|
+
if (all.length === 0) {
|
|
135
|
+
throw new Error(`DNS resolution failed for ${hostname}`);
|
|
136
|
+
}
|
|
137
|
+
for (const addr of all) {
|
|
138
|
+
if (PRIVATE_IP_PATTERNS.some(p => p.test(addr))) {
|
|
139
|
+
throw new Error(`Hostname ${hostname} resolves to private IP ${addr}`);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
return v4[0] || null;
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
function postOnce(targetUrl, body, token, resolvedAddress) {
|
|
146
|
+
return new Promise((resolve, reject) => {
|
|
147
|
+
const urlObj = new URL(targetUrl);
|
|
148
|
+
const proto = urlObj.protocol === 'https:' ? https : http;
|
|
149
|
+
const options = {
|
|
150
|
+
hostname: resolvedAddress || urlObj.hostname,
|
|
151
|
+
port: urlObj.port || (urlObj.protocol === 'https:' ? 443 : 80),
|
|
152
|
+
path: urlObj.pathname + urlObj.search,
|
|
153
|
+
method: 'POST',
|
|
154
|
+
headers: {
|
|
155
|
+
'Content-Type': 'application/json',
|
|
156
|
+
'Content-Length': Buffer.byteLength(body),
|
|
157
|
+
'Authorization': `Bearer ${token}`,
|
|
158
|
+
'Host': urlObj.hostname
|
|
159
|
+
},
|
|
160
|
+
servername: urlObj.hostname
|
|
161
|
+
};
|
|
162
|
+
|
|
163
|
+
const req = proto.request(options, (res) => {
|
|
164
|
+
let size = 0;
|
|
165
|
+
res.on('data', chunk => {
|
|
166
|
+
size += chunk.length;
|
|
167
|
+
if (size > 64 * 1024) res.destroy();
|
|
168
|
+
});
|
|
169
|
+
res.on('end', () => {
|
|
170
|
+
if (res.statusCode >= 200 && res.statusCode < 300) {
|
|
171
|
+
resolve({ status: res.statusCode });
|
|
172
|
+
} else {
|
|
173
|
+
reject(new Error(`HTTP ${res.statusCode}`));
|
|
174
|
+
}
|
|
175
|
+
});
|
|
176
|
+
});
|
|
177
|
+
req.setTimeout(REQUEST_TIMEOUT_MS, () => {
|
|
178
|
+
req.destroy();
|
|
179
|
+
reject(new Error(`Timeout after ${REQUEST_TIMEOUT_MS}ms`));
|
|
180
|
+
});
|
|
181
|
+
req.on('error', reject);
|
|
182
|
+
req.write(body);
|
|
183
|
+
req.end();
|
|
184
|
+
});
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Push an alert to muad-api. Fire-and-forget: never throws.
|
|
189
|
+
* Returns { ok: true, status } or { ok: false, error }.
|
|
190
|
+
*/
|
|
191
|
+
async function sendIngest(name, version, result) {
|
|
192
|
+
if (!isIngestConfigured()) return { ok: false, error: 'not_configured' };
|
|
193
|
+
|
|
194
|
+
const apiUrl = getApiUrl();
|
|
195
|
+
const token = getIngestToken();
|
|
196
|
+
const validation = validateApiUrl(apiUrl);
|
|
197
|
+
if (!validation.valid) {
|
|
198
|
+
console.error(`[INGEST] Invalid MUADDIB_API_URL: ${validation.error}`);
|
|
199
|
+
return { ok: false, error: validation.error };
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
const target = `${apiUrl}/alerts/ingest`;
|
|
203
|
+
const payload = buildIngestPayload(name, version, result);
|
|
204
|
+
const body = JSON.stringify(payload);
|
|
205
|
+
|
|
206
|
+
try {
|
|
207
|
+
const resolved = await resolveAndCheck(validation.urlObj.hostname, validation.local);
|
|
208
|
+
const res = await postOnce(target, body, token, resolved);
|
|
209
|
+
return { ok: true, status: res.status };
|
|
210
|
+
} catch (err) {
|
|
211
|
+
console.error(`[INGEST] ${name}@${version}: ${err.message}`);
|
|
212
|
+
return { ok: false, error: err.message };
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
module.exports = {
|
|
217
|
+
sendIngest,
|
|
218
|
+
buildIngestPayload,
|
|
219
|
+
computeSeverityFromSummary,
|
|
220
|
+
isIngestConfigured,
|
|
221
|
+
validateApiUrl
|
|
222
|
+
};
|
package/src/ioc/scraper.js
CHANGED
|
@@ -1037,7 +1037,8 @@ async function runScraper() {
|
|
|
1037
1037
|
scrapeDatadogIOCs(),
|
|
1038
1038
|
scrapeOSSFMaliciousPackages(osvResult.knownIds),
|
|
1039
1039
|
scrapeGitHubAdvisory(),
|
|
1040
|
-
scrapeOSVPyPIDataDump()
|
|
1040
|
+
scrapeOSVPyPIDataDump(),
|
|
1041
|
+
scrapeAikidoMalwareFeed()
|
|
1041
1042
|
]);
|
|
1042
1043
|
|
|
1043
1044
|
const shaiHuludResult = results[0];
|
|
@@ -1045,6 +1046,7 @@ async function runScraper() {
|
|
|
1045
1046
|
const ossfPackages = results[2];
|
|
1046
1047
|
const githubPackages = results[3];
|
|
1047
1048
|
const pypiPackages = results[4];
|
|
1049
|
+
const aikidoResult = results[5];
|
|
1048
1050
|
|
|
1049
1051
|
// Log aggregated warnings
|
|
1050
1052
|
if (_noVersionSkipCount > 0) {
|
|
@@ -1057,7 +1059,8 @@ async function runScraper() {
|
|
|
1057
1059
|
...shaiHuludResult.packages,
|
|
1058
1060
|
...datadogResult.packages,
|
|
1059
1061
|
...ossfPackages,
|
|
1060
|
-
...githubPackages
|
|
1062
|
+
...githubPackages,
|
|
1063
|
+
...aikidoResult.packages
|
|
1061
1064
|
];
|
|
1062
1065
|
|
|
1063
1066
|
// Merge all hashes
|
|
@@ -1069,7 +1072,7 @@ async function runScraper() {
|
|
|
1069
1072
|
// Smart deduplication: build map of best entry per key
|
|
1070
1073
|
// For duplicates, keep the one with highest confidence, then most recent date
|
|
1071
1074
|
const dedupSpinner = new Spinner();
|
|
1072
|
-
dedupSpinner.start('Deduplicating ' + allPackages.length + ' npm + ' + pypiPackages.length + ' PyPI entries...');
|
|
1075
|
+
dedupSpinner.start('Deduplicating ' + allPackages.length + ' npm + ' + (pypiPackages.length + (aikidoResult.pypi_packages || []).length) + ' PyPI entries...');
|
|
1073
1076
|
const dedupMap = new Map();
|
|
1074
1077
|
|
|
1075
1078
|
// Seed with existing IOCs (with sanitization of stale comma-in-version entries)
|
|
@@ -1091,11 +1094,34 @@ async function runScraper() {
|
|
|
1091
1094
|
dedupMap.set(key, pkg);
|
|
1092
1095
|
}
|
|
1093
1096
|
|
|
1094
|
-
// Merge new IOCs with smart replacement (with input validation)
|
|
1097
|
+
// Merge new IOCs with smart replacement (with input validation).
|
|
1098
|
+
// Source-aware: each entry accumulates a `sources: [{name, added_at}]` array
|
|
1099
|
+
// tracking every feed that reported this (name, version). A package
|
|
1100
|
+
// reported by >= 3 distinct sources is treated as confidence-max
|
|
1101
|
+
// (used by `getSourceConfidence` for webhook gating).
|
|
1095
1102
|
let addedPackages = 0;
|
|
1096
1103
|
let upgradedPackages = 0;
|
|
1097
1104
|
let skippedInvalid = 0;
|
|
1098
1105
|
let skippedNeverWildcard = 0;
|
|
1106
|
+
function appendSource(target, pkg) {
|
|
1107
|
+
if (!Array.isArray(target.sources)) target.sources = [];
|
|
1108
|
+
const newSrc = pkg.source || (pkg.freshness && pkg.freshness.source) || 'unknown';
|
|
1109
|
+
if (!target.sources.some(s => s.name === newSrc)) {
|
|
1110
|
+
target.sources.push({
|
|
1111
|
+
name: newSrc,
|
|
1112
|
+
added_at: (pkg.freshness && pkg.freshness.added_at) || pkg.published || new Date().toISOString()
|
|
1113
|
+
});
|
|
1114
|
+
}
|
|
1115
|
+
}
|
|
1116
|
+
function seedSources(pkg) {
|
|
1117
|
+
if (!Array.isArray(pkg.sources)) {
|
|
1118
|
+
const src = pkg.source || (pkg.freshness && pkg.freshness.source) || 'unknown';
|
|
1119
|
+
pkg.sources = [{
|
|
1120
|
+
name: src,
|
|
1121
|
+
added_at: (pkg.freshness && pkg.freshness.added_at) || pkg.published || new Date().toISOString()
|
|
1122
|
+
}];
|
|
1123
|
+
}
|
|
1124
|
+
}
|
|
1099
1125
|
for (const pkg of allPackages) {
|
|
1100
1126
|
if (!validateIOCEntry(pkg.name, pkg.version, 'npm')) {
|
|
1101
1127
|
skippedInvalid++;
|
|
@@ -1108,21 +1134,28 @@ async function runScraper() {
|
|
|
1108
1134
|
}
|
|
1109
1135
|
const key = pkg.name + '@' + pkg.version;
|
|
1110
1136
|
if (!dedupMap.has(key)) {
|
|
1137
|
+
seedSources(pkg);
|
|
1111
1138
|
dedupMap.set(key, pkg);
|
|
1112
1139
|
addedPackages++;
|
|
1113
1140
|
} else {
|
|
1114
1141
|
const existing = dedupMap.get(key);
|
|
1142
|
+
// Always accumulate source attribution before any replacement decision.
|
|
1143
|
+
seedSources(existing);
|
|
1144
|
+
appendSource(existing, pkg);
|
|
1115
1145
|
const existingConf = CONFIDENCE_ORDER[existing.confidence] || 0;
|
|
1116
1146
|
const newConf = CONFIDENCE_ORDER[pkg.confidence] || 0;
|
|
1117
1147
|
if (newConf > existingConf) {
|
|
1118
|
-
|
|
1148
|
+
// Replace with the higher-confidence entry but preserve the merged sources list
|
|
1149
|
+
const mergedSources = existing.sources;
|
|
1150
|
+
dedupMap.set(key, Object.assign({}, pkg, { sources: mergedSources }));
|
|
1119
1151
|
upgradedPackages++;
|
|
1120
1152
|
} else if (newConf === existingConf) {
|
|
1121
1153
|
// Same confidence: keep most recent
|
|
1122
1154
|
const existingDate = existing.published || (existing.freshness && existing.freshness.added_at) || '';
|
|
1123
1155
|
const newDate = pkg.published || (pkg.freshness && pkg.freshness.added_at) || '';
|
|
1124
1156
|
if (newDate > existingDate) {
|
|
1125
|
-
|
|
1157
|
+
const mergedSources = existing.sources;
|
|
1158
|
+
dedupMap.set(key, Object.assign({}, pkg, { sources: mergedSources }));
|
|
1126
1159
|
upgradedPackages++;
|
|
1127
1160
|
}
|
|
1128
1161
|
}
|
|
@@ -1139,21 +1172,27 @@ async function runScraper() {
|
|
|
1139
1172
|
pypiDedupMap.set(key, pkg);
|
|
1140
1173
|
}
|
|
1141
1174
|
let addedPyPIPackages = 0;
|
|
1142
|
-
|
|
1175
|
+
// Merge Aikido PyPI feed into the same loop
|
|
1176
|
+
const allPyPIPackages = pypiPackages.concat(aikidoResult.pypi_packages || []);
|
|
1177
|
+
for (const pkg of allPyPIPackages) {
|
|
1143
1178
|
if (!validateIOCEntry(pkg.name, pkg.version, 'pypi')) {
|
|
1144
1179
|
skippedInvalid++;
|
|
1145
1180
|
continue;
|
|
1146
1181
|
}
|
|
1147
1182
|
const key = pkg.name + '@' + pkg.version;
|
|
1148
1183
|
if (!pypiDedupMap.has(key)) {
|
|
1184
|
+
seedSources(pkg);
|
|
1149
1185
|
pypiDedupMap.set(key, pkg);
|
|
1150
1186
|
addedPyPIPackages++;
|
|
1151
1187
|
} else {
|
|
1152
1188
|
const existing = pypiDedupMap.get(key);
|
|
1189
|
+
seedSources(existing);
|
|
1190
|
+
appendSource(existing, pkg);
|
|
1153
1191
|
const existingConf = CONFIDENCE_ORDER[existing.confidence] || 0;
|
|
1154
1192
|
const newConf = CONFIDENCE_ORDER[pkg.confidence] || 0;
|
|
1155
1193
|
if (newConf > existingConf) {
|
|
1156
|
-
|
|
1194
|
+
const mergedSources = existing.sources;
|
|
1195
|
+
pypiDedupMap.set(key, Object.assign({}, pkg, { sources: mergedSources }));
|
|
1157
1196
|
}
|
|
1158
1197
|
}
|
|
1159
1198
|
}
|
|
@@ -1298,6 +1337,80 @@ async function runScraper() {
|
|
|
1298
1337
|
};
|
|
1299
1338
|
}
|
|
1300
1339
|
|
|
1340
|
+
// ============================================
|
|
1341
|
+
// SOURCE 6: Aikido Open Source Malware Feed (npm + PyPI)
|
|
1342
|
+
// Free flat JSON feed at malware-list.aikido.dev. Each entry:
|
|
1343
|
+
// { package_name, version, reason: 'MALWARE'|'TELEMETRY'|'PROTESTWARE' }
|
|
1344
|
+
// Source: https://github.com/AikidoSec/safe-chain (open-source consumer)
|
|
1345
|
+
// ============================================
|
|
1346
|
+
async function scrapeAikidoMalwareFeed() {
|
|
1347
|
+
console.log('[SCRAPER] Aikido Open Source Malware Feed...');
|
|
1348
|
+
const npmPackages = [];
|
|
1349
|
+
const pypiPackages = [];
|
|
1350
|
+
|
|
1351
|
+
// npm
|
|
1352
|
+
try {
|
|
1353
|
+
const { status, data } = await fetchJSON('https://malware-list.aikido.dev/malware_predictions.json');
|
|
1354
|
+
if (status === 200 && Array.isArray(data)) {
|
|
1355
|
+
for (const entry of data) {
|
|
1356
|
+
if (!entry || typeof entry.package_name !== 'string') continue;
|
|
1357
|
+
// Only keep MALWARE; TELEMETRY/PROTESTWARE are policy decisions, not security
|
|
1358
|
+
if (entry.reason !== 'MALWARE') continue;
|
|
1359
|
+
const ver = (entry.version && entry.version !== '') ? String(entry.version) : '*';
|
|
1360
|
+
npmPackages.push({
|
|
1361
|
+
id: 'AIKIDO-' + entry.package_name + '-' + ver,
|
|
1362
|
+
name: entry.package_name,
|
|
1363
|
+
version: ver,
|
|
1364
|
+
severity: 'critical',
|
|
1365
|
+
confidence: 'high',
|
|
1366
|
+
source: 'aikido',
|
|
1367
|
+
description: 'Flagged by Aikido Open Source Malware Feed',
|
|
1368
|
+
references: ['https://malware-list.aikido.dev/malware_predictions.json',
|
|
1369
|
+
'https://www.aikido.dev/code/malware-detection-in-dependencies'],
|
|
1370
|
+
mitre: 'T1195.002',
|
|
1371
|
+
freshness: createFreshness('aikido', 'high')
|
|
1372
|
+
});
|
|
1373
|
+
}
|
|
1374
|
+
console.log('[SCRAPER] ' + npmPackages.length + ' npm MALWARE entries from Aikido');
|
|
1375
|
+
} else {
|
|
1376
|
+
console.log('[SCRAPER] Aikido npm feed: HTTP ' + status);
|
|
1377
|
+
}
|
|
1378
|
+
} catch (e) {
|
|
1379
|
+
console.log('[SCRAPER] Aikido npm error: ' + e.message);
|
|
1380
|
+
}
|
|
1381
|
+
|
|
1382
|
+
// PyPI
|
|
1383
|
+
try {
|
|
1384
|
+
const { status, data } = await fetchJSON('https://malware-list.aikido.dev/malware_pypi.json');
|
|
1385
|
+
if (status === 200 && Array.isArray(data)) {
|
|
1386
|
+
for (const entry of data) {
|
|
1387
|
+
if (!entry || typeof entry.package_name !== 'string') continue;
|
|
1388
|
+
if (entry.reason !== 'MALWARE') continue;
|
|
1389
|
+
const ver = (entry.version && entry.version !== '') ? String(entry.version) : '*';
|
|
1390
|
+
pypiPackages.push({
|
|
1391
|
+
id: 'AIKIDO-PYPI-' + entry.package_name + '-' + ver,
|
|
1392
|
+
name: entry.package_name,
|
|
1393
|
+
version: ver,
|
|
1394
|
+
severity: 'critical',
|
|
1395
|
+
confidence: 'high',
|
|
1396
|
+
source: 'aikido',
|
|
1397
|
+
description: 'Flagged by Aikido Open Source Malware Feed',
|
|
1398
|
+
references: ['https://malware-list.aikido.dev/malware_pypi.json'],
|
|
1399
|
+
mitre: 'T1195.002',
|
|
1400
|
+
freshness: createFreshness('aikido', 'high')
|
|
1401
|
+
});
|
|
1402
|
+
}
|
|
1403
|
+
console.log('[SCRAPER] ' + pypiPackages.length + ' PyPI MALWARE entries from Aikido');
|
|
1404
|
+
} else {
|
|
1405
|
+
console.log('[SCRAPER] Aikido PyPI feed: HTTP ' + status);
|
|
1406
|
+
}
|
|
1407
|
+
} catch (e) {
|
|
1408
|
+
console.log('[SCRAPER] Aikido PyPI error: ' + e.message);
|
|
1409
|
+
}
|
|
1410
|
+
|
|
1411
|
+
return { packages: npmPackages, pypi_packages: pypiPackages };
|
|
1412
|
+
}
|
|
1413
|
+
|
|
1301
1414
|
// ============================================
|
|
1302
1415
|
// SOURCE 5: OSV.dev Lightweight API
|
|
1303
1416
|
// Used by `muaddib update` (fast, no zip download)
|
|
@@ -1388,9 +1501,36 @@ async function queryOSVBatch(packageNames) {
|
|
|
1388
1501
|
function getNoVersionSkipCount() { return _noVersionSkipCount; }
|
|
1389
1502
|
function resetNoVersionSkipCount() { _noVersionSkipCount = 0; }
|
|
1390
1503
|
|
|
1504
|
+
/**
|
|
1505
|
+
* Source-aware confidence: a package reported by N distinct feeds is more
|
|
1506
|
+
* confident than one reported by a single source. Used by webhook gating
|
|
1507
|
+
* and the /diff command to prioritize multi-confirmed alerts.
|
|
1508
|
+
*
|
|
1509
|
+
* Tiers:
|
|
1510
|
+
* N >= 3 → 'high' (cross-confirmed, alert immediately)
|
|
1511
|
+
* N === 2 → 'medium' (single-corroboration, alert with sandbox confirm)
|
|
1512
|
+
* N <= 1 → 'low' (single-feed only, log + sandbox before alert)
|
|
1513
|
+
*
|
|
1514
|
+
* @param {object} pkg - IOC package entry (with optional `sources` array)
|
|
1515
|
+
* @returns {{ tier: 'high'|'medium'|'low', count: number, sources: string[] }}
|
|
1516
|
+
*/
|
|
1517
|
+
function getSourceConfidence(pkg) {
|
|
1518
|
+
if (!pkg) return { tier: 'low', count: 0, sources: [] };
|
|
1519
|
+
const sources = Array.isArray(pkg.sources) && pkg.sources.length > 0
|
|
1520
|
+
? pkg.sources.map(s => s.name || 'unknown')
|
|
1521
|
+
: [pkg.source || (pkg.freshness && pkg.freshness.source) || 'unknown'];
|
|
1522
|
+
const unique = Array.from(new Set(sources));
|
|
1523
|
+
let tier = 'low';
|
|
1524
|
+
if (unique.length >= 3) tier = 'high';
|
|
1525
|
+
else if (unique.length === 2) tier = 'medium';
|
|
1526
|
+
return { tier, count: unique.length, sources: unique };
|
|
1527
|
+
}
|
|
1528
|
+
|
|
1391
1529
|
module.exports = {
|
|
1392
1530
|
runScraper, scrapeShaiHuludDetector, scrapeDatadogIOCs,
|
|
1531
|
+
scrapeAikidoMalwareFeed,
|
|
1393
1532
|
scrapeOSVLightweightAPI, queryOSVBatch,
|
|
1533
|
+
getSourceConfidence,
|
|
1394
1534
|
// Pure utility functions (exported for testing)
|
|
1395
1535
|
parseCSVLine, parseCSV, extractVersions, parseOSVEntry,
|
|
1396
1536
|
createFreshness, isAllowedRedirect,
|
package/src/ioc/updater.js
CHANGED
|
@@ -136,7 +136,9 @@ function mergeIOCs(target, source) {
|
|
|
136
136
|
target._hashSet = new Set(target.hashes);
|
|
137
137
|
target._markerSet = new Set(target.markers);
|
|
138
138
|
target._fileSet = new Set(target.files);
|
|
139
|
+
target._stringIocSet = new Set((target.stringIocs || []).map(s => s.string));
|
|
139
140
|
}
|
|
141
|
+
if (!target.stringIocs) target.stringIocs = [];
|
|
140
142
|
|
|
141
143
|
let added = 0;
|
|
142
144
|
|
|
@@ -184,6 +186,16 @@ function mergeIOCs(target, source) {
|
|
|
184
186
|
}
|
|
185
187
|
}
|
|
186
188
|
|
|
189
|
+
// Merge string IOCs (YARA-style)
|
|
190
|
+
for (const sIoc of source.stringIocs || []) {
|
|
191
|
+
const literal = sIoc && typeof sIoc.string === 'string' ? sIoc.string : null;
|
|
192
|
+
if (!literal) continue;
|
|
193
|
+
if (!target._stringIocSet.has(literal)) {
|
|
194
|
+
target.stringIocs.push(sIoc);
|
|
195
|
+
target._stringIocSet.add(literal);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
187
199
|
return added;
|
|
188
200
|
}
|
|
189
201
|
|
|
@@ -207,7 +219,9 @@ function loadCachedIOCs() {
|
|
|
207
219
|
pypi_packages: [],
|
|
208
220
|
hashes: yamlIOCs.hashes.map(function(h) { return h.sha256; }),
|
|
209
221
|
markers: yamlIOCs.markers.map(function(m) { return m.pattern; }),
|
|
210
|
-
files: yamlIOCs.files.map(function(f) { return f.name; })
|
|
222
|
+
files: yamlIOCs.files.map(function(f) { return f.name; }),
|
|
223
|
+
// string-IOCs from string-iocs.yaml (YARA-style high-precision artifacts)
|
|
224
|
+
stringIocs: Array.isArray(yamlIOCs.stringIocs) ? [...yamlIOCs.stringIocs] : []
|
|
211
225
|
};
|
|
212
226
|
|
|
213
227
|
// Priority 2a: Local scraped IOCs (full enriched file)
|
|
@@ -349,6 +363,11 @@ function createOptimizedIOCs(iocs) {
|
|
|
349
363
|
// Set for suspicious files
|
|
350
364
|
const filesSet = new Set(iocs.files);
|
|
351
365
|
|
|
366
|
+
// String IOCs (YARA-style): keep both array (for metadata) and Map keyed by string
|
|
367
|
+
// for O(1) campaign lookup once a substring match has been confirmed.
|
|
368
|
+
const stringIocsArr = Array.isArray(iocs.stringIocs) ? iocs.stringIocs : [];
|
|
369
|
+
const stringIocsMap = new Map(stringIocsArr.map(s => [s.string, s]));
|
|
370
|
+
|
|
352
371
|
return {
|
|
353
372
|
// Optimized structures (npm)
|
|
354
373
|
packagesMap,
|
|
@@ -360,6 +379,9 @@ function createOptimizedIOCs(iocs) {
|
|
|
360
379
|
hashesSet,
|
|
361
380
|
markersSet,
|
|
362
381
|
filesSet,
|
|
382
|
+
// String IOCs (YARA-style)
|
|
383
|
+
stringIocs: stringIocsArr,
|
|
384
|
+
stringIocsMap,
|
|
363
385
|
// Original arrays for compatibility
|
|
364
386
|
packages: iocs.packages,
|
|
365
387
|
pypi_packages: iocs.pypi_packages || [],
|
package/src/ioc/yaml-loader.js
CHANGED
|
@@ -34,7 +34,10 @@ function loadYAMLIOCs() {
|
|
|
34
34
|
packages: [],
|
|
35
35
|
hashes: [],
|
|
36
36
|
markers: [],
|
|
37
|
-
files: []
|
|
37
|
+
files: [],
|
|
38
|
+
// string-IOCs (YARA-style high-precision artifacts) — see iocs/string-iocs.yaml
|
|
39
|
+
// Each entry: { string, campaign, severity, source, description }
|
|
40
|
+
stringIocs: []
|
|
38
41
|
};
|
|
39
42
|
|
|
40
43
|
// Dedup sets for O(1) lookup during loading
|
|
@@ -42,6 +45,7 @@ function loadYAMLIOCs() {
|
|
|
42
45
|
const seenHashes = new Set();
|
|
43
46
|
const seenMarkers = new Set();
|
|
44
47
|
const seenFiles = new Set();
|
|
48
|
+
const seenStrings = new Set();
|
|
45
49
|
|
|
46
50
|
// Charger packages.yaml
|
|
47
51
|
loadPackagesYAML(path.join(IOCS_DIR, 'packages.yaml'), iocs, seenPkgs);
|
|
@@ -52,9 +56,53 @@ function loadYAMLIOCs() {
|
|
|
52
56
|
// Charger hashes.yaml
|
|
53
57
|
loadHashesYAML(path.join(IOCS_DIR, 'hashes.yaml'), iocs, seenHashes, seenMarkers, seenFiles);
|
|
54
58
|
|
|
59
|
+
// Charger string-iocs.yaml (YARA-style)
|
|
60
|
+
loadStringIocsYAML(path.join(IOCS_DIR, 'string-iocs.yaml'), iocs, seenStrings);
|
|
61
|
+
|
|
55
62
|
return iocs;
|
|
56
63
|
}
|
|
57
64
|
|
|
65
|
+
/**
|
|
66
|
+
* Load YARA-style string IOCs from string-iocs.yaml.
|
|
67
|
+
* Each entry must satisfy the inclusion criteria documented in that file:
|
|
68
|
+
* - length >= 6 chars
|
|
69
|
+
* - confirmed in >= 1 sample malware
|
|
70
|
+
* - absent from benign corpus
|
|
71
|
+
* - unique enough that substring match is decisive
|
|
72
|
+
* Length floor enforced here (defense-in-depth) — anything shorter is dropped
|
|
73
|
+
* silently with a single warning to avoid spamming the console.
|
|
74
|
+
*/
|
|
75
|
+
function loadStringIocsYAML(filePath, iocs, seenStrings) {
|
|
76
|
+
if (!fs.existsSync(filePath)) return;
|
|
77
|
+
const MIN_STRING_LEN = 6;
|
|
78
|
+
let dropped = 0;
|
|
79
|
+
|
|
80
|
+
try {
|
|
81
|
+
const data = yaml.load(readVerifiedYAML(filePath), { schema: yaml.JSON_SCHEMA });
|
|
82
|
+
if (!data || !Array.isArray(data.strings)) return;
|
|
83
|
+
|
|
84
|
+
for (const s of data.strings) {
|
|
85
|
+
if (!s || typeof s.string !== 'string') { dropped++; continue; }
|
|
86
|
+
const literal = s.string;
|
|
87
|
+
if (literal.length < MIN_STRING_LEN) { dropped++; continue; }
|
|
88
|
+
if (seenStrings.has(literal)) continue;
|
|
89
|
+
seenStrings.add(literal);
|
|
90
|
+
iocs.stringIocs.push({
|
|
91
|
+
string: literal,
|
|
92
|
+
campaign: typeof s.campaign === 'string' ? s.campaign : 'unknown',
|
|
93
|
+
severity: (s.severity === 'HIGH' || s.severity === 'MEDIUM') ? s.severity : 'CRITICAL',
|
|
94
|
+
source: typeof s.source === 'string' ? s.source : '',
|
|
95
|
+
description: typeof s.description === 'string' ? s.description : ''
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
if (dropped > 0) {
|
|
99
|
+
console.error(`[WARN] string-iocs.yaml: ${dropped} entries dropped (missing string field or below ${MIN_STRING_LEN} chars)`);
|
|
100
|
+
}
|
|
101
|
+
} catch (e) {
|
|
102
|
+
console.error('[WARN] Erreur parsing string-iocs.yaml:', e.message);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
58
106
|
function loadPackagesYAML(filePath, iocs, seenPkgs) {
|
|
59
107
|
if (!fs.existsSync(filePath)) return;
|
|
60
108
|
|
package/src/monitor/webhook.js
CHANGED
|
@@ -9,6 +9,7 @@ const fs = require('fs');
|
|
|
9
9
|
const path = require('path');
|
|
10
10
|
|
|
11
11
|
const { sendWebhook } = require('../webhook.js');
|
|
12
|
+
const { sendIngest, isIngestConfigured } = require('../integrations/api-ingest.js');
|
|
12
13
|
const {
|
|
13
14
|
atomicWriteFileSync,
|
|
14
15
|
ALERTS_LOG_DIR,
|
|
@@ -451,6 +452,12 @@ async function trySendWebhook(name, version, ecosystem, result, sandboxResult, m
|
|
|
451
452
|
alertedPackageRules.set(name, new Set(currentRules));
|
|
452
453
|
}
|
|
453
454
|
|
|
455
|
+
// Push to muad-api dashboard (fire-and-forget, fires once per unique package
|
|
456
|
+
// even when scope grouping batches the Discord webhook).
|
|
457
|
+
if (isIngestConfigured()) {
|
|
458
|
+
sendIngest(name, version, result).catch(() => {});
|
|
459
|
+
}
|
|
460
|
+
|
|
454
461
|
// Scope grouping: buffer scoped npm packages for grouped webhook
|
|
455
462
|
const scope = extractScope(name);
|
|
456
463
|
if (scope && ecosystem === 'npm') {
|