autoremediator 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-H4ICCI3K.js +1847 -0
- package/dist/chunk-H4ICCI3K.js.map +1 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +119 -0
- package/dist/cli.js.map +1 -0
- package/dist/index.d.ts +175 -0
- package/dist/index.js +19 -0
- package/dist/index.js.map +1 -0
- package/dist/mcp/server.d.ts +1 -0
- package/dist/mcp/server.js +80 -0
- package/dist/mcp/server.js.map +1 -0
- package/dist/openapi/server.d.ts +2 -0
- package/dist/openapi/server.js +236 -0
- package/dist/openapi/server.js.map +1 -0
- package/package.json +1 -1
|
@@ -0,0 +1,1847 @@
|
|
|
1
|
+
// src/remediation/pipeline.ts
|
|
2
|
+
import { generateText as generateText2 } from "ai";
|
|
3
|
+
import { existsSync as existsSync4, readFileSync as readFileSync4 } from "fs";
|
|
4
|
+
import { join as join7 } from "path";
|
|
5
|
+
import semver4 from "semver";
|
|
6
|
+
|
|
7
|
+
// src/platform/config.ts
|
|
8
|
+
function resolveProvider(options = {}) {
|
|
9
|
+
const raw = options.llmProvider ?? process.env.AUTOREMEDIATOR_LLM_PROVIDER ?? "openai";
|
|
10
|
+
if (raw !== "openai" && raw !== "anthropic" && raw !== "local") {
|
|
11
|
+
throw new Error(
|
|
12
|
+
`Unsupported LLM provider "${raw}". Set AUTOREMEDIATOR_LLM_PROVIDER to "openai", "anthropic", or "local".`
|
|
13
|
+
);
|
|
14
|
+
}
|
|
15
|
+
return raw;
|
|
16
|
+
}
|
|
17
|
+
function resolveModelName(provider, options = {}) {
|
|
18
|
+
if (options.model) return options.model;
|
|
19
|
+
if (process.env.AUTOREMEDIATOR_MODEL) return process.env.AUTOREMEDIATOR_MODEL;
|
|
20
|
+
const defaults = {
|
|
21
|
+
openai: "gpt-4o",
|
|
22
|
+
anthropic: "claude-sonnet-4-5",
|
|
23
|
+
local: "local"
|
|
24
|
+
};
|
|
25
|
+
return defaults[provider];
|
|
26
|
+
}
|
|
27
|
+
async function createModel(options = {}) {
|
|
28
|
+
const provider = resolveProvider(options);
|
|
29
|
+
if (provider === "local") {
|
|
30
|
+
throw new Error(
|
|
31
|
+
"Local provider does not create a language model. Use the deterministic pipeline path instead."
|
|
32
|
+
);
|
|
33
|
+
}
|
|
34
|
+
const modelName = resolveModelName(provider, options);
|
|
35
|
+
if (provider === "openai") {
|
|
36
|
+
const apiKey = process.env.OPENAI_API_KEY;
|
|
37
|
+
if (!apiKey) {
|
|
38
|
+
throw new Error(
|
|
39
|
+
"OPENAI_API_KEY environment variable is required when using the openai provider."
|
|
40
|
+
);
|
|
41
|
+
}
|
|
42
|
+
const { createOpenAI } = await import("@ai-sdk/openai");
|
|
43
|
+
const openai = createOpenAI({ apiKey });
|
|
44
|
+
return openai(modelName);
|
|
45
|
+
}
|
|
46
|
+
if (provider === "anthropic") {
|
|
47
|
+
const apiKey = process.env.ANTHROPIC_API_KEY;
|
|
48
|
+
if (!apiKey) {
|
|
49
|
+
throw new Error(
|
|
50
|
+
"ANTHROPIC_API_KEY environment variable is required when using the anthropic provider."
|
|
51
|
+
);
|
|
52
|
+
}
|
|
53
|
+
const { createAnthropic } = await import("@ai-sdk/anthropic");
|
|
54
|
+
const anthropic = createAnthropic({ apiKey });
|
|
55
|
+
return anthropic(modelName);
|
|
56
|
+
}
|
|
57
|
+
throw new Error(`Unhandled provider: ${provider}`);
|
|
58
|
+
}
|
|
59
|
+
function getNvdConfig() {
|
|
60
|
+
return {
|
|
61
|
+
apiKey: process.env.AUTOREMEDIATOR_NVD_API_KEY
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
function getGitHubToken() {
|
|
65
|
+
return process.env.GITHUB_TOKEN;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// src/platform/package-manager.ts
|
|
69
|
+
import { existsSync } from "fs";
|
|
70
|
+
import { join } from "path";
|
|
71
|
+
function detectPackageManager(cwd) {
|
|
72
|
+
if (existsSync(join(cwd, "pnpm-lock.yaml"))) return "pnpm";
|
|
73
|
+
if (existsSync(join(cwd, "yarn.lock"))) return "yarn";
|
|
74
|
+
return "npm";
|
|
75
|
+
}
|
|
76
|
+
function getPackageManagerCommands(pm) {
|
|
77
|
+
if (pm === "pnpm") {
|
|
78
|
+
return {
|
|
79
|
+
install: ["pnpm", "install"],
|
|
80
|
+
installPreferOffline: ["pnpm", "install", "--prefer-offline"],
|
|
81
|
+
installDev: (pkg) => ["pnpm", "add", "-D", pkg],
|
|
82
|
+
test: ["pnpm", "test"],
|
|
83
|
+
list: ["pnpm", "list", "--json", "--depth=0"],
|
|
84
|
+
lockfileName: "pnpm-lock.yaml"
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
if (pm === "yarn") {
|
|
88
|
+
return {
|
|
89
|
+
install: ["yarn", "install"],
|
|
90
|
+
installPreferOffline: ["yarn", "install"],
|
|
91
|
+
installDev: (pkg) => ["yarn", "add", "--dev", pkg],
|
|
92
|
+
test: ["yarn", "test"],
|
|
93
|
+
list: ["yarn", "list", "--json", "--depth=0"],
|
|
94
|
+
lockfileName: "yarn.lock"
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
return {
|
|
98
|
+
install: ["npm", "install"],
|
|
99
|
+
installPreferOffline: ["npm", "install", "--prefer-offline"],
|
|
100
|
+
installDev: (pkg) => ["npm", "install", "--save-dev", pkg],
|
|
101
|
+
test: ["npm", "test"],
|
|
102
|
+
list: ["npm", "list", "--json", "--depth=0"],
|
|
103
|
+
lockfileName: "package-lock.json"
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
function parseListOutput(pm, stdout) {
|
|
107
|
+
const versions = /* @__PURE__ */ new Map();
|
|
108
|
+
if (!stdout.trim()) return versions;
|
|
109
|
+
if (pm === "yarn") {
|
|
110
|
+
const lines = stdout.split("\n").map((l) => l.trim()).filter(Boolean);
|
|
111
|
+
for (const line of lines) {
|
|
112
|
+
try {
|
|
113
|
+
const obj = JSON.parse(line);
|
|
114
|
+
if (obj.type !== "tree") continue;
|
|
115
|
+
for (const tree of obj.data?.trees ?? []) {
|
|
116
|
+
const raw = tree.name ?? "";
|
|
117
|
+
const at = raw.lastIndexOf("@");
|
|
118
|
+
if (at <= 0) continue;
|
|
119
|
+
const name = raw.slice(0, at);
|
|
120
|
+
const version = raw.slice(at + 1);
|
|
121
|
+
if (name && version) {
|
|
122
|
+
versions.set(name, version);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
} catch {
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
return versions;
|
|
129
|
+
}
|
|
130
|
+
let parsed;
|
|
131
|
+
try {
|
|
132
|
+
parsed = JSON.parse(stdout);
|
|
133
|
+
} catch {
|
|
134
|
+
return versions;
|
|
135
|
+
}
|
|
136
|
+
const root = Array.isArray(parsed) ? parsed[0] : parsed;
|
|
137
|
+
const dependencies = root?.dependencies;
|
|
138
|
+
for (const [name, entry] of Object.entries(dependencies ?? {})) {
|
|
139
|
+
const version = entry?.version;
|
|
140
|
+
if (typeof version === "string" && version) {
|
|
141
|
+
versions.set(name, version);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
return versions;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
// src/remediation/tools/lookup-cve.ts
|
|
148
|
+
import { tool } from "ai";
|
|
149
|
+
import { z } from "zod";
|
|
150
|
+
|
|
151
|
+
// src/intelligence/sources/osv.ts
|
|
152
|
+
var OSV_BASE = "https://api.osv.dev/v1";
|
|
153
|
+
async function fetchOsvVuln(cveId) {
|
|
154
|
+
const url = `${OSV_BASE}/vulns/${encodeURIComponent(cveId)}`;
|
|
155
|
+
const res = await fetch(url, {
|
|
156
|
+
headers: { Accept: "application/json" }
|
|
157
|
+
});
|
|
158
|
+
if (res.status === 404) return null;
|
|
159
|
+
if (!res.ok) {
|
|
160
|
+
throw new Error(`OSV API error ${res.status} for ${cveId}: ${await res.text()}`);
|
|
161
|
+
}
|
|
162
|
+
return res.json();
|
|
163
|
+
}
|
|
164
|
+
function osvEventsToSemverRange(events) {
|
|
165
|
+
const parts = [];
|
|
166
|
+
for (const event of events) {
|
|
167
|
+
if (event.introduced !== void 0) {
|
|
168
|
+
const v = event.introduced === "0" ? "0.0.0" : event.introduced;
|
|
169
|
+
parts.push(`>=${v}`);
|
|
170
|
+
}
|
|
171
|
+
if (event.fixed !== void 0) {
|
|
172
|
+
parts.push(`<${event.fixed}`);
|
|
173
|
+
}
|
|
174
|
+
if (event.last_affected !== void 0) {
|
|
175
|
+
parts.push(`<=${event.last_affected}`);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
return parts.join(" ") || ">=0.0.0";
|
|
179
|
+
}
|
|
180
|
+
function parseOsvVuln(vuln) {
|
|
181
|
+
const npmAffected = [];
|
|
182
|
+
for (const affected of vuln.affected ?? []) {
|
|
183
|
+
const ecosystem = affected.package?.ecosystem;
|
|
184
|
+
const packageName = affected.package?.name;
|
|
185
|
+
if (!ecosystem || typeof ecosystem !== "string") continue;
|
|
186
|
+
if (!packageName || typeof packageName !== "string") continue;
|
|
187
|
+
if (ecosystem.toLowerCase() !== "npm") continue;
|
|
188
|
+
const semverRange = affected.ranges?.find((r) => r.type === "SEMVER");
|
|
189
|
+
const vulnerableRange = semverRange ? osvEventsToSemverRange(semverRange.events) : ">=0.0.0";
|
|
190
|
+
const fixedEvent = semverRange?.events.find((e) => e.fixed !== void 0);
|
|
191
|
+
npmAffected.push({
|
|
192
|
+
name: packageName,
|
|
193
|
+
ecosystem: "npm",
|
|
194
|
+
vulnerableRange,
|
|
195
|
+
firstPatchedVersion: fixedEvent?.fixed,
|
|
196
|
+
source: "osv"
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
const severity = deriveSeverity(vuln.severity);
|
|
200
|
+
return {
|
|
201
|
+
id: vuln.id,
|
|
202
|
+
summary: vuln.summary ?? vuln.details ?? "No summary available.",
|
|
203
|
+
severity,
|
|
204
|
+
references: vuln.references?.map((r) => r.url) ?? [],
|
|
205
|
+
affectedPackages: npmAffected
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
function deriveSeverity(severityEntries) {
|
|
209
|
+
if (!severityEntries?.length) return "UNKNOWN";
|
|
210
|
+
const cvssEntry = severityEntries.find((s) => s.type === "CVSS_V3") ?? severityEntries[0];
|
|
211
|
+
const scoreMatch = cvssEntry.score.match(/(\d+\.\d+)$/);
|
|
212
|
+
if (scoreMatch) {
|
|
213
|
+
const score = parseFloat(scoreMatch[1]);
|
|
214
|
+
if (score >= 9) return "CRITICAL";
|
|
215
|
+
if (score >= 7) return "HIGH";
|
|
216
|
+
if (score >= 4) return "MEDIUM";
|
|
217
|
+
return "LOW";
|
|
218
|
+
}
|
|
219
|
+
return "UNKNOWN";
|
|
220
|
+
}
|
|
221
|
+
async function lookupCveOsv(cveId) {
|
|
222
|
+
const vuln = await fetchOsvVuln(cveId);
|
|
223
|
+
if (!vuln) return null;
|
|
224
|
+
return parseOsvVuln(vuln);
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
// src/intelligence/sources/github-advisory.ts
|
|
228
|
+
var GH_ADVISORY_BASE = "https://api.github.com/advisories";
|
|
229
|
+
function buildHeaders() {
|
|
230
|
+
const headers = {
|
|
231
|
+
Accept: "application/vnd.github+json",
|
|
232
|
+
"X-GitHub-Api-Version": "2022-11-28"
|
|
233
|
+
};
|
|
234
|
+
const token = getGitHubToken();
|
|
235
|
+
if (token) {
|
|
236
|
+
headers.Authorization = `Bearer ${token}`;
|
|
237
|
+
}
|
|
238
|
+
return headers;
|
|
239
|
+
}
|
|
240
|
+
async function fetchGhAdvisories(cveId) {
|
|
241
|
+
const url = new URL(GH_ADVISORY_BASE);
|
|
242
|
+
url.searchParams.set("cve_id", cveId);
|
|
243
|
+
url.searchParams.set("ecosystem", "npm");
|
|
244
|
+
url.searchParams.set("type", "reviewed");
|
|
245
|
+
url.searchParams.set("per_page", "10");
|
|
246
|
+
const res = await fetch(url.toString(), { headers: buildHeaders() });
|
|
247
|
+
if (res.status === 404) return [];
|
|
248
|
+
if (!res.ok) {
|
|
249
|
+
console.warn(
|
|
250
|
+
`[autoremediator] GitHub Advisory API returned ${res.status} for ${cveId} \u2014 skipping.`
|
|
251
|
+
);
|
|
252
|
+
return [];
|
|
253
|
+
}
|
|
254
|
+
return res.json();
|
|
255
|
+
}
|
|
256
|
+
function parseGhAdvisories(advisories) {
|
|
257
|
+
const packages = [];
|
|
258
|
+
for (const advisory of advisories) {
|
|
259
|
+
for (const vuln of advisory.vulnerabilities) {
|
|
260
|
+
if (vuln.package.ecosystem.toLowerCase() !== "npm") continue;
|
|
261
|
+
packages.push({
|
|
262
|
+
name: vuln.package.name,
|
|
263
|
+
ecosystem: "npm",
|
|
264
|
+
vulnerableRange: vuln.vulnerable_version_range ?? ">=0.0.0",
|
|
265
|
+
firstPatchedVersion: vuln.first_patched_version ?? void 0,
|
|
266
|
+
source: "github-advisory"
|
|
267
|
+
});
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
return packages;
|
|
271
|
+
}
|
|
272
|
+
function mergeGhDataIntoCveDetails(details, ghPackages) {
|
|
273
|
+
const enriched = { ...details };
|
|
274
|
+
for (const ghPkg of ghPackages) {
|
|
275
|
+
const existing = enriched.affectedPackages.find(
|
|
276
|
+
(p) => p.name === ghPkg.name
|
|
277
|
+
);
|
|
278
|
+
if (existing) {
|
|
279
|
+
if (!existing.firstPatchedVersion && ghPkg.firstPatchedVersion) {
|
|
280
|
+
existing.firstPatchedVersion = ghPkg.firstPatchedVersion;
|
|
281
|
+
}
|
|
282
|
+
} else {
|
|
283
|
+
enriched.affectedPackages.push(ghPkg);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
return enriched;
|
|
287
|
+
}
|
|
288
|
+
async function lookupCveGitHub(cveId) {
|
|
289
|
+
const advisories = await fetchGhAdvisories(cveId);
|
|
290
|
+
return parseGhAdvisories(advisories);
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// src/intelligence/sources/nvd.ts
|
|
294
|
+
var NVD_BASE = "https://services.nvd.nist.gov/rest/json/cves/2.0";
|
|
295
|
+
function buildNvdHeaders() {
|
|
296
|
+
const { apiKey } = getNvdConfig();
|
|
297
|
+
const headers = { Accept: "application/json" };
|
|
298
|
+
if (apiKey) {
|
|
299
|
+
headers.apiKey = apiKey;
|
|
300
|
+
}
|
|
301
|
+
return headers;
|
|
302
|
+
}
|
|
303
|
+
async function fetchNvdCvss(cveId) {
|
|
304
|
+
const url = `${NVD_BASE}?cveId=${encodeURIComponent(cveId)}`;
|
|
305
|
+
try {
|
|
306
|
+
const res = await fetch(url, { headers: buildNvdHeaders() });
|
|
307
|
+
if (!res.ok) return void 0;
|
|
308
|
+
const data = await res.json();
|
|
309
|
+
const vuln = data.vulnerabilities?.[0];
|
|
310
|
+
if (!vuln) return void 0;
|
|
311
|
+
const metrics = vuln.cve.metrics;
|
|
312
|
+
const metric = metrics?.cvssMetricV31?.[0] ?? metrics?.cvssMetricV30?.[0] ?? metrics?.cvssMetricV2?.[0];
|
|
313
|
+
if (!metric) return void 0;
|
|
314
|
+
const score = metric.cvssData.baseScore;
|
|
315
|
+
const rawSeverity = metric.cvssData.baseSeverity.toUpperCase();
|
|
316
|
+
const severityMap = {
|
|
317
|
+
CRITICAL: "CRITICAL",
|
|
318
|
+
HIGH: "HIGH",
|
|
319
|
+
MEDIUM: "MEDIUM",
|
|
320
|
+
LOW: "LOW"
|
|
321
|
+
};
|
|
322
|
+
return {
|
|
323
|
+
score,
|
|
324
|
+
severity: severityMap[rawSeverity] ?? "UNKNOWN"
|
|
325
|
+
};
|
|
326
|
+
} catch {
|
|
327
|
+
return void 0;
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
async function enrichWithNvd(details) {
|
|
331
|
+
const cvss = await fetchNvdCvss(details.id);
|
|
332
|
+
if (cvss) {
|
|
333
|
+
details.cvssScore = cvss.score;
|
|
334
|
+
if (details.severity === "UNKNOWN") {
|
|
335
|
+
details.severity = cvss.severity;
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
return details;
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
// src/remediation/tools/lookup-cve.ts
|
|
342
|
+
var lookupCveTool = tool({
|
|
343
|
+
description: "Look up a CVE ID and return the list of affected npm packages, their vulnerable version ranges, and the first patched version. Always call this first.",
|
|
344
|
+
parameters: z.object({
|
|
345
|
+
cveId: z.string().regex(/^CVE-\d{4}-\d+$/i, "Must be a valid CVE ID like CVE-2021-23337")
|
|
346
|
+
}),
|
|
347
|
+
execute: async ({ cveId }) => {
|
|
348
|
+
const normalizedId = cveId.toUpperCase();
|
|
349
|
+
const [osvDetails, ghPackages] = await Promise.all([
|
|
350
|
+
lookupCveOsv(normalizedId),
|
|
351
|
+
lookupCveGitHub(normalizedId)
|
|
352
|
+
]);
|
|
353
|
+
if (!osvDetails && ghPackages.length === 0) {
|
|
354
|
+
return {
|
|
355
|
+
success: false,
|
|
356
|
+
error: `CVE "${normalizedId}" was not found in OSV or GitHub Advisory databases. It may be too new, or not affect npm packages.`
|
|
357
|
+
};
|
|
358
|
+
}
|
|
359
|
+
let details = osvDetails ?? {
|
|
360
|
+
id: normalizedId,
|
|
361
|
+
summary: "Details sourced from GitHub Advisory Database.",
|
|
362
|
+
severity: "UNKNOWN",
|
|
363
|
+
references: [],
|
|
364
|
+
affectedPackages: []
|
|
365
|
+
};
|
|
366
|
+
if (ghPackages.length > 0) {
|
|
367
|
+
details = mergeGhDataIntoCveDetails(details, ghPackages);
|
|
368
|
+
}
|
|
369
|
+
details = await enrichWithNvd(details);
|
|
370
|
+
if (details.affectedPackages.length === 0) {
|
|
371
|
+
return {
|
|
372
|
+
success: false,
|
|
373
|
+
error: `CVE "${normalizedId}" was found but has no npm-specific affected packages listed. It may affect a different ecosystem.`
|
|
374
|
+
};
|
|
375
|
+
}
|
|
376
|
+
return { success: true, data: details };
|
|
377
|
+
}
|
|
378
|
+
});
|
|
379
|
+
|
|
380
|
+
// src/remediation/tools/check-inventory.ts
|
|
381
|
+
import { tool as tool2 } from "ai";
|
|
382
|
+
import { z as z2 } from "zod";
|
|
383
|
+
import { readFileSync } from "fs";
|
|
384
|
+
import { join as join2 } from "path";
|
|
385
|
+
import { execa } from "execa";
|
|
386
|
+
var checkInventoryTool = tool2({
|
|
387
|
+
description: "Read the project's package.json and installed dependencies to list packages and exact versions. Must be called before checking version matches.",
|
|
388
|
+
parameters: z2.object({
|
|
389
|
+
cwd: z2.string().describe("Absolute path to the consumer project's root directory"),
|
|
390
|
+
packageManager: z2.enum(["npm", "pnpm", "yarn"]).optional().describe("Package manager used by the target project (auto-detected if omitted)")
|
|
391
|
+
}),
|
|
392
|
+
execute: async ({ cwd, packageManager }) => {
|
|
393
|
+
let pkgJson;
|
|
394
|
+
try {
|
|
395
|
+
pkgJson = JSON.parse(readFileSync(join2(cwd, "package.json"), "utf8"));
|
|
396
|
+
} catch {
|
|
397
|
+
return {
|
|
398
|
+
packages: [],
|
|
399
|
+
error: `Could not read package.json in "${cwd}". Is this a Node.js project?`
|
|
400
|
+
};
|
|
401
|
+
}
|
|
402
|
+
const pm = packageManager ?? detectPackageManager(cwd);
|
|
403
|
+
const commands = getPackageManagerCommands(pm);
|
|
404
|
+
let installedVersions = /* @__PURE__ */ new Map();
|
|
405
|
+
try {
|
|
406
|
+
const [cmd, ...args] = commands.list;
|
|
407
|
+
const listResult = await execa(cmd, args, {
|
|
408
|
+
cwd,
|
|
409
|
+
stdio: "pipe",
|
|
410
|
+
reject: false
|
|
411
|
+
});
|
|
412
|
+
installedVersions = parseListOutput(pm, listResult.stdout || "");
|
|
413
|
+
} catch {
|
|
414
|
+
}
|
|
415
|
+
const packages = [];
|
|
416
|
+
for (const [name, version] of installedVersions.entries()) {
|
|
417
|
+
const isDirect = Boolean(pkgJson.dependencies?.[name]) || Boolean(pkgJson.devDependencies?.[name]) || Boolean(pkgJson.peerDependencies?.[name]);
|
|
418
|
+
packages.push({
|
|
419
|
+
name,
|
|
420
|
+
version,
|
|
421
|
+
type: isDirect ? "direct" : "indirect"
|
|
422
|
+
});
|
|
423
|
+
}
|
|
424
|
+
if (packages.length === 0) {
|
|
425
|
+
const allDeps = {
|
|
426
|
+
...pkgJson.dependencies,
|
|
427
|
+
...pkgJson.devDependencies
|
|
428
|
+
};
|
|
429
|
+
for (const [name, version] of Object.entries(allDeps)) {
|
|
430
|
+
const cleaned = version.replace(/^[\^~>=<]+/, "").trim();
|
|
431
|
+
packages.push({ name, version: cleaned, type: "direct" });
|
|
432
|
+
}
|
|
433
|
+
}
|
|
434
|
+
return { packages };
|
|
435
|
+
}
|
|
436
|
+
});
|
|
437
|
+
|
|
438
|
+
// src/remediation/tools/check-version-match.ts
|
|
439
|
+
import { tool as tool3 } from "ai";
|
|
440
|
+
import { z as z3 } from "zod";
|
|
441
|
+
import semver from "semver";
|
|
442
|
+
var affectedPackageSchema = z3.object({
|
|
443
|
+
name: z3.string(),
|
|
444
|
+
ecosystem: z3.literal("npm"),
|
|
445
|
+
vulnerableRange: z3.string(),
|
|
446
|
+
firstPatchedVersion: z3.string().optional(),
|
|
447
|
+
source: z3.enum(["osv", "github-advisory"])
|
|
448
|
+
});
|
|
449
|
+
var inventoryPackageSchema = z3.object({
|
|
450
|
+
name: z3.string(),
|
|
451
|
+
version: z3.string(),
|
|
452
|
+
type: z3.enum(["direct", "indirect"])
|
|
453
|
+
});
|
|
454
|
+
var checkVersionMatchTool = tool3({
|
|
455
|
+
description: "Check which of the project's installed packages fall within the CVE's vulnerable version ranges. Returns only the packages that are actually vulnerable.",
|
|
456
|
+
parameters: z3.object({
|
|
457
|
+
installedPackages: z3.array(inventoryPackageSchema).describe("Output from the check-inventory tool"),
|
|
458
|
+
affectedPackages: z3.array(affectedPackageSchema).describe("affectedPackages array from the lookup-cve tool result")
|
|
459
|
+
}),
|
|
460
|
+
execute: async ({ installedPackages, affectedPackages }) => {
|
|
461
|
+
const vulnerable = [];
|
|
462
|
+
for (const affected of affectedPackages) {
|
|
463
|
+
const matches = installedPackages.filter(
|
|
464
|
+
(p) => p.name === affected.name
|
|
465
|
+
);
|
|
466
|
+
for (const installed of matches) {
|
|
467
|
+
if (!semver.valid(installed.version)) continue;
|
|
468
|
+
let isVulnerable = false;
|
|
469
|
+
try {
|
|
470
|
+
isVulnerable = semver.satisfies(installed.version, affected.vulnerableRange, {
|
|
471
|
+
includePrerelease: false
|
|
472
|
+
});
|
|
473
|
+
} catch {
|
|
474
|
+
continue;
|
|
475
|
+
}
|
|
476
|
+
if (isVulnerable) {
|
|
477
|
+
vulnerable.push({ installed, affected });
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
return {
|
|
482
|
+
vulnerablePackages: vulnerable,
|
|
483
|
+
checkedCount: installedPackages.length
|
|
484
|
+
};
|
|
485
|
+
}
|
|
486
|
+
});
|
|
487
|
+
|
|
488
|
+
// src/remediation/tools/find-fixed-version.ts
|
|
489
|
+
import { tool as tool4 } from "ai";
|
|
490
|
+
import { z as z4 } from "zod";
|
|
491
|
+
|
|
492
|
+
// src/intelligence/sources/registry.ts
|
|
493
|
+
import semver2 from "semver";
|
|
494
|
+
var NPM_REGISTRY = "https://registry.npmjs.org";
|
|
495
|
+
async function fetchPackageVersions(packageName) {
|
|
496
|
+
const url = `${NPM_REGISTRY}/${encodeURIComponent(packageName)}`;
|
|
497
|
+
const res = await fetch(url, {
|
|
498
|
+
headers: { Accept: "application/json" }
|
|
499
|
+
});
|
|
500
|
+
if (res.status === 404) return [];
|
|
501
|
+
if (!res.ok) {
|
|
502
|
+
throw new Error(
|
|
503
|
+
`npm registry error ${res.status} for "${packageName}": ${await res.text()}`
|
|
504
|
+
);
|
|
505
|
+
}
|
|
506
|
+
const data = await res.json();
|
|
507
|
+
return Object.keys(data.versions);
|
|
508
|
+
}
|
|
509
|
+
async function findSafeUpgradeVersion(packageName, installedVersion, firstPatchedVersion) {
|
|
510
|
+
const versions = await fetchPackageVersions(packageName);
|
|
511
|
+
if (!versions.length) return void 0;
|
|
512
|
+
const installedMajor = semver2.major(installedVersion);
|
|
513
|
+
const candidates = versions.filter((v) => semver2.valid(v) && semver2.gte(v, firstPatchedVersion)).sort(semver2.compare);
|
|
514
|
+
if (!candidates.length) return void 0;
|
|
515
|
+
const sameMajor = candidates.find(
|
|
516
|
+
(v) => semver2.major(v) === installedMajor
|
|
517
|
+
);
|
|
518
|
+
if (sameMajor) return sameMajor;
|
|
519
|
+
return candidates[0];
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
// src/remediation/tools/find-fixed-version.ts
|
|
523
|
+
var findFixedVersionTool = tool4({
|
|
524
|
+
description: "Query the npm registry to find the lowest published version of a package that is >= the first patched version. Prefer same-major upgrades. Returns undefined if no safe version exists.",
|
|
525
|
+
parameters: z4.object({
|
|
526
|
+
packageName: z4.string().describe("The npm package name"),
|
|
527
|
+
installedVersion: z4.string().describe("The currently installed version (exact semver)"),
|
|
528
|
+
firstPatchedVersion: z4.string().describe(
|
|
529
|
+
"The first version that is NOT vulnerable (from lookup-cve). Use this as the floor."
|
|
530
|
+
)
|
|
531
|
+
}),
|
|
532
|
+
execute: async ({
|
|
533
|
+
packageName,
|
|
534
|
+
installedVersion,
|
|
535
|
+
firstPatchedVersion
|
|
536
|
+
}) => {
|
|
537
|
+
const safeVersion = await findSafeUpgradeVersion(
|
|
538
|
+
packageName,
|
|
539
|
+
installedVersion,
|
|
540
|
+
firstPatchedVersion
|
|
541
|
+
);
|
|
542
|
+
if (!safeVersion) {
|
|
543
|
+
return {
|
|
544
|
+
isMajorBump: false,
|
|
545
|
+
message: `No safe upgrade version found for "${packageName}". The patch-file path will be needed.`
|
|
546
|
+
};
|
|
547
|
+
}
|
|
548
|
+
const installedMajor = parseInt(installedVersion.split(".")[0] ?? "0", 10);
|
|
549
|
+
const safeMajor = parseInt(safeVersion.split(".")[0] ?? "0", 10);
|
|
550
|
+
const isMajorBump = safeMajor > installedMajor;
|
|
551
|
+
return {
|
|
552
|
+
safeVersion,
|
|
553
|
+
isMajorBump,
|
|
554
|
+
message: isMajorBump ? `Found safe version ${safeVersion} for "${packageName}", but it is a major bump from ${installedVersion}. Applying anyway \u2014 consumer should review for breaking changes.` : `Found safe version ${safeVersion} for "${packageName}" (from ${installedVersion}).`
|
|
555
|
+
};
|
|
556
|
+
}
|
|
557
|
+
});
|
|
558
|
+
|
|
559
|
+
// src/remediation/tools/apply-version-bump.ts
|
|
560
|
+
import { tool as tool5 } from "ai";
|
|
561
|
+
import { z as z5 } from "zod";
|
|
562
|
+
import { join as join4 } from "path";
|
|
563
|
+
import { readFileSync as readFileSync3, writeFileSync } from "fs";
|
|
564
|
+
import { execa as execa2 } from "execa";
|
|
565
|
+
import semver3 from "semver";
|
|
566
|
+
|
|
567
|
+
// src/platform/policy.ts
|
|
568
|
+
import { existsSync as existsSync2, readFileSync as readFileSync2 } from "fs";
|
|
569
|
+
import { join as join3 } from "path";
|
|
570
|
+
var DEFAULT_POLICY = {
|
|
571
|
+
allowMajorBumps: false,
|
|
572
|
+
denyPackages: [],
|
|
573
|
+
allowPackages: [],
|
|
574
|
+
autoApply: true
|
|
575
|
+
};
|
|
576
|
+
function loadPolicy(cwd, explicitPath) {
|
|
577
|
+
const candidate = explicitPath ?? join3(cwd, ".autoremediator.json");
|
|
578
|
+
if (!existsSync2(candidate)) return DEFAULT_POLICY;
|
|
579
|
+
try {
|
|
580
|
+
const parsed = JSON.parse(readFileSync2(candidate, "utf8"));
|
|
581
|
+
return {
|
|
582
|
+
allowMajorBumps: parsed.allowMajorBumps ?? DEFAULT_POLICY.allowMajorBumps,
|
|
583
|
+
denyPackages: parsed.denyPackages ?? DEFAULT_POLICY.denyPackages,
|
|
584
|
+
allowPackages: parsed.allowPackages ?? DEFAULT_POLICY.allowPackages,
|
|
585
|
+
autoApply: parsed.autoApply ?? DEFAULT_POLICY.autoApply
|
|
586
|
+
};
|
|
587
|
+
} catch {
|
|
588
|
+
return DEFAULT_POLICY;
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
function isPackageAllowed(policy, packageName) {
|
|
592
|
+
if (policy.denyPackages.includes(packageName)) return false;
|
|
593
|
+
if (policy.allowPackages.length > 0 && !policy.allowPackages.includes(packageName)) {
|
|
594
|
+
return false;
|
|
595
|
+
}
|
|
596
|
+
return true;
|
|
597
|
+
}
|
|
598
|
+
|
|
599
|
+
// src/remediation/tools/apply-version-bump.ts
|
|
600
|
+
var applyVersionBumpTool = tool5({
|
|
601
|
+
description: "Update package.json to use the safe version of a vulnerable package and run the project's package manager install. In dry-run mode, only reports what would change.",
|
|
602
|
+
parameters: z5.object({
|
|
603
|
+
cwd: z5.string().describe("Absolute path to the consumer project root"),
|
|
604
|
+
packageManager: z5.enum(["npm", "pnpm", "yarn"]).optional().describe("Package manager used by the target project (auto-detected if omitted)"),
|
|
605
|
+
packageName: z5.string().describe("The npm package to upgrade"),
|
|
606
|
+
fromVersion: z5.string().describe("The currently installed vulnerable version"),
|
|
607
|
+
toVersion: z5.string().describe("The safe target version to upgrade to"),
|
|
608
|
+
dryRun: z5.boolean().default(false).describe("If true, report changes but do not write"),
|
|
609
|
+
policyPath: z5.string().optional().describe("Optional path to .autoremediator policy file"),
|
|
610
|
+
skipTests: z5.boolean().default(true).describe("If true, skip test validation after applying the fix")
|
|
611
|
+
}),
|
|
612
|
+
execute: async ({
|
|
613
|
+
cwd,
|
|
614
|
+
packageManager,
|
|
615
|
+
packageName,
|
|
616
|
+
fromVersion,
|
|
617
|
+
toVersion,
|
|
618
|
+
dryRun,
|
|
619
|
+
policyPath,
|
|
620
|
+
skipTests
|
|
621
|
+
}) => {
|
|
622
|
+
const pm = packageManager ?? detectPackageManager(cwd);
|
|
623
|
+
const commands = getPackageManagerCommands(pm);
|
|
624
|
+
const pkgPath = join4(cwd, "package.json");
|
|
625
|
+
const policy = loadPolicy(cwd, policyPath);
|
|
626
|
+
if (!isPackageAllowed(policy, packageName)) {
|
|
627
|
+
return {
|
|
628
|
+
packageName,
|
|
629
|
+
strategy: "none",
|
|
630
|
+
fromVersion,
|
|
631
|
+
toVersion,
|
|
632
|
+
applied: false,
|
|
633
|
+
dryRun,
|
|
634
|
+
message: `Policy blocked changes for package "${packageName}".`
|
|
635
|
+
};
|
|
636
|
+
}
|
|
637
|
+
const isMajorBump = semver3.valid(fromVersion) && semver3.valid(toVersion) && semver3.major(toVersion) > semver3.major(fromVersion);
|
|
638
|
+
if (isMajorBump && !policy.allowMajorBumps) {
|
|
639
|
+
return {
|
|
640
|
+
packageName,
|
|
641
|
+
strategy: "none",
|
|
642
|
+
fromVersion,
|
|
643
|
+
toVersion,
|
|
644
|
+
applied: false,
|
|
645
|
+
dryRun,
|
|
646
|
+
message: `Policy blocked major bump for "${packageName}" (${fromVersion} -> ${toVersion}).`
|
|
647
|
+
};
|
|
648
|
+
}
|
|
649
|
+
let pkgJson;
|
|
650
|
+
try {
|
|
651
|
+
pkgJson = JSON.parse(readFileSync3(pkgPath, "utf8"));
|
|
652
|
+
} catch {
|
|
653
|
+
return {
|
|
654
|
+
packageName,
|
|
655
|
+
strategy: "none",
|
|
656
|
+
fromVersion,
|
|
657
|
+
applied: false,
|
|
658
|
+
dryRun,
|
|
659
|
+
message: `Could not read package.json at "${pkgPath}".`
|
|
660
|
+
};
|
|
661
|
+
}
|
|
662
|
+
const depField = ["dependencies", "devDependencies", "peerDependencies"].find(
|
|
663
|
+
(f) => pkgJson[f]?.[packageName] !== void 0
|
|
664
|
+
);
|
|
665
|
+
if (!depField) {
|
|
666
|
+
return {
|
|
667
|
+
packageName,
|
|
668
|
+
strategy: "none",
|
|
669
|
+
fromVersion,
|
|
670
|
+
applied: false,
|
|
671
|
+
dryRun,
|
|
672
|
+
message: `"${packageName}" was not found in package.json dependencies (it may be a transitive dep). Cannot auto-bump.`
|
|
673
|
+
};
|
|
674
|
+
}
|
|
675
|
+
const currentRange = pkgJson[depField][packageName];
|
|
676
|
+
const prefixMatch = currentRange.match(/^([~^]?)/);
|
|
677
|
+
const prefix = prefixMatch?.[1] ?? "";
|
|
678
|
+
const newRange = `${prefix}${toVersion}`;
|
|
679
|
+
if (dryRun) {
|
|
680
|
+
const installCmd = commands.installPreferOffline.join(" ");
|
|
681
|
+
const testCmd = commands.test.join(" ");
|
|
682
|
+
return {
|
|
683
|
+
packageName,
|
|
684
|
+
strategy: "version-bump",
|
|
685
|
+
fromVersion,
|
|
686
|
+
toVersion,
|
|
687
|
+
applied: false,
|
|
688
|
+
dryRun: true,
|
|
689
|
+
message: `[DRY RUN] Would update ${depField}.${packageName}: "${currentRange}" \u2192 "${newRange}", then run ${installCmd}${skipTests ? "" : ` and ${testCmd}`}.`
|
|
690
|
+
};
|
|
691
|
+
}
|
|
692
|
+
pkgJson[depField][packageName] = newRange;
|
|
693
|
+
writeFileSync(pkgPath, JSON.stringify(pkgJson, null, 2) + "\n", "utf8");
|
|
694
|
+
try {
|
|
695
|
+
const [installCmd, ...installArgs] = commands.installPreferOffline;
|
|
696
|
+
await execa2(installCmd, installArgs, {
|
|
697
|
+
cwd,
|
|
698
|
+
stdio: "pipe"
|
|
699
|
+
});
|
|
700
|
+
} catch (err) {
|
|
701
|
+
pkgJson[depField][packageName] = currentRange;
|
|
702
|
+
writeFileSync(pkgPath, JSON.stringify(pkgJson, null, 2) + "\n", "utf8");
|
|
703
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
704
|
+
return {
|
|
705
|
+
packageName,
|
|
706
|
+
strategy: "version-bump",
|
|
707
|
+
fromVersion,
|
|
708
|
+
toVersion,
|
|
709
|
+
applied: false,
|
|
710
|
+
dryRun: false,
|
|
711
|
+
message: `${commands.installPreferOffline.join(" ")} failed after updating "${packageName}" to ${toVersion}. Reverted. Error: ${message}`
|
|
712
|
+
};
|
|
713
|
+
}
|
|
714
|
+
if (!skipTests) {
|
|
715
|
+
try {
|
|
716
|
+
const [testCmd, ...testArgs] = commands.test;
|
|
717
|
+
await execa2(testCmd, testArgs, {
|
|
718
|
+
cwd,
|
|
719
|
+
stdio: "pipe"
|
|
720
|
+
});
|
|
721
|
+
} catch (err) {
|
|
722
|
+
pkgJson[depField][packageName] = currentRange;
|
|
723
|
+
writeFileSync(pkgPath, JSON.stringify(pkgJson, null, 2) + "\n", "utf8");
|
|
724
|
+
try {
|
|
725
|
+
const [rollbackCmd, ...rollbackArgs] = commands.installPreferOffline;
|
|
726
|
+
await execa2(rollbackCmd, rollbackArgs, {
|
|
727
|
+
cwd,
|
|
728
|
+
stdio: "pipe"
|
|
729
|
+
});
|
|
730
|
+
} catch {
|
|
731
|
+
}
|
|
732
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
733
|
+
return {
|
|
734
|
+
packageName,
|
|
735
|
+
strategy: "version-bump",
|
|
736
|
+
fromVersion,
|
|
737
|
+
toVersion,
|
|
738
|
+
applied: false,
|
|
739
|
+
dryRun: false,
|
|
740
|
+
message: `${commands.test.join(" ")} failed after upgrading "${packageName}" to ${toVersion}. Rolled back to ${currentRange}. Error: ${message}`
|
|
741
|
+
};
|
|
742
|
+
}
|
|
743
|
+
}
|
|
744
|
+
return {
|
|
745
|
+
packageName,
|
|
746
|
+
strategy: "version-bump",
|
|
747
|
+
fromVersion,
|
|
748
|
+
toVersion,
|
|
749
|
+
applied: true,
|
|
750
|
+
dryRun: false,
|
|
751
|
+
message: `Successfully upgraded "${packageName}" from ${fromVersion} to ${toVersion}, ran ${commands.installPreferOffline.join(" ")}${skipTests ? "" : `, and passed ${commands.test.join(" ")}`}.`
|
|
752
|
+
};
|
|
753
|
+
}
|
|
754
|
+
});
|
|
755
|
+
|
|
756
|
+
// src/remediation/tools/fetch-package-source.ts
|
|
757
|
+
import { tool as tool6 } from "ai";
|
|
758
|
+
import { z as z6 } from "zod";
|
|
759
|
+
import { mkdir, readdir, readFile } from "fs/promises";
|
|
760
|
+
import { join as join5 } from "path";
|
|
761
|
+
import { execa as execa3 } from "execa";
|
|
762
|
+
var fetchPackageSourceTool = tool6({
|
|
763
|
+
description: "Download package tarball from npm and extract source files for CVE analysis. Supports custom file patterns (default: *.js, *.ts).",
|
|
764
|
+
parameters: z6.object({
|
|
765
|
+
packageName: z6.string().min(1).describe("The npm package name (e.g., 'lodash', '@scope/package')"),
|
|
766
|
+
version: z6.string().regex(/^\d+\.\d+\.\d+/, "Must be a valid semver version").describe("Exact package version to download"),
|
|
767
|
+
filePatterns: z6.array(z6.string()).optional().default(["*.js", "*.ts"]).describe(
|
|
768
|
+
"File patterns to extract (glob patterns, default: *.js, *.ts)"
|
|
769
|
+
)
|
|
770
|
+
}),
|
|
771
|
+
execute: async ({
|
|
772
|
+
packageName,
|
|
773
|
+
version,
|
|
774
|
+
filePatterns
|
|
775
|
+
}) => {
|
|
776
|
+
const tempBaseDir = `/tmp/autoremediator-pkg-${Date.now()}`;
|
|
777
|
+
const extractDir = join5(tempBaseDir, "out");
|
|
778
|
+
try {
|
|
779
|
+
const npmUrl = `https://registry.npmjs.org/${packageName}/-/${packageName.split("/").pop()}-${version}.tgz`;
|
|
780
|
+
await mkdir(tempBaseDir, { recursive: true });
|
|
781
|
+
const tarballPath = join5(tempBaseDir, "package.tgz");
|
|
782
|
+
await execa3("curl", ["-L", "-o", tarballPath, npmUrl]);
|
|
783
|
+
await mkdir(extractDir, { recursive: true });
|
|
784
|
+
await execa3("tar", ["-xzf", tarballPath, "-C", extractDir]);
|
|
785
|
+
const extractedContents = await readdir(extractDir);
|
|
786
|
+
const packageRootDir = extractedContents.includes("package") ? join5(extractDir, "package") : extractDir;
|
|
787
|
+
const sourceCode = {};
|
|
788
|
+
async function walkDir(dir, relativeBase) {
|
|
789
|
+
try {
|
|
790
|
+
const files = await readdir(dir, { withFileTypes: true });
|
|
791
|
+
for (const file of files) {
|
|
792
|
+
const fullPath = join5(dir, file.name);
|
|
793
|
+
const relPath = join5(relativeBase, file.name);
|
|
794
|
+
if (file.isDirectory()) {
|
|
795
|
+
if (![
|
|
796
|
+
"node_modules",
|
|
797
|
+
".git",
|
|
798
|
+
"dist",
|
|
799
|
+
"build",
|
|
800
|
+
"coverage",
|
|
801
|
+
".next",
|
|
802
|
+
"out"
|
|
803
|
+
].includes(file.name)) {
|
|
804
|
+
await walkDir(fullPath, relPath);
|
|
805
|
+
}
|
|
806
|
+
} else if (file.isFile()) {
|
|
807
|
+
const matches = filePatterns.some((pattern) => {
|
|
808
|
+
const regex = new RegExp(
|
|
809
|
+
`^${pattern.replace(/\*/g, ".*").replace(/\./g, "\\.")}$`
|
|
810
|
+
);
|
|
811
|
+
return regex.test(file.name);
|
|
812
|
+
});
|
|
813
|
+
if (matches) {
|
|
814
|
+
try {
|
|
815
|
+
const content = await readFile(fullPath, "utf8");
|
|
816
|
+
sourceCode[relPath] = content;
|
|
817
|
+
} catch {
|
|
818
|
+
}
|
|
819
|
+
}
|
|
820
|
+
}
|
|
821
|
+
}
|
|
822
|
+
} catch {
|
|
823
|
+
}
|
|
824
|
+
}
|
|
825
|
+
await walkDir(packageRootDir, "");
|
|
826
|
+
if (Object.keys(sourceCode).length === 0) {
|
|
827
|
+
return {
|
|
828
|
+
success: false,
|
|
829
|
+
error: `No source files matching patterns [${filePatterns.join(", ")}] found in ${packageName}@${version}. Download succeeded but extraction yielded no matching files.`
|
|
830
|
+
};
|
|
831
|
+
}
|
|
832
|
+
return {
|
|
833
|
+
success: true,
|
|
834
|
+
sourceCode,
|
|
835
|
+
packageDir: packageRootDir
|
|
836
|
+
};
|
|
837
|
+
} catch (err) {
|
|
838
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
839
|
+
if (message.includes("404") || message.includes("not found")) {
|
|
840
|
+
return {
|
|
841
|
+
success: false,
|
|
842
|
+
error: `Package ${packageName}@${version} not found on npm registry. It may not exist or the version may be incorrect.`
|
|
843
|
+
};
|
|
844
|
+
}
|
|
845
|
+
return {
|
|
846
|
+
success: false,
|
|
847
|
+
error: `Failed to fetch and extract package ${packageName}@${version}: ${message}`
|
|
848
|
+
};
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
});
|
|
852
|
+
|
|
853
|
+
// src/remediation/tools/generate-patch.ts
|
|
854
|
+
import { tool as tool7 } from "ai";
|
|
855
|
+
import { z as z7 } from "zod";
|
|
856
|
+
import { generateText } from "ai";
|
|
857
|
+
var VULNERABILITY_DESCRIPTIONS = {
|
|
858
|
+
redos: "Regular Expression Denial of Service (ReDoS): The vulnerability is caused by poorly constructed regular expressions that cause excessive backtracking when processing certain inputs. The fix should optimize the regex to avoid catastrophic backtracking or replace it with a safer alternative.",
|
|
859
|
+
"code-injection": "Code Injection: The vulnerability allows injected code to be executed. The fix must properly sanitize/validate inputs and prevent dynamic code execution, or use safe alternatives like template literals with proper escaping.",
|
|
860
|
+
"path-traversal": "Path Traversal: The vulnerability allows access to files outside intended directories through path traversal sequences (../, etc.). The fix must validate and normalize file paths, use path.resolve() and path.relative() checks.",
|
|
861
|
+
unknown: "Unknown vulnerability type: Analyze the CVE summary carefully and implement the most appropriate fix for the security issue described."
|
|
862
|
+
};
|
|
863
|
+
var generatePatchTool = tool7({
|
|
864
|
+
description: "Generate a unified diff patch for a CVE using LLM analysis of vulnerable source code.",
|
|
865
|
+
parameters: z7.object({
|
|
866
|
+
packageName: z7.string().min(1).describe("The npm package name"),
|
|
867
|
+
vulnerableVersion: z7.string().describe("The vulnerable version string"),
|
|
868
|
+
cveId: z7.string().regex(/^CVE-\d{4}-\d+$/i).describe("CVE ID (e.g., CVE-2021-23337)"),
|
|
869
|
+
cveSummary: z7.string().min(10).describe("CVE description and impact"),
|
|
870
|
+
sourceFiles: z7.record(z7.string()).describe(
|
|
871
|
+
"Map of file paths to source code contents from fetch-package-source"
|
|
872
|
+
),
|
|
873
|
+
vulnerabilityCategory: z7.enum(["redos", "code-injection", "path-traversal", "unknown"]).optional().default("unknown").describe("Category of the vulnerability for better context"),
|
|
874
|
+
dryRun: z7.boolean().optional().default(false).describe("If true, return analysis without generating patches")
|
|
875
|
+
}),
|
|
876
|
+
execute: async ({
|
|
877
|
+
packageName,
|
|
878
|
+
vulnerableVersion,
|
|
879
|
+
cveId,
|
|
880
|
+
cveSummary,
|
|
881
|
+
sourceFiles,
|
|
882
|
+
vulnerabilityCategory,
|
|
883
|
+
dryRun
|
|
884
|
+
}) => {
|
|
885
|
+
try {
|
|
886
|
+
const model = await createModel();
|
|
887
|
+
const modelName = model.modelId || "unknown-model";
|
|
888
|
+
const sourceContext = Object.entries(sourceFiles).map(([filePath, content]) => `
|
|
889
|
+
### File: ${filePath}
|
|
890
|
+
\`\`\`typescript
|
|
891
|
+
${content}
|
|
892
|
+
\`\`\``).join("\n");
|
|
893
|
+
const vulnerabilityContext = VULNERABILITY_DESCRIPTIONS[vulnerabilityCategory] || VULNERABILITY_DESCRIPTIONS.unknown;
|
|
894
|
+
const prompt = `You are a security expert tasked with analyzing a CVE vulnerability and generating a secure patch.
|
|
895
|
+
|
|
896
|
+
## CVE Information
|
|
897
|
+
- CVE ID: ${cveId}
|
|
898
|
+
- Package: ${packageName}@${vulnerableVersion}
|
|
899
|
+
- Category: ${vulnerabilityCategory}
|
|
900
|
+
|
|
901
|
+
## Vulnerability Summary
|
|
902
|
+
${cveSummary}
|
|
903
|
+
|
|
904
|
+
## Vulnerability Type Context
|
|
905
|
+
${vulnerabilityContext}
|
|
906
|
+
|
|
907
|
+
## Vulnerable Source Code
|
|
908
|
+
${sourceContext}
|
|
909
|
+
|
|
910
|
+
## Your Task
|
|
911
|
+
Analyze the source code to:
|
|
912
|
+
1. Identify the exact code location causing the vulnerability
|
|
913
|
+
2. Explain the root cause of the security issue
|
|
914
|
+
3. Propose a secure fix that addresses the vulnerability
|
|
915
|
+
4. Provide the complete fixed version of affected files
|
|
916
|
+
|
|
917
|
+
## Response Format
|
|
918
|
+
Respond ONLY with valid JSON (no markdown, no extra text):
|
|
919
|
+
{
|
|
920
|
+
"analysis": "Detailed explanation of the vulnerability root cause and why it's a security issue",
|
|
921
|
+
"fixedCode": {
|
|
922
|
+
"path/to/file.js": "Complete fixed source code for this file",
|
|
923
|
+
"path/to/other.ts": "Complete fixed source code for this file"
|
|
924
|
+
},
|
|
925
|
+
"confidence": 0.95,
|
|
926
|
+
"riskLevel": "medium"
|
|
927
|
+
}
|
|
928
|
+
|
|
929
|
+
Important:
|
|
930
|
+
- confidence: number between 0 and 1 indicating how confident you are in the fix
|
|
931
|
+
- riskLevel: "low", "medium", or "high" - assess the risk of the proposed fix breaking functionality
|
|
932
|
+
- fixedCode: must contain the COMPLETE file contents (not just diffs), with the vulnerability addressed
|
|
933
|
+
- Only include files that need modification`;
|
|
934
|
+
const { text } = await generateText({
|
|
935
|
+
model,
|
|
936
|
+
prompt,
|
|
937
|
+
temperature: 0.3
|
|
938
|
+
// Lower temperature for more consistent code generation
|
|
939
|
+
});
|
|
940
|
+
let analysis;
|
|
941
|
+
try {
|
|
942
|
+
const jsonMatch = text.match(/\{[\s\S]*\}/);
|
|
943
|
+
if (!jsonMatch) {
|
|
944
|
+
throw new Error("No JSON found in LLM response");
|
|
945
|
+
}
|
|
946
|
+
analysis = JSON.parse(jsonMatch[0]);
|
|
947
|
+
} catch (err) {
|
|
948
|
+
return {
|
|
949
|
+
success: false,
|
|
950
|
+
llmModel: modelName,
|
|
951
|
+
confidence: 0,
|
|
952
|
+
riskLevel: "high",
|
|
953
|
+
error: `Failed to parse LLM response: ${err instanceof Error ? err.message : "unknown error"}`
|
|
954
|
+
};
|
|
955
|
+
}
|
|
956
|
+
if (!analysis.analysis || !analysis.fixedCode || typeof analysis.confidence !== "number" || !["low", "medium", "high"].includes(analysis.riskLevel)) {
|
|
957
|
+
return {
|
|
958
|
+
success: false,
|
|
959
|
+
llmModel: modelName,
|
|
960
|
+
confidence: 0,
|
|
961
|
+
riskLevel: "high",
|
|
962
|
+
error: "LLM response missing required fields (analysis, fixedCode, confidence, riskLevel)"
|
|
963
|
+
};
|
|
964
|
+
}
|
|
965
|
+
if (dryRun) {
|
|
966
|
+
return {
|
|
967
|
+
success: true,
|
|
968
|
+
llmModel: modelName,
|
|
969
|
+
confidence: analysis.confidence,
|
|
970
|
+
riskLevel: analysis.riskLevel
|
|
971
|
+
};
|
|
972
|
+
}
|
|
973
|
+
const patches = [];
|
|
974
|
+
for (const [filePath, fixedCode] of Object.entries(
|
|
975
|
+
analysis.fixedCode
|
|
976
|
+
)) {
|
|
977
|
+
const originalCode = sourceFiles[filePath];
|
|
978
|
+
if (!originalCode) {
|
|
979
|
+
continue;
|
|
980
|
+
}
|
|
981
|
+
const unifiedDiff = generateUnifiedDiff(
|
|
982
|
+
originalCode,
|
|
983
|
+
fixedCode,
|
|
984
|
+
filePath
|
|
985
|
+
);
|
|
986
|
+
if (unifiedDiff) {
|
|
987
|
+
patches.push({
|
|
988
|
+
filePath,
|
|
989
|
+
unifiedDiff
|
|
990
|
+
});
|
|
991
|
+
}
|
|
992
|
+
}
|
|
993
|
+
if (patches.length === 0) {
|
|
994
|
+
return {
|
|
995
|
+
success: false,
|
|
996
|
+
llmModel: modelName,
|
|
997
|
+
confidence: analysis.confidence,
|
|
998
|
+
riskLevel: analysis.riskLevel,
|
|
999
|
+
error: "No valid patches could be generated from LLM response"
|
|
1000
|
+
};
|
|
1001
|
+
}
|
|
1002
|
+
return {
|
|
1003
|
+
success: true,
|
|
1004
|
+
patches,
|
|
1005
|
+
llmModel: modelName,
|
|
1006
|
+
confidence: analysis.confidence,
|
|
1007
|
+
riskLevel: analysis.riskLevel
|
|
1008
|
+
};
|
|
1009
|
+
} catch (err) {
|
|
1010
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1011
|
+
return {
|
|
1012
|
+
success: false,
|
|
1013
|
+
llmModel: "unknown",
|
|
1014
|
+
confidence: 0,
|
|
1015
|
+
riskLevel: "high",
|
|
1016
|
+
error: `Patch generation failed: ${message}`
|
|
1017
|
+
};
|
|
1018
|
+
}
|
|
1019
|
+
}
|
|
1020
|
+
});
|
|
1021
|
+
function generateUnifiedDiff(original, fixed, filePath) {
|
|
1022
|
+
if (original === fixed) {
|
|
1023
|
+
return null;
|
|
1024
|
+
}
|
|
1025
|
+
const originalLines = original.split("\n");
|
|
1026
|
+
const fixedLines = fixed.split("\n");
|
|
1027
|
+
const diff = [];
|
|
1028
|
+
diff.push(`--- a/${filePath}`);
|
|
1029
|
+
diff.push(`+++ b/${filePath}`);
|
|
1030
|
+
diff.push("@@ -1," + originalLines.length + " +1," + fixedLines.length + " @@");
|
|
1031
|
+
const maxLen = Math.max(originalLines.length, fixedLines.length);
|
|
1032
|
+
for (let i = 0; i < maxLen; i++) {
|
|
1033
|
+
const origLine = originalLines[i] || "";
|
|
1034
|
+
const fixedLine = fixedLines[i] || "";
|
|
1035
|
+
if (origLine !== fixedLine) {
|
|
1036
|
+
if (origLine) {
|
|
1037
|
+
diff.push("-" + origLine);
|
|
1038
|
+
}
|
|
1039
|
+
if (fixedLine) {
|
|
1040
|
+
diff.push("+" + fixedLine);
|
|
1041
|
+
}
|
|
1042
|
+
} else if (origLine) {
|
|
1043
|
+
diff.push(" " + origLine);
|
|
1044
|
+
}
|
|
1045
|
+
}
|
|
1046
|
+
return diff.join("\n");
|
|
1047
|
+
}
|
|
1048
|
+
|
|
1049
|
+
// src/remediation/tools/apply-patch-file.ts
|
|
1050
|
+
import { tool as tool8 } from "ai";
|
|
1051
|
+
import { z as z8 } from "zod";
|
|
1052
|
+
import { existsSync as existsSync3 } from "fs";
|
|
1053
|
+
import { mkdir as mkdir2, mkdtemp, readFile as readFile2, rm, writeFile } from "fs/promises";
|
|
1054
|
+
import { tmpdir } from "os";
|
|
1055
|
+
import { join as join6 } from "path";
|
|
1056
|
+
import { execa as execa4 } from "execa";
|
|
1057
|
+
var applyPatchFileTool = tool8({
|
|
1058
|
+
description: "Write generated patch file and apply it using package-manager-native patch flow when available, falling back to patch-package when needed.",
|
|
1059
|
+
parameters: z8.object({
|
|
1060
|
+
packageName: z8.string().min(1).describe("The npm package name"),
|
|
1061
|
+
vulnerableVersion: z8.string().describe("The vulnerable version string"),
|
|
1062
|
+
patchContent: z8.string().min(10).describe("Unified diff patch content from generate-patch"),
|
|
1063
|
+
patchesDir: z8.string().optional().default("./patches").describe("Directory to store patch files"),
|
|
1064
|
+
cwd: z8.string().describe("Project root directory (for package.json)"),
|
|
1065
|
+
packageManager: z8.enum(["npm", "pnpm", "yarn"]).optional().describe("Package manager used by the target project (auto-detected if omitted)"),
|
|
1066
|
+
validateWithTests: z8.boolean().optional().default(true).describe("Run package manager test command to validate patch doesn't break anything")
|
|
1067
|
+
}),
|
|
1068
|
+
execute: async ({
|
|
1069
|
+
packageName,
|
|
1070
|
+
vulnerableVersion,
|
|
1071
|
+
patchContent,
|
|
1072
|
+
patchesDir,
|
|
1073
|
+
cwd,
|
|
1074
|
+
packageManager,
|
|
1075
|
+
validateWithTests
|
|
1076
|
+
}) => {
|
|
1077
|
+
try {
|
|
1078
|
+
const pm = packageManager ?? detectPackageManager(cwd);
|
|
1079
|
+
const patchesDirPath = join6(cwd, patchesDir);
|
|
1080
|
+
await mkdir2(patchesDirPath, { recursive: true });
|
|
1081
|
+
const patchFileName = `${packageName}+${vulnerableVersion}.patch`;
|
|
1082
|
+
const patchFilePath = join6(patchesDirPath, patchFileName);
|
|
1083
|
+
await writeFile(patchFilePath, patchContent, "utf8");
|
|
1084
|
+
let validationResult;
|
|
1085
|
+
const patchMode = await resolvePatchMode(pm, cwd);
|
|
1086
|
+
const applyResult = patchMode === "patch-package" ? await configurePatchPackagePostinstall(cwd) : await applyNativePatch({
|
|
1087
|
+
cwd,
|
|
1088
|
+
packageName,
|
|
1089
|
+
vulnerableVersion,
|
|
1090
|
+
patchContent,
|
|
1091
|
+
patchMode
|
|
1092
|
+
});
|
|
1093
|
+
if (!applyResult.success) {
|
|
1094
|
+
return {
|
|
1095
|
+
success: false,
|
|
1096
|
+
patchPath: patchFilePath,
|
|
1097
|
+
patchMode,
|
|
1098
|
+
postinstallConfigured: patchMode === "patch-package" ? false : void 0,
|
|
1099
|
+
error: applyResult.error
|
|
1100
|
+
};
|
|
1101
|
+
}
|
|
1102
|
+
if (validateWithTests) {
|
|
1103
|
+
validationResult = await validatePatchWithTests(cwd, pm);
|
|
1104
|
+
}
|
|
1105
|
+
return {
|
|
1106
|
+
success: true,
|
|
1107
|
+
patchPath: patchFilePath,
|
|
1108
|
+
patchMode,
|
|
1109
|
+
postinstallConfigured: patchMode === "patch-package",
|
|
1110
|
+
validation: validationResult
|
|
1111
|
+
};
|
|
1112
|
+
} catch (err) {
|
|
1113
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1114
|
+
return {
|
|
1115
|
+
success: false,
|
|
1116
|
+
error: `Failed to apply patch file: ${message}`
|
|
1117
|
+
};
|
|
1118
|
+
}
|
|
1119
|
+
}
|
|
1120
|
+
});
|
|
1121
|
+
async function resolvePatchMode(packageManager, cwd) {
|
|
1122
|
+
if (packageManager === "npm") return "patch-package";
|
|
1123
|
+
if (packageManager === "pnpm") return "native-pnpm";
|
|
1124
|
+
try {
|
|
1125
|
+
const result = await execa4("yarn", ["--version"], {
|
|
1126
|
+
cwd,
|
|
1127
|
+
stdio: "pipe"
|
|
1128
|
+
});
|
|
1129
|
+
const version = result.stdout.trim();
|
|
1130
|
+
const major = Number.parseInt(version.split(".")[0] || "0", 10);
|
|
1131
|
+
return major >= 2 ? "native-yarn" : "patch-package";
|
|
1132
|
+
} catch {
|
|
1133
|
+
return "patch-package";
|
|
1134
|
+
}
|
|
1135
|
+
}
|
|
1136
|
+
async function configurePatchPackagePostinstall(cwd) {
|
|
1137
|
+
const pkgJsonPath = join6(cwd, "package.json");
|
|
1138
|
+
let pkgJson;
|
|
1139
|
+
try {
|
|
1140
|
+
pkgJson = JSON.parse(await readFile2(pkgJsonPath, "utf8"));
|
|
1141
|
+
} catch {
|
|
1142
|
+
return {
|
|
1143
|
+
success: false,
|
|
1144
|
+
error: `Could not read package.json at ${pkgJsonPath}`
|
|
1145
|
+
};
|
|
1146
|
+
}
|
|
1147
|
+
if (!pkgJson.scripts) {
|
|
1148
|
+
pkgJson.scripts = {};
|
|
1149
|
+
}
|
|
1150
|
+
const patchApplyCmd = "patch-package";
|
|
1151
|
+
const currentPostinstall = pkgJson.scripts.postinstall || "";
|
|
1152
|
+
if (currentPostinstall && !currentPostinstall.includes("patch-package")) {
|
|
1153
|
+
pkgJson.scripts.postinstall = `${currentPostinstall} && ${patchApplyCmd}`;
|
|
1154
|
+
} else if (!currentPostinstall) {
|
|
1155
|
+
pkgJson.scripts.postinstall = patchApplyCmd;
|
|
1156
|
+
}
|
|
1157
|
+
await writeFile(pkgJsonPath, JSON.stringify(pkgJson, null, 2) + "\n", "utf8");
|
|
1158
|
+
return { success: true };
|
|
1159
|
+
}
|
|
1160
|
+
async function applyNativePatch(params) {
|
|
1161
|
+
const { cwd, packageName, vulnerableVersion, patchContent, patchMode } = params;
|
|
1162
|
+
const packageSpec = `${packageName}@${vulnerableVersion}`;
|
|
1163
|
+
const createArgs = patchMode === "native-pnpm" ? ["pnpm", ["patch", packageSpec]] : ["yarn", ["patch", packageSpec]];
|
|
1164
|
+
let patchDir;
|
|
1165
|
+
try {
|
|
1166
|
+
const createResult = await execa4(createArgs[0], createArgs[1], {
|
|
1167
|
+
cwd,
|
|
1168
|
+
stdio: "pipe"
|
|
1169
|
+
});
|
|
1170
|
+
patchDir = extractPatchDirectory(`${createResult.stdout}
|
|
1171
|
+
${createResult.stderr}`);
|
|
1172
|
+
} catch (err) {
|
|
1173
|
+
return {
|
|
1174
|
+
success: false,
|
|
1175
|
+
error: `Failed to create native patch workspace for ${packageSpec}: ${err instanceof Error ? err.message : String(err)}`
|
|
1176
|
+
};
|
|
1177
|
+
}
|
|
1178
|
+
if (!patchDir) {
|
|
1179
|
+
return {
|
|
1180
|
+
success: false,
|
|
1181
|
+
error: `Could not determine native patch directory for ${packageSpec}.`
|
|
1182
|
+
};
|
|
1183
|
+
}
|
|
1184
|
+
const tempPatchDir = await mkdtemp(join6(tmpdir(), "autoremediator-native-patch-"));
|
|
1185
|
+
const tempPatchFile = join6(tempPatchDir, "change.patch");
|
|
1186
|
+
try {
|
|
1187
|
+
await writeFile(tempPatchFile, patchContent, "utf8");
|
|
1188
|
+
await execa4("patch", ["-p1", "-i", tempPatchFile], {
|
|
1189
|
+
cwd: patchDir,
|
|
1190
|
+
stdio: "pipe"
|
|
1191
|
+
});
|
|
1192
|
+
const commitArgs = patchMode === "native-pnpm" ? ["pnpm", ["patch-commit", patchDir]] : ["yarn", ["patch-commit", "-s", patchDir]];
|
|
1193
|
+
await execa4(commitArgs[0], commitArgs[1], {
|
|
1194
|
+
cwd,
|
|
1195
|
+
stdio: "pipe"
|
|
1196
|
+
});
|
|
1197
|
+
} catch (err) {
|
|
1198
|
+
return {
|
|
1199
|
+
success: false,
|
|
1200
|
+
error: `Failed to apply native patch for ${packageSpec}: ${err instanceof Error ? err.message : String(err)}`
|
|
1201
|
+
};
|
|
1202
|
+
} finally {
|
|
1203
|
+
await rm(tempPatchDir, { recursive: true, force: true });
|
|
1204
|
+
}
|
|
1205
|
+
return { success: true };
|
|
1206
|
+
}
|
|
1207
|
+
function extractPatchDirectory(output) {
|
|
1208
|
+
const lines = output.split(/\r?\n/).map((line) => line.trim()).filter(Boolean);
|
|
1209
|
+
for (const line of lines) {
|
|
1210
|
+
if (existsSync3(line)) {
|
|
1211
|
+
return line;
|
|
1212
|
+
}
|
|
1213
|
+
const tokens = line.split(/\s+/).map((token) => token.replace(/^['"]|['"]$/g, ""));
|
|
1214
|
+
for (const token of tokens) {
|
|
1215
|
+
if (token.startsWith("/") && existsSync3(token)) {
|
|
1216
|
+
return token;
|
|
1217
|
+
}
|
|
1218
|
+
}
|
|
1219
|
+
}
|
|
1220
|
+
return "";
|
|
1221
|
+
}
|
|
1222
|
+
async function validatePatchWithTests(cwd, packageManager) {
|
|
1223
|
+
try {
|
|
1224
|
+
const commands = getPackageManagerCommands(packageManager);
|
|
1225
|
+
const [cmd, ...args] = commands.test;
|
|
1226
|
+
const result = await execa4(cmd, args, {
|
|
1227
|
+
cwd,
|
|
1228
|
+
timeout: 6e4,
|
|
1229
|
+
// 60 second timeout
|
|
1230
|
+
stdio: "pipe"
|
|
1231
|
+
});
|
|
1232
|
+
return {
|
|
1233
|
+
passed: true,
|
|
1234
|
+
output: result.stdout
|
|
1235
|
+
};
|
|
1236
|
+
} catch (err) {
|
|
1237
|
+
const errorOutput = err instanceof Error && "stdout" in err ? err.stdout : "";
|
|
1238
|
+
const failedTests = extractFailedTests(errorOutput);
|
|
1239
|
+
return {
|
|
1240
|
+
passed: false,
|
|
1241
|
+
output: errorOutput,
|
|
1242
|
+
failedTests
|
|
1243
|
+
};
|
|
1244
|
+
}
|
|
1245
|
+
}
|
|
1246
|
+
function extractFailedTests(output) {
|
|
1247
|
+
const failedTests = [];
|
|
1248
|
+
const patterns = [
|
|
1249
|
+
/✖\s+(.+?)(?:\n|$)/g,
|
|
1250
|
+
// Mocha style
|
|
1251
|
+
/●\s+(.+)(?:\n|$)/g,
|
|
1252
|
+
// Jest style
|
|
1253
|
+
/FAIL.*?(.+?)(?:\n|$)/g
|
|
1254
|
+
// Generic FAIL
|
|
1255
|
+
];
|
|
1256
|
+
for (const pattern of patterns) {
|
|
1257
|
+
let match;
|
|
1258
|
+
while ((match = pattern.exec(output)) !== null) {
|
|
1259
|
+
if (match[1]) {
|
|
1260
|
+
failedTests.push(match[1].trim());
|
|
1261
|
+
}
|
|
1262
|
+
}
|
|
1263
|
+
}
|
|
1264
|
+
return failedTests.slice(0, 5);
|
|
1265
|
+
}
|
|
1266
|
+
|
|
1267
|
+
// src/remediation/pipeline.ts
|
|
1268
|
+
async function runHealAgent(cveId, options = {}) {
|
|
1269
|
+
const provider = resolveProvider(options);
|
|
1270
|
+
if (provider === "local") {
|
|
1271
|
+
return runLocalHealPipeline(cveId, options);
|
|
1272
|
+
}
|
|
1273
|
+
const cwd = options.cwd ?? process.cwd();
|
|
1274
|
+
const packageManager = options.packageManager ?? detectPackageManager(cwd);
|
|
1275
|
+
const dryRun = options.dryRun ?? false;
|
|
1276
|
+
const skipTests = options.skipTests ?? true;
|
|
1277
|
+
const policyPath = options.policyPath ?? "";
|
|
1278
|
+
const patchesDir = options.patchesDir || "./patches";
|
|
1279
|
+
const model = await createModel(options);
|
|
1280
|
+
const systemPrompt = loadOrchestrationPrompt({
|
|
1281
|
+
cveId,
|
|
1282
|
+
cwd,
|
|
1283
|
+
dryRun,
|
|
1284
|
+
skipTests,
|
|
1285
|
+
policyPath,
|
|
1286
|
+
patchesDir,
|
|
1287
|
+
packageManager
|
|
1288
|
+
});
|
|
1289
|
+
const prompt = `Patch vulnerable dependencies affected by ${cveId} in the project at: ${cwd}. Package manager: ${packageManager}.`;
|
|
1290
|
+
const collectedResults = [];
|
|
1291
|
+
const vulnerablePackages = [];
|
|
1292
|
+
let cveDetails = null;
|
|
1293
|
+
let agentSteps = 0;
|
|
1294
|
+
let lastGeneratedPatches = null;
|
|
1295
|
+
const result = await generateText2({
|
|
1296
|
+
model,
|
|
1297
|
+
system: systemPrompt,
|
|
1298
|
+
prompt,
|
|
1299
|
+
tools: {
|
|
1300
|
+
"lookup-cve": lookupCveTool,
|
|
1301
|
+
"check-inventory": checkInventoryTool,
|
|
1302
|
+
"check-version-match": checkVersionMatchTool,
|
|
1303
|
+
"find-fixed-version": findFixedVersionTool,
|
|
1304
|
+
"apply-version-bump": applyVersionBumpTool,
|
|
1305
|
+
"fetch-package-source": fetchPackageSourceTool,
|
|
1306
|
+
"generate-patch": generatePatchTool,
|
|
1307
|
+
"apply-patch-file": applyPatchFileTool
|
|
1308
|
+
},
|
|
1309
|
+
maxSteps: 25,
|
|
1310
|
+
onStepFinish(stepResult) {
|
|
1311
|
+
agentSteps += 1;
|
|
1312
|
+
const { toolResults } = stepResult;
|
|
1313
|
+
for (const tr of toolResults ?? []) {
|
|
1314
|
+
const toolResult = tr.result;
|
|
1315
|
+
if (tr.toolName === "lookup-cve" && toolResult?.data) {
|
|
1316
|
+
cveDetails = toolResult.data;
|
|
1317
|
+
}
|
|
1318
|
+
if (tr.toolName === "check-version-match" && toolResult?.vulnerablePackages) {
|
|
1319
|
+
vulnerablePackages.push(...toolResult.vulnerablePackages);
|
|
1320
|
+
}
|
|
1321
|
+
if (tr.toolName === "apply-version-bump") {
|
|
1322
|
+
collectedResults.push(toolResult);
|
|
1323
|
+
}
|
|
1324
|
+
if (tr.toolName === "fetch-package-source" && toolResult?.success) {
|
|
1325
|
+
const sourceData = {
|
|
1326
|
+
success: toolResult.success,
|
|
1327
|
+
sourceFiles: toolResult.sourceFiles,
|
|
1328
|
+
packageDir: toolResult.packageDir
|
|
1329
|
+
};
|
|
1330
|
+
}
|
|
1331
|
+
if (tr.toolName === "generate-patch" && toolResult?.success) {
|
|
1332
|
+
const patchData = {
|
|
1333
|
+
success: toolResult.success,
|
|
1334
|
+
patches: toolResult.patches,
|
|
1335
|
+
confidence: toolResult.confidence,
|
|
1336
|
+
riskLevel: toolResult.riskLevel
|
|
1337
|
+
};
|
|
1338
|
+
lastGeneratedPatches = patchData.patches || null;
|
|
1339
|
+
}
|
|
1340
|
+
if (tr.toolName === "apply-patch-file" && toolResult) {
|
|
1341
|
+
const patchFileResult = {
|
|
1342
|
+
success: toolResult.success,
|
|
1343
|
+
patchPath: toolResult.patchPath,
|
|
1344
|
+
postinstallConfigured: toolResult.postinstallConfigured,
|
|
1345
|
+
validation: toolResult.validation
|
|
1346
|
+
};
|
|
1347
|
+
if (patchFileResult.success) {
|
|
1348
|
+
collectedResults.push({
|
|
1349
|
+
...toolResult,
|
|
1350
|
+
strategy: "patch-file"
|
|
1351
|
+
});
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
}
|
|
1355
|
+
}
|
|
1356
|
+
});
|
|
1357
|
+
return {
|
|
1358
|
+
cveId,
|
|
1359
|
+
cveDetails,
|
|
1360
|
+
vulnerablePackages,
|
|
1361
|
+
results: collectedResults,
|
|
1362
|
+
agentSteps,
|
|
1363
|
+
summary: result.text
|
|
1364
|
+
};
|
|
1365
|
+
}
|
|
1366
|
+
async function runLocalHealPipeline(cveId, options = {}) {
|
|
1367
|
+
const cwd = options.cwd ?? process.cwd();
|
|
1368
|
+
const packageManager = options.packageManager ?? detectPackageManager(cwd);
|
|
1369
|
+
const dryRun = options.dryRun ?? false;
|
|
1370
|
+
const skipTests = options.skipTests ?? true;
|
|
1371
|
+
const policyPath = options.policyPath ?? "";
|
|
1372
|
+
const collectedResults = [];
|
|
1373
|
+
const vulnerablePackages = [];
|
|
1374
|
+
let cveDetails = null;
|
|
1375
|
+
let agentSteps = 0;
|
|
1376
|
+
const normalizedId = cveId.toUpperCase();
|
|
1377
|
+
const [osvDetails, ghPackages] = await Promise.all([
|
|
1378
|
+
lookupCveOsv(normalizedId),
|
|
1379
|
+
lookupCveGitHub(normalizedId).catch(() => [])
|
|
1380
|
+
]);
|
|
1381
|
+
agentSteps += 2;
|
|
1382
|
+
if (!osvDetails && ghPackages.length === 0) {
|
|
1383
|
+
return {
|
|
1384
|
+
cveId,
|
|
1385
|
+
cveDetails: null,
|
|
1386
|
+
vulnerablePackages,
|
|
1387
|
+
results: collectedResults,
|
|
1388
|
+
agentSteps,
|
|
1389
|
+
summary: `Local mode failed at lookup-cve: ${normalizedId} not found in OSV or GitHub advisory data.`
|
|
1390
|
+
};
|
|
1391
|
+
}
|
|
1392
|
+
cveDetails = osvDetails ?? {
|
|
1393
|
+
id: normalizedId,
|
|
1394
|
+
summary: "Details sourced from GitHub Advisory Database.",
|
|
1395
|
+
severity: "UNKNOWN",
|
|
1396
|
+
references: [],
|
|
1397
|
+
affectedPackages: []
|
|
1398
|
+
};
|
|
1399
|
+
if (ghPackages.length > 0) {
|
|
1400
|
+
cveDetails = mergeGhDataIntoCveDetails(cveDetails, ghPackages);
|
|
1401
|
+
}
|
|
1402
|
+
cveDetails = await enrichWithNvd(cveDetails);
|
|
1403
|
+
if (cveDetails.affectedPackages.length === 0) {
|
|
1404
|
+
return {
|
|
1405
|
+
cveId,
|
|
1406
|
+
cveDetails,
|
|
1407
|
+
vulnerablePackages,
|
|
1408
|
+
results: collectedResults,
|
|
1409
|
+
agentSteps,
|
|
1410
|
+
summary: `Local mode lookup succeeded but no npm affected packages were found for ${normalizedId}.`
|
|
1411
|
+
};
|
|
1412
|
+
}
|
|
1413
|
+
const inventory = await checkInventoryTool.execute({ cwd, packageManager });
|
|
1414
|
+
agentSteps += 1;
|
|
1415
|
+
if (inventory?.error) {
|
|
1416
|
+
return {
|
|
1417
|
+
cveId,
|
|
1418
|
+
cveDetails,
|
|
1419
|
+
vulnerablePackages,
|
|
1420
|
+
results: collectedResults,
|
|
1421
|
+
agentSteps,
|
|
1422
|
+
summary: `Local mode failed at check-inventory: ${inventory.error}`
|
|
1423
|
+
};
|
|
1424
|
+
}
|
|
1425
|
+
const installedPackages = inventory.packages ?? [];
|
|
1426
|
+
for (const affected of cveDetails.affectedPackages) {
|
|
1427
|
+
if (!affected || typeof affected !== "object") continue;
|
|
1428
|
+
if (!affected.name || !affected.vulnerableRange) continue;
|
|
1429
|
+
if (affected.ecosystem !== "npm") continue;
|
|
1430
|
+
const matches = installedPackages.filter((p) => p.name === affected.name);
|
|
1431
|
+
for (const installed of matches) {
|
|
1432
|
+
if (!semver4.valid(installed.version)) continue;
|
|
1433
|
+
let isVulnerable = false;
|
|
1434
|
+
try {
|
|
1435
|
+
isVulnerable = semver4.satisfies(installed.version, affected.vulnerableRange, {
|
|
1436
|
+
includePrerelease: false
|
|
1437
|
+
});
|
|
1438
|
+
} catch {
|
|
1439
|
+
continue;
|
|
1440
|
+
}
|
|
1441
|
+
if (isVulnerable) {
|
|
1442
|
+
vulnerablePackages.push({ installed, affected });
|
|
1443
|
+
}
|
|
1444
|
+
}
|
|
1445
|
+
}
|
|
1446
|
+
agentSteps += 1;
|
|
1447
|
+
for (const vulnerable of vulnerablePackages) {
|
|
1448
|
+
const pkg = vulnerable.installed;
|
|
1449
|
+
const firstPatchedVersion = vulnerable.affected.firstPatchedVersion;
|
|
1450
|
+
if (!firstPatchedVersion) {
|
|
1451
|
+
collectedResults.push({
|
|
1452
|
+
packageName: pkg.name,
|
|
1453
|
+
strategy: "none",
|
|
1454
|
+
fromVersion: pkg.version,
|
|
1455
|
+
applied: false,
|
|
1456
|
+
dryRun,
|
|
1457
|
+
message: `No firstPatchedVersion available for ${pkg.name}; cannot resolve deterministic upgrade in local mode.`
|
|
1458
|
+
});
|
|
1459
|
+
continue;
|
|
1460
|
+
}
|
|
1461
|
+
const safeVersion = await findSafeUpgradeVersion(
|
|
1462
|
+
pkg.name,
|
|
1463
|
+
pkg.version,
|
|
1464
|
+
firstPatchedVersion
|
|
1465
|
+
);
|
|
1466
|
+
agentSteps += 1;
|
|
1467
|
+
if (!safeVersion) {
|
|
1468
|
+
collectedResults.push({
|
|
1469
|
+
packageName: pkg.name,
|
|
1470
|
+
strategy: "none",
|
|
1471
|
+
fromVersion: pkg.version,
|
|
1472
|
+
applied: false,
|
|
1473
|
+
dryRun,
|
|
1474
|
+
message: `No safe upgrade version found for ${pkg.name}.`
|
|
1475
|
+
});
|
|
1476
|
+
continue;
|
|
1477
|
+
}
|
|
1478
|
+
const applyResult = await applyVersionBumpTool.execute({
|
|
1479
|
+
cwd,
|
|
1480
|
+
packageManager,
|
|
1481
|
+
packageName: pkg.name,
|
|
1482
|
+
fromVersion: pkg.version,
|
|
1483
|
+
toVersion: safeVersion,
|
|
1484
|
+
dryRun,
|
|
1485
|
+
policyPath,
|
|
1486
|
+
skipTests
|
|
1487
|
+
});
|
|
1488
|
+
agentSteps += 1;
|
|
1489
|
+
collectedResults.push(applyResult);
|
|
1490
|
+
}
|
|
1491
|
+
const appliedCount = collectedResults.filter((r) => r.applied).length;
|
|
1492
|
+
const unresolvedCount = collectedResults.filter((r) => !r.applied && !r.dryRun).length;
|
|
1493
|
+
const dryRunCount = collectedResults.filter((r) => r.dryRun).length;
|
|
1494
|
+
return {
|
|
1495
|
+
cveId,
|
|
1496
|
+
cveDetails,
|
|
1497
|
+
vulnerablePackages,
|
|
1498
|
+
results: collectedResults,
|
|
1499
|
+
agentSteps,
|
|
1500
|
+
summary: `Local mode completed: vulnerable=${vulnerablePackages.length}, applied=${appliedCount}, dryRun=${dryRunCount}, unresolved=${unresolvedCount}`
|
|
1501
|
+
};
|
|
1502
|
+
}
|
|
1503
|
+
function loadOrchestrationPrompt(ctx) {
|
|
1504
|
+
const promptPath = join7(process.cwd(), ".github", "instructions", "orchestration.instructions.md");
|
|
1505
|
+
if (!existsSync4(promptPath)) {
|
|
1506
|
+
return `You are autoremediator, an agentic security remediation system for Node.js package dependencies.
|
|
1507
|
+
Working directory: ${ctx.cwd}
|
|
1508
|
+
Package manager: ${ctx.packageManager}
|
|
1509
|
+
Dry run: ${ctx.dryRun}
|
|
1510
|
+
Skip tests: ${ctx.skipTests}
|
|
1511
|
+
Policy path: ${ctx.policyPath || "undefined"}
|
|
1512
|
+
Patches dir: ${ctx.patchesDir}
|
|
1513
|
+
|
|
1514
|
+
Required sequence:
|
|
1515
|
+
1. lookup-cve
|
|
1516
|
+
2. check-inventory
|
|
1517
|
+
3. check-version-match
|
|
1518
|
+
4. find-fixed-version
|
|
1519
|
+
5. apply-version-bump
|
|
1520
|
+
|
|
1521
|
+
Fallback sequence (when strategy="none"):
|
|
1522
|
+
1. fetch-package-source
|
|
1523
|
+
2. generate-patch
|
|
1524
|
+
3. apply-patch-file
|
|
1525
|
+
|
|
1526
|
+
Always respect dryRun and policy constraints.`;
|
|
1527
|
+
}
|
|
1528
|
+
const template = readFileSync4(promptPath, "utf8");
|
|
1529
|
+
return template.replaceAll("{{cveId}}", ctx.cveId).replaceAll("{{cwd}}", ctx.cwd).replaceAll("{{packageManager}}", ctx.packageManager).replaceAll("{{dryRun}}", String(ctx.dryRun)).replaceAll("{{skipTests}}", String(ctx.skipTests)).replaceAll("{{policyPath}}", ctx.policyPath || "undefined").replaceAll("{{patchesDir}}", ctx.patchesDir);
|
|
1530
|
+
}
|
|
1531
|
+
|
|
1532
|
+
// src/scanner/index.ts
|
|
1533
|
+
import { extname } from "path";
|
|
1534
|
+
import { readFileSync as readFileSync8 } from "fs";
|
|
1535
|
+
|
|
1536
|
+
// src/scanner/adapters/npm-audit.ts
|
|
1537
|
+
import { readFileSync as readFileSync5 } from "fs";
|
|
1538
|
+
var CVE_REGEX = /CVE-\d{4}-\d+/gi;
|
|
1539
|
+
function normalizeSeverity(raw) {
|
|
1540
|
+
if (!raw) return "UNKNOWN";
|
|
1541
|
+
const up = raw.toUpperCase();
|
|
1542
|
+
if (up === "CRITICAL" || up === "HIGH" || up === "MEDIUM" || up === "LOW") {
|
|
1543
|
+
return up;
|
|
1544
|
+
}
|
|
1545
|
+
return "UNKNOWN";
|
|
1546
|
+
}
|
|
1547
|
+
function parseNpmAuditJsonFromString(content) {
|
|
1548
|
+
const report = JSON.parse(content);
|
|
1549
|
+
const findings = [];
|
|
1550
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1551
|
+
for (const vuln of Object.values(report.vulnerabilities ?? {})) {
|
|
1552
|
+
for (const viaEntry of vuln.via ?? []) {
|
|
1553
|
+
const text = typeof viaEntry === "string" ? viaEntry : `${viaEntry.url ?? ""} ${viaEntry.name ?? ""}`;
|
|
1554
|
+
const matches = text.match(CVE_REGEX) ?? [];
|
|
1555
|
+
for (const match of matches) {
|
|
1556
|
+
const cveId = match.toUpperCase();
|
|
1557
|
+
const key = `${cveId}:${vuln.name}`;
|
|
1558
|
+
if (seen.has(key)) continue;
|
|
1559
|
+
seen.add(key);
|
|
1560
|
+
findings.push({
|
|
1561
|
+
cveId,
|
|
1562
|
+
source: "npm-audit",
|
|
1563
|
+
packageName: vuln.name,
|
|
1564
|
+
severity: normalizeSeverity(vuln.severity)
|
|
1565
|
+
});
|
|
1566
|
+
}
|
|
1567
|
+
}
|
|
1568
|
+
}
|
|
1569
|
+
return findings;
|
|
1570
|
+
}
|
|
1571
|
+
function parseNpmAuditJsonFile(filePath) {
|
|
1572
|
+
const content = readFileSync5(filePath, "utf8");
|
|
1573
|
+
return parseNpmAuditJsonFromString(content);
|
|
1574
|
+
}
|
|
1575
|
+
|
|
1576
|
+
// src/scanner/adapters/yarn-audit.ts
|
|
1577
|
+
import { readFileSync as readFileSync6 } from "fs";
|
|
1578
|
+
var CVE_REGEX2 = /CVE-\d{4}-\d+/gi;
|
|
1579
|
+
function normalizeSeverity2(raw) {
|
|
1580
|
+
if (!raw) return "UNKNOWN";
|
|
1581
|
+
const up = raw.toUpperCase();
|
|
1582
|
+
if (up === "CRITICAL" || up === "HIGH" || up === "MEDIUM" || up === "LOW") {
|
|
1583
|
+
return up;
|
|
1584
|
+
}
|
|
1585
|
+
return "UNKNOWN";
|
|
1586
|
+
}
|
|
1587
|
+
function parseYarnAuditJsonFromString(content) {
|
|
1588
|
+
const findings = [];
|
|
1589
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1590
|
+
const lines = content.split("\n").map((line) => line.trim()).filter(Boolean);
|
|
1591
|
+
for (const line of lines) {
|
|
1592
|
+
let parsed;
|
|
1593
|
+
try {
|
|
1594
|
+
parsed = JSON.parse(line);
|
|
1595
|
+
} catch {
|
|
1596
|
+
continue;
|
|
1597
|
+
}
|
|
1598
|
+
const event = parsed;
|
|
1599
|
+
if (event.type !== "auditAdvisory") continue;
|
|
1600
|
+
const advisory = event.data?.advisory;
|
|
1601
|
+
const packageName = advisory?.module_name;
|
|
1602
|
+
const severity = normalizeSeverity2(advisory?.severity);
|
|
1603
|
+
const text = `${advisory?.url ?? ""} ${(advisory?.cves ?? []).join(" ")}`;
|
|
1604
|
+
const matches = text.match(CVE_REGEX2) ?? [];
|
|
1605
|
+
for (const match of matches) {
|
|
1606
|
+
const cveId = match.toUpperCase();
|
|
1607
|
+
const key = `${cveId}:${packageName ?? ""}`;
|
|
1608
|
+
if (seen.has(key)) continue;
|
|
1609
|
+
seen.add(key);
|
|
1610
|
+
findings.push({
|
|
1611
|
+
cveId,
|
|
1612
|
+
source: "yarn-audit",
|
|
1613
|
+
packageName,
|
|
1614
|
+
severity
|
|
1615
|
+
});
|
|
1616
|
+
}
|
|
1617
|
+
}
|
|
1618
|
+
return findings;
|
|
1619
|
+
}
|
|
1620
|
+
function parseYarnAuditJsonFile(filePath) {
|
|
1621
|
+
const content = readFileSync6(filePath, "utf8");
|
|
1622
|
+
return parseYarnAuditJsonFromString(content);
|
|
1623
|
+
}
|
|
1624
|
+
|
|
1625
|
+
// src/scanner/adapters/sarif.ts
|
|
1626
|
+
import { readFileSync as readFileSync7 } from "fs";
|
|
1627
|
+
var CVE_REGEX3 = /CVE-\d{4}-\d+/gi;
|
|
1628
|
+
function extractPackageName(result) {
|
|
1629
|
+
const pkg = result.properties?.["packageName"];
|
|
1630
|
+
return typeof pkg === "string" ? pkg : void 0;
|
|
1631
|
+
}
|
|
1632
|
+
function parseSarifFromString(content) {
|
|
1633
|
+
const report = JSON.parse(content);
|
|
1634
|
+
const findings = [];
|
|
1635
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1636
|
+
for (const run of report.runs ?? []) {
|
|
1637
|
+
for (const result of run.results ?? []) {
|
|
1638
|
+
const combined = `${result.ruleId ?? ""} ${result.message?.text ?? ""}`;
|
|
1639
|
+
const matches = combined.match(CVE_REGEX3) ?? [];
|
|
1640
|
+
for (const match of matches) {
|
|
1641
|
+
const cveId = match.toUpperCase();
|
|
1642
|
+
const pkg = extractPackageName(result);
|
|
1643
|
+
const key = `${cveId}:${pkg ?? ""}`;
|
|
1644
|
+
if (seen.has(key)) continue;
|
|
1645
|
+
seen.add(key);
|
|
1646
|
+
findings.push({
|
|
1647
|
+
cveId,
|
|
1648
|
+
source: "sarif",
|
|
1649
|
+
packageName: pkg,
|
|
1650
|
+
severity: "UNKNOWN"
|
|
1651
|
+
});
|
|
1652
|
+
}
|
|
1653
|
+
}
|
|
1654
|
+
}
|
|
1655
|
+
return findings;
|
|
1656
|
+
}
|
|
1657
|
+
function parseSarifFile(filePath) {
|
|
1658
|
+
const content = readFileSync7(filePath, "utf8");
|
|
1659
|
+
return parseSarifFromString(content);
|
|
1660
|
+
}
|
|
1661
|
+
|
|
1662
|
+
// src/scanner/index.ts
|
|
1663
|
+
function parseScanInput(filePath, format) {
|
|
1664
|
+
const resolved = format === "auto" ? inferFormat(filePath) : format;
|
|
1665
|
+
if (resolved === "npm-audit") {
|
|
1666
|
+
return parseNpmAuditJsonFile(filePath);
|
|
1667
|
+
}
|
|
1668
|
+
if (resolved === "yarn-audit") {
|
|
1669
|
+
return parseYarnAuditJsonFile(filePath);
|
|
1670
|
+
}
|
|
1671
|
+
if (resolved === "sarif") {
|
|
1672
|
+
return parseSarifFile(filePath);
|
|
1673
|
+
}
|
|
1674
|
+
throw new Error(`Unsupported input format: ${resolved}`);
|
|
1675
|
+
}
|
|
1676
|
+
function inferFormat(filePath) {
|
|
1677
|
+
const ext = extname(filePath).toLowerCase();
|
|
1678
|
+
if (ext === ".sarif") return "sarif";
|
|
1679
|
+
try {
|
|
1680
|
+
const content = readFileSync8(filePath, "utf8");
|
|
1681
|
+
const firstLine = content.split("\n").find((line) => line.trim().startsWith("{"));
|
|
1682
|
+
if (firstLine) {
|
|
1683
|
+
const parsed = JSON.parse(firstLine);
|
|
1684
|
+
if (parsed.type === "auditAdvisory" || parsed.type === "auditSummary") {
|
|
1685
|
+
return "yarn-audit";
|
|
1686
|
+
}
|
|
1687
|
+
}
|
|
1688
|
+
} catch {
|
|
1689
|
+
}
|
|
1690
|
+
return "npm-audit";
|
|
1691
|
+
}
|
|
1692
|
+
function uniqueCveIds(findings) {
|
|
1693
|
+
return [...new Set(findings.map((f) => f.cveId.toUpperCase()))];
|
|
1694
|
+
}
|
|
1695
|
+
|
|
1696
|
+
// src/platform/evidence.ts
|
|
1697
|
+
import { mkdirSync, writeFileSync as writeFileSync2 } from "fs";
|
|
1698
|
+
import { join as join8 } from "path";
|
|
1699
|
+
function createEvidenceLog(cwd, cveIds) {
|
|
1700
|
+
return {
|
|
1701
|
+
runId: `${Date.now()}`,
|
|
1702
|
+
cveIds,
|
|
1703
|
+
cwd,
|
|
1704
|
+
startedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1705
|
+
steps: []
|
|
1706
|
+
};
|
|
1707
|
+
}
|
|
1708
|
+
function addEvidenceStep(log, action, input, output, error) {
|
|
1709
|
+
log.steps.push({
|
|
1710
|
+
at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1711
|
+
action,
|
|
1712
|
+
input,
|
|
1713
|
+
output,
|
|
1714
|
+
error
|
|
1715
|
+
});
|
|
1716
|
+
}
|
|
1717
|
+
function finalizeEvidence(log) {
|
|
1718
|
+
log.finishedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
1719
|
+
return log;
|
|
1720
|
+
}
|
|
1721
|
+
function writeEvidenceLog(cwd, log) {
|
|
1722
|
+
const dir = join8(cwd, ".autoremediator", "evidence");
|
|
1723
|
+
mkdirSync(dir, { recursive: true });
|
|
1724
|
+
const filePath = join8(dir, `${log.runId}.json`);
|
|
1725
|
+
writeFileSync2(filePath, JSON.stringify(log, null, 2) + "\n", "utf8");
|
|
1726
|
+
return filePath;
|
|
1727
|
+
}
|
|
1728
|
+
|
|
1729
|
+
// src/api.ts
|
|
1730
|
+
var runRemediationPipeline = runHealAgent;
|
|
1731
|
+
async function remediate(cveId, options = {}) {
|
|
1732
|
+
if (!/^CVE-\d{4}-\d+$/i.test(cveId)) {
|
|
1733
|
+
throw new Error(
|
|
1734
|
+
`Invalid CVE ID: "${cveId}". Expected format: CVE-YYYY-NNNNN (e.g. CVE-2021-23337).`
|
|
1735
|
+
);
|
|
1736
|
+
}
|
|
1737
|
+
return runHealAgent(cveId.toUpperCase(), options);
|
|
1738
|
+
}
|
|
1739
|
+
async function remediateFromScan(inputPath, options = {}) {
|
|
1740
|
+
const cwd = options.cwd ?? process.cwd();
|
|
1741
|
+
const format = options.format ?? "auto";
|
|
1742
|
+
const patchesDir = options.patchesDir ?? "./patches";
|
|
1743
|
+
const findings = parseScanInput(inputPath, format);
|
|
1744
|
+
const cveIds = uniqueCveIds(findings);
|
|
1745
|
+
const policy = loadPolicy(cwd, options.policyPath);
|
|
1746
|
+
const evidence = createEvidenceLog(cwd, cveIds);
|
|
1747
|
+
addEvidenceStep(evidence, "scan.parse", { inputPath, format }, { findingCount: findings.length, cveCount: cveIds.length });
|
|
1748
|
+
const reports = [];
|
|
1749
|
+
const errors = [];
|
|
1750
|
+
const patchValidationFailures = [];
|
|
1751
|
+
let patchFileCount = 0;
|
|
1752
|
+
for (const cveId of cveIds) {
|
|
1753
|
+
try {
|
|
1754
|
+
addEvidenceStep(evidence, "heal.start", { cveId });
|
|
1755
|
+
const report = await remediate(cveId, {
|
|
1756
|
+
...options,
|
|
1757
|
+
patchesDir
|
|
1758
|
+
});
|
|
1759
|
+
report.results = report.results.filter((r) => isPackageAllowed(policy, r.packageName));
|
|
1760
|
+
for (const result of report.results) {
|
|
1761
|
+
if (result.strategy === "patch-file") {
|
|
1762
|
+
patchFileCount += 1;
|
|
1763
|
+
}
|
|
1764
|
+
if (result.validation?.passed === false && result.validation?.error) {
|
|
1765
|
+
patchValidationFailures.push({
|
|
1766
|
+
packageName: result.packageName,
|
|
1767
|
+
cveId,
|
|
1768
|
+
error: result.validation.error
|
|
1769
|
+
});
|
|
1770
|
+
}
|
|
1771
|
+
}
|
|
1772
|
+
reports.push(report);
|
|
1773
|
+
addEvidenceStep(evidence, "heal.finish", { cveId }, { results: report.results.length });
|
|
1774
|
+
} catch (error) {
|
|
1775
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1776
|
+
errors.push({ cveId, message });
|
|
1777
|
+
addEvidenceStep(evidence, "heal.error", { cveId }, void 0, message);
|
|
1778
|
+
}
|
|
1779
|
+
}
|
|
1780
|
+
let successCount = 0;
|
|
1781
|
+
let failedCount = 0;
|
|
1782
|
+
for (const report of reports) {
|
|
1783
|
+
for (const result of report.results) {
|
|
1784
|
+
if (result.applied || result.dryRun) {
|
|
1785
|
+
successCount += 1;
|
|
1786
|
+
} else {
|
|
1787
|
+
failedCount += 1;
|
|
1788
|
+
}
|
|
1789
|
+
}
|
|
1790
|
+
}
|
|
1791
|
+
failedCount += errors.length;
|
|
1792
|
+
let status = "ok";
|
|
1793
|
+
if (failedCount > 0 && successCount > 0) {
|
|
1794
|
+
status = "partial";
|
|
1795
|
+
} else if (failedCount > 0 && successCount === 0) {
|
|
1796
|
+
status = "failed";
|
|
1797
|
+
}
|
|
1798
|
+
finalizeEvidence(evidence);
|
|
1799
|
+
const evidenceFile = options.writeEvidence === false ? void 0 : writeEvidenceLog(cwd, evidence);
|
|
1800
|
+
return {
|
|
1801
|
+
schemaVersion: "1.0",
|
|
1802
|
+
status,
|
|
1803
|
+
generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1804
|
+
cveIds,
|
|
1805
|
+
reports,
|
|
1806
|
+
successCount,
|
|
1807
|
+
failedCount,
|
|
1808
|
+
errors,
|
|
1809
|
+
evidenceFile,
|
|
1810
|
+
patchFileCount,
|
|
1811
|
+
patchValidationFailures: patchValidationFailures.length > 0 ? patchValidationFailures : void 0,
|
|
1812
|
+
patchStorageDir: patchFileCount > 0 ? patchesDir : void 0
|
|
1813
|
+
};
|
|
1814
|
+
}
|
|
1815
|
+
function toCiSummary(report) {
|
|
1816
|
+
let remediationCount = 0;
|
|
1817
|
+
for (const cveReport of report.reports) {
|
|
1818
|
+
remediationCount += cveReport.results.length;
|
|
1819
|
+
}
|
|
1820
|
+
return {
|
|
1821
|
+
schemaVersion: report.schemaVersion,
|
|
1822
|
+
status: report.status,
|
|
1823
|
+
generatedAt: report.generatedAt,
|
|
1824
|
+
cveCount: report.cveIds.length,
|
|
1825
|
+
remediationCount,
|
|
1826
|
+
successCount: report.successCount,
|
|
1827
|
+
failedCount: report.failedCount,
|
|
1828
|
+
errors: report.errors,
|
|
1829
|
+
evidenceFile: report.evidenceFile,
|
|
1830
|
+
patchFileCount: report.patchFileCount || 0,
|
|
1831
|
+
patchValidationFailures: report.patchValidationFailures,
|
|
1832
|
+
patchStorageDir: report.patchStorageDir
|
|
1833
|
+
};
|
|
1834
|
+
}
|
|
1835
|
+
function ciExitCode(summary) {
|
|
1836
|
+
return summary.failedCount > 0 ? 1 : 0;
|
|
1837
|
+
}
|
|
1838
|
+
|
|
1839
|
+
export {
|
|
1840
|
+
runHealAgent,
|
|
1841
|
+
runRemediationPipeline,
|
|
1842
|
+
remediate,
|
|
1843
|
+
remediateFromScan,
|
|
1844
|
+
toCiSummary,
|
|
1845
|
+
ciExitCode
|
|
1846
|
+
};
|
|
1847
|
+
//# sourceMappingURL=chunk-H4ICCI3K.js.map
|