@triedotdev/mcp 1.0.110 → 1.0.111

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1923 @@
1
+ import {
2
+ ContextGraph
3
+ } from "./chunk-6QR6QZIX.js";
4
+ import {
5
+ scanForVibeCodeIssues
6
+ } from "./chunk-IXO4G4D3.js";
7
+ import {
8
+ storeIssues
9
+ } from "./chunk-6JPPYG7F.js";
10
+ import {
11
+ getTrieDirectory,
12
+ getWorkingDirectory
13
+ } from "./chunk-R4AAPFXC.js";
14
+ import {
15
+ scanForVulnerabilities
16
+ } from "./chunk-F4NJ4CBP.js";
17
+ import {
18
+ Trie
19
+ } from "./chunk-6NLHFIYA.js";
20
+ import {
21
+ isInteractiveMode
22
+ } from "./chunk-APMV77PU.js";
23
+
24
+ // src/cli/checkpoint.ts
25
+ import { existsSync } from "fs";
26
+ import { mkdir, writeFile, readFile } from "fs/promises";
27
+ import { join } from "path";
28
+ async function saveCheckpoint(options) {
29
+ const workDir = options.workDir || getWorkingDirectory(void 0, true);
30
+ const trieDir = getTrieDirectory(workDir);
31
+ const checkpointPath = join(trieDir, "checkpoints.json");
32
+ await mkdir(trieDir, { recursive: true });
33
+ let log = { checkpoints: [] };
34
+ try {
35
+ if (existsSync(checkpointPath)) {
36
+ log = JSON.parse(await readFile(checkpointPath, "utf-8"));
37
+ }
38
+ } catch {
39
+ log = { checkpoints: [] };
40
+ }
41
+ const checkpoint = {
42
+ id: `cp-${Date.now().toString(36)}`,
43
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
44
+ files: options.files || [],
45
+ createdBy: options.createdBy || "cli"
46
+ };
47
+ if (options.message) checkpoint.message = options.message;
48
+ if (options.notes) checkpoint.notes = options.notes;
49
+ log.checkpoints.push(checkpoint);
50
+ log.lastCheckpoint = checkpoint.id;
51
+ if (log.checkpoints.length > 50) {
52
+ log.checkpoints = log.checkpoints.slice(-50);
53
+ }
54
+ await writeFile(checkpointPath, JSON.stringify(log, null, 2));
55
+ await updateAgentsMdWithCheckpoint(checkpoint, workDir);
56
+ return checkpoint;
57
+ }
58
+ async function listCheckpoints(workDir) {
59
+ const dir = workDir || getWorkingDirectory(void 0, true);
60
+ const checkpointPath = join(getTrieDirectory(dir), "checkpoints.json");
61
+ try {
62
+ if (existsSync(checkpointPath)) {
63
+ const log = JSON.parse(await readFile(checkpointPath, "utf-8"));
64
+ return log.checkpoints;
65
+ }
66
+ } catch {
67
+ }
68
+ return [];
69
+ }
70
+ async function getLastCheckpoint(workDir) {
71
+ const checkpoints = await listCheckpoints(workDir);
72
+ const last = checkpoints[checkpoints.length - 1];
73
+ return last ?? null;
74
+ }
75
+ async function updateAgentsMdWithCheckpoint(checkpoint, workDir) {
76
+ const agentsPath = join(getTrieDirectory(workDir), "AGENTS.md");
77
+ let content = "";
78
+ try {
79
+ if (existsSync(agentsPath)) {
80
+ content = await readFile(agentsPath, "utf-8");
81
+ }
82
+ } catch {
83
+ content = "";
84
+ }
85
+ const checkpointSection = `
86
+ ## Last Checkpoint
87
+
88
+ - **ID:** ${checkpoint.id}
89
+ - **Time:** ${checkpoint.timestamp}
90
+ ${checkpoint.message ? `- **Message:** ${checkpoint.message}` : ""}
91
+ ${checkpoint.files.length > 0 ? `- **Files:** ${checkpoint.files.length} files` : ""}
92
+ ${checkpoint.notes ? `- **Notes:** ${checkpoint.notes}` : ""}
93
+ `;
94
+ const checkpointRegex = /## Last Checkpoint[\s\S]*?(?=\n## |\n---|\Z)/;
95
+ if (checkpointRegex.test(content)) {
96
+ content = content.replace(checkpointRegex, checkpointSection.trim());
97
+ } else {
98
+ content = content.trim() + "\n\n" + checkpointSection.trim() + "\n";
99
+ }
100
+ await writeFile(agentsPath, content);
101
+ }
102
+ async function handleCheckpointCommand(args) {
103
+ const subcommand = args[0] || "save";
104
+ switch (subcommand) {
105
+ case "save": {
106
+ let message;
107
+ let notes;
108
+ const files = [];
109
+ for (let i = 1; i < args.length; i++) {
110
+ const arg = args[i];
111
+ if (arg === "-m" || arg === "--message") {
112
+ message = args[++i] ?? "";
113
+ } else if (arg === "-n" || arg === "--notes") {
114
+ notes = args[++i] ?? "";
115
+ } else if (arg === "-f" || arg === "--file") {
116
+ const file = args[++i];
117
+ if (file) files.push(file);
118
+ } else if (arg && !arg.startsWith("-")) {
119
+ message = arg;
120
+ }
121
+ }
122
+ const opts = { files };
123
+ if (message) opts.message = message;
124
+ if (notes) opts.notes = notes;
125
+ const checkpoint = await saveCheckpoint(opts);
126
+ console.log("\n Checkpoint saved\n");
127
+ console.log(` ID: ${checkpoint.id}`);
128
+ console.log(` Time: ${checkpoint.timestamp}`);
129
+ if (checkpoint.message) {
130
+ console.log(` Message: ${checkpoint.message}`);
131
+ }
132
+ if (checkpoint.files.length > 0) {
133
+ console.log(` Files: ${checkpoint.files.join(", ")}`);
134
+ }
135
+ console.log("\n Context saved to .trie/\n");
136
+ break;
137
+ }
138
+ case "list": {
139
+ const checkpoints = await listCheckpoints();
140
+ if (checkpoints.length === 0) {
141
+ console.log("\n No checkpoints yet. Run `trie checkpoint` to save one.\n");
142
+ return;
143
+ }
144
+ console.log("\n Checkpoints:\n");
145
+ for (const cp of checkpoints.slice(-10).reverse()) {
146
+ const date = new Date(cp.timestamp).toLocaleString();
147
+ console.log(` ${cp.id} ${date} ${cp.message || "(no message)"}`);
148
+ }
149
+ console.log("");
150
+ break;
151
+ }
152
+ case "last": {
153
+ const checkpoint = await getLastCheckpoint();
154
+ if (!checkpoint) {
155
+ console.log("\n No checkpoints yet. Run `trie checkpoint` to save one.\n");
156
+ return;
157
+ }
158
+ console.log("\n Last Checkpoint:\n");
159
+ console.log(` ID: ${checkpoint.id}`);
160
+ console.log(` Time: ${new Date(checkpoint.timestamp).toLocaleString()}`);
161
+ if (checkpoint.message) {
162
+ console.log(` Message: ${checkpoint.message}`);
163
+ }
164
+ if (checkpoint.notes) {
165
+ console.log(` Notes: ${checkpoint.notes}`);
166
+ }
167
+ if (checkpoint.files.length > 0) {
168
+ console.log(` Files: ${checkpoint.files.join(", ")}`);
169
+ }
170
+ console.log("");
171
+ break;
172
+ }
173
+ default:
174
+ console.log(`
175
+ Usage: trie checkpoint [command] [options]
176
+
177
+ Commands:
178
+ save [message] Save a checkpoint (default)
179
+ list List recent checkpoints
180
+ last Show the last checkpoint
181
+
182
+ Options:
183
+ -m, --message Checkpoint message
184
+ -n, --notes Additional notes
185
+ -f, --file File to include (can be repeated)
186
+
187
+ Examples:
188
+ trie checkpoint "finished auth flow"
189
+ trie checkpoint save -m "WIP: payment integration" -n "needs testing"
190
+ trie checkpoint list
191
+ `);
192
+ }
193
+ }
194
+
195
+ // src/utils/trie-init.ts
196
+ import { existsSync as existsSync2 } from "fs";
197
+ import { join as join2 } from "path";
198
+ var INIT_MARKERS = [
199
+ "PROJECT.md",
200
+ "RULES.md",
201
+ "TEAM.md",
202
+ "BOOTSTRAP.md",
203
+ "AGENTS.md",
204
+ "config.json"
205
+ ];
206
+ function isTrieInitialized(workDir) {
207
+ const dir = workDir || getWorkingDirectory(void 0, true);
208
+ const trieDir = getTrieDirectory(dir);
209
+ return INIT_MARKERS.some((marker) => existsSync2(join2(trieDir, marker)));
210
+ }
211
+
212
+ // src/context/sync.ts
213
+ import fs from "fs/promises";
214
+ import path from "path";
215
+ var DEFAULT_JSON_NAME = "context.json";
216
+ async function exportToJson(graph, targetPath) {
217
+ const snapshot = await graph.getSnapshot();
218
+ const json = JSON.stringify(snapshot, null, 2);
219
+ const outputPath = targetPath ?? path.join(getTrieDirectory(graph.projectRoot), DEFAULT_JSON_NAME);
220
+ await fs.mkdir(path.dirname(outputPath), { recursive: true });
221
+ await fs.writeFile(outputPath, json, "utf8");
222
+ return json;
223
+ }
224
+ async function importFromJson(graph, json, sourcePath) {
225
+ const payload = json.trim().length > 0 ? json : await fs.readFile(sourcePath ?? path.join(getTrieDirectory(graph.projectRoot), DEFAULT_JSON_NAME), "utf8");
226
+ const snapshot = JSON.parse(payload);
227
+ await graph.applySnapshot(snapshot);
228
+ }
229
+
230
+ // src/context/incident-index.ts
231
+ import path3 from "path";
232
+
233
+ // src/context/file-trie.ts
234
+ import fs2 from "fs";
235
+ import path2 from "path";
236
+ import { performance } from "perf_hooks";
237
+ function normalizePath(filePath) {
238
+ const normalized = filePath.replace(/\\/g, "/");
239
+ return normalized.startsWith("./") ? normalized.slice(2) : normalized;
240
+ }
241
+ var FilePathTrie = class {
242
+ trie = new Trie();
243
+ persistPath;
244
+ constructor(persistPath) {
245
+ if (persistPath) this.persistPath = persistPath;
246
+ if (persistPath && fs2.existsSync(persistPath)) {
247
+ try {
248
+ const raw = fs2.readFileSync(persistPath, "utf-8");
249
+ if (raw.trim().length > 0) {
250
+ const json = JSON.parse(raw);
251
+ this.trie = Trie.fromJSON(json);
252
+ }
253
+ } catch {
254
+ this.trie = new Trie();
255
+ }
256
+ }
257
+ }
258
+ addIncident(filePath, incident) {
259
+ const key = normalizePath(filePath);
260
+ const existing = this.trie.search(key);
261
+ const incidents = existing.found && Array.isArray(existing.value) ? existing.value : [];
262
+ incidents.push(incident);
263
+ this.trie.insert(key, incidents);
264
+ this.persist();
265
+ }
266
+ getIncidents(filePath) {
267
+ const key = normalizePath(filePath);
268
+ const result = this.trie.search(key);
269
+ return result.found && Array.isArray(result.value) ? result.value : [];
270
+ }
271
+ getDirectoryIncidents(prefix) {
272
+ const normalizedPrefix = normalizePath(prefix);
273
+ const matches = this.trie.getWithPrefix(normalizedPrefix);
274
+ return matches.flatMap((m) => Array.isArray(m.value) ? m.value : []).filter(Boolean);
275
+ }
276
+ getHotZones(threshold) {
277
+ const matches = this.trie.getWithPrefix("");
278
+ const zones = [];
279
+ for (const match of matches) {
280
+ const incidents = Array.isArray(match.value) ? match.value : [];
281
+ if (incidents.length >= threshold) {
282
+ zones.push({
283
+ path: match.pattern,
284
+ incidentCount: incidents.length,
285
+ confidence: this.calculateConfidence(incidents.length)
286
+ });
287
+ }
288
+ }
289
+ return zones.sort((a, b) => b.incidentCount - a.incidentCount);
290
+ }
291
+ suggestPaths(partial, limit = 5) {
292
+ const normalized = normalizePath(partial);
293
+ const results = this.trie.getWithPrefix(normalized);
294
+ return results.map((r) => ({
295
+ path: r.pattern,
296
+ incidentCount: Array.isArray(r.value) ? r.value.length : 0
297
+ })).sort((a, b) => b.incidentCount - a.incidentCount).slice(0, limit);
298
+ }
299
+ timeLookup(path8) {
300
+ const start = performance.now();
301
+ this.getIncidents(path8);
302
+ return performance.now() - start;
303
+ }
304
+ toJSON() {
305
+ return this.trie.toJSON();
306
+ }
307
+ calculateConfidence(count) {
308
+ const capped = Math.min(count, 10);
309
+ return Math.round(capped / 10 * 100) / 100;
310
+ }
311
+ persist() {
312
+ if (!this.persistPath) return;
313
+ try {
314
+ const dir = path2.dirname(this.persistPath);
315
+ if (!fs2.existsSync(dir)) {
316
+ fs2.mkdirSync(dir, { recursive: true });
317
+ }
318
+ fs2.writeFileSync(this.persistPath, JSON.stringify(this.trie.toJSON()), "utf-8");
319
+ } catch {
320
+ }
321
+ }
322
+ };
323
+
324
+ // src/context/incident-index.ts
325
+ var IncidentIndex = class _IncidentIndex {
326
+ graph;
327
+ trie;
328
+ projectRoot;
329
+ constructor(graph, projectRoot, options) {
330
+ this.graph = graph;
331
+ this.projectRoot = projectRoot;
332
+ this.trie = new FilePathTrie(
333
+ options?.persistPath ?? path3.join(getTrieDirectory(projectRoot), "incident-trie.json")
334
+ );
335
+ }
336
+ static async build(graph, projectRoot, options) {
337
+ const index = new _IncidentIndex(graph, projectRoot, options);
338
+ await index.rebuild();
339
+ return index;
340
+ }
341
+ async rebuild() {
342
+ const nodes = await this.graph.listNodes();
343
+ const incidents = nodes.filter((n) => n.type === "incident");
344
+ for (const incident of incidents) {
345
+ const files = await this.getFilesForIncident(incident.id);
346
+ this.addIncidentToTrie(incident, files);
347
+ }
348
+ }
349
+ addIncidentToTrie(incident, files) {
350
+ const meta = {
351
+ id: incident.id,
352
+ file: "",
353
+ description: incident.data.description,
354
+ severity: incident.data.severity,
355
+ timestamp: incident.data.timestamp
356
+ };
357
+ for (const file of files) {
358
+ const normalized = this.normalizePath(file);
359
+ this.trie.addIncident(normalized, { ...meta, file: normalized });
360
+ }
361
+ }
362
+ getFileTrie() {
363
+ return this.trie;
364
+ }
365
+ async getFilesForIncident(incidentId) {
366
+ const files = /* @__PURE__ */ new Set();
367
+ const edges = await this.graph.getEdges(incidentId, "both");
368
+ for (const edge of edges) {
369
+ if (edge.type === "causedBy" || edge.type === "leadTo") {
370
+ const changeId = edge.type === "causedBy" ? edge.to_id : edge.from_id;
371
+ const change = await this.graph.getNode("change", changeId);
372
+ if (change?.data && "files" in change.data && Array.isArray(change.data.files)) {
373
+ change.data.files.forEach((f) => files.add(f));
374
+ }
375
+ }
376
+ if (edge.type === "affects") {
377
+ const fileNode = await this.graph.getNode("file", edge.to_id) || await this.graph.getNode("file", edge.from_id);
378
+ if (fileNode && typeof fileNode.data?.path === "string") {
379
+ files.add(fileNode.data.path);
380
+ }
381
+ }
382
+ }
383
+ return Array.from(files);
384
+ }
385
+ normalizePath(filePath) {
386
+ const absolute = path3.isAbsolute(filePath) ? filePath : path3.join(this.projectRoot, filePath);
387
+ const relative = path3.relative(this.projectRoot, absolute);
388
+ return relative.replace(/\\/g, "/");
389
+ }
390
+ };
391
+
392
+ // src/utils/errors.ts
393
+ var TrieError = class extends Error {
394
+ code;
395
+ recoverable;
396
+ userMessage;
397
+ constructor(message, code, userMessage, recoverable = true) {
398
+ super(message);
399
+ this.code = code;
400
+ this.recoverable = recoverable;
401
+ this.userMessage = userMessage;
402
+ }
403
+ };
404
+ function formatFriendlyError(error) {
405
+ if (error instanceof TrieError) {
406
+ return { userMessage: error.userMessage, code: error.code };
407
+ }
408
+ return {
409
+ userMessage: "Something went wrong. Try again or run with --offline.",
410
+ code: "UNKNOWN"
411
+ };
412
+ }
413
+
414
+ // src/skills/audit-logger.ts
415
+ function formatAuditLog(_entry) {
416
+ return "Audit logging has been integrated into the decision ledger";
417
+ }
418
+ function getAuditStatistics() {
419
+ return {
420
+ totalScans: 0,
421
+ totalIssues: 0,
422
+ criticalCount: 0,
423
+ seriousCount: 0,
424
+ moderateCount: 0,
425
+ lowCount: 0,
426
+ totalExecutions: 0,
427
+ successfulExecutions: 0,
428
+ failedExecutions: 0,
429
+ uniqueSkills: 0,
430
+ totalCommands: 0,
431
+ blockedCommands: 0,
432
+ totalNetworkCalls: 0,
433
+ blockedNetworkCalls: 0
434
+ };
435
+ }
436
+ function createAuditEntry(skillName, skillSource, triggeredBy, targetPath) {
437
+ return {
438
+ skillName,
439
+ skillSource,
440
+ triggeredBy,
441
+ targetPath,
442
+ startedAt: (/* @__PURE__ */ new Date()).toISOString(),
443
+ commands: []
444
+ };
445
+ }
446
+ function completeAuditEntry(entry, success, error) {
447
+ const result = {
448
+ ...entry,
449
+ completedAt: (/* @__PURE__ */ new Date()).toISOString(),
450
+ success
451
+ };
452
+ if (error !== void 0) {
453
+ result.error = error;
454
+ }
455
+ return result;
456
+ }
457
+ async function logSkillExecution(_execution) {
458
+ }
459
+ async function getRecentAuditLogs(_limit = 10) {
460
+ return [];
461
+ }
462
+ async function getSkillAuditLogs(_skillName) {
463
+ return [];
464
+ }
465
+
466
+ // src/agent/git.ts
467
+ import { existsSync as existsSync3 } from "fs";
468
+ import path4 from "path";
469
+
470
+ // src/utils/command-runner.ts
471
+ import { exec, execFile, execSync } from "child_process";
472
+ import { promisify } from "util";
473
+ var execAsync = promisify(exec);
474
+ var execFileAsync = promisify(execFile);
475
+ function redact(text) {
476
+ return text.replace(/\b(AWS|ANTHROPIC|OPENAI|GITHUB)_[A-Z0-9_]*\s*=\s*([^\s"'`]+)/gi, "$1_<REDACTED>=<REDACTED>").replace(/\bBearer\s+[A-Za-z0-9\-._~+/]+=*\b/g, "Bearer <REDACTED>").replace(/\bghp_[A-Za-z0-9]{20,}\b/g, "ghp_<REDACTED>").replace(/\b(?:xox[baprs]-)[A-Za-z0-9-]{10,}\b/g, "<REDACTED_SLACK_TOKEN>").replace(/\bAKIA[0-9A-Z]{16}\b/g, "AKIA<REDACTED>");
477
+ }
478
+ function clampOutput(text, maxChars) {
479
+ if (text.length <= maxChars) return text;
480
+ return text.slice(0, maxChars) + `
481
+ \u2026(truncated ${text.length - maxChars} chars)`;
482
+ }
483
+ function buildCommandRecord(command) {
484
+ return {
485
+ command,
486
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
487
+ };
488
+ }
489
+ async function finalizeAndWrite(entry, cmd, outcome, options) {
490
+ const duration = Date.now() - outcome.startedAt;
491
+ cmd.duration = duration;
492
+ if (outcome.exitCode !== void 0) {
493
+ cmd.exitCode = outcome.exitCode;
494
+ }
495
+ const captureOutput = options?.captureOutput ?? false;
496
+ const redactOutput = options?.redactOutput ?? true;
497
+ const maxOutputChars = options?.maxOutputChars ?? 2e3;
498
+ if (captureOutput) {
499
+ const out = outcome.stdout ?? "";
500
+ const err = outcome.stderr ?? "";
501
+ cmd.stdout = redactOutput ? redact(clampOutput(out, maxOutputChars)) : clampOutput(out, maxOutputChars);
502
+ cmd.stderr = redactOutput ? redact(clampOutput(err, maxOutputChars)) : clampOutput(err, maxOutputChars);
503
+ }
504
+ const completed = completeAuditEntry(entry, outcome.success, outcome.error);
505
+ await logSkillExecution(completed);
506
+ }
507
+ function runShellCommandSync(command, audit, options) {
508
+ const startedAt = Date.now();
509
+ const entry = createAuditEntry(audit.actor, audit.source ?? "trie", audit.triggeredBy, audit.targetPath);
510
+ const cmd = buildCommandRecord(command);
511
+ entry.commands?.push(cmd);
512
+ try {
513
+ const stdout = execSync(command, {
514
+ cwd: options?.cwd,
515
+ timeout: options?.timeoutMs,
516
+ maxBuffer: options?.maxBuffer,
517
+ encoding: "utf-8",
518
+ stdio: ["pipe", "pipe", "pipe"]
519
+ });
520
+ void finalizeAndWrite(entry, cmd, { success: true, exitCode: 0, stdout, stderr: "", startedAt }, options);
521
+ return { stdout: stdout ?? "", exitCode: 0 };
522
+ } catch (e) {
523
+ const err = e;
524
+ const stdout = typeof err.stdout === "string" ? err.stdout : "";
525
+ const stderr = typeof err.stderr === "string" ? err.stderr : "";
526
+ const exitCode = typeof err.status === "number" ? err.status : 1;
527
+ void finalizeAndWrite(
528
+ entry,
529
+ cmd,
530
+ { success: false, exitCode, stdout, stderr, error: err.message, startedAt },
531
+ { ...options, captureOutput: options?.captureOutput ?? true }
532
+ );
533
+ return { stdout, exitCode };
534
+ }
535
+ }
536
+ async function runExecFile(file, args, audit, options) {
537
+ const startedAt = Date.now();
538
+ const command = [file, ...args].join(" ");
539
+ const entry = createAuditEntry(audit.actor, audit.source ?? "trie", audit.triggeredBy, audit.targetPath);
540
+ const cmd = buildCommandRecord(command);
541
+ entry.commands?.push(cmd);
542
+ try {
543
+ const { stdout, stderr } = await execFileAsync(file, args, {
544
+ cwd: options?.cwd,
545
+ timeout: options?.timeoutMs,
546
+ maxBuffer: options?.maxBuffer
547
+ });
548
+ await finalizeAndWrite(entry, cmd, { success: true, exitCode: 0, stdout: String(stdout ?? ""), stderr: String(stderr ?? ""), startedAt }, options);
549
+ return { stdout: String(stdout ?? ""), stderr: String(stderr ?? ""), exitCode: 0 };
550
+ } catch (e) {
551
+ const err = e;
552
+ const stdout = typeof err.stdout === "string" ? err.stdout : "";
553
+ const stderr = typeof err.stderr === "string" ? err.stderr : "";
554
+ const exitCode = typeof err.code === "number" ? err.code : 1;
555
+ await finalizeAndWrite(
556
+ entry,
557
+ cmd,
558
+ { success: false, exitCode, stdout, stderr, error: err.message, startedAt },
559
+ { ...options, captureOutput: options?.captureOutput ?? true }
560
+ );
561
+ return { stdout, stderr, exitCode };
562
+ }
563
+ }
564
+
565
+ // src/agent/git.ts
566
+ async function execGit(args, cwd) {
567
+ try {
568
+ const { stdout } = await runExecFile(
569
+ "git",
570
+ ["-C", cwd, ...args],
571
+ { actor: "internal:git", triggeredBy: "manual", targetPath: cwd },
572
+ { maxBuffer: 10 * 1024 * 1024, captureOutput: false }
573
+ );
574
+ return stdout.trim();
575
+ } catch (error) {
576
+ const stderr = error?.stderr?.toString();
577
+ if (stderr?.includes("not a git repository") || stderr?.includes("does not have any commits")) {
578
+ return null;
579
+ }
580
+ throw error;
581
+ }
582
+ }
583
+ async function ensureRepo(projectPath) {
584
+ const result = await execGit(["rev-parse", "--is-inside-work-tree"], projectPath);
585
+ return result === "true";
586
+ }
587
+ function parseNameStatus(output) {
588
+ return output.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => {
589
+ const parts = line.split(" ");
590
+ const status = parts[0] ?? "";
591
+ const filePath = parts[1] ?? "";
592
+ const oldPath = parts[2];
593
+ const change = { status, path: filePath };
594
+ if (oldPath) change.oldPath = oldPath;
595
+ return change;
596
+ }).filter((entry) => entry.path.length > 0);
597
+ }
598
+ async function getRecentCommits(projectPath, limit) {
599
+ const isRepo = await ensureRepo(projectPath);
600
+ if (!isRepo) return [];
601
+ const output = await execGit(
602
+ ["log", `-n`, String(limit), "--pretty=format:%H%x09%an%x09%ad%x09%s", "--date=iso"],
603
+ projectPath
604
+ );
605
+ if (!output) return [];
606
+ return output.split("\n").map((line) => {
607
+ const [hash, author, date, message] = line.split(" ");
608
+ return { hash, author, date, message };
609
+ });
610
+ }
611
+ async function getStagedChanges(projectPath) {
612
+ const isRepo = await ensureRepo(projectPath);
613
+ if (!isRepo) return [];
614
+ const output = await execGit(["diff", "--cached", "--name-status"], projectPath);
615
+ if (!output) return [];
616
+ return parseNameStatus(output);
617
+ }
618
+ async function getUncommittedChanges(projectPath) {
619
+ const isRepo = await ensureRepo(projectPath);
620
+ if (!isRepo) return [];
621
+ const changes = [];
622
+ const unstaged = await execGit(["diff", "--name-status"], projectPath);
623
+ if (unstaged) {
624
+ changes.push(...parseNameStatus(unstaged));
625
+ }
626
+ const untracked = await execGit(["ls-files", "--others", "--exclude-standard"], projectPath);
627
+ if (untracked) {
628
+ changes.push(
629
+ ...untracked.split("\n").map((p) => p.trim()).filter(Boolean).map((p) => ({ status: "??", path: p }))
630
+ );
631
+ }
632
+ return changes;
633
+ }
634
+ async function getDiff(projectPath, commitHash) {
635
+ const isRepo = await ensureRepo(projectPath);
636
+ if (!isRepo) return "";
637
+ const diff = await execGit(["show", commitHash, "--unified=3", "--no-color"], projectPath);
638
+ return diff ?? "";
639
+ }
640
+ async function getWorkingTreeDiff(projectPath, stagedOnly = false) {
641
+ const isRepo = await ensureRepo(projectPath);
642
+ if (!isRepo) return "";
643
+ const args = stagedOnly ? ["diff", "--cached", "--unified=3", "--no-color"] : ["diff", "--unified=3", "--no-color"];
644
+ const diff = await execGit(args, projectPath);
645
+ return diff ?? "";
646
+ }
647
+
648
+ // src/agent/confidence.ts
649
+ function adjustConfidence(current, outcome, step = 0.1) {
650
+ const delta = outcome === "positive" ? step : -step;
651
+ return clamp(current + delta);
652
+ }
653
+ function clamp(value) {
654
+ if (Number.isNaN(value)) return 0.5;
655
+ return Math.min(1, Math.max(0, value));
656
+ }
657
+
658
+ // src/agent/pattern-discovery.ts
659
+ var TriePatternDiscovery = class {
660
+ constructor(graph, incidentIndex) {
661
+ this.graph = graph;
662
+ this.incidentIndex = incidentIndex;
663
+ }
664
+ discoverHotPatterns(threshold = 3) {
665
+ const trie = this.incidentIndex.getFileTrie();
666
+ const hotZones = trie.getHotZones(threshold);
667
+ return hotZones.map((zone) => ({
668
+ type: zone.path.endsWith("/") ? "directory" : "file",
669
+ path: zone.path,
670
+ incidentCount: zone.incidentCount,
671
+ confidence: zone.confidence,
672
+ relatedFiles: trie.getDirectoryIncidents(zone.path).map((i) => i.file)
673
+ }));
674
+ }
675
+ async discoverCoOccurrences(minCount = 3) {
676
+ const incidents = await this.getAllIncidents();
677
+ const coOccurrences = /* @__PURE__ */ new Map();
678
+ for (const inc of incidents) {
679
+ const files = await this.getFilesForIncident(inc);
680
+ for (let i = 0; i < files.length; i++) {
681
+ for (let j = i + 1; j < files.length; j++) {
682
+ const a = files[i];
683
+ const b = files[j];
684
+ if (!coOccurrences.has(a)) coOccurrences.set(a, /* @__PURE__ */ new Map());
685
+ const counts = coOccurrences.get(a);
686
+ counts.set(b, (counts.get(b) || 0) + 1);
687
+ }
688
+ }
689
+ }
690
+ const patterns = [];
691
+ for (const [a, map] of coOccurrences.entries()) {
692
+ for (const [b, count] of map.entries()) {
693
+ if (count >= minCount) {
694
+ const denom = Math.min(
695
+ this.incidentIndex.getFileTrie().getIncidents(a).length || 1,
696
+ this.incidentIndex.getFileTrie().getIncidents(b).length || 1
697
+ );
698
+ patterns.push({
699
+ files: [a, b],
700
+ coOccurrences: count,
701
+ confidence: Math.min(1, count / denom)
702
+ });
703
+ }
704
+ }
705
+ }
706
+ return patterns.sort((x, y) => y.confidence - x.confidence);
707
+ }
708
+ async getAllIncidents() {
709
+ const nodes = await this.graph.listNodes();
710
+ return nodes.filter((n) => n.type === "incident");
711
+ }
712
+ async getFilesForIncident(incident) {
713
+ const files = /* @__PURE__ */ new Set();
714
+ const edges = await this.graph.getEdges(incident.id, "both");
715
+ for (const edge of edges) {
716
+ if (edge.type === "causedBy" || edge.type === "leadTo") {
717
+ const changeId = edge.type === "causedBy" ? edge.to_id : edge.from_id;
718
+ const change = await this.graph.getNode("change", changeId);
719
+ if (change?.data?.files && Array.isArray(change.data.files)) {
720
+ change.data.files.forEach((f) => files.add(f));
721
+ }
722
+ }
723
+ }
724
+ return Array.from(files).map((f) => f.replace(/\\/g, "/"));
725
+ }
726
+ };
727
+
728
+ // src/agent/learning.ts
729
+ var LearningSystem = class {
730
+ constructor(graph, projectPath) {
731
+ this.graph = graph;
732
+ this.incidentIndex = new IncidentIndex(graph, projectPath);
733
+ this.discovery = new TriePatternDiscovery(graph, this.incidentIndex);
734
+ }
735
+ incidentIndex;
736
+ discovery;
737
+ async onWarningHeeded(files) {
738
+ await this.adjustPatterns(files, "positive");
739
+ }
740
+ async onWarningIgnored(files) {
741
+ await this.adjustPatterns(files, "negative");
742
+ }
743
+ async onIncidentReported(incidentId, files) {
744
+ const incident = await this.graph.getNode("incident", incidentId);
745
+ if (incident && incident.type === "incident") {
746
+ this.incidentIndex.addIncidentToTrie(incident, files);
747
+ }
748
+ await this.discoverAndStorePatterns();
749
+ }
750
+ async onFeedback(helpful, files = []) {
751
+ await this.adjustPatterns(files, helpful ? "positive" : "negative");
752
+ }
753
+ async adjustPatterns(files, outcome) {
754
+ if (!files.length) return;
755
+ for (const file of files) {
756
+ const patterns = await this.graph.getPatternsForFile(file);
757
+ await Promise.all(patterns.map((p) => this.updatePatternConfidence(p, outcome)));
758
+ }
759
+ }
760
+ async updatePatternConfidence(pattern, outcome) {
761
+ const current = pattern.data.confidence ?? 0.5;
762
+ const updated = adjustConfidence(current, outcome, 0.05);
763
+ await this.graph.updateNode("pattern", pattern.id, { confidence: updated, lastSeen: (/* @__PURE__ */ new Date()).toISOString() });
764
+ }
765
+ async discoverAndStorePatterns() {
766
+ const hotPatterns = this.discovery.discoverHotPatterns();
767
+ for (const hot of hotPatterns) {
768
+ await this.graph.addNode("pattern", {
769
+ description: `${hot.type === "directory" ? "Directory" : "File"} hot zone: ${hot.path}`,
770
+ appliesTo: [hot.path],
771
+ confidence: hot.confidence,
772
+ occurrences: hot.incidentCount,
773
+ firstSeen: (/* @__PURE__ */ new Date()).toISOString(),
774
+ lastSeen: (/* @__PURE__ */ new Date()).toISOString(),
775
+ isAntiPattern: true,
776
+ source: "local"
777
+ });
778
+ }
779
+ }
780
+ };
781
+
782
+ // src/guardian/learning-engine.ts
783
+ import path5 from "path";
784
+ var LearningEngine = class {
785
+ projectPath;
786
+ graph;
787
+ learningSystem;
788
+ constructor(projectPath, graph) {
789
+ this.projectPath = projectPath;
790
+ this.graph = graph || new ContextGraph(projectPath);
791
+ this.learningSystem = new LearningSystem(this.graph, projectPath);
792
+ }
793
+ /**
794
+ * Unified learning method: Scans history AND processes manual feedback
795
+ */
796
+ async learn(options = {}) {
797
+ const results = [];
798
+ if (!options.manualFeedback) {
799
+ const implicitCount = await this.learnFromHistory(options.limit || 20);
800
+ results.push({ learned: implicitCount, source: "git-history" });
801
+ }
802
+ if (options.manualFeedback) {
803
+ await this.recordManualFeedback(
804
+ options.manualFeedback.helpful,
805
+ options.manualFeedback.files,
806
+ options.manualFeedback.note
807
+ );
808
+ results.push({ learned: options.manualFeedback.files.length || 1, source: "manual-feedback" });
809
+ }
810
+ return results;
811
+ }
812
+ /**
813
+ * Scan recent commits for implicit failure signals (reverts, fixes)
814
+ */
815
+ async learnFromHistory(limit = 20) {
816
+ const commits = await getRecentCommits(this.projectPath, limit);
817
+ const issuesToStore = [];
818
+ for (const commit of commits) {
819
+ const isRevert = commit.message.toLowerCase().includes("revert") || commit.message.startsWith('Revert "');
820
+ const isFix = /fix(es|ed)?\s+#\d+/i.test(commit.message) || commit.message.toLowerCase().includes("bugfix");
821
+ if (isRevert || isFix) {
822
+ const type = isRevert ? "revert" : "fix";
823
+ const diff = await getDiff(this.projectPath, commit.hash);
824
+ const files = this.extractFilesFromDiff(diff);
825
+ for (const file of files) {
826
+ const learnedIssues = await this.extractIssuesFromDiff(diff, file, type, commit.message);
827
+ issuesToStore.push(...learnedIssues);
828
+ }
829
+ }
830
+ }
831
+ if (issuesToStore.length > 0) {
832
+ const result = await storeIssues(issuesToStore, path5.basename(this.projectPath), this.projectPath);
833
+ return result.stored;
834
+ }
835
+ return 0;
836
+ }
837
+ /**
838
+ * Record manual feedback (trie ok/bad) and adjust pattern confidence
839
+ */
840
+ async recordManualFeedback(helpful, files, note) {
841
+ const context = files[0] ?? "unspecified";
842
+ const decision = await this.graph.addNode("decision", {
843
+ context,
844
+ decision: helpful ? "helpful" : "not helpful",
845
+ reasoning: note ?? null,
846
+ outcome: helpful ? "good" : "bad",
847
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
848
+ });
849
+ if (files.length > 0) {
850
+ for (const file of files) {
851
+ const fileNode = await this.graph.getNode("file", file);
852
+ if (fileNode) {
853
+ await this.graph.addEdge(decision.id, fileNode.id, "affects");
854
+ }
855
+ }
856
+ await this.learningSystem.onFeedback(helpful, files);
857
+ }
858
+ }
859
+ extractFilesFromDiff(diff) {
860
+ const files = /* @__PURE__ */ new Set();
861
+ const lines = diff.split("\n");
862
+ for (const line of lines) {
863
+ if (line.startsWith("+++ b/")) {
864
+ files.add(line.slice(6));
865
+ }
866
+ }
867
+ return Array.from(files);
868
+ }
869
+ async extractIssuesFromDiff(diff, file, type, message) {
870
+ const issues = [];
871
+ const badLines = this.getBadLinesFromDiff(diff, file, type);
872
+ const content = badLines.join("\n");
873
+ if (!content) return [];
874
+ const vulnerabilities = await scanForVulnerabilities(content, file);
875
+ const vibeIssues = await scanForVibeCodeIssues(content, file);
876
+ const allMatches = [...vulnerabilities, ...vibeIssues];
877
+ for (const match of allMatches) {
878
+ issues.push({
879
+ id: `implicit-${type}-${Date.now()}-${Math.random().toString(36).slice(2, 7)}`,
880
+ severity: "serious",
881
+ issue: `Implicit failure detected via ${type}: ${message}. Linked to pattern: ${match.category}`,
882
+ fix: `Review the ${type} commit and avoid this pattern in ${file}.`,
883
+ file,
884
+ confidence: 0.7,
885
+ autoFixable: false,
886
+ agent: "implicit-learning",
887
+ category: match.category
888
+ });
889
+ }
890
+ if (issues.length === 0) {
891
+ issues.push({
892
+ id: `implicit-${type}-${Date.now()}`,
893
+ severity: "moderate",
894
+ issue: `Historical ${type} detected: ${message}`,
895
+ fix: `Review the changes in ${file} from this commit to avoid regression.`,
896
+ file,
897
+ confidence: 0.5,
898
+ autoFixable: false,
899
+ agent: "implicit-learning"
900
+ });
901
+ }
902
+ return issues;
903
+ }
904
+ getBadLinesFromDiff(diff, file, type) {
905
+ const badLines = [];
906
+ const lines = diff.split("\n");
907
+ let inTargetFile = false;
908
+ for (const line of lines) {
909
+ if (line.startsWith("+++ b/") || line.startsWith("--- a/")) {
910
+ inTargetFile = line.includes(file);
911
+ continue;
912
+ }
913
+ if (!inTargetFile) continue;
914
+ if (type === "fix" && line.startsWith("-") && !line.startsWith("---")) {
915
+ badLines.push(line.slice(1));
916
+ } else if (type === "revert" && line.startsWith("+") && !line.startsWith("+++")) {
917
+ badLines.push(line.slice(1));
918
+ }
919
+ }
920
+ return badLines;
921
+ }
922
+ };
923
+
924
+ // src/agent/perceive.ts
925
+ import path6 from "path";
926
+
927
+ // src/agent/diff-analyzer.ts
928
+ var RISKY_PATTERNS = [/auth/i, /token/i, /password/i, /secret/i, /validate/i, /sanitize/i];
929
+ function analyzeDiff(diff) {
930
+ const files = [];
931
+ let current = null;
932
+ const lines = diff.split("\n");
933
+ for (const line of lines) {
934
+ if (line.startsWith("+++ b/")) {
935
+ const filePath = line.replace("+++ b/", "").trim();
936
+ current = {
937
+ filePath,
938
+ added: 0,
939
+ removed: 0,
940
+ functionsModified: [],
941
+ riskyPatterns: []
942
+ };
943
+ files.push(current);
944
+ continue;
945
+ }
946
+ if (!current) {
947
+ continue;
948
+ }
949
+ if (line.startsWith("@@")) {
950
+ const match = line.match(/@@.*?(function\s+([\w$]+)|class\s+([\w$]+)|([\w$]+\s*\())/i);
951
+ const fnName = match?.[2] || match?.[3] || match?.[4];
952
+ if (fnName) {
953
+ current.functionsModified.push(fnName.replace("(", "").trim());
954
+ }
955
+ continue;
956
+ }
957
+ if (line.startsWith("+") && !line.startsWith("+++")) {
958
+ current.added += 1;
959
+ markRisk(line, current);
960
+ } else if (line.startsWith("-") && !line.startsWith("---")) {
961
+ current.removed += 1;
962
+ markRisk(line, current);
963
+ }
964
+ }
965
+ const totalAdded = files.reduce((acc, f) => acc + f.added, 0);
966
+ const totalRemoved = files.reduce((acc, f) => acc + f.removed, 0);
967
+ const riskyFiles = files.filter((f) => f.riskyPatterns.length > 0).map((f) => f.filePath);
968
+ return {
969
+ files,
970
+ totalAdded,
971
+ totalRemoved,
972
+ riskyFiles
973
+ };
974
+ }
975
+ function markRisk(line, file) {
976
+ for (const pattern of RISKY_PATTERNS) {
977
+ if (pattern.test(line)) {
978
+ const label = pattern.toString();
979
+ if (!file.riskyPatterns.includes(label)) {
980
+ file.riskyPatterns.push(label);
981
+ }
982
+ }
983
+ }
984
+ }
985
+
986
+ // src/agent/perceive.ts
987
+ async function perceiveCurrentChanges(projectPath, graph) {
988
+ const ctxGraph = graph ?? new ContextGraph(projectPath);
989
+ const [staged, unstaged] = await Promise.all([
990
+ getStagedChanges(projectPath),
991
+ getUncommittedChanges(projectPath)
992
+ ]);
993
+ const stagedDiff = await getWorkingTreeDiff(projectPath, true);
994
+ const unstagedDiff = await getWorkingTreeDiff(projectPath, false);
995
+ const combinedDiff = [stagedDiff, unstagedDiff].filter(Boolean).join("\n");
996
+ const diffSummary = analyzeDiff(combinedDiff);
997
+ const filesTouched = /* @__PURE__ */ new Set();
998
+ staged.forEach((c) => filesTouched.add(c.path));
999
+ unstaged.forEach((c) => filesTouched.add(c.path));
1000
+ diffSummary.files.forEach((f) => filesTouched.add(f.filePath));
1001
+ const changeId = await upsertWorkingChange(ctxGraph, Array.from(filesTouched), projectPath);
1002
+ const result = {
1003
+ staged,
1004
+ unstaged,
1005
+ diffSummary
1006
+ };
1007
+ if (changeId) result.changeNodeId = changeId;
1008
+ return result;
1009
+ }
1010
+ async function upsertWorkingChange(graph, files, projectPath) {
1011
+ if (files.length === 0) return void 0;
1012
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1013
+ const change = await graph.addNode("change", {
1014
+ commitHash: null,
1015
+ files,
1016
+ message: "workspace changes",
1017
+ diff: null,
1018
+ author: null,
1019
+ timestamp: now,
1020
+ outcome: "unknown"
1021
+ });
1022
+ for (const filePath of files) {
1023
+ const fileNode = await ensureFileNode(graph, filePath, projectPath);
1024
+ await graph.addEdge(change.id, fileNode.id, "affects");
1025
+ }
1026
+ return change.id;
1027
+ }
1028
+ async function ensureFileNode(graph, filePath, projectPath) {
1029
+ const normalized = path6.resolve(projectPath, filePath);
1030
+ const existing = await graph.getNode("file", normalized);
1031
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1032
+ if (existing) {
1033
+ const data2 = existing.data;
1034
+ await graph.updateNode("file", existing.id, {
1035
+ changeCount: (data2.changeCount ?? 0) + 1,
1036
+ lastChanged: now
1037
+ });
1038
+ return await graph.getNode("file", existing.id);
1039
+ }
1040
+ const data = {
1041
+ path: filePath,
1042
+ extension: path6.extname(filePath),
1043
+ purpose: "",
1044
+ riskLevel: "medium",
1045
+ whyRisky: null,
1046
+ changeCount: 1,
1047
+ lastChanged: now,
1048
+ incidentCount: 0,
1049
+ createdAt: now
1050
+ };
1051
+ return await graph.addNode("file", data);
1052
+ }
1053
+
1054
+ // src/agent/risk-scorer.ts
1055
+ import path7 from "path";
1056
+ var BASE_RISK = {
1057
+ low: 10,
1058
+ medium: 35,
1059
+ high: 65,
1060
+ critical: 85
1061
+ };
1062
+ var SENSITIVE_PATHS = [
1063
+ { pattern: /auth|login|token|session/i, weight: 20, reason: "touches authentication" },
1064
+ { pattern: /payment|billing|stripe|paypal|checkout/i, weight: 25, reason: "touches payments" },
1065
+ { pattern: /secret|credential|env|config\/security/i, weight: 15, reason: "touches secrets/security config" }
1066
+ ];
1067
+ function levelFromScore(score) {
1068
+ if (score >= 90) return "critical";
1069
+ if (score >= 65) return "high";
1070
+ if (score >= 40) return "medium";
1071
+ return "low";
1072
+ }
1073
+ async function scoreFile(graph, filePath, matchedPatterns = []) {
1074
+ const reasons = [];
1075
+ const normalized = path7.resolve(graph.projectRoot, filePath);
1076
+ const node = await graph.getNode("file", normalized);
1077
+ const incidents = await graph.getIncidentsForFile(filePath);
1078
+ let score = 10;
1079
+ const data = node?.data;
1080
+ if (data) {
1081
+ score = BASE_RISK[data.riskLevel] ?? score;
1082
+ reasons.push(`baseline ${data.riskLevel}`);
1083
+ if (data.incidentCount > 0) {
1084
+ const incBoost = Math.min(data.incidentCount * 12, 36);
1085
+ score += incBoost;
1086
+ reasons.push(`historical incidents (+${incBoost})`);
1087
+ }
1088
+ if (data.changeCount > 5) {
1089
+ const changeBoost = Math.min((data.changeCount - 5) * 2, 12);
1090
+ score += changeBoost;
1091
+ reasons.push(`frequent changes (+${changeBoost})`);
1092
+ }
1093
+ if (data.lastChanged) {
1094
+ const lastChanged = new Date(data.lastChanged).getTime();
1095
+ const days = (Date.now() - lastChanged) / (1e3 * 60 * 60 * 24);
1096
+ if (days > 60 && data.incidentCount === 0) {
1097
+ score -= 5;
1098
+ reasons.push("stable for 60d (-5)");
1099
+ }
1100
+ }
1101
+ }
1102
+ for (const { pattern, weight, reason } of SENSITIVE_PATHS) {
1103
+ if (pattern.test(filePath)) {
1104
+ score += weight;
1105
+ reasons.push(reason);
1106
+ }
1107
+ }
1108
+ if (matchedPatterns.length > 0) {
1109
+ const patternBoost = Math.min(
1110
+ matchedPatterns.reduce((acc, p) => acc + (p.data.confidence ?? 50) / 10, 0),
1111
+ 20
1112
+ );
1113
+ score += patternBoost;
1114
+ reasons.push(`pattern match (+${Math.round(patternBoost)})`);
1115
+ }
1116
+ if (incidents.length > 0) {
1117
+ const timestamps = incidents.map((i) => new Date(i.data.timestamp).getTime()).sort((a, b) => b - a);
1118
+ const recent = timestamps[0];
1119
+ const daysSince = (Date.now() - recent) / (1e3 * 60 * 60 * 24);
1120
+ if (daysSince > 90) {
1121
+ score -= 5;
1122
+ reasons.push("no incidents in 90d (-5)");
1123
+ } else {
1124
+ score += 8;
1125
+ reasons.push("recent incident (+8)");
1126
+ }
1127
+ }
1128
+ const level = levelFromScore(score);
1129
+ return {
1130
+ file: filePath,
1131
+ score,
1132
+ level,
1133
+ reasons,
1134
+ incidents,
1135
+ matchedPatterns
1136
+ };
1137
+ }
1138
+ async function scoreChangeSet(graph, files, patternMatches = {}) {
1139
+ const fileResults = [];
1140
+ for (const file of files) {
1141
+ const patterns = patternMatches[file] ?? [];
1142
+ fileResults.push(await scoreFile(graph, file, patterns));
1143
+ }
1144
+ const maxScore = Math.max(...fileResults.map((f) => f.score), 10);
1145
+ const spreadBoost = files.length > 5 ? Math.min((files.length - 5) * 2, 10) : 0;
1146
+ const overallScore = maxScore + spreadBoost;
1147
+ const overall = levelFromScore(overallScore);
1148
+ const shouldEscalate = overall === "critical" || overall === "high";
1149
+ return {
1150
+ files: fileResults,
1151
+ overall,
1152
+ score: overallScore,
1153
+ shouldEscalate
1154
+ };
1155
+ }
1156
+
1157
+ // src/agent/pattern-matcher.ts
1158
+ async function matchPatternsForFiles(graph, files) {
1159
+ const matches = [];
1160
+ const byFile = {};
1161
+ for (const file of files) {
1162
+ const patterns = await graph.getPatternsForFile(file);
1163
+ if (patterns.length === 0) continue;
1164
+ byFile[file] = patterns;
1165
+ for (const pattern of patterns) {
1166
+ matches.push({
1167
+ file,
1168
+ pattern,
1169
+ confidence: pattern.data.confidence,
1170
+ isAntiPattern: pattern.data.isAntiPattern
1171
+ });
1172
+ }
1173
+ }
1174
+ return { matches, byFile };
1175
+ }
1176
+
1177
+ // src/orchestrator/triager.ts
1178
+ var Triager = class {
1179
+ constructor(_config) {
1180
+ }
1181
+ /**
1182
+ * Triage a change to select appropriate agents
1183
+ * Note: Skills/agents have been removed - Trie is now purely a decision ledger
1184
+ */
1185
+ async triage(_context, _forceAgents) {
1186
+ return [];
1187
+ }
1188
+ /**
1189
+ * Get all available agent names (deprecated - returns empty array)
1190
+ */
1191
+ getAvailableAgents() {
1192
+ return [];
1193
+ }
1194
+ };
1195
+
1196
+ // src/utils/parallel-executor.ts
1197
+ import { Worker } from "worker_threads";
1198
+ import { cpus } from "os";
1199
+ import { existsSync as existsSync4 } from "fs";
1200
+ import { fileURLToPath } from "url";
1201
+ var ParallelExecutor = class {
1202
+ maxWorkers;
1203
+ cache;
1204
+ streaming;
1205
+ activeWorkers = /* @__PURE__ */ new Set();
1206
+ cacheEnabled = true;
1207
+ useWorkerThreads = false;
1208
+ workerAvailable = null;
1209
+ warnedWorkerFallback = false;
1210
+ constructor(cacheManager, maxWorkers = Math.max(2, Math.min(cpus().length - 1, 8)), options) {
1211
+ this.maxWorkers = maxWorkers;
1212
+ this.cache = cacheManager;
1213
+ this.cacheEnabled = options?.cacheEnabled ?? true;
1214
+ this.useWorkerThreads = options?.useWorkerThreads ?? false;
1215
+ }
1216
+ /**
1217
+ * Set streaming manager for real-time updates
1218
+ */
1219
+ setStreaming(streaming) {
1220
+ this.streaming = streaming;
1221
+ }
1222
+ /**
1223
+ * Execute agents in parallel with intelligent scheduling
1224
+ */
1225
+ async executeAgents(agents, files, context) {
1226
+ if (agents.length === 0) {
1227
+ return /* @__PURE__ */ new Map();
1228
+ }
1229
+ if (this.streaming && this.streaming.getProgress().totalFiles === 0) {
1230
+ this.streaming.startScan(files.length);
1231
+ }
1232
+ const cacheResults = /* @__PURE__ */ new Map();
1233
+ const uncachedTasks = [];
1234
+ for (const agent of agents) {
1235
+ const cached = await this.checkAgentCache(agent, files);
1236
+ if (cached) {
1237
+ cacheResults.set(agent.name, cached);
1238
+ this.streaming?.completeAgent(agent.name, cached.issues);
1239
+ } else {
1240
+ uncachedTasks.push({
1241
+ agent,
1242
+ files,
1243
+ context,
1244
+ priority: agent.priority?.tier || 2,
1245
+ timeoutMs: context?.config?.timeoutMs || 12e4
1246
+ });
1247
+ }
1248
+ }
1249
+ uncachedTasks.sort((a, b) => a.priority - b.priority);
1250
+ const parallelResults = await this.executeTasksParallel(uncachedTasks);
1251
+ await this.cacheResults(parallelResults);
1252
+ const allResults = /* @__PURE__ */ new Map();
1253
+ for (const [agent, result] of cacheResults) {
1254
+ allResults.set(agent, result);
1255
+ }
1256
+ for (const result of parallelResults) {
1257
+ allResults.set(result.agent, result.result);
1258
+ }
1259
+ const allIssues = Array.from(allResults.values()).flatMap((r) => r.issues);
1260
+ this.streaming?.completeScan(allIssues);
1261
+ return allResults;
1262
+ }
1263
+ /**
1264
+ * Check if agent has cached results for given files
1265
+ */
1266
+ async checkAgentCache(agent, files) {
1267
+ if (!this.cacheEnabled || !this.cache) {
1268
+ return null;
1269
+ }
1270
+ const cachedIssues = await this.cache.getCachedBatch(files, agent.name);
1271
+ if (cachedIssues.size === files.length) {
1272
+ const allIssues = Array.from(cachedIssues.values()).flat();
1273
+ return {
1274
+ agent: agent.name,
1275
+ issues: allIssues,
1276
+ executionTime: 0,
1277
+ // Cached
1278
+ success: true,
1279
+ metadata: {
1280
+ filesAnalyzed: files.length,
1281
+ linesAnalyzed: 0
1282
+ }
1283
+ };
1284
+ }
1285
+ return null;
1286
+ }
1287
+ /**
1288
+ * Execute tasks in parallel batches
1289
+ */
1290
+ async executeTasksParallel(tasks) {
1291
+ if (tasks.length === 0) {
1292
+ return [];
1293
+ }
1294
+ const results = [];
1295
+ const batches = this.createBatches(tasks, this.maxWorkers);
1296
+ for (const batch of batches) {
1297
+ const batchResults = await Promise.all(
1298
+ batch.map((task) => this.executeTask(task))
1299
+ );
1300
+ results.push(...batchResults);
1301
+ }
1302
+ return results;
1303
+ }
1304
+ /**
1305
+ * Create batches for parallel execution
1306
+ */
1307
+ createBatches(tasks, batchSize) {
1308
+ const batches = [];
1309
+ for (let i = 0; i < tasks.length; i += batchSize) {
1310
+ batches.push(tasks.slice(i, i + batchSize));
1311
+ }
1312
+ return batches;
1313
+ }
1314
+ /**
1315
+ * Execute a single task
1316
+ */
1317
+ async executeTask(task) {
1318
+ const startTime = Date.now();
1319
+ this.streaming?.startAgent(task.agent.name);
1320
+ try {
1321
+ const result = this.canUseWorkers() ? await this.executeTaskInWorker(task) : await task.agent.scan(task.files, task.context);
1322
+ const executionTime = Date.now() - startTime;
1323
+ this.streaming?.completeAgent(task.agent.name, result.issues);
1324
+ return {
1325
+ agent: task.agent.name,
1326
+ result,
1327
+ fromCache: false,
1328
+ executionTime
1329
+ };
1330
+ } catch (error) {
1331
+ const executionTime = Date.now() - startTime;
1332
+ const errorMessage = error instanceof Error ? error.message : String(error);
1333
+ this.streaming?.reportError(new Error(errorMessage), `Agent: ${task.agent.name}`);
1334
+ return {
1335
+ agent: task.agent.name,
1336
+ result: {
1337
+ agent: task.agent.name,
1338
+ issues: [],
1339
+ executionTime,
1340
+ success: false,
1341
+ error: errorMessage
1342
+ },
1343
+ fromCache: false,
1344
+ executionTime
1345
+ };
1346
+ }
1347
+ }
1348
+ canUseWorkers() {
1349
+ if (!this.useWorkerThreads) {
1350
+ return false;
1351
+ }
1352
+ if (this.workerAvailable !== null) {
1353
+ return this.workerAvailable;
1354
+ }
1355
+ const workerUrl = this.getWorkerUrl();
1356
+ this.workerAvailable = existsSync4(fileURLToPath(workerUrl));
1357
+ if (!this.workerAvailable && !this.warnedWorkerFallback && !isInteractiveMode()) {
1358
+ console.error("Worker threads unavailable; falling back to in-process agents.");
1359
+ this.warnedWorkerFallback = true;
1360
+ }
1361
+ return this.workerAvailable;
1362
+ }
1363
+ getWorkerUrl() {
1364
+ const distDir = new URL(".", import.meta.url);
1365
+ return new URL("workers/agent-worker.js", distDir);
1366
+ }
1367
+ async executeTaskInWorker(task) {
1368
+ const workerUrl = this.getWorkerUrl();
1369
+ return new Promise((resolve, reject) => {
1370
+ const worker = new Worker(workerUrl, {
1371
+ workerData: {
1372
+ agentName: task.agent.name,
1373
+ files: task.files,
1374
+ context: task.context
1375
+ }
1376
+ });
1377
+ this.activeWorkers.add(worker);
1378
+ const timeout = setTimeout(() => {
1379
+ worker.terminate().catch(() => void 0);
1380
+ reject(new Error(`Agent ${task.agent.name} timed out after ${task.timeoutMs}ms`));
1381
+ }, task.timeoutMs);
1382
+ worker.on("message", (message) => {
1383
+ if (message?.type === "result") {
1384
+ clearTimeout(timeout);
1385
+ resolve(message.result);
1386
+ } else if (message?.type === "error") {
1387
+ clearTimeout(timeout);
1388
+ reject(new Error(message.error));
1389
+ }
1390
+ });
1391
+ worker.on("error", (error) => {
1392
+ clearTimeout(timeout);
1393
+ reject(error);
1394
+ });
1395
+ worker.on("exit", (code) => {
1396
+ this.activeWorkers.delete(worker);
1397
+ if (code !== 0) {
1398
+ clearTimeout(timeout);
1399
+ reject(new Error(`Worker stopped with exit code ${code}`));
1400
+ }
1401
+ });
1402
+ });
1403
+ }
1404
+ /**
1405
+ * Cache results for future use
1406
+ */
1407
+ async cacheResults(results) {
1408
+ if (!this.cacheEnabled || !this.cache) {
1409
+ return;
1410
+ }
1411
+ const cachePromises = results.filter((r) => r.result.success && !r.fromCache).map((r) => {
1412
+ const issuesByFile = this.groupIssuesByFile(r.result.issues);
1413
+ const perFilePromises = Object.entries(issuesByFile).map(
1414
+ ([file, issues]) => this.cache.setCached(file, r.agent, issues, r.executionTime)
1415
+ );
1416
+ return Promise.all(perFilePromises);
1417
+ });
1418
+ await Promise.allSettled(cachePromises);
1419
+ }
1420
+ /**
1421
+ * Cleanup resources
1422
+ */
1423
+ async cleanup() {
1424
+ const terminationPromises = Array.from(this.activeWorkers).map(
1425
+ (worker) => worker.terminate()
1426
+ );
1427
+ await Promise.allSettled(terminationPromises);
1428
+ this.activeWorkers.clear();
1429
+ }
1430
+ groupIssuesByFile(issues) {
1431
+ const grouped = {};
1432
+ for (const issue of issues) {
1433
+ if (!grouped[issue.file]) {
1434
+ grouped[issue.file] = [];
1435
+ }
1436
+ grouped[issue.file].push(issue);
1437
+ }
1438
+ return grouped;
1439
+ }
1440
+ };
1441
+ function calculateOptimalConcurrency() {
1442
+ const numCPUs = cpus().length;
1443
+ const availableMemoryGB = process.memoryUsage().rss / 1024 / 1024 / 1024;
1444
+ let optimal = Math.max(2, Math.min(numCPUs - 1, 8));
1445
+ if (availableMemoryGB < 2) {
1446
+ optimal = Math.max(2, Math.floor(optimal / 2));
1447
+ }
1448
+ if (numCPUs > 8) {
1449
+ optimal = Math.min(optimal + 2, 12);
1450
+ }
1451
+ return optimal;
1452
+ }
1453
+
1454
+ // src/utils/cache-manager.ts
1455
+ import { readFile as readFile2, writeFile as writeFile2, mkdir as mkdir2, stat } from "fs/promises";
1456
+ import { join as join3 } from "path";
1457
+ import { createHash } from "crypto";
1458
+ var CacheManager = class {
1459
+ cacheDir;
1460
+ indexPath;
1461
+ VERSION = "1.0.0";
1462
+ MAX_AGE_MS = 24 * 60 * 60 * 1e3;
1463
+ // 24 hours
1464
+ MAX_ENTRIES = 1e3;
1465
+ constructor(baseDir) {
1466
+ this.cacheDir = join3(getTrieDirectory(baseDir), "cache");
1467
+ this.indexPath = join3(this.cacheDir, "index.json");
1468
+ }
1469
+ /**
1470
+ * Generate cache key for a file and agent combination
1471
+ */
1472
+ generateCacheKey(filePath, agent, fileHash) {
1473
+ const key = `${filePath}:${agent}:${fileHash}`;
1474
+ return createHash("sha256").update(key).digest("hex").slice(0, 16);
1475
+ }
1476
+ /**
1477
+ * Get file hash for cache validation
1478
+ */
1479
+ async getFileHash(filePath) {
1480
+ try {
1481
+ const content = await readFile2(filePath, "utf-8");
1482
+ const stats = await stat(filePath);
1483
+ const hash = createHash("sha256").update(content).digest("hex").slice(0, 16);
1484
+ return {
1485
+ hash,
1486
+ size: stats.size,
1487
+ mtime: stats.mtime.getTime()
1488
+ };
1489
+ } catch {
1490
+ return { hash: "", size: 0, mtime: 0 };
1491
+ }
1492
+ }
1493
+ /**
1494
+ * Load cache index
1495
+ */
1496
+ async loadIndex() {
1497
+ try {
1498
+ const content = await readFile2(this.indexPath, "utf-8");
1499
+ return JSON.parse(content);
1500
+ } catch {
1501
+ return {
1502
+ version: this.VERSION,
1503
+ created: Date.now(),
1504
+ entries: {}
1505
+ };
1506
+ }
1507
+ }
1508
+ /**
1509
+ * Save cache index
1510
+ */
1511
+ async saveIndex(index) {
1512
+ try {
1513
+ await mkdir2(this.cacheDir, { recursive: true });
1514
+ await writeFile2(this.indexPath, JSON.stringify(index, null, 2));
1515
+ } catch (error) {
1516
+ if (!isInteractiveMode()) {
1517
+ console.warn("Failed to save cache index:", error);
1518
+ }
1519
+ }
1520
+ }
1521
+ /**
1522
+ * Clean up expired entries
1523
+ */
1524
+ cleanupExpired(index) {
1525
+ const now = Date.now();
1526
+ const validEntries = {};
1527
+ for (const [key, entry] of Object.entries(index.entries)) {
1528
+ if (now - entry.timestamp < this.MAX_AGE_MS) {
1529
+ validEntries[key] = entry;
1530
+ }
1531
+ }
1532
+ const entries = Object.entries(validEntries);
1533
+ if (entries.length > this.MAX_ENTRIES) {
1534
+ entries.sort((a, b) => b[1].timestamp - a[1].timestamp);
1535
+ const limited = entries.slice(0, this.MAX_ENTRIES);
1536
+ return {
1537
+ ...index,
1538
+ entries: Object.fromEntries(limited)
1539
+ };
1540
+ }
1541
+ return {
1542
+ ...index,
1543
+ entries: validEntries
1544
+ };
1545
+ }
1546
+ /**
1547
+ * Get cached result for a file and agent
1548
+ *
1549
+ * Cache automatically invalidates when files change:
1550
+ * - Cache key includes file hash: hash(filePath:agent:fileHash)
1551
+ * - When file changes, hash changes, so cache key changes
1552
+ * - Old cache entry won't be found (different key)
1553
+ * - File is automatically rescanned
1554
+ *
1555
+ * This means cache auto-updates when Claude fixes code - no manual invalidation needed!
1556
+ */
1557
+ async getCached(filePath, agent) {
1558
+ try {
1559
+ const { hash, size: _size, mtime: _mtime } = await this.getFileHash(filePath);
1560
+ if (!hash) return null;
1561
+ const index = await this.loadIndex();
1562
+ const cacheKey = this.generateCacheKey(filePath, agent, hash);
1563
+ const entry = index.entries[cacheKey];
1564
+ if (!entry) return null;
1565
+ const isValid = entry.fileHash === hash && entry.version === this.VERSION && Date.now() - entry.timestamp < this.MAX_AGE_MS;
1566
+ if (!isValid) {
1567
+ delete index.entries[cacheKey];
1568
+ await this.saveIndex(index);
1569
+ return null;
1570
+ }
1571
+ return entry.issues;
1572
+ } catch {
1573
+ return null;
1574
+ }
1575
+ }
1576
+ /**
1577
+ * Cache result for a file and agent
1578
+ */
1579
+ async setCached(filePath, agent, issues, executionTime) {
1580
+ try {
1581
+ const { hash, size } = await this.getFileHash(filePath);
1582
+ if (!hash) return;
1583
+ const index = await this.loadIndex();
1584
+ const cacheKey = this.generateCacheKey(filePath, agent, hash);
1585
+ index.entries[cacheKey] = {
1586
+ version: this.VERSION,
1587
+ timestamp: Date.now(),
1588
+ fileHash: hash,
1589
+ fileSize: size,
1590
+ agent,
1591
+ issues,
1592
+ executionTime
1593
+ };
1594
+ const cleanedIndex = this.cleanupExpired(index);
1595
+ await this.saveIndex(cleanedIndex);
1596
+ } catch (error) {
1597
+ if (!isInteractiveMode()) {
1598
+ console.warn("Failed to cache result:", error);
1599
+ }
1600
+ }
1601
+ }
1602
+ /**
1603
+ * Check if multiple files have cached results
1604
+ */
1605
+ async getCachedBatch(files, agent) {
1606
+ const results = /* @__PURE__ */ new Map();
1607
+ await Promise.all(
1608
+ files.map(async (file) => {
1609
+ const cached = await this.getCached(file, agent);
1610
+ if (cached) {
1611
+ results.set(file, cached);
1612
+ }
1613
+ })
1614
+ );
1615
+ return results;
1616
+ }
1617
+ /**
1618
+ * Get cache statistics
1619
+ */
1620
+ async getStats() {
1621
+ try {
1622
+ const index = await this.loadIndex();
1623
+ const entries = Object.values(index.entries);
1624
+ const totalSizeKB = entries.reduce((acc, entry) => acc + entry.fileSize, 0) / 1024;
1625
+ const timestamps = entries.map((e) => e.timestamp);
1626
+ const agents = [...new Set(entries.map((e) => e.agent))];
1627
+ return {
1628
+ totalEntries: entries.length,
1629
+ totalSizeKB: Math.round(totalSizeKB),
1630
+ oldestEntry: timestamps.length > 0 ? Math.min(...timestamps) : null,
1631
+ newestEntry: timestamps.length > 0 ? Math.max(...timestamps) : null,
1632
+ agents
1633
+ };
1634
+ } catch {
1635
+ return {
1636
+ totalEntries: 0,
1637
+ totalSizeKB: 0,
1638
+ oldestEntry: null,
1639
+ newestEntry: null,
1640
+ agents: []
1641
+ };
1642
+ }
1643
+ }
1644
+ /**
1645
+ * Clean up stale cache entries by verifying file hashes
1646
+ * This removes entries where files have changed or no longer exist
1647
+ * Called periodically to keep cache clean
1648
+ *
1649
+ * Note: Since cache keys are hashed, we can't easily reverse-engineer file paths.
1650
+ * However, when getCached() is called, it naturally invalidates stale entries
1651
+ * by checking if the current file hash matches the cached hash. This method
1652
+ * proactively cleans up entries for known changed files.
1653
+ */
1654
+ async cleanupStaleEntries(filePaths) {
1655
+ try {
1656
+ const index = await this.loadIndex();
1657
+ let removedCount = 0;
1658
+ const keysToRemove = [];
1659
+ if (filePaths && filePaths.length > 0) {
1660
+ const agents = /* @__PURE__ */ new Set();
1661
+ for (const entry of Object.values(index.entries)) {
1662
+ agents.add(entry.agent);
1663
+ }
1664
+ for (const filePath of filePaths) {
1665
+ try {
1666
+ const { hash: currentHash } = await this.getFileHash(filePath);
1667
+ if (!currentHash) {
1668
+ continue;
1669
+ }
1670
+ for (const agent of agents) {
1671
+ for (const [key, entry] of Object.entries(index.entries)) {
1672
+ if (entry.agent !== agent) continue;
1673
+ if (entry.fileHash !== currentHash) {
1674
+ const oldKey = this.generateCacheKey(filePath, agent, entry.fileHash);
1675
+ if (oldKey === key) {
1676
+ keysToRemove.push(key);
1677
+ removedCount++;
1678
+ }
1679
+ }
1680
+ }
1681
+ }
1682
+ } catch {
1683
+ continue;
1684
+ }
1685
+ }
1686
+ }
1687
+ const uniqueKeys = [...new Set(keysToRemove)];
1688
+ for (const key of uniqueKeys) {
1689
+ delete index.entries[key];
1690
+ }
1691
+ if (removedCount > 0) {
1692
+ await this.saveIndex(index);
1693
+ }
1694
+ return removedCount;
1695
+ } catch (error) {
1696
+ if (!isInteractiveMode()) {
1697
+ console.warn("Failed to cleanup stale cache entries:", error);
1698
+ }
1699
+ return 0;
1700
+ }
1701
+ }
1702
+ /**
1703
+ * Clear all cache
1704
+ */
1705
+ async clear() {
1706
+ try {
1707
+ const emptyIndex = {
1708
+ version: this.VERSION,
1709
+ created: Date.now(),
1710
+ entries: {}
1711
+ };
1712
+ await this.saveIndex(emptyIndex);
1713
+ } catch (error) {
1714
+ if (!isInteractiveMode()) {
1715
+ console.warn("Failed to clear cache:", error);
1716
+ }
1717
+ }
1718
+ }
1719
+ };
1720
+
1721
+ // src/orchestrator/executor.ts
1722
+ var Executor = class {
1723
+ async executeAgents(agents, files, context, options) {
1724
+ const parallel = options?.parallel ?? true;
1725
+ const cacheEnabled = options?.cacheEnabled ?? true;
1726
+ const maxConcurrency = options?.maxConcurrency ?? calculateOptimalConcurrency();
1727
+ const useWorkerThreads = options?.useWorkerThreads ?? false;
1728
+ if (!isInteractiveMode()) {
1729
+ console.error(`Executing ${agents.length} scouts ${parallel ? "in parallel" : "sequentially"}...`);
1730
+ }
1731
+ if (parallel) {
1732
+ const cacheManager = cacheEnabled ? new CacheManager(context.workingDir) : null;
1733
+ const executor = new ParallelExecutor(cacheManager, maxConcurrency, {
1734
+ cacheEnabled,
1735
+ useWorkerThreads
1736
+ });
1737
+ if (options?.streaming) {
1738
+ executor.setStreaming(options.streaming);
1739
+ }
1740
+ const results = await executor.executeAgents(agents, files, {
1741
+ ...context,
1742
+ config: { timeoutMs: options?.timeoutMs ?? 12e4 }
1743
+ });
1744
+ return agents.map((agent) => results.get(agent.name)).filter(Boolean);
1745
+ }
1746
+ const promises = agents.map(
1747
+ (agent) => this.executeAgentWithTimeout(agent, files, context, options?.timeoutMs ?? 3e4)
1748
+ );
1749
+ try {
1750
+ const results = await Promise.allSettled(promises);
1751
+ return results.map((result, index) => {
1752
+ if (result.status === "fulfilled") {
1753
+ if (!isInteractiveMode()) {
1754
+ console.error(`${agents[index].name} completed in ${result.value.executionTime}ms`);
1755
+ }
1756
+ return result.value;
1757
+ } else {
1758
+ if (!isInteractiveMode()) {
1759
+ console.error(`${agents[index].name} failed:`, result.reason);
1760
+ }
1761
+ return {
1762
+ agent: agents[index].name,
1763
+ issues: [],
1764
+ executionTime: 0,
1765
+ success: false,
1766
+ error: result.reason instanceof Error ? result.reason.message : String(result.reason)
1767
+ };
1768
+ }
1769
+ });
1770
+ } catch (error) {
1771
+ if (!isInteractiveMode()) {
1772
+ console.error("Executor error:", error);
1773
+ }
1774
+ return agents.map((agent) => ({
1775
+ agent: agent.name,
1776
+ issues: [],
1777
+ executionTime: 0,
1778
+ success: false,
1779
+ error: "Execution failed"
1780
+ }));
1781
+ }
1782
+ }
1783
+ async executeAgentWithTimeout(agent, files, context, timeoutMs = 3e4) {
1784
+ return new Promise(async (resolve, reject) => {
1785
+ const timeout = setTimeout(() => {
1786
+ reject(new Error(`Agent ${agent.name} timed out after ${timeoutMs}ms`));
1787
+ }, timeoutMs);
1788
+ try {
1789
+ const result = await agent.scan(files, context);
1790
+ clearTimeout(timeout);
1791
+ resolve(result);
1792
+ } catch (error) {
1793
+ clearTimeout(timeout);
1794
+ reject(error);
1795
+ }
1796
+ });
1797
+ }
1798
+ };
1799
+
1800
+ // src/agent/reason.ts
1801
+ function buildDefaultCodeContext() {
1802
+ return {
1803
+ changeType: "general",
1804
+ isNewFeature: false,
1805
+ touchesUserData: false,
1806
+ touchesAuth: false,
1807
+ touchesPayments: false,
1808
+ touchesDatabase: false,
1809
+ touchesAPI: false,
1810
+ touchesUI: false,
1811
+ touchesHealthData: false,
1812
+ touchesSecurityConfig: false,
1813
+ linesChanged: 50,
1814
+ filePatterns: [],
1815
+ framework: "unknown",
1816
+ language: "typescript",
1817
+ touchesCrypto: false,
1818
+ touchesFileSystem: false,
1819
+ touchesThirdPartyAPI: false,
1820
+ touchesLogging: false,
1821
+ touchesErrorHandling: false,
1822
+ hasTests: false,
1823
+ complexity: "medium",
1824
+ patterns: {
1825
+ hasAsyncCode: false,
1826
+ hasFormHandling: false,
1827
+ hasFileUploads: false,
1828
+ hasEmailHandling: false,
1829
+ hasRateLimiting: false,
1830
+ hasWebSockets: false,
1831
+ hasCaching: false,
1832
+ hasQueue: false
1833
+ }
1834
+ };
1835
+ }
1836
+ function buildExplanation(result) {
1837
+ const top = [...result.files].sort((a, b) => b.score - a.score)[0];
1838
+ if (!top) return `Risk level ${result.overall} (no files provided)`;
1839
+ return `Risk level ${result.overall} because ${top.file} ${top.reasons.join(", ")}`;
1840
+ }
1841
+ function buildRecommendation(risk, hasAntiPattern) {
1842
+ if (hasAntiPattern || risk === "critical") {
1843
+ return "Block until reviewed: address anti-patterns and rerun targeted tests.";
1844
+ }
1845
+ if (risk === "high") {
1846
+ return "Require senior review and run full test suite before merge.";
1847
+ }
1848
+ if (risk === "medium") {
1849
+ return "Proceed with caution; run impacted tests and sanity checks.";
1850
+ }
1851
+ return "Low risk; proceed but keep an eye on recent changes.";
1852
+ }
1853
+ async function reasonAboutChanges(projectPath, files, options = {}) {
1854
+ const graph = new ContextGraph(projectPath);
1855
+ const { matches, byFile } = await matchPatternsForFiles(graph, files);
1856
+ const changeRisk = await scoreChangeSet(graph, files, byFile);
1857
+ const incidents = [];
1858
+ for (const file of files) {
1859
+ const fileIncidents = await graph.getIncidentsForFile(file);
1860
+ incidents.push(...fileIncidents);
1861
+ }
1862
+ const hasAntiPattern = matches.some((m) => m.isAntiPattern);
1863
+ const riskLevel = hasAntiPattern ? "critical" : changeRisk.overall;
1864
+ const shouldBlock = hasAntiPattern || riskLevel === "critical" || riskLevel === "high";
1865
+ const reasoning = {
1866
+ riskLevel,
1867
+ shouldBlock,
1868
+ explanation: buildExplanation(changeRisk),
1869
+ relevantIncidents: incidents,
1870
+ matchedPatterns: matches.map((m) => m.pattern),
1871
+ recommendation: buildRecommendation(riskLevel, hasAntiPattern),
1872
+ files: changeRisk.files
1873
+ };
1874
+ if (options.runAgents) {
1875
+ const codeContext = options.codeContext ?? buildDefaultCodeContext();
1876
+ const triager = new Triager();
1877
+ const agents = await triager.triage(codeContext);
1878
+ if (agents.length > 0) {
1879
+ const executor = new Executor();
1880
+ const scanContext = {
1881
+ workingDir: projectPath,
1882
+ ...options.scanContext
1883
+ };
1884
+ if (codeContext.framework) scanContext.framework = codeContext.framework;
1885
+ if (codeContext.language) scanContext.language = codeContext.language;
1886
+ reasoning.agentResults = await executor.executeAgents(agents, files, scanContext, {
1887
+ parallel: true,
1888
+ timeoutMs: options.scanContext?.config?.timeoutMs ?? 6e4
1889
+ });
1890
+ } else {
1891
+ reasoning.agentResults = [];
1892
+ }
1893
+ }
1894
+ return reasoning;
1895
+ }
1896
+ async function reasonAboutChangesHumanReadable(projectPath, files, options = {}) {
1897
+ const reasoning = await reasonAboutChanges(projectPath, files, options);
1898
+ const { humanizeReasoning } = await import("./comprehension-46F7ZNKL.js");
1899
+ return humanizeReasoning(reasoning);
1900
+ }
1901
+
1902
+ export {
1903
+ exportToJson,
1904
+ importFromJson,
1905
+ IncidentIndex,
1906
+ formatFriendlyError,
1907
+ formatAuditLog,
1908
+ getAuditStatistics,
1909
+ getRecentAuditLogs,
1910
+ getSkillAuditLogs,
1911
+ runShellCommandSync,
1912
+ getStagedChanges,
1913
+ getUncommittedChanges,
1914
+ LearningEngine,
1915
+ perceiveCurrentChanges,
1916
+ reasonAboutChangesHumanReadable,
1917
+ saveCheckpoint,
1918
+ listCheckpoints,
1919
+ getLastCheckpoint,
1920
+ handleCheckpointCommand,
1921
+ isTrieInitialized
1922
+ };
1923
+ //# sourceMappingURL=chunk-QYOACM2C.js.map