@triedotdev/mcp 1.0.169 → 1.0.171

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. package/README.md +62 -540
  2. package/dist/chunk-2YXOBNKW.js +619 -0
  3. package/dist/chunk-2YXOBNKW.js.map +1 -0
  4. package/dist/chunk-QR64Y5TI.js +363 -0
  5. package/dist/chunk-QR64Y5TI.js.map +1 -0
  6. package/dist/cli/main.d.ts +0 -15
  7. package/dist/cli/main.js +356 -3098
  8. package/dist/cli/main.js.map +1 -1
  9. package/dist/index.js +2 -34
  10. package/dist/index.js.map +1 -1
  11. package/dist/server/mcp-server.js +2 -34
  12. package/package.json +8 -31
  13. package/dist/autonomy-config-FSERX3O3.js +0 -30
  14. package/dist/autonomy-config-FSERX3O3.js.map +0 -1
  15. package/dist/chat-store-JNGNTDSN.js +0 -15
  16. package/dist/chat-store-JNGNTDSN.js.map +0 -1
  17. package/dist/chunk-2HF65EHQ.js +0 -311
  18. package/dist/chunk-2HF65EHQ.js.map +0 -1
  19. package/dist/chunk-3XR6WVAW.js +0 -4011
  20. package/dist/chunk-3XR6WVAW.js.map +0 -1
  21. package/dist/chunk-43X6JBEM.js +0 -36
  22. package/dist/chunk-43X6JBEM.js.map +0 -1
  23. package/dist/chunk-6NLHFIYA.js +0 -344
  24. package/dist/chunk-6NLHFIYA.js.map +0 -1
  25. package/dist/chunk-7IO4YUI3.js +0 -1827
  26. package/dist/chunk-7IO4YUI3.js.map +0 -1
  27. package/dist/chunk-AHD2CBQ7.js +0 -846
  28. package/dist/chunk-AHD2CBQ7.js.map +0 -1
  29. package/dist/chunk-BUTOP5EB.js +0 -931
  30. package/dist/chunk-BUTOP5EB.js.map +0 -1
  31. package/dist/chunk-DGUM43GV.js +0 -11
  32. package/dist/chunk-DGUM43GV.js.map +0 -1
  33. package/dist/chunk-EFWVF6TI.js +0 -267
  34. package/dist/chunk-EFWVF6TI.js.map +0 -1
  35. package/dist/chunk-F6WFNUAY.js +0 -216
  36. package/dist/chunk-F6WFNUAY.js.map +0 -1
  37. package/dist/chunk-FBNURWRY.js +0 -662
  38. package/dist/chunk-FBNURWRY.js.map +0 -1
  39. package/dist/chunk-FQ45QP5A.js +0 -361
  40. package/dist/chunk-FQ45QP5A.js.map +0 -1
  41. package/dist/chunk-FVRO5RN3.js +0 -1306
  42. package/dist/chunk-FVRO5RN3.js.map +0 -1
  43. package/dist/chunk-G2TGF6TR.js +0 -573
  44. package/dist/chunk-G2TGF6TR.js.map +0 -1
  45. package/dist/chunk-G3I7SZLW.js +0 -354
  46. package/dist/chunk-G3I7SZLW.js.map +0 -1
  47. package/dist/chunk-GTKYBOXL.js +0 -700
  48. package/dist/chunk-GTKYBOXL.js.map +0 -1
  49. package/dist/chunk-HVCDY3AK.js +0 -850
  50. package/dist/chunk-HVCDY3AK.js.map +0 -1
  51. package/dist/chunk-I2O5OYQT.js +0 -727
  52. package/dist/chunk-I2O5OYQT.js.map +0 -1
  53. package/dist/chunk-JVMBCWKS.js +0 -348
  54. package/dist/chunk-JVMBCWKS.js.map +0 -1
  55. package/dist/chunk-KCUOWRPX.js +0 -816
  56. package/dist/chunk-KCUOWRPX.js.map +0 -1
  57. package/dist/chunk-KDHN2ZQE.js +0 -313
  58. package/dist/chunk-KDHN2ZQE.js.map +0 -1
  59. package/dist/chunk-ME2OERF5.js +0 -345
  60. package/dist/chunk-ME2OERF5.js.map +0 -1
  61. package/dist/chunk-OBQ74FOU.js +0 -27
  62. package/dist/chunk-OBQ74FOU.js.map +0 -1
  63. package/dist/chunk-Q5EKA5YA.js +0 -254
  64. package/dist/chunk-Q5EKA5YA.js.map +0 -1
  65. package/dist/chunk-Q63FFI6D.js +0 -132
  66. package/dist/chunk-Q63FFI6D.js.map +0 -1
  67. package/dist/chunk-SASNMSB5.js +0 -12597
  68. package/dist/chunk-SASNMSB5.js.map +0 -1
  69. package/dist/chunk-T63OHG4Q.js +0 -440
  70. package/dist/chunk-T63OHG4Q.js.map +0 -1
  71. package/dist/chunk-TN5WEKWI.js +0 -173
  72. package/dist/chunk-TN5WEKWI.js.map +0 -1
  73. package/dist/chunk-VUL52BQL.js +0 -402
  74. package/dist/chunk-VUL52BQL.js.map +0 -1
  75. package/dist/chunk-VVITXIHN.js +0 -189
  76. package/dist/chunk-VVITXIHN.js.map +0 -1
  77. package/dist/chunk-WCN7S3EI.js +0 -14
  78. package/dist/chunk-WCN7S3EI.js.map +0 -1
  79. package/dist/chunk-XPZZFPBZ.js +0 -491
  80. package/dist/chunk-XPZZFPBZ.js.map +0 -1
  81. package/dist/chunk-ZJF5FTBX.js +0 -1396
  82. package/dist/chunk-ZJF5FTBX.js.map +0 -1
  83. package/dist/chunk-ZV2K6M7T.js +0 -74
  84. package/dist/chunk-ZV2K6M7T.js.map +0 -1
  85. package/dist/cli/create-agent.d.ts +0 -1
  86. package/dist/cli/create-agent.js +0 -1050
  87. package/dist/cli/create-agent.js.map +0 -1
  88. package/dist/cli/yolo-daemon.d.ts +0 -1
  89. package/dist/cli/yolo-daemon.js +0 -421
  90. package/dist/cli/yolo-daemon.js.map +0 -1
  91. package/dist/client-NJPZE5JT.js +0 -28
  92. package/dist/client-NJPZE5JT.js.map +0 -1
  93. package/dist/codebase-index-VAPF32XX.js +0 -12
  94. package/dist/codebase-index-VAPF32XX.js.map +0 -1
  95. package/dist/fast-analyzer-3GCCZMLK.js +0 -216
  96. package/dist/fast-analyzer-3GCCZMLK.js.map +0 -1
  97. package/dist/git-EO5SRFMN.js +0 -28
  98. package/dist/git-EO5SRFMN.js.map +0 -1
  99. package/dist/github-ingester-ZOKK6GRS.js +0 -11
  100. package/dist/github-ingester-ZOKK6GRS.js.map +0 -1
  101. package/dist/goal-manager-QUKX2W6C.js +0 -25
  102. package/dist/goal-manager-QUKX2W6C.js.map +0 -1
  103. package/dist/goal-validator-2SFSKKVU.js +0 -24
  104. package/dist/goal-validator-2SFSKKVU.js.map +0 -1
  105. package/dist/graph-B3NA4S7I.js +0 -10
  106. package/dist/graph-B3NA4S7I.js.map +0 -1
  107. package/dist/hypothesis-KCPBR652.js +0 -23
  108. package/dist/hypothesis-KCPBR652.js.map +0 -1
  109. package/dist/incident-index-EFNUSGWL.js +0 -11
  110. package/dist/incident-index-EFNUSGWL.js.map +0 -1
  111. package/dist/insight-store-EC4PLSAW.js +0 -22
  112. package/dist/insight-store-EC4PLSAW.js.map +0 -1
  113. package/dist/issue-store-YAXTNRRY.js +0 -36
  114. package/dist/issue-store-YAXTNRRY.js.map +0 -1
  115. package/dist/ledger-TWZTGDFA.js +0 -58
  116. package/dist/ledger-TWZTGDFA.js.map +0 -1
  117. package/dist/linear-ingester-XXPAZZRW.js +0 -11
  118. package/dist/linear-ingester-XXPAZZRW.js.map +0 -1
  119. package/dist/output-manager-RVJ37XKA.js +0 -13
  120. package/dist/output-manager-RVJ37XKA.js.map +0 -1
  121. package/dist/parse-goal-violation-SACGFG3C.js +0 -8
  122. package/dist/parse-goal-violation-SACGFG3C.js.map +0 -1
  123. package/dist/pattern-discovery-F7LU5K6E.js +0 -8
  124. package/dist/pattern-discovery-F7LU5K6E.js.map +0 -1
  125. package/dist/progress-SRQ2V3BP.js +0 -18
  126. package/dist/progress-SRQ2V3BP.js.map +0 -1
  127. package/dist/project-state-AHPA77SM.js +0 -28
  128. package/dist/project-state-AHPA77SM.js.map +0 -1
  129. package/dist/sync-M2FSWPBC.js +0 -12
  130. package/dist/sync-M2FSWPBC.js.map +0 -1
  131. package/dist/terminal-spawn-5YXDMUCF.js +0 -157
  132. package/dist/terminal-spawn-5YXDMUCF.js.map +0 -1
  133. package/dist/tiered-storage-DYNC5CQ6.js +0 -13
  134. package/dist/tiered-storage-DYNC5CQ6.js.map +0 -1
  135. package/dist/trie-agent-I3HAHY2G.js +0 -26
  136. package/dist/trie-agent-I3HAHY2G.js.map +0 -1
  137. package/dist/ui/chat.html +0 -1014
  138. package/dist/ui/goals.html +0 -967
  139. package/dist/ui/hypotheses.html +0 -1011
  140. package/dist/ui/ledger.html +0 -954
  141. package/dist/ui/nudges.html +0 -995
  142. package/dist/vibe-code-signatures-5ZULYP3D.js +0 -987
  143. package/dist/vibe-code-signatures-5ZULYP3D.js.map +0 -1
  144. package/dist/vulnerability-signatures-2URZSXAQ.js +0 -983
  145. package/dist/vulnerability-signatures-2URZSXAQ.js.map +0 -1
@@ -1,1306 +0,0 @@
1
- import {
2
- IncidentIndex
3
- } from "./chunk-TN5WEKWI.js";
4
- import {
5
- TriePatternDiscovery
6
- } from "./chunk-ZV2K6M7T.js";
7
- import {
8
- ContextGraph
9
- } from "./chunk-VUL52BQL.js";
10
- import {
11
- tryGetClient
12
- } from "./chunk-FQ45QP5A.js";
13
- import {
14
- storeIssues
15
- } from "./chunk-KCUOWRPX.js";
16
- import {
17
- getTrieDirectory,
18
- getWorkingDirectory
19
- } from "./chunk-VVITXIHN.js";
20
- import {
21
- isInteractiveMode
22
- } from "./chunk-KDHN2ZQE.js";
23
- import {
24
- getDiff,
25
- getRecentCommits,
26
- getStagedChanges,
27
- getUncommittedChanges,
28
- getWorkingTreeDiff
29
- } from "./chunk-2HF65EHQ.js";
30
-
31
- // src/utils/trie-init.ts
32
- import { existsSync } from "fs";
33
- import { join } from "path";
34
- var INIT_MARKERS = [
35
- "PROJECT.md",
36
- "RULES.md",
37
- "TEAM.md",
38
- "BOOTSTRAP.md",
39
- "AGENTS.md",
40
- "config.json"
41
- ];
42
- function isTrieInitialized(workDir) {
43
- const dir = workDir || getWorkingDirectory(void 0, true);
44
- const trieDir = getTrieDirectory(dir);
45
- return INIT_MARKERS.some((marker) => existsSync(join(trieDir, marker)));
46
- }
47
-
48
- // src/utils/errors.ts
49
- var TrieError = class extends Error {
50
- code;
51
- recoverable;
52
- userMessage;
53
- constructor(message, code, userMessage, recoverable = true) {
54
- super(message);
55
- this.code = code;
56
- this.recoverable = recoverable;
57
- this.userMessage = userMessage;
58
- }
59
- };
60
- function formatFriendlyError(error) {
61
- if (error instanceof TrieError) {
62
- return { userMessage: error.userMessage, code: error.code };
63
- }
64
- return {
65
- userMessage: "Something went wrong. Try again or run with --offline.",
66
- code: "UNKNOWN"
67
- };
68
- }
69
-
70
- // src/agent/confidence.ts
71
- function adjustConfidence(current, outcome, step = 0.1) {
72
- const delta = outcome === "positive" ? step : -step;
73
- return clamp(current + delta);
74
- }
75
- function clamp(value) {
76
- if (Number.isNaN(value)) return 0.5;
77
- return Math.min(1, Math.max(0, value));
78
- }
79
-
80
- // src/agent/learning.ts
81
- var LearningSystem = class {
82
- constructor(graph, projectPath) {
83
- this.graph = graph;
84
- this.incidentIndex = new IncidentIndex(graph, projectPath);
85
- this.discovery = new TriePatternDiscovery(graph, this.incidentIndex);
86
- }
87
- incidentIndex;
88
- discovery;
89
- async onWarningHeeded(files) {
90
- await this.adjustPatterns(files, "positive");
91
- }
92
- async onWarningIgnored(files) {
93
- await this.adjustPatterns(files, "negative");
94
- }
95
- async onIncidentReported(incidentId, files) {
96
- const incident = await this.graph.getNode("incident", incidentId);
97
- if (incident && incident.type === "incident") {
98
- this.incidentIndex.addIncidentToTrie(incident, files);
99
- }
100
- await this.discoverAndStorePatterns();
101
- }
102
- async onFeedback(helpful, files = []) {
103
- await this.adjustPatterns(files, helpful ? "positive" : "negative");
104
- }
105
- async adjustPatterns(files, outcome) {
106
- if (!files.length) return;
107
- for (const file of files) {
108
- const patterns = await this.graph.getPatternsForFile(file);
109
- await Promise.all(patterns.map((p) => this.updatePatternConfidence(p, outcome)));
110
- }
111
- }
112
- async updatePatternConfidence(pattern, outcome) {
113
- const current = pattern.data.confidence ?? 0.5;
114
- const updated = adjustConfidence(current, outcome, 0.05);
115
- await this.graph.updateNode("pattern", pattern.id, { confidence: updated, lastSeen: (/* @__PURE__ */ new Date()).toISOString() });
116
- }
117
- async discoverAndStorePatterns() {
118
- const hotPatterns = this.discovery.discoverHotPatterns();
119
- for (const hot of hotPatterns) {
120
- await this.graph.addNode("pattern", {
121
- description: `${hot.type === "directory" ? "Directory" : "File"} hot zone: ${hot.path}`,
122
- appliesTo: [hot.path],
123
- confidence: hot.confidence,
124
- occurrences: hot.incidentCount,
125
- firstSeen: (/* @__PURE__ */ new Date()).toISOString(),
126
- lastSeen: (/* @__PURE__ */ new Date()).toISOString(),
127
- isAntiPattern: true,
128
- source: "local"
129
- });
130
- }
131
- }
132
- };
133
-
134
- // src/agent/learning-engine.ts
135
- import path from "path";
136
- var LearningEngine = class {
137
- projectPath;
138
- graph;
139
- learningSystem;
140
- constructor(projectPath, graph) {
141
- this.projectPath = projectPath;
142
- this.graph = graph || new ContextGraph(projectPath);
143
- this.learningSystem = new LearningSystem(this.graph, projectPath);
144
- }
145
- /**
146
- * Unified learning method: Scans history AND processes manual feedback
147
- */
148
- async learn(options = {}) {
149
- const results = [];
150
- if (!options.manualFeedback) {
151
- const implicitCount = await this.learnFromHistory(options.limit || 20);
152
- results.push({ learned: implicitCount, source: "git-history" });
153
- }
154
- if (options.manualFeedback) {
155
- await this.recordManualFeedback(
156
- options.manualFeedback.helpful,
157
- options.manualFeedback.files,
158
- options.manualFeedback.note
159
- );
160
- results.push({ learned: options.manualFeedback.files.length || 1, source: "manual-feedback" });
161
- }
162
- return results;
163
- }
164
- /**
165
- * Scan recent commits for implicit failure signals (reverts, fixes)
166
- */
167
- async learnFromHistory(limit = 20) {
168
- const commits = await getRecentCommits(this.projectPath, limit);
169
- const issuesToStore = [];
170
- for (const commit of commits) {
171
- const isRevert = commit.message.toLowerCase().includes("revert") || commit.message.startsWith('Revert "');
172
- const isFix = /fix(es|ed)?\s+#\d+/i.test(commit.message) || commit.message.toLowerCase().includes("bugfix");
173
- if (isRevert || isFix) {
174
- const type = isRevert ? "revert" : "fix";
175
- const diff = await getDiff(this.projectPath, commit.hash);
176
- const files = this.extractFilesFromDiff(diff);
177
- for (const file of files) {
178
- const learnedIssues = await this.extractIssuesFromDiff(diff, file, type, commit.message);
179
- issuesToStore.push(...learnedIssues);
180
- }
181
- }
182
- }
183
- if (issuesToStore.length > 0) {
184
- const result = await storeIssues(issuesToStore, path.basename(this.projectPath), this.projectPath);
185
- return result.stored;
186
- }
187
- return 0;
188
- }
189
- /**
190
- * Record manual feedback (trie ok/bad) and adjust pattern confidence
191
- */
192
- async recordManualFeedback(helpful, files, note) {
193
- const context = files[0] ?? "unspecified";
194
- const decision = await this.graph.addNode("decision", {
195
- context,
196
- decision: helpful ? "helpful" : "not helpful",
197
- reasoning: note ?? null,
198
- outcome: helpful ? "good" : "bad",
199
- timestamp: (/* @__PURE__ */ new Date()).toISOString()
200
- });
201
- if (files.length > 0) {
202
- for (const file of files) {
203
- const fileNode = await this.graph.getNode("file", file);
204
- if (fileNode) {
205
- await this.graph.addEdge(decision.id, fileNode.id, "affects");
206
- }
207
- }
208
- await this.learningSystem.onFeedback(helpful, files);
209
- }
210
- }
211
- extractFilesFromDiff(diff) {
212
- const files = /* @__PURE__ */ new Set();
213
- const lines = diff.split("\n");
214
- for (const line of lines) {
215
- if (line.startsWith("+++ b/")) {
216
- files.add(line.slice(6));
217
- }
218
- }
219
- return Array.from(files);
220
- }
221
- async extractIssuesFromDiff(diff, file, type, message) {
222
- const issues = [];
223
- const client = tryGetClient();
224
- if (client) {
225
- try {
226
- const systemPrompt = `You are analyzing a git ${type} commit to understand what went wrong.
227
-
228
- A ${type} commit means:
229
- - If "fix": The removed lines (starting with -) were BUGGY and the added lines (starting with +) are the FIX
230
- - If "revert": The commit is undoing changes, so the removed lines were PROBLEMATIC
231
-
232
- Your job: Identify what patterns or issues led to this ${type}. Return a JSON array of issues:
233
- [{
234
- "issue": "Brief description of what was wrong",
235
- "category": "bug category (e.g., logic-error, null-check, race-condition)",
236
- "severity": "critical" | "serious" | "moderate",
237
- "fix": "What the fix does / how to avoid this"
238
- }]
239
-
240
- IMPORTANT: Only return JSON, no other text. Focus on the ROOT CAUSE, not just symptoms.`;
241
- const userPrompt = `File: ${file}
242
- Commit message: ${message}
243
- Type: ${type}
244
-
245
- Diff:
246
- \`\`\`diff
247
- ${diff.slice(0, 3e3)}
248
- \`\`\`
249
-
250
- Analyze what went wrong and return JSON array of issues.`;
251
- const response = await client.messages.create({
252
- model: "claude-sonnet-4-20250514",
253
- max_tokens: 2048,
254
- temperature: 0.2,
255
- system: systemPrompt,
256
- messages: [{ role: "user", content: userPrompt }]
257
- });
258
- const textContent = response.content.filter((block) => block.type === "text").map((block) => block.text).join("");
259
- const jsonMatch = textContent.match(/\[[\s\S]*\]/);
260
- if (jsonMatch) {
261
- const aiIssues = JSON.parse(jsonMatch[0]);
262
- for (const aiIssue of aiIssues) {
263
- issues.push({
264
- id: `implicit-${type}-ai-${Date.now()}-${Math.random().toString(36).slice(2, 7)}`,
265
- severity: aiIssue.severity || "moderate",
266
- issue: `[AI] ${aiIssue.issue} (from ${type} commit: ${message})`,
267
- fix: aiIssue.fix || `Review the ${type} commit and learn from this pattern.`,
268
- file,
269
- confidence: 0.8,
270
- autoFixable: false,
271
- agent: "implicit-learning-ai",
272
- category: aiIssue.category || type
273
- });
274
- }
275
- }
276
- } catch (error) {
277
- }
278
- }
279
- if (issues.length === 0) {
280
- issues.push({
281
- id: `implicit-${type}-${Date.now()}-${Math.random().toString(36).slice(2, 7)}`,
282
- severity: "moderate",
283
- issue: `Implicit failure detected via ${type}: ${message}`,
284
- fix: `Review the ${type} commit and learn from this pattern in ${file}.`,
285
- file,
286
- confidence: 0.7,
287
- autoFixable: false,
288
- agent: "implicit-learning",
289
- category: type
290
- });
291
- }
292
- return issues;
293
- }
294
- };
295
-
296
- // src/agent/perceive.ts
297
- import path2 from "path";
298
-
299
- // src/agent/diff-analyzer.ts
300
- var RISKY_PATTERNS = [/auth/i, /token/i, /password/i, /secret/i, /validate/i, /sanitize/i];
301
- function analyzeDiff(diff) {
302
- const files = [];
303
- let current = null;
304
- const lines = diff.split("\n");
305
- for (const line of lines) {
306
- if (line.startsWith("+++ b/")) {
307
- const filePath = line.replace("+++ b/", "").trim();
308
- current = {
309
- filePath,
310
- added: 0,
311
- removed: 0,
312
- functionsModified: [],
313
- riskyPatterns: []
314
- };
315
- files.push(current);
316
- continue;
317
- }
318
- if (!current) {
319
- continue;
320
- }
321
- if (line.startsWith("@@")) {
322
- const match = line.match(/@@.*?(function\s+([\w$]+)|class\s+([\w$]+)|([\w$]+\s*\())/i);
323
- const fnName = match?.[2] || match?.[3] || match?.[4];
324
- if (fnName) {
325
- current.functionsModified.push(fnName.replace("(", "").trim());
326
- }
327
- continue;
328
- }
329
- if (line.startsWith("+") && !line.startsWith("+++")) {
330
- current.added += 1;
331
- markRisk(line, current);
332
- } else if (line.startsWith("-") && !line.startsWith("---")) {
333
- current.removed += 1;
334
- markRisk(line, current);
335
- }
336
- }
337
- const totalAdded = files.reduce((acc, f) => acc + f.added, 0);
338
- const totalRemoved = files.reduce((acc, f) => acc + f.removed, 0);
339
- const riskyFiles = files.filter((f) => f.riskyPatterns.length > 0).map((f) => f.filePath);
340
- return {
341
- files,
342
- totalAdded,
343
- totalRemoved,
344
- riskyFiles
345
- };
346
- }
347
- function markRisk(line, file) {
348
- for (const pattern of RISKY_PATTERNS) {
349
- if (pattern.test(line)) {
350
- const label = pattern.toString();
351
- if (!file.riskyPatterns.includes(label)) {
352
- file.riskyPatterns.push(label);
353
- }
354
- }
355
- }
356
- }
357
-
358
- // src/agent/perceive.ts
359
- async function perceiveCurrentChanges(projectPath, graph) {
360
- const ctxGraph = graph ?? new ContextGraph(projectPath);
361
- const [staged, unstaged] = await Promise.all([
362
- getStagedChanges(projectPath),
363
- getUncommittedChanges(projectPath)
364
- ]);
365
- const stagedDiff = await getWorkingTreeDiff(projectPath, true);
366
- const unstagedDiff = await getWorkingTreeDiff(projectPath, false);
367
- const combinedDiff = [stagedDiff, unstagedDiff].filter(Boolean).join("\n");
368
- const diffSummary = analyzeDiff(combinedDiff);
369
- const filesTouched = /* @__PURE__ */ new Set();
370
- staged.forEach((c) => filesTouched.add(c.path));
371
- unstaged.forEach((c) => filesTouched.add(c.path));
372
- diffSummary.files.forEach((f) => filesTouched.add(f.filePath));
373
- const changeId = await upsertWorkingChange(ctxGraph, Array.from(filesTouched), projectPath);
374
- const result = {
375
- staged,
376
- unstaged,
377
- diffSummary
378
- };
379
- if (changeId) result.changeNodeId = changeId;
380
- return result;
381
- }
382
- async function upsertWorkingChange(graph, files, projectPath) {
383
- if (files.length === 0) return void 0;
384
- const now = (/* @__PURE__ */ new Date()).toISOString();
385
- const change = await graph.addNode("change", {
386
- commitHash: null,
387
- files,
388
- message: "workspace changes",
389
- diff: null,
390
- author: null,
391
- timestamp: now,
392
- outcome: "unknown"
393
- });
394
- for (const filePath of files) {
395
- const fileNode = await ensureFileNode(graph, filePath, projectPath);
396
- await graph.addEdge(change.id, fileNode.id, "affects");
397
- }
398
- return change.id;
399
- }
400
- async function ensureFileNode(graph, filePath, projectPath) {
401
- const normalized = path2.resolve(projectPath, filePath);
402
- const existing = await graph.getNode("file", normalized);
403
- const now = (/* @__PURE__ */ new Date()).toISOString();
404
- if (existing) {
405
- const data2 = existing.data;
406
- await graph.updateNode("file", existing.id, {
407
- changeCount: (data2.changeCount ?? 0) + 1,
408
- lastChanged: now
409
- });
410
- return await graph.getNode("file", existing.id);
411
- }
412
- const data = {
413
- path: filePath,
414
- extension: path2.extname(filePath),
415
- purpose: "",
416
- riskLevel: "medium",
417
- whyRisky: null,
418
- changeCount: 1,
419
- lastChanged: now,
420
- incidentCount: 0,
421
- createdAt: now
422
- };
423
- return await graph.addNode("file", data);
424
- }
425
-
426
- // src/agent/risk-scorer.ts
427
- import * as path3 from "path";
428
- var BASE_RISK = {
429
- low: 10,
430
- medium: 35,
431
- high: 65,
432
- critical: 85
433
- };
434
- var SENSITIVE_PATHS = [
435
- { pattern: /auth|login|token|session/i, weight: 20, reason: "touches authentication" },
436
- { pattern: /payment|billing|stripe|paypal|checkout/i, weight: 25, reason: "touches payments" },
437
- { pattern: /secret|credential|env|config\/security/i, weight: 15, reason: "touches secrets/security config" }
438
- ];
439
- function levelFromScore(score) {
440
- if (score >= 90) return "critical";
441
- if (score >= 65) return "high";
442
- if (score >= 40) return "medium";
443
- return "low";
444
- }
445
- async function scoreFile(graph, filePath, matchedPatterns = []) {
446
- const reasons = [];
447
- const normalized = path3.resolve(graph.projectRoot, filePath);
448
- const node = await graph.getNode("file", normalized);
449
- const incidents = await graph.getIncidentsForFile(filePath);
450
- let score = 10;
451
- const data = node?.data;
452
- if (data) {
453
- score = BASE_RISK[data.riskLevel] ?? score;
454
- reasons.push(`baseline ${data.riskLevel}`);
455
- if (data.incidentCount > 0) {
456
- const incBoost = Math.min(data.incidentCount * 12, 36);
457
- score += incBoost;
458
- reasons.push(`historical incidents (+${incBoost})`);
459
- }
460
- if (data.changeCount > 5) {
461
- const changeBoost = Math.min((data.changeCount - 5) * 2, 12);
462
- score += changeBoost;
463
- reasons.push(`frequent changes (+${changeBoost})`);
464
- }
465
- if (data.lastChanged) {
466
- const lastChanged = new Date(data.lastChanged).getTime();
467
- const days = (Date.now() - lastChanged) / (1e3 * 60 * 60 * 24);
468
- if (days > 60 && data.incidentCount === 0) {
469
- score -= 5;
470
- reasons.push("stable for 60d (-5)");
471
- }
472
- }
473
- }
474
- for (const { pattern, weight, reason } of SENSITIVE_PATHS) {
475
- if (pattern.test(filePath)) {
476
- score += weight;
477
- reasons.push(reason);
478
- }
479
- }
480
- if (matchedPatterns.length > 0) {
481
- const patternBoost = Math.min(
482
- matchedPatterns.reduce((acc, p) => acc + (p.data.confidence ?? 50) / 10, 0),
483
- 20
484
- );
485
- score += patternBoost;
486
- reasons.push(`pattern match (+${Math.round(patternBoost)})`);
487
- }
488
- if (incidents.length > 0) {
489
- const timestamps = incidents.map((i) => new Date(i.data.timestamp).getTime()).sort((a, b) => b - a);
490
- const recent = timestamps[0];
491
- const daysSince = (Date.now() - recent) / (1e3 * 60 * 60 * 24);
492
- if (daysSince > 90) {
493
- score -= 5;
494
- reasons.push("no incidents in 90d (-5)");
495
- } else {
496
- score += 8;
497
- reasons.push("recent incident (+8)");
498
- }
499
- }
500
- const level = levelFromScore(score);
501
- return {
502
- file: filePath,
503
- score,
504
- level,
505
- reasons,
506
- incidents,
507
- matchedPatterns
508
- };
509
- }
510
- async function scoreChangeSet(graph, files, patternMatches = {}) {
511
- const fileResults = [];
512
- for (const file of files) {
513
- const patterns = patternMatches[file] ?? [];
514
- fileResults.push(await scoreFile(graph, file, patterns));
515
- }
516
- const maxScore = Math.max(...fileResults.map((f) => f.score), 10);
517
- const spreadBoost = files.length > 5 ? Math.min((files.length - 5) * 2, 10) : 0;
518
- const overallScore = maxScore + spreadBoost;
519
- const overall = levelFromScore(overallScore);
520
- const shouldEscalate = overall === "critical" || overall === "high";
521
- return {
522
- files: fileResults,
523
- overall,
524
- score: overallScore,
525
- shouldEscalate
526
- };
527
- }
528
-
529
- // src/agent/pattern-matcher.ts
530
- async function matchPatternsForFiles(graph, files) {
531
- const matches = [];
532
- const byFile = {};
533
- for (const file of files) {
534
- const patterns = await graph.getPatternsForFile(file);
535
- if (patterns.length === 0) continue;
536
- byFile[file] = patterns;
537
- for (const pattern of patterns) {
538
- matches.push({
539
- file,
540
- pattern,
541
- confidence: pattern.data.confidence,
542
- isAntiPattern: pattern.data.isAntiPattern
543
- });
544
- }
545
- }
546
- return { matches, byFile };
547
- }
548
-
549
- // src/orchestrator/triager.ts
550
- var Triager = class {
551
- constructor(_config) {
552
- }
553
- /**
554
- * Triage a change to select appropriate agents
555
- * Note: Skills/agents have been removed - Trie is now purely a decision ledger
556
- */
557
- async triage(_context, _forceAgents) {
558
- return [];
559
- }
560
- /**
561
- * Get all available agent names (deprecated - returns empty array)
562
- */
563
- getAvailableAgents() {
564
- return [];
565
- }
566
- };
567
-
568
- // src/utils/parallel-executor.ts
569
- import { Worker } from "worker_threads";
570
- import { cpus } from "os";
571
- import { existsSync as existsSync2 } from "fs";
572
- import { fileURLToPath } from "url";
573
- var ParallelExecutor = class {
574
- maxWorkers;
575
- cache;
576
- streaming;
577
- activeWorkers = /* @__PURE__ */ new Set();
578
- cacheEnabled = true;
579
- useWorkerThreads = false;
580
- workerAvailable = null;
581
- warnedWorkerFallback = false;
582
- constructor(cacheManager, maxWorkers = Math.max(2, Math.min(cpus().length - 1, 8)), options) {
583
- this.maxWorkers = maxWorkers;
584
- this.cache = cacheManager;
585
- this.cacheEnabled = options?.cacheEnabled ?? true;
586
- this.useWorkerThreads = options?.useWorkerThreads ?? false;
587
- }
588
- /**
589
- * Set streaming manager for real-time updates
590
- */
591
- setStreaming(streaming) {
592
- this.streaming = streaming;
593
- }
594
- /**
595
- * Execute agents in parallel with intelligent scheduling
596
- */
597
- async executeAgents(agents, files, context) {
598
- if (agents.length === 0) {
599
- return /* @__PURE__ */ new Map();
600
- }
601
- if (this.streaming && this.streaming.getProgress().totalFiles === 0) {
602
- this.streaming.startScan(files.length);
603
- }
604
- const cacheResults = /* @__PURE__ */ new Map();
605
- const uncachedTasks = [];
606
- for (const agent of agents) {
607
- const cached = await this.checkAgentCache(agent, files);
608
- if (cached) {
609
- cacheResults.set(agent.name, cached);
610
- this.streaming?.completeAgent(agent.name, cached.issues);
611
- } else {
612
- uncachedTasks.push({
613
- agent,
614
- files,
615
- context,
616
- priority: agent.priority?.tier || 2,
617
- timeoutMs: context?.config?.timeoutMs || 12e4
618
- });
619
- }
620
- }
621
- uncachedTasks.sort((a, b) => a.priority - b.priority);
622
- const parallelResults = await this.executeTasksParallel(uncachedTasks);
623
- await this.cacheResults(parallelResults);
624
- const allResults = /* @__PURE__ */ new Map();
625
- for (const [agent, result] of Array.from(cacheResults.entries())) {
626
- allResults.set(agent, result);
627
- }
628
- for (const result of parallelResults) {
629
- allResults.set(result.agent, result.result);
630
- }
631
- const allIssues = Array.from(allResults.values()).flatMap((r) => r.issues);
632
- this.streaming?.completeScan(allIssues);
633
- return allResults;
634
- }
635
- /**
636
- * Check if agent has cached results for given files
637
- */
638
- async checkAgentCache(agent, files) {
639
- if (!this.cacheEnabled || !this.cache) {
640
- return null;
641
- }
642
- const cachedIssues = await this.cache.getCachedBatch(files, agent.name);
643
- if (cachedIssues.size === files.length) {
644
- const allIssues = Array.from(cachedIssues.values()).flat();
645
- return {
646
- agent: agent.name,
647
- issues: allIssues,
648
- executionTime: 0,
649
- // Cached
650
- success: true,
651
- metadata: {
652
- filesAnalyzed: files.length,
653
- linesAnalyzed: 0
654
- }
655
- };
656
- }
657
- return null;
658
- }
659
- /**
660
- * Execute tasks in parallel batches
661
- */
662
- async executeTasksParallel(tasks) {
663
- if (tasks.length === 0) {
664
- return [];
665
- }
666
- const results = [];
667
- const batches = this.createBatches(tasks, this.maxWorkers);
668
- for (const batch of batches) {
669
- const batchResults = await Promise.all(
670
- batch.map((task) => this.executeTask(task))
671
- );
672
- results.push(...batchResults);
673
- }
674
- return results;
675
- }
676
- /**
677
- * Create batches for parallel execution
678
- */
679
- createBatches(tasks, batchSize) {
680
- const batches = [];
681
- for (let i = 0; i < tasks.length; i += batchSize) {
682
- batches.push(tasks.slice(i, i + batchSize));
683
- }
684
- return batches;
685
- }
686
- /**
687
- * Execute a single task
688
- */
689
- async executeTask(task) {
690
- const startTime = Date.now();
691
- this.streaming?.startAgent(task.agent.name);
692
- try {
693
- const result = this.canUseWorkers() ? await this.executeTaskInWorker(task) : await task.agent.scan(task.files, task.context);
694
- const executionTime = Date.now() - startTime;
695
- this.streaming?.completeAgent(task.agent.name, result.issues);
696
- return {
697
- agent: task.agent.name,
698
- result,
699
- fromCache: false,
700
- executionTime
701
- };
702
- } catch (error) {
703
- const executionTime = Date.now() - startTime;
704
- const errorMessage = error instanceof Error ? error.message : String(error);
705
- this.streaming?.reportError(new Error(errorMessage), `Agent: ${task.agent.name}`);
706
- return {
707
- agent: task.agent.name,
708
- result: {
709
- agent: task.agent.name,
710
- issues: [],
711
- executionTime,
712
- success: false,
713
- error: errorMessage
714
- },
715
- fromCache: false,
716
- executionTime
717
- };
718
- }
719
- }
720
- canUseWorkers() {
721
- if (!this.useWorkerThreads) {
722
- return false;
723
- }
724
- if (this.workerAvailable !== null) {
725
- return this.workerAvailable;
726
- }
727
- const workerUrl = this.getWorkerUrl();
728
- this.workerAvailable = existsSync2(fileURLToPath(workerUrl));
729
- if (!this.workerAvailable && !this.warnedWorkerFallback && !isInteractiveMode()) {
730
- console.error("Worker threads unavailable; falling back to in-process agents.");
731
- this.warnedWorkerFallback = true;
732
- }
733
- return this.workerAvailable;
734
- }
735
- getWorkerUrl() {
736
- const distDir = new URL(".", import.meta.url);
737
- return new URL("workers/agent-worker.js", distDir);
738
- }
739
- async executeTaskInWorker(task) {
740
- const workerUrl = this.getWorkerUrl();
741
- return new Promise((resolve2, reject) => {
742
- const worker = new Worker(workerUrl, {
743
- workerData: {
744
- agentName: task.agent.name,
745
- files: task.files,
746
- context: task.context
747
- }
748
- });
749
- this.activeWorkers.add(worker);
750
- const timeout = setTimeout(() => {
751
- worker.terminate().catch(() => void 0);
752
- reject(new Error(`Agent ${task.agent.name} timed out after ${task.timeoutMs}ms`));
753
- }, task.timeoutMs);
754
- worker.on("message", (message) => {
755
- if (message?.type === "result") {
756
- clearTimeout(timeout);
757
- resolve2(message.result);
758
- } else if (message?.type === "error") {
759
- clearTimeout(timeout);
760
- reject(new Error(message.error));
761
- }
762
- });
763
- worker.on("error", (error) => {
764
- clearTimeout(timeout);
765
- reject(error);
766
- });
767
- worker.on("exit", (code) => {
768
- this.activeWorkers.delete(worker);
769
- if (code !== 0) {
770
- clearTimeout(timeout);
771
- reject(new Error(`Worker stopped with exit code ${code}`));
772
- }
773
- });
774
- });
775
- }
776
- /**
777
- * Cache results for future use
778
- */
779
- async cacheResults(results) {
780
- if (!this.cacheEnabled || !this.cache) {
781
- return;
782
- }
783
- const cachePromises = results.filter((r) => r.result.success && !r.fromCache).map((r) => {
784
- const issuesByFile = this.groupIssuesByFile(r.result.issues);
785
- const perFilePromises = Object.entries(issuesByFile).map(
786
- ([file, issues]) => this.cache.setCached(file, r.agent, issues, r.executionTime)
787
- );
788
- return Promise.all(perFilePromises);
789
- });
790
- await Promise.allSettled(cachePromises);
791
- }
792
- /**
793
- * Cleanup resources
794
- */
795
- async cleanup() {
796
- const terminationPromises = Array.from(this.activeWorkers).map(
797
- (worker) => worker.terminate()
798
- );
799
- await Promise.allSettled(terminationPromises);
800
- this.activeWorkers.clear();
801
- }
802
- groupIssuesByFile(issues) {
803
- const grouped = {};
804
- for (const issue of issues) {
805
- if (!grouped[issue.file]) {
806
- grouped[issue.file] = [];
807
- }
808
- grouped[issue.file].push(issue);
809
- }
810
- return grouped;
811
- }
812
- };
813
- function calculateOptimalConcurrency() {
814
- const numCPUs = cpus().length;
815
- const availableMemoryGB = process.memoryUsage().rss / 1024 / 1024 / 1024;
816
- let optimal = Math.max(2, Math.min(numCPUs - 1, 8));
817
- if (availableMemoryGB < 2) {
818
- optimal = Math.max(2, Math.floor(optimal / 2));
819
- }
820
- if (numCPUs > 8) {
821
- optimal = Math.min(optimal + 2, 12);
822
- }
823
- return optimal;
824
- }
825
-
826
- // src/utils/cache-manager.ts
827
- import { readFile, writeFile, mkdir, stat } from "fs/promises";
828
- import { join as join2 } from "path";
829
- import { createHash } from "crypto";
830
- var CacheManager = class {
831
- cacheDir;
832
- indexPath;
833
- VERSION = "1.0.0";
834
- MAX_AGE_MS = 24 * 60 * 60 * 1e3;
835
- // 24 hours
836
- MAX_ENTRIES = 1e3;
837
- constructor(baseDir) {
838
- this.cacheDir = join2(getTrieDirectory(baseDir), "cache");
839
- this.indexPath = join2(this.cacheDir, "index.json");
840
- }
841
- /**
842
- * Generate cache key for a file and agent combination
843
- */
844
- generateCacheKey(filePath, agent, fileHash) {
845
- const key = `${filePath}:${agent}:${fileHash}`;
846
- return createHash("sha256").update(key).digest("hex").slice(0, 16);
847
- }
848
- /**
849
- * Get file hash for cache validation
850
- */
851
- async getFileHash(filePath) {
852
- try {
853
- const content = await readFile(filePath, "utf-8");
854
- const stats = await stat(filePath);
855
- const hash = createHash("sha256").update(content).digest("hex").slice(0, 16);
856
- return {
857
- hash,
858
- size: stats.size,
859
- mtime: stats.mtime.getTime()
860
- };
861
- } catch {
862
- return { hash: "", size: 0, mtime: 0 };
863
- }
864
- }
865
- /**
866
- * Load cache index
867
- */
868
- async loadIndex() {
869
- try {
870
- const content = await readFile(this.indexPath, "utf-8");
871
- return JSON.parse(content);
872
- } catch {
873
- return {
874
- version: this.VERSION,
875
- created: Date.now(),
876
- entries: {}
877
- };
878
- }
879
- }
880
- /**
881
- * Save cache index
882
- */
883
- async saveIndex(index) {
884
- try {
885
- await mkdir(this.cacheDir, { recursive: true });
886
- await writeFile(this.indexPath, JSON.stringify(index, null, 2));
887
- } catch (error) {
888
- if (!isInteractiveMode()) {
889
- console.warn("Failed to save cache index:", error);
890
- }
891
- }
892
- }
893
- /**
894
- * Clean up expired entries
895
- */
896
- cleanupExpired(index) {
897
- const now = Date.now();
898
- const validEntries = {};
899
- for (const [key, entry] of Object.entries(index.entries)) {
900
- if (now - entry.timestamp < this.MAX_AGE_MS) {
901
- validEntries[key] = entry;
902
- }
903
- }
904
- const entries = Object.entries(validEntries);
905
- if (entries.length > this.MAX_ENTRIES) {
906
- entries.sort((a, b) => b[1].timestamp - a[1].timestamp);
907
- const limited = entries.slice(0, this.MAX_ENTRIES);
908
- return {
909
- ...index,
910
- entries: Object.fromEntries(limited)
911
- };
912
- }
913
- return {
914
- ...index,
915
- entries: validEntries
916
- };
917
- }
918
- /**
919
- * Get cached result for a file and agent
920
- *
921
- * Cache automatically invalidates when files change:
922
- * - Cache key includes file hash: hash(filePath:agent:fileHash)
923
- * - When file changes, hash changes, so cache key changes
924
- * - Old cache entry won't be found (different key)
925
- * - File is automatically rescanned
926
- *
927
- * This means cache auto-updates when Claude fixes code - no manual invalidation needed!
928
- */
929
- async getCached(filePath, agent) {
930
- try {
931
- const { hash, size: _size, mtime: _mtime } = await this.getFileHash(filePath);
932
- if (!hash) return null;
933
- const index = await this.loadIndex();
934
- const cacheKey = this.generateCacheKey(filePath, agent, hash);
935
- const entry = index.entries[cacheKey];
936
- if (!entry) return null;
937
- const isValid = entry.fileHash === hash && entry.version === this.VERSION && Date.now() - entry.timestamp < this.MAX_AGE_MS;
938
- if (!isValid) {
939
- delete index.entries[cacheKey];
940
- await this.saveIndex(index);
941
- return null;
942
- }
943
- return entry.issues;
944
- } catch {
945
- return null;
946
- }
947
- }
948
- /**
949
- * Cache result for a file and agent
950
- */
951
- async setCached(filePath, agent, issues, executionTime) {
952
- try {
953
- const { hash, size } = await this.getFileHash(filePath);
954
- if (!hash) return;
955
- const index = await this.loadIndex();
956
- const cacheKey = this.generateCacheKey(filePath, agent, hash);
957
- index.entries[cacheKey] = {
958
- version: this.VERSION,
959
- timestamp: Date.now(),
960
- fileHash: hash,
961
- fileSize: size,
962
- agent,
963
- issues,
964
- executionTime
965
- };
966
- const cleanedIndex = this.cleanupExpired(index);
967
- await this.saveIndex(cleanedIndex);
968
- } catch (error) {
969
- if (!isInteractiveMode()) {
970
- console.warn("Failed to cache result:", error);
971
- }
972
- }
973
- }
974
- /**
975
- * Check if multiple files have cached results
976
- */
977
- async getCachedBatch(files, agent) {
978
- const results = /* @__PURE__ */ new Map();
979
- await Promise.all(
980
- files.map(async (file) => {
981
- const cached = await this.getCached(file, agent);
982
- if (cached) {
983
- results.set(file, cached);
984
- }
985
- })
986
- );
987
- return results;
988
- }
989
- /**
990
- * Get cache statistics
991
- */
992
- async getStats() {
993
- try {
994
- const index = await this.loadIndex();
995
- const entries = Object.values(index.entries);
996
- const totalSizeKB = entries.reduce((acc, entry) => acc + entry.fileSize, 0) / 1024;
997
- const timestamps = entries.map((e) => e.timestamp);
998
- const agents = Array.from(new Set(entries.map((e) => e.agent)));
999
- return {
1000
- totalEntries: entries.length,
1001
- totalSizeKB: Math.round(totalSizeKB),
1002
- oldestEntry: timestamps.length > 0 ? Math.min(...timestamps) : null,
1003
- newestEntry: timestamps.length > 0 ? Math.max(...timestamps) : null,
1004
- agents
1005
- };
1006
- } catch {
1007
- return {
1008
- totalEntries: 0,
1009
- totalSizeKB: 0,
1010
- oldestEntry: null,
1011
- newestEntry: null,
1012
- agents: []
1013
- };
1014
- }
1015
- }
1016
- /**
1017
- * Clean up stale cache entries by verifying file hashes
1018
- * This removes entries where files have changed or no longer exist
1019
- * Called periodically to keep cache clean
1020
- *
1021
- * Note: Since cache keys are hashed, we can't easily reverse-engineer file paths.
1022
- * However, when getCached() is called, it naturally invalidates stale entries
1023
- * by checking if the current file hash matches the cached hash. This method
1024
- * proactively cleans up entries for known changed files.
1025
- */
1026
- async cleanupStaleEntries(filePaths) {
1027
- try {
1028
- const index = await this.loadIndex();
1029
- let removedCount = 0;
1030
- const keysToRemove = [];
1031
- if (filePaths && filePaths.length > 0) {
1032
- const agents = /* @__PURE__ */ new Set();
1033
- for (const entry of Object.values(index.entries)) {
1034
- agents.add(entry.agent);
1035
- }
1036
- for (const filePath of filePaths) {
1037
- try {
1038
- const { hash: currentHash } = await this.getFileHash(filePath);
1039
- if (!currentHash) {
1040
- continue;
1041
- }
1042
- for (const agent of Array.from(agents)) {
1043
- for (const [key, entry] of Object.entries(index.entries)) {
1044
- if (entry.agent !== agent) continue;
1045
- if (entry.fileHash !== currentHash) {
1046
- const oldKey = this.generateCacheKey(filePath, agent, entry.fileHash);
1047
- if (oldKey === key) {
1048
- keysToRemove.push(key);
1049
- removedCount++;
1050
- }
1051
- }
1052
- }
1053
- }
1054
- } catch {
1055
- continue;
1056
- }
1057
- }
1058
- }
1059
- const uniqueKeys = Array.from(new Set(keysToRemove));
1060
- for (const key of uniqueKeys) {
1061
- delete index.entries[key];
1062
- }
1063
- if (removedCount > 0) {
1064
- await this.saveIndex(index);
1065
- }
1066
- return removedCount;
1067
- } catch (error) {
1068
- if (!isInteractiveMode()) {
1069
- console.warn("Failed to cleanup stale cache entries:", error);
1070
- }
1071
- return 0;
1072
- }
1073
- }
1074
- /**
1075
- * Clear all cache
1076
- */
1077
- async clear() {
1078
- try {
1079
- const emptyIndex = {
1080
- version: this.VERSION,
1081
- created: Date.now(),
1082
- entries: {}
1083
- };
1084
- await this.saveIndex(emptyIndex);
1085
- } catch (error) {
1086
- if (!isInteractiveMode()) {
1087
- console.warn("Failed to clear cache:", error);
1088
- }
1089
- }
1090
- }
1091
- };
1092
-
1093
- // src/orchestrator/executor.ts
1094
- var Executor = class {
1095
- async executeAgents(agents, files, context, options) {
1096
- const parallel = options?.parallel ?? true;
1097
- const cacheEnabled = options?.cacheEnabled ?? true;
1098
- const maxConcurrency = options?.maxConcurrency ?? calculateOptimalConcurrency();
1099
- const useWorkerThreads = options?.useWorkerThreads ?? false;
1100
- if (!isInteractiveMode()) {
1101
- console.error(`Executing ${agents.length} scouts ${parallel ? "in parallel" : "sequentially"}...`);
1102
- }
1103
- if (parallel) {
1104
- const cacheManager = cacheEnabled ? new CacheManager(context.workingDir) : null;
1105
- const executor = new ParallelExecutor(cacheManager, maxConcurrency, {
1106
- cacheEnabled,
1107
- useWorkerThreads
1108
- });
1109
- if (options?.streaming) {
1110
- executor.setStreaming(options.streaming);
1111
- }
1112
- const results = await executor.executeAgents(agents, files, {
1113
- ...context,
1114
- config: { timeoutMs: options?.timeoutMs ?? 12e4 }
1115
- });
1116
- return agents.map((agent) => results.get(agent.name)).filter(Boolean);
1117
- }
1118
- const promises = agents.map(
1119
- (agent) => this.executeAgentWithTimeout(agent, files, context, options?.timeoutMs ?? 3e4)
1120
- );
1121
- try {
1122
- const results = await Promise.allSettled(promises);
1123
- return results.map((result, index) => {
1124
- if (result.status === "fulfilled") {
1125
- if (!isInteractiveMode()) {
1126
- console.error(`${agents[index].name} completed in ${result.value.executionTime}ms`);
1127
- }
1128
- return result.value;
1129
- } else {
1130
- if (!isInteractiveMode()) {
1131
- console.error(`${agents[index].name} failed:`, result.reason);
1132
- }
1133
- return {
1134
- agent: agents[index].name,
1135
- issues: [],
1136
- executionTime: 0,
1137
- success: false,
1138
- error: result.reason instanceof Error ? result.reason.message : String(result.reason)
1139
- };
1140
- }
1141
- });
1142
- } catch (error) {
1143
- if (!isInteractiveMode()) {
1144
- console.error("Executor error:", error);
1145
- }
1146
- return agents.map((agent) => ({
1147
- agent: agent.name,
1148
- issues: [],
1149
- executionTime: 0,
1150
- success: false,
1151
- error: "Execution failed"
1152
- }));
1153
- }
1154
- }
1155
- async executeAgentWithTimeout(agent, files, context, timeoutMs = 3e4) {
1156
- return new Promise(async (resolve2, reject) => {
1157
- const timeout = setTimeout(() => {
1158
- reject(new Error(`Agent ${agent.name} timed out after ${timeoutMs}ms`));
1159
- }, timeoutMs);
1160
- try {
1161
- const result = await agent.scan(files, context);
1162
- clearTimeout(timeout);
1163
- resolve2(result);
1164
- } catch (error) {
1165
- clearTimeout(timeout);
1166
- reject(error);
1167
- }
1168
- });
1169
- }
1170
- };
1171
-
1172
- // src/agent/reason.ts
1173
- function buildDefaultCodeContext() {
1174
- return {
1175
- changeType: "general",
1176
- isNewFeature: false,
1177
- touchesUserData: false,
1178
- touchesAuth: false,
1179
- touchesPayments: false,
1180
- touchesDatabase: false,
1181
- touchesAPI: false,
1182
- touchesUI: false,
1183
- touchesHealthData: false,
1184
- touchesSecurityConfig: false,
1185
- linesChanged: 50,
1186
- filePatterns: [],
1187
- framework: "unknown",
1188
- language: "typescript",
1189
- touchesCrypto: false,
1190
- touchesFileSystem: false,
1191
- touchesThirdPartyAPI: false,
1192
- touchesLogging: false,
1193
- touchesErrorHandling: false,
1194
- hasTests: false,
1195
- complexity: "medium",
1196
- patterns: {
1197
- hasAsyncCode: false,
1198
- hasFormHandling: false,
1199
- hasFileUploads: false,
1200
- hasEmailHandling: false,
1201
- hasRateLimiting: false,
1202
- hasWebSockets: false,
1203
- hasCaching: false,
1204
- hasQueue: false
1205
- }
1206
- };
1207
- }
1208
- function buildExplanation(result) {
1209
- const top = [...result.files].sort((a, b) => b.score - a.score)[0];
1210
- if (!top) return `Risk level ${result.overall} (no files provided)`;
1211
- return `Risk level ${result.overall} because ${top.file} ${top.reasons.join(", ")}`;
1212
- }
1213
- function buildRecommendation(risk, hasAntiPattern) {
1214
- if (hasAntiPattern || risk === "critical") {
1215
- return "Block until reviewed: address anti-patterns and rerun targeted tests.";
1216
- }
1217
- if (risk === "high") {
1218
- return "Require senior review and run full test suite before merge.";
1219
- }
1220
- if (risk === "medium") {
1221
- return "Proceed with caution; run impacted tests and sanity checks.";
1222
- }
1223
- return "Low risk; proceed but keep an eye on recent changes.";
1224
- }
1225
- async function reasonAboutChanges(projectPath, files, options = {}) {
1226
- const graph = new ContextGraph(projectPath);
1227
- const { matches, byFile } = await matchPatternsForFiles(graph, files);
1228
- const changeRisk = await scoreChangeSet(graph, files, byFile);
1229
- const incidents = [];
1230
- for (const file of files) {
1231
- const fileIncidents = await graph.getIncidentsForFile(file);
1232
- incidents.push(...fileIncidents);
1233
- }
1234
- const hasAntiPattern = matches.some((m) => m.isAntiPattern);
1235
- const riskLevel = hasAntiPattern ? "critical" : changeRisk.overall;
1236
- const shouldBlock = hasAntiPattern || riskLevel === "critical" || riskLevel === "high";
1237
- const reasoning = {
1238
- riskLevel,
1239
- shouldBlock,
1240
- explanation: buildExplanation(changeRisk),
1241
- relevantIncidents: incidents,
1242
- matchedPatterns: matches.map((m) => m.pattern),
1243
- recommendation: buildRecommendation(riskLevel, hasAntiPattern),
1244
- files: changeRisk.files
1245
- };
1246
- if (options.runAgents) {
1247
- const codeContext = options.codeContext ?? buildDefaultCodeContext();
1248
- const triager = new Triager();
1249
- const agents = await triager.triage(codeContext);
1250
- if (agents.length > 0) {
1251
- const executor = new Executor();
1252
- const scanContext = {
1253
- workingDir: projectPath,
1254
- ...options.scanContext
1255
- };
1256
- if (codeContext.framework) scanContext.framework = codeContext.framework;
1257
- if (codeContext.language) scanContext.language = codeContext.language;
1258
- reasoning.agentResults = await executor.executeAgents(agents, files, scanContext, {
1259
- parallel: true,
1260
- timeoutMs: options.scanContext?.config?.timeoutMs ?? 6e4
1261
- });
1262
- } else {
1263
- reasoning.agentResults = [];
1264
- }
1265
- }
1266
- return reasoning;
1267
- }
1268
- async function reasonAboutChangesHumanReadable(projectPath, files, options = {}) {
1269
- const reasoning = await reasonAboutChanges(projectPath, files, options);
1270
- const topFile = [...reasoning.files].sort((a, b) => b.score - a.score)[0];
1271
- const topReasons = topFile?.reasons?.slice(0, 3) ?? [];
1272
- const summaryParts = [];
1273
- summaryParts.push(`Risk ${reasoning.riskLevel.toUpperCase()} (${reasoning.shouldBlock ? "block" : "allow"})`);
1274
- if (topFile) summaryParts.push(`Most sensitive file: ${topFile.file}`);
1275
- if (topReasons.length) summaryParts.push(`Top reasons: ${topReasons.join("; ")}`);
1276
- const patterns = reasoning.matchedPatterns;
1277
- const incidents = reasoning.relevantIncidents;
1278
- const firstPattern = patterns[0];
1279
- const patternLine = firstPattern ? `Pattern match: ${firstPattern.data.description}` : "No known risky patterns matched.";
1280
- const incidentLine = incidents[0] ? `Relevant prior incident: ${incidents[0].data.description} (${incidents[0].data.severity})` : "No prior incidents linked to these files.";
1281
- const howBad = [
1282
- `Overall risk is ${reasoning.riskLevel}.`,
1283
- reasoning.shouldBlock ? "This is likely to break things or violate policy if pushed without review." : "This is unlikely to cause major issues if you follow normal review/testing.",
1284
- incidentLine
1285
- ].join(" ");
1286
- const whatToDo = [
1287
- reasoning.recommendation,
1288
- patterns.length ? `Double-check the pattern(s): ${patterns.slice(0, 2).map((p) => p.data.description).join(" | ")}.` : ""
1289
- ].filter(Boolean).join(" ");
1290
- return {
1291
- summary: summaryParts.join(" \u2014 "),
1292
- whatIFound: [patternLine, `Incidents considered: ${incidents.length}. Patterns considered: ${patterns.length}.`].join(" "),
1293
- howBad,
1294
- whatToDo,
1295
- original: reasoning
1296
- };
1297
- }
1298
-
1299
- export {
1300
- formatFriendlyError,
1301
- LearningEngine,
1302
- perceiveCurrentChanges,
1303
- reasonAboutChangesHumanReadable,
1304
- isTrieInitialized
1305
- };
1306
- //# sourceMappingURL=chunk-FVRO5RN3.js.map