@triedotdev/mcp 1.0.168 → 1.0.170
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +54 -500
- package/dist/chunk-2YXOBNKW.js +619 -0
- package/dist/chunk-2YXOBNKW.js.map +1 -0
- package/dist/chunk-QR64Y5TI.js +363 -0
- package/dist/chunk-QR64Y5TI.js.map +1 -0
- package/dist/cli/main.d.ts +0 -15
- package/dist/cli/main.js +356 -3100
- package/dist/cli/main.js.map +1 -1
- package/dist/index.js +2 -36
- package/dist/index.js.map +1 -1
- package/dist/server/mcp-server.js +2 -36
- package/package.json +8 -31
- package/dist/autonomy-config-FSERX3O3.js +0 -30
- package/dist/autonomy-config-FSERX3O3.js.map +0 -1
- package/dist/chat-store-JNGNTDSN.js +0 -15
- package/dist/chat-store-JNGNTDSN.js.map +0 -1
- package/dist/chunk-2HF65EHQ.js +0 -311
- package/dist/chunk-2HF65EHQ.js.map +0 -1
- package/dist/chunk-43X6JBEM.js +0 -36
- package/dist/chunk-43X6JBEM.js.map +0 -1
- package/dist/chunk-4MXH2ZPT.js +0 -1827
- package/dist/chunk-4MXH2ZPT.js.map +0 -1
- package/dist/chunk-575YT2SD.js +0 -737
- package/dist/chunk-575YT2SD.js.map +0 -1
- package/dist/chunk-5BRRRTN6.js +0 -354
- package/dist/chunk-5BRRRTN6.js.map +0 -1
- package/dist/chunk-6NLHFIYA.js +0 -344
- package/dist/chunk-6NLHFIYA.js.map +0 -1
- package/dist/chunk-7WITSO22.js +0 -824
- package/dist/chunk-7WITSO22.js.map +0 -1
- package/dist/chunk-DGUM43GV.js +0 -11
- package/dist/chunk-DGUM43GV.js.map +0 -1
- package/dist/chunk-EFWVF6TI.js +0 -267
- package/dist/chunk-EFWVF6TI.js.map +0 -1
- package/dist/chunk-F6WFNUAY.js +0 -216
- package/dist/chunk-F6WFNUAY.js.map +0 -1
- package/dist/chunk-FQ45QP5A.js +0 -361
- package/dist/chunk-FQ45QP5A.js.map +0 -1
- package/dist/chunk-G2TGF6TR.js +0 -573
- package/dist/chunk-G2TGF6TR.js.map +0 -1
- package/dist/chunk-GTKYBOXL.js +0 -700
- package/dist/chunk-GTKYBOXL.js.map +0 -1
- package/dist/chunk-HVCDY3AK.js +0 -850
- package/dist/chunk-HVCDY3AK.js.map +0 -1
- package/dist/chunk-JVMBCWKS.js +0 -348
- package/dist/chunk-JVMBCWKS.js.map +0 -1
- package/dist/chunk-KDHN2ZQE.js +0 -313
- package/dist/chunk-KDHN2ZQE.js.map +0 -1
- package/dist/chunk-LQIMKE3P.js +0 -12524
- package/dist/chunk-LQIMKE3P.js.map +0 -1
- package/dist/chunk-ME2OERF5.js +0 -345
- package/dist/chunk-ME2OERF5.js.map +0 -1
- package/dist/chunk-MRHKX5M5.js +0 -662
- package/dist/chunk-MRHKX5M5.js.map +0 -1
- package/dist/chunk-OBQ74FOU.js +0 -27
- package/dist/chunk-OBQ74FOU.js.map +0 -1
- package/dist/chunk-OMR4YCBS.js +0 -987
- package/dist/chunk-OMR4YCBS.js.map +0 -1
- package/dist/chunk-Q5EKA5YA.js +0 -254
- package/dist/chunk-Q5EKA5YA.js.map +0 -1
- package/dist/chunk-Q63FFI6D.js +0 -132
- package/dist/chunk-Q63FFI6D.js.map +0 -1
- package/dist/chunk-SY6KQG44.js +0 -983
- package/dist/chunk-SY6KQG44.js.map +0 -1
- package/dist/chunk-T63OHG4Q.js +0 -440
- package/dist/chunk-T63OHG4Q.js.map +0 -1
- package/dist/chunk-TN5WEKWI.js +0 -173
- package/dist/chunk-TN5WEKWI.js.map +0 -1
- package/dist/chunk-VUL52BQL.js +0 -402
- package/dist/chunk-VUL52BQL.js.map +0 -1
- package/dist/chunk-VVITXIHN.js +0 -189
- package/dist/chunk-VVITXIHN.js.map +0 -1
- package/dist/chunk-WCN7S3EI.js +0 -14
- package/dist/chunk-WCN7S3EI.js.map +0 -1
- package/dist/chunk-XE6KQRKZ.js +0 -816
- package/dist/chunk-XE6KQRKZ.js.map +0 -1
- package/dist/chunk-XPZZFPBZ.js +0 -491
- package/dist/chunk-XPZZFPBZ.js.map +0 -1
- package/dist/chunk-XTFWT2XM.js +0 -727
- package/dist/chunk-XTFWT2XM.js.map +0 -1
- package/dist/chunk-YDHUCDHM.js +0 -4011
- package/dist/chunk-YDHUCDHM.js.map +0 -1
- package/dist/chunk-YZ6Y2H3P.js +0 -1289
- package/dist/chunk-YZ6Y2H3P.js.map +0 -1
- package/dist/chunk-ZJF5FTBX.js +0 -1396
- package/dist/chunk-ZJF5FTBX.js.map +0 -1
- package/dist/chunk-ZV2K6M7T.js +0 -74
- package/dist/chunk-ZV2K6M7T.js.map +0 -1
- package/dist/cli/create-agent.d.ts +0 -1
- package/dist/cli/create-agent.js +0 -1050
- package/dist/cli/create-agent.js.map +0 -1
- package/dist/cli/yolo-daemon.d.ts +0 -1
- package/dist/cli/yolo-daemon.js +0 -423
- package/dist/cli/yolo-daemon.js.map +0 -1
- package/dist/client-NJPZE5JT.js +0 -28
- package/dist/client-NJPZE5JT.js.map +0 -1
- package/dist/codebase-index-VAPF32XX.js +0 -12
- package/dist/codebase-index-VAPF32XX.js.map +0 -1
- package/dist/fast-analyzer-XXYMOXRK.js +0 -216
- package/dist/fast-analyzer-XXYMOXRK.js.map +0 -1
- package/dist/git-EO5SRFMN.js +0 -28
- package/dist/git-EO5SRFMN.js.map +0 -1
- package/dist/github-ingester-ZOKK6GRS.js +0 -11
- package/dist/github-ingester-ZOKK6GRS.js.map +0 -1
- package/dist/goal-manager-YOB7VWK7.js +0 -25
- package/dist/goal-manager-YOB7VWK7.js.map +0 -1
- package/dist/goal-validator-ULKIBDPX.js +0 -24
- package/dist/goal-validator-ULKIBDPX.js.map +0 -1
- package/dist/graph-B3NA4S7I.js +0 -10
- package/dist/graph-B3NA4S7I.js.map +0 -1
- package/dist/hypothesis-7BFFT5JY.js +0 -23
- package/dist/hypothesis-7BFFT5JY.js.map +0 -1
- package/dist/incident-index-EFNUSGWL.js +0 -11
- package/dist/incident-index-EFNUSGWL.js.map +0 -1
- package/dist/insight-store-EC4PLSAW.js +0 -22
- package/dist/insight-store-EC4PLSAW.js.map +0 -1
- package/dist/issue-store-ZIRP23EP.js +0 -36
- package/dist/issue-store-ZIRP23EP.js.map +0 -1
- package/dist/ledger-TWZTGDFA.js +0 -58
- package/dist/ledger-TWZTGDFA.js.map +0 -1
- package/dist/linear-ingester-XXPAZZRW.js +0 -11
- package/dist/linear-ingester-XXPAZZRW.js.map +0 -1
- package/dist/output-manager-RVJ37XKA.js +0 -13
- package/dist/output-manager-RVJ37XKA.js.map +0 -1
- package/dist/parse-goal-violation-SACGFG3C.js +0 -8
- package/dist/parse-goal-violation-SACGFG3C.js.map +0 -1
- package/dist/pattern-discovery-F7LU5K6E.js +0 -8
- package/dist/pattern-discovery-F7LU5K6E.js.map +0 -1
- package/dist/progress-SRQ2V3BP.js +0 -18
- package/dist/progress-SRQ2V3BP.js.map +0 -1
- package/dist/project-state-AHPA77SM.js +0 -28
- package/dist/project-state-AHPA77SM.js.map +0 -1
- package/dist/sync-M2FSWPBC.js +0 -12
- package/dist/sync-M2FSWPBC.js.map +0 -1
- package/dist/terminal-spawn-5YXDMUCF.js +0 -157
- package/dist/terminal-spawn-5YXDMUCF.js.map +0 -1
- package/dist/tiered-storage-Z3YCR465.js +0 -12
- package/dist/tiered-storage-Z3YCR465.js.map +0 -1
- package/dist/trie-agent-3YDPEGHJ.js +0 -28
- package/dist/trie-agent-3YDPEGHJ.js.map +0 -1
- package/dist/ui/chat.html +0 -1014
- package/dist/ui/goals.html +0 -967
- package/dist/ui/hypotheses.html +0 -1011
- package/dist/ui/ledger.html +0 -954
- package/dist/ui/nudges.html +0 -995
- package/dist/vibe-code-signatures-F6URTBW3.js +0 -16
- package/dist/vibe-code-signatures-F6URTBW3.js.map +0 -1
- package/dist/vulnerability-signatures-T7SKHORW.js +0 -18
- package/dist/vulnerability-signatures-T7SKHORW.js.map +0 -1
package/dist/chunk-YZ6Y2H3P.js
DELETED
|
@@ -1,1289 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
IncidentIndex
|
|
3
|
-
} from "./chunk-TN5WEKWI.js";
|
|
4
|
-
import {
|
|
5
|
-
TriePatternDiscovery
|
|
6
|
-
} from "./chunk-ZV2K6M7T.js";
|
|
7
|
-
import {
|
|
8
|
-
scanForVulnerabilities
|
|
9
|
-
} from "./chunk-SY6KQG44.js";
|
|
10
|
-
import {
|
|
11
|
-
scanForVibeCodeIssues
|
|
12
|
-
} from "./chunk-OMR4YCBS.js";
|
|
13
|
-
import {
|
|
14
|
-
ContextGraph
|
|
15
|
-
} from "./chunk-VUL52BQL.js";
|
|
16
|
-
import {
|
|
17
|
-
storeIssues
|
|
18
|
-
} from "./chunk-XE6KQRKZ.js";
|
|
19
|
-
import {
|
|
20
|
-
getTrieDirectory,
|
|
21
|
-
getWorkingDirectory
|
|
22
|
-
} from "./chunk-VVITXIHN.js";
|
|
23
|
-
import {
|
|
24
|
-
isInteractiveMode
|
|
25
|
-
} from "./chunk-KDHN2ZQE.js";
|
|
26
|
-
import {
|
|
27
|
-
getDiff,
|
|
28
|
-
getRecentCommits,
|
|
29
|
-
getStagedChanges,
|
|
30
|
-
getUncommittedChanges,
|
|
31
|
-
getWorkingTreeDiff
|
|
32
|
-
} from "./chunk-2HF65EHQ.js";
|
|
33
|
-
|
|
34
|
-
// src/utils/trie-init.ts
|
|
35
|
-
import { existsSync } from "fs";
|
|
36
|
-
import { join } from "path";
|
|
37
|
-
var INIT_MARKERS = [
|
|
38
|
-
"PROJECT.md",
|
|
39
|
-
"RULES.md",
|
|
40
|
-
"TEAM.md",
|
|
41
|
-
"BOOTSTRAP.md",
|
|
42
|
-
"AGENTS.md",
|
|
43
|
-
"config.json"
|
|
44
|
-
];
|
|
45
|
-
function isTrieInitialized(workDir) {
|
|
46
|
-
const dir = workDir || getWorkingDirectory(void 0, true);
|
|
47
|
-
const trieDir = getTrieDirectory(dir);
|
|
48
|
-
return INIT_MARKERS.some((marker) => existsSync(join(trieDir, marker)));
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
// src/utils/errors.ts
|
|
52
|
-
var TrieError = class extends Error {
|
|
53
|
-
code;
|
|
54
|
-
recoverable;
|
|
55
|
-
userMessage;
|
|
56
|
-
constructor(message, code, userMessage, recoverable = true) {
|
|
57
|
-
super(message);
|
|
58
|
-
this.code = code;
|
|
59
|
-
this.recoverable = recoverable;
|
|
60
|
-
this.userMessage = userMessage;
|
|
61
|
-
}
|
|
62
|
-
};
|
|
63
|
-
function formatFriendlyError(error) {
|
|
64
|
-
if (error instanceof TrieError) {
|
|
65
|
-
return { userMessage: error.userMessage, code: error.code };
|
|
66
|
-
}
|
|
67
|
-
return {
|
|
68
|
-
userMessage: "Something went wrong. Try again or run with --offline.",
|
|
69
|
-
code: "UNKNOWN"
|
|
70
|
-
};
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
// src/agent/confidence.ts
|
|
74
|
-
function adjustConfidence(current, outcome, step = 0.1) {
|
|
75
|
-
const delta = outcome === "positive" ? step : -step;
|
|
76
|
-
return clamp(current + delta);
|
|
77
|
-
}
|
|
78
|
-
function clamp(value) {
|
|
79
|
-
if (Number.isNaN(value)) return 0.5;
|
|
80
|
-
return Math.min(1, Math.max(0, value));
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
// src/agent/learning.ts
|
|
84
|
-
var LearningSystem = class {
|
|
85
|
-
constructor(graph, projectPath) {
|
|
86
|
-
this.graph = graph;
|
|
87
|
-
this.incidentIndex = new IncidentIndex(graph, projectPath);
|
|
88
|
-
this.discovery = new TriePatternDiscovery(graph, this.incidentIndex);
|
|
89
|
-
}
|
|
90
|
-
incidentIndex;
|
|
91
|
-
discovery;
|
|
92
|
-
async onWarningHeeded(files) {
|
|
93
|
-
await this.adjustPatterns(files, "positive");
|
|
94
|
-
}
|
|
95
|
-
async onWarningIgnored(files) {
|
|
96
|
-
await this.adjustPatterns(files, "negative");
|
|
97
|
-
}
|
|
98
|
-
async onIncidentReported(incidentId, files) {
|
|
99
|
-
const incident = await this.graph.getNode("incident", incidentId);
|
|
100
|
-
if (incident && incident.type === "incident") {
|
|
101
|
-
this.incidentIndex.addIncidentToTrie(incident, files);
|
|
102
|
-
}
|
|
103
|
-
await this.discoverAndStorePatterns();
|
|
104
|
-
}
|
|
105
|
-
async onFeedback(helpful, files = []) {
|
|
106
|
-
await this.adjustPatterns(files, helpful ? "positive" : "negative");
|
|
107
|
-
}
|
|
108
|
-
async adjustPatterns(files, outcome) {
|
|
109
|
-
if (!files.length) return;
|
|
110
|
-
for (const file of files) {
|
|
111
|
-
const patterns = await this.graph.getPatternsForFile(file);
|
|
112
|
-
await Promise.all(patterns.map((p) => this.updatePatternConfidence(p, outcome)));
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
async updatePatternConfidence(pattern, outcome) {
|
|
116
|
-
const current = pattern.data.confidence ?? 0.5;
|
|
117
|
-
const updated = adjustConfidence(current, outcome, 0.05);
|
|
118
|
-
await this.graph.updateNode("pattern", pattern.id, { confidence: updated, lastSeen: (/* @__PURE__ */ new Date()).toISOString() });
|
|
119
|
-
}
|
|
120
|
-
async discoverAndStorePatterns() {
|
|
121
|
-
const hotPatterns = this.discovery.discoverHotPatterns();
|
|
122
|
-
for (const hot of hotPatterns) {
|
|
123
|
-
await this.graph.addNode("pattern", {
|
|
124
|
-
description: `${hot.type === "directory" ? "Directory" : "File"} hot zone: ${hot.path}`,
|
|
125
|
-
appliesTo: [hot.path],
|
|
126
|
-
confidence: hot.confidence,
|
|
127
|
-
occurrences: hot.incidentCount,
|
|
128
|
-
firstSeen: (/* @__PURE__ */ new Date()).toISOString(),
|
|
129
|
-
lastSeen: (/* @__PURE__ */ new Date()).toISOString(),
|
|
130
|
-
isAntiPattern: true,
|
|
131
|
-
source: "local"
|
|
132
|
-
});
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
};
|
|
136
|
-
|
|
137
|
-
// src/agent/learning-engine.ts
|
|
138
|
-
import path from "path";
|
|
139
|
-
var LearningEngine = class {
|
|
140
|
-
projectPath;
|
|
141
|
-
graph;
|
|
142
|
-
learningSystem;
|
|
143
|
-
constructor(projectPath, graph) {
|
|
144
|
-
this.projectPath = projectPath;
|
|
145
|
-
this.graph = graph || new ContextGraph(projectPath);
|
|
146
|
-
this.learningSystem = new LearningSystem(this.graph, projectPath);
|
|
147
|
-
}
|
|
148
|
-
/**
|
|
149
|
-
* Unified learning method: Scans history AND processes manual feedback
|
|
150
|
-
*/
|
|
151
|
-
async learn(options = {}) {
|
|
152
|
-
const results = [];
|
|
153
|
-
if (!options.manualFeedback) {
|
|
154
|
-
const implicitCount = await this.learnFromHistory(options.limit || 20);
|
|
155
|
-
results.push({ learned: implicitCount, source: "git-history" });
|
|
156
|
-
}
|
|
157
|
-
if (options.manualFeedback) {
|
|
158
|
-
await this.recordManualFeedback(
|
|
159
|
-
options.manualFeedback.helpful,
|
|
160
|
-
options.manualFeedback.files,
|
|
161
|
-
options.manualFeedback.note
|
|
162
|
-
);
|
|
163
|
-
results.push({ learned: options.manualFeedback.files.length || 1, source: "manual-feedback" });
|
|
164
|
-
}
|
|
165
|
-
return results;
|
|
166
|
-
}
|
|
167
|
-
/**
|
|
168
|
-
* Scan recent commits for implicit failure signals (reverts, fixes)
|
|
169
|
-
*/
|
|
170
|
-
async learnFromHistory(limit = 20) {
|
|
171
|
-
const commits = await getRecentCommits(this.projectPath, limit);
|
|
172
|
-
const issuesToStore = [];
|
|
173
|
-
for (const commit of commits) {
|
|
174
|
-
const isRevert = commit.message.toLowerCase().includes("revert") || commit.message.startsWith('Revert "');
|
|
175
|
-
const isFix = /fix(es|ed)?\s+#\d+/i.test(commit.message) || commit.message.toLowerCase().includes("bugfix");
|
|
176
|
-
if (isRevert || isFix) {
|
|
177
|
-
const type = isRevert ? "revert" : "fix";
|
|
178
|
-
const diff = await getDiff(this.projectPath, commit.hash);
|
|
179
|
-
const files = this.extractFilesFromDiff(diff);
|
|
180
|
-
for (const file of files) {
|
|
181
|
-
const learnedIssues = await this.extractIssuesFromDiff(diff, file, type, commit.message);
|
|
182
|
-
issuesToStore.push(...learnedIssues);
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
}
|
|
186
|
-
if (issuesToStore.length > 0) {
|
|
187
|
-
const result = await storeIssues(issuesToStore, path.basename(this.projectPath), this.projectPath);
|
|
188
|
-
return result.stored;
|
|
189
|
-
}
|
|
190
|
-
return 0;
|
|
191
|
-
}
|
|
192
|
-
/**
|
|
193
|
-
* Record manual feedback (trie ok/bad) and adjust pattern confidence
|
|
194
|
-
*/
|
|
195
|
-
async recordManualFeedback(helpful, files, note) {
|
|
196
|
-
const context = files[0] ?? "unspecified";
|
|
197
|
-
const decision = await this.graph.addNode("decision", {
|
|
198
|
-
context,
|
|
199
|
-
decision: helpful ? "helpful" : "not helpful",
|
|
200
|
-
reasoning: note ?? null,
|
|
201
|
-
outcome: helpful ? "good" : "bad",
|
|
202
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
203
|
-
});
|
|
204
|
-
if (files.length > 0) {
|
|
205
|
-
for (const file of files) {
|
|
206
|
-
const fileNode = await this.graph.getNode("file", file);
|
|
207
|
-
if (fileNode) {
|
|
208
|
-
await this.graph.addEdge(decision.id, fileNode.id, "affects");
|
|
209
|
-
}
|
|
210
|
-
}
|
|
211
|
-
await this.learningSystem.onFeedback(helpful, files);
|
|
212
|
-
}
|
|
213
|
-
}
|
|
214
|
-
extractFilesFromDiff(diff) {
|
|
215
|
-
const files = /* @__PURE__ */ new Set();
|
|
216
|
-
const lines = diff.split("\n");
|
|
217
|
-
for (const line of lines) {
|
|
218
|
-
if (line.startsWith("+++ b/")) {
|
|
219
|
-
files.add(line.slice(6));
|
|
220
|
-
}
|
|
221
|
-
}
|
|
222
|
-
return Array.from(files);
|
|
223
|
-
}
|
|
224
|
-
async extractIssuesFromDiff(diff, file, type, message) {
|
|
225
|
-
const issues = [];
|
|
226
|
-
const badLines = this.getBadLinesFromDiff(diff, file, type);
|
|
227
|
-
const content = badLines.join("\n");
|
|
228
|
-
if (!content) return [];
|
|
229
|
-
const vulnerabilities = await scanForVulnerabilities(content, file);
|
|
230
|
-
const vibeIssues = await scanForVibeCodeIssues(content, file);
|
|
231
|
-
const allMatches = [...vulnerabilities, ...vibeIssues];
|
|
232
|
-
for (const match of allMatches) {
|
|
233
|
-
issues.push({
|
|
234
|
-
id: `implicit-${type}-${Date.now()}-${Math.random().toString(36).slice(2, 7)}`,
|
|
235
|
-
severity: "serious",
|
|
236
|
-
issue: `Implicit failure detected via ${type}: ${message}. Linked to pattern: ${match.category}`,
|
|
237
|
-
fix: `Review the ${type} commit and avoid this pattern in ${file}.`,
|
|
238
|
-
file,
|
|
239
|
-
confidence: 0.7,
|
|
240
|
-
autoFixable: false,
|
|
241
|
-
agent: "implicit-learning",
|
|
242
|
-
category: match.category
|
|
243
|
-
});
|
|
244
|
-
}
|
|
245
|
-
if (issues.length === 0) {
|
|
246
|
-
issues.push({
|
|
247
|
-
id: `implicit-${type}-${Date.now()}`,
|
|
248
|
-
severity: "moderate",
|
|
249
|
-
issue: `Historical ${type} detected: ${message}`,
|
|
250
|
-
fix: `Review the changes in ${file} from this commit to avoid regression.`,
|
|
251
|
-
file,
|
|
252
|
-
confidence: 0.5,
|
|
253
|
-
autoFixable: false,
|
|
254
|
-
agent: "implicit-learning"
|
|
255
|
-
});
|
|
256
|
-
}
|
|
257
|
-
return issues;
|
|
258
|
-
}
|
|
259
|
-
getBadLinesFromDiff(diff, file, type) {
|
|
260
|
-
const badLines = [];
|
|
261
|
-
const lines = diff.split("\n");
|
|
262
|
-
let inTargetFile = false;
|
|
263
|
-
for (const line of lines) {
|
|
264
|
-
if (line.startsWith("+++ b/") || line.startsWith("--- a/")) {
|
|
265
|
-
inTargetFile = line.includes(file);
|
|
266
|
-
continue;
|
|
267
|
-
}
|
|
268
|
-
if (!inTargetFile) continue;
|
|
269
|
-
if (type === "fix" && line.startsWith("-") && !line.startsWith("---")) {
|
|
270
|
-
badLines.push(line.slice(1));
|
|
271
|
-
} else if (type === "revert" && line.startsWith("+") && !line.startsWith("+++")) {
|
|
272
|
-
badLines.push(line.slice(1));
|
|
273
|
-
}
|
|
274
|
-
}
|
|
275
|
-
return badLines;
|
|
276
|
-
}
|
|
277
|
-
};
|
|
278
|
-
|
|
279
|
-
// src/agent/perceive.ts
|
|
280
|
-
import path2 from "path";
|
|
281
|
-
|
|
282
|
-
// src/agent/diff-analyzer.ts
|
|
283
|
-
var RISKY_PATTERNS = [/auth/i, /token/i, /password/i, /secret/i, /validate/i, /sanitize/i];
|
|
284
|
-
function analyzeDiff(diff) {
|
|
285
|
-
const files = [];
|
|
286
|
-
let current = null;
|
|
287
|
-
const lines = diff.split("\n");
|
|
288
|
-
for (const line of lines) {
|
|
289
|
-
if (line.startsWith("+++ b/")) {
|
|
290
|
-
const filePath = line.replace("+++ b/", "").trim();
|
|
291
|
-
current = {
|
|
292
|
-
filePath,
|
|
293
|
-
added: 0,
|
|
294
|
-
removed: 0,
|
|
295
|
-
functionsModified: [],
|
|
296
|
-
riskyPatterns: []
|
|
297
|
-
};
|
|
298
|
-
files.push(current);
|
|
299
|
-
continue;
|
|
300
|
-
}
|
|
301
|
-
if (!current) {
|
|
302
|
-
continue;
|
|
303
|
-
}
|
|
304
|
-
if (line.startsWith("@@")) {
|
|
305
|
-
const match = line.match(/@@.*?(function\s+([\w$]+)|class\s+([\w$]+)|([\w$]+\s*\())/i);
|
|
306
|
-
const fnName = match?.[2] || match?.[3] || match?.[4];
|
|
307
|
-
if (fnName) {
|
|
308
|
-
current.functionsModified.push(fnName.replace("(", "").trim());
|
|
309
|
-
}
|
|
310
|
-
continue;
|
|
311
|
-
}
|
|
312
|
-
if (line.startsWith("+") && !line.startsWith("+++")) {
|
|
313
|
-
current.added += 1;
|
|
314
|
-
markRisk(line, current);
|
|
315
|
-
} else if (line.startsWith("-") && !line.startsWith("---")) {
|
|
316
|
-
current.removed += 1;
|
|
317
|
-
markRisk(line, current);
|
|
318
|
-
}
|
|
319
|
-
}
|
|
320
|
-
const totalAdded = files.reduce((acc, f) => acc + f.added, 0);
|
|
321
|
-
const totalRemoved = files.reduce((acc, f) => acc + f.removed, 0);
|
|
322
|
-
const riskyFiles = files.filter((f) => f.riskyPatterns.length > 0).map((f) => f.filePath);
|
|
323
|
-
return {
|
|
324
|
-
files,
|
|
325
|
-
totalAdded,
|
|
326
|
-
totalRemoved,
|
|
327
|
-
riskyFiles
|
|
328
|
-
};
|
|
329
|
-
}
|
|
330
|
-
function markRisk(line, file) {
|
|
331
|
-
for (const pattern of RISKY_PATTERNS) {
|
|
332
|
-
if (pattern.test(line)) {
|
|
333
|
-
const label = pattern.toString();
|
|
334
|
-
if (!file.riskyPatterns.includes(label)) {
|
|
335
|
-
file.riskyPatterns.push(label);
|
|
336
|
-
}
|
|
337
|
-
}
|
|
338
|
-
}
|
|
339
|
-
}
|
|
340
|
-
|
|
341
|
-
// src/agent/perceive.ts
|
|
342
|
-
async function perceiveCurrentChanges(projectPath, graph) {
|
|
343
|
-
const ctxGraph = graph ?? new ContextGraph(projectPath);
|
|
344
|
-
const [staged, unstaged] = await Promise.all([
|
|
345
|
-
getStagedChanges(projectPath),
|
|
346
|
-
getUncommittedChanges(projectPath)
|
|
347
|
-
]);
|
|
348
|
-
const stagedDiff = await getWorkingTreeDiff(projectPath, true);
|
|
349
|
-
const unstagedDiff = await getWorkingTreeDiff(projectPath, false);
|
|
350
|
-
const combinedDiff = [stagedDiff, unstagedDiff].filter(Boolean).join("\n");
|
|
351
|
-
const diffSummary = analyzeDiff(combinedDiff);
|
|
352
|
-
const filesTouched = /* @__PURE__ */ new Set();
|
|
353
|
-
staged.forEach((c) => filesTouched.add(c.path));
|
|
354
|
-
unstaged.forEach((c) => filesTouched.add(c.path));
|
|
355
|
-
diffSummary.files.forEach((f) => filesTouched.add(f.filePath));
|
|
356
|
-
const changeId = await upsertWorkingChange(ctxGraph, Array.from(filesTouched), projectPath);
|
|
357
|
-
const result = {
|
|
358
|
-
staged,
|
|
359
|
-
unstaged,
|
|
360
|
-
diffSummary
|
|
361
|
-
};
|
|
362
|
-
if (changeId) result.changeNodeId = changeId;
|
|
363
|
-
return result;
|
|
364
|
-
}
|
|
365
|
-
async function upsertWorkingChange(graph, files, projectPath) {
|
|
366
|
-
if (files.length === 0) return void 0;
|
|
367
|
-
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
368
|
-
const change = await graph.addNode("change", {
|
|
369
|
-
commitHash: null,
|
|
370
|
-
files,
|
|
371
|
-
message: "workspace changes",
|
|
372
|
-
diff: null,
|
|
373
|
-
author: null,
|
|
374
|
-
timestamp: now,
|
|
375
|
-
outcome: "unknown"
|
|
376
|
-
});
|
|
377
|
-
for (const filePath of files) {
|
|
378
|
-
const fileNode = await ensureFileNode(graph, filePath, projectPath);
|
|
379
|
-
await graph.addEdge(change.id, fileNode.id, "affects");
|
|
380
|
-
}
|
|
381
|
-
return change.id;
|
|
382
|
-
}
|
|
383
|
-
async function ensureFileNode(graph, filePath, projectPath) {
|
|
384
|
-
const normalized = path2.resolve(projectPath, filePath);
|
|
385
|
-
const existing = await graph.getNode("file", normalized);
|
|
386
|
-
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
387
|
-
if (existing) {
|
|
388
|
-
const data2 = existing.data;
|
|
389
|
-
await graph.updateNode("file", existing.id, {
|
|
390
|
-
changeCount: (data2.changeCount ?? 0) + 1,
|
|
391
|
-
lastChanged: now
|
|
392
|
-
});
|
|
393
|
-
return await graph.getNode("file", existing.id);
|
|
394
|
-
}
|
|
395
|
-
const data = {
|
|
396
|
-
path: filePath,
|
|
397
|
-
extension: path2.extname(filePath),
|
|
398
|
-
purpose: "",
|
|
399
|
-
riskLevel: "medium",
|
|
400
|
-
whyRisky: null,
|
|
401
|
-
changeCount: 1,
|
|
402
|
-
lastChanged: now,
|
|
403
|
-
incidentCount: 0,
|
|
404
|
-
createdAt: now
|
|
405
|
-
};
|
|
406
|
-
return await graph.addNode("file", data);
|
|
407
|
-
}
|
|
408
|
-
|
|
409
|
-
// src/agent/risk-scorer.ts
|
|
410
|
-
import * as path3 from "path";
|
|
411
|
-
var BASE_RISK = {
|
|
412
|
-
low: 10,
|
|
413
|
-
medium: 35,
|
|
414
|
-
high: 65,
|
|
415
|
-
critical: 85
|
|
416
|
-
};
|
|
417
|
-
var SENSITIVE_PATHS = [
|
|
418
|
-
{ pattern: /auth|login|token|session/i, weight: 20, reason: "touches authentication" },
|
|
419
|
-
{ pattern: /payment|billing|stripe|paypal|checkout/i, weight: 25, reason: "touches payments" },
|
|
420
|
-
{ pattern: /secret|credential|env|config\/security/i, weight: 15, reason: "touches secrets/security config" }
|
|
421
|
-
];
|
|
422
|
-
function levelFromScore(score) {
|
|
423
|
-
if (score >= 90) return "critical";
|
|
424
|
-
if (score >= 65) return "high";
|
|
425
|
-
if (score >= 40) return "medium";
|
|
426
|
-
return "low";
|
|
427
|
-
}
|
|
428
|
-
async function scoreFile(graph, filePath, matchedPatterns = []) {
|
|
429
|
-
const reasons = [];
|
|
430
|
-
const normalized = path3.resolve(graph.projectRoot, filePath);
|
|
431
|
-
const node = await graph.getNode("file", normalized);
|
|
432
|
-
const incidents = await graph.getIncidentsForFile(filePath);
|
|
433
|
-
let score = 10;
|
|
434
|
-
const data = node?.data;
|
|
435
|
-
if (data) {
|
|
436
|
-
score = BASE_RISK[data.riskLevel] ?? score;
|
|
437
|
-
reasons.push(`baseline ${data.riskLevel}`);
|
|
438
|
-
if (data.incidentCount > 0) {
|
|
439
|
-
const incBoost = Math.min(data.incidentCount * 12, 36);
|
|
440
|
-
score += incBoost;
|
|
441
|
-
reasons.push(`historical incidents (+${incBoost})`);
|
|
442
|
-
}
|
|
443
|
-
if (data.changeCount > 5) {
|
|
444
|
-
const changeBoost = Math.min((data.changeCount - 5) * 2, 12);
|
|
445
|
-
score += changeBoost;
|
|
446
|
-
reasons.push(`frequent changes (+${changeBoost})`);
|
|
447
|
-
}
|
|
448
|
-
if (data.lastChanged) {
|
|
449
|
-
const lastChanged = new Date(data.lastChanged).getTime();
|
|
450
|
-
const days = (Date.now() - lastChanged) / (1e3 * 60 * 60 * 24);
|
|
451
|
-
if (days > 60 && data.incidentCount === 0) {
|
|
452
|
-
score -= 5;
|
|
453
|
-
reasons.push("stable for 60d (-5)");
|
|
454
|
-
}
|
|
455
|
-
}
|
|
456
|
-
}
|
|
457
|
-
for (const { pattern, weight, reason } of SENSITIVE_PATHS) {
|
|
458
|
-
if (pattern.test(filePath)) {
|
|
459
|
-
score += weight;
|
|
460
|
-
reasons.push(reason);
|
|
461
|
-
}
|
|
462
|
-
}
|
|
463
|
-
if (matchedPatterns.length > 0) {
|
|
464
|
-
const patternBoost = Math.min(
|
|
465
|
-
matchedPatterns.reduce((acc, p) => acc + (p.data.confidence ?? 50) / 10, 0),
|
|
466
|
-
20
|
|
467
|
-
);
|
|
468
|
-
score += patternBoost;
|
|
469
|
-
reasons.push(`pattern match (+${Math.round(patternBoost)})`);
|
|
470
|
-
}
|
|
471
|
-
if (incidents.length > 0) {
|
|
472
|
-
const timestamps = incidents.map((i) => new Date(i.data.timestamp).getTime()).sort((a, b) => b - a);
|
|
473
|
-
const recent = timestamps[0];
|
|
474
|
-
const daysSince = (Date.now() - recent) / (1e3 * 60 * 60 * 24);
|
|
475
|
-
if (daysSince > 90) {
|
|
476
|
-
score -= 5;
|
|
477
|
-
reasons.push("no incidents in 90d (-5)");
|
|
478
|
-
} else {
|
|
479
|
-
score += 8;
|
|
480
|
-
reasons.push("recent incident (+8)");
|
|
481
|
-
}
|
|
482
|
-
}
|
|
483
|
-
const level = levelFromScore(score);
|
|
484
|
-
return {
|
|
485
|
-
file: filePath,
|
|
486
|
-
score,
|
|
487
|
-
level,
|
|
488
|
-
reasons,
|
|
489
|
-
incidents,
|
|
490
|
-
matchedPatterns
|
|
491
|
-
};
|
|
492
|
-
}
|
|
493
|
-
async function scoreChangeSet(graph, files, patternMatches = {}) {
|
|
494
|
-
const fileResults = [];
|
|
495
|
-
for (const file of files) {
|
|
496
|
-
const patterns = patternMatches[file] ?? [];
|
|
497
|
-
fileResults.push(await scoreFile(graph, file, patterns));
|
|
498
|
-
}
|
|
499
|
-
const maxScore = Math.max(...fileResults.map((f) => f.score), 10);
|
|
500
|
-
const spreadBoost = files.length > 5 ? Math.min((files.length - 5) * 2, 10) : 0;
|
|
501
|
-
const overallScore = maxScore + spreadBoost;
|
|
502
|
-
const overall = levelFromScore(overallScore);
|
|
503
|
-
const shouldEscalate = overall === "critical" || overall === "high";
|
|
504
|
-
return {
|
|
505
|
-
files: fileResults,
|
|
506
|
-
overall,
|
|
507
|
-
score: overallScore,
|
|
508
|
-
shouldEscalate
|
|
509
|
-
};
|
|
510
|
-
}
|
|
511
|
-
|
|
512
|
-
// src/agent/pattern-matcher.ts
|
|
513
|
-
async function matchPatternsForFiles(graph, files) {
|
|
514
|
-
const matches = [];
|
|
515
|
-
const byFile = {};
|
|
516
|
-
for (const file of files) {
|
|
517
|
-
const patterns = await graph.getPatternsForFile(file);
|
|
518
|
-
if (patterns.length === 0) continue;
|
|
519
|
-
byFile[file] = patterns;
|
|
520
|
-
for (const pattern of patterns) {
|
|
521
|
-
matches.push({
|
|
522
|
-
file,
|
|
523
|
-
pattern,
|
|
524
|
-
confidence: pattern.data.confidence,
|
|
525
|
-
isAntiPattern: pattern.data.isAntiPattern
|
|
526
|
-
});
|
|
527
|
-
}
|
|
528
|
-
}
|
|
529
|
-
return { matches, byFile };
|
|
530
|
-
}
|
|
531
|
-
|
|
532
|
-
// src/orchestrator/triager.ts
|
|
533
|
-
var Triager = class {
|
|
534
|
-
constructor(_config) {
|
|
535
|
-
}
|
|
536
|
-
/**
|
|
537
|
-
* Triage a change to select appropriate agents
|
|
538
|
-
* Note: Skills/agents have been removed - Trie is now purely a decision ledger
|
|
539
|
-
*/
|
|
540
|
-
async triage(_context, _forceAgents) {
|
|
541
|
-
return [];
|
|
542
|
-
}
|
|
543
|
-
/**
|
|
544
|
-
* Get all available agent names (deprecated - returns empty array)
|
|
545
|
-
*/
|
|
546
|
-
getAvailableAgents() {
|
|
547
|
-
return [];
|
|
548
|
-
}
|
|
549
|
-
};
|
|
550
|
-
|
|
551
|
-
// src/utils/parallel-executor.ts
|
|
552
|
-
import { Worker } from "worker_threads";
|
|
553
|
-
import { cpus } from "os";
|
|
554
|
-
import { existsSync as existsSync2 } from "fs";
|
|
555
|
-
import { fileURLToPath } from "url";
|
|
556
|
-
var ParallelExecutor = class {
|
|
557
|
-
maxWorkers;
|
|
558
|
-
cache;
|
|
559
|
-
streaming;
|
|
560
|
-
activeWorkers = /* @__PURE__ */ new Set();
|
|
561
|
-
cacheEnabled = true;
|
|
562
|
-
useWorkerThreads = false;
|
|
563
|
-
workerAvailable = null;
|
|
564
|
-
warnedWorkerFallback = false;
|
|
565
|
-
constructor(cacheManager, maxWorkers = Math.max(2, Math.min(cpus().length - 1, 8)), options) {
|
|
566
|
-
this.maxWorkers = maxWorkers;
|
|
567
|
-
this.cache = cacheManager;
|
|
568
|
-
this.cacheEnabled = options?.cacheEnabled ?? true;
|
|
569
|
-
this.useWorkerThreads = options?.useWorkerThreads ?? false;
|
|
570
|
-
}
|
|
571
|
-
/**
|
|
572
|
-
* Set streaming manager for real-time updates
|
|
573
|
-
*/
|
|
574
|
-
setStreaming(streaming) {
|
|
575
|
-
this.streaming = streaming;
|
|
576
|
-
}
|
|
577
|
-
/**
|
|
578
|
-
* Execute agents in parallel with intelligent scheduling
|
|
579
|
-
*/
|
|
580
|
-
async executeAgents(agents, files, context) {
|
|
581
|
-
if (agents.length === 0) {
|
|
582
|
-
return /* @__PURE__ */ new Map();
|
|
583
|
-
}
|
|
584
|
-
if (this.streaming && this.streaming.getProgress().totalFiles === 0) {
|
|
585
|
-
this.streaming.startScan(files.length);
|
|
586
|
-
}
|
|
587
|
-
const cacheResults = /* @__PURE__ */ new Map();
|
|
588
|
-
const uncachedTasks = [];
|
|
589
|
-
for (const agent of agents) {
|
|
590
|
-
const cached = await this.checkAgentCache(agent, files);
|
|
591
|
-
if (cached) {
|
|
592
|
-
cacheResults.set(agent.name, cached);
|
|
593
|
-
this.streaming?.completeAgent(agent.name, cached.issues);
|
|
594
|
-
} else {
|
|
595
|
-
uncachedTasks.push({
|
|
596
|
-
agent,
|
|
597
|
-
files,
|
|
598
|
-
context,
|
|
599
|
-
priority: agent.priority?.tier || 2,
|
|
600
|
-
timeoutMs: context?.config?.timeoutMs || 12e4
|
|
601
|
-
});
|
|
602
|
-
}
|
|
603
|
-
}
|
|
604
|
-
uncachedTasks.sort((a, b) => a.priority - b.priority);
|
|
605
|
-
const parallelResults = await this.executeTasksParallel(uncachedTasks);
|
|
606
|
-
await this.cacheResults(parallelResults);
|
|
607
|
-
const allResults = /* @__PURE__ */ new Map();
|
|
608
|
-
for (const [agent, result] of Array.from(cacheResults.entries())) {
|
|
609
|
-
allResults.set(agent, result);
|
|
610
|
-
}
|
|
611
|
-
for (const result of parallelResults) {
|
|
612
|
-
allResults.set(result.agent, result.result);
|
|
613
|
-
}
|
|
614
|
-
const allIssues = Array.from(allResults.values()).flatMap((r) => r.issues);
|
|
615
|
-
this.streaming?.completeScan(allIssues);
|
|
616
|
-
return allResults;
|
|
617
|
-
}
|
|
618
|
-
/**
|
|
619
|
-
* Check if agent has cached results for given files
|
|
620
|
-
*/
|
|
621
|
-
async checkAgentCache(agent, files) {
|
|
622
|
-
if (!this.cacheEnabled || !this.cache) {
|
|
623
|
-
return null;
|
|
624
|
-
}
|
|
625
|
-
const cachedIssues = await this.cache.getCachedBatch(files, agent.name);
|
|
626
|
-
if (cachedIssues.size === files.length) {
|
|
627
|
-
const allIssues = Array.from(cachedIssues.values()).flat();
|
|
628
|
-
return {
|
|
629
|
-
agent: agent.name,
|
|
630
|
-
issues: allIssues,
|
|
631
|
-
executionTime: 0,
|
|
632
|
-
// Cached
|
|
633
|
-
success: true,
|
|
634
|
-
metadata: {
|
|
635
|
-
filesAnalyzed: files.length,
|
|
636
|
-
linesAnalyzed: 0
|
|
637
|
-
}
|
|
638
|
-
};
|
|
639
|
-
}
|
|
640
|
-
return null;
|
|
641
|
-
}
|
|
642
|
-
/**
|
|
643
|
-
* Execute tasks in parallel batches
|
|
644
|
-
*/
|
|
645
|
-
async executeTasksParallel(tasks) {
|
|
646
|
-
if (tasks.length === 0) {
|
|
647
|
-
return [];
|
|
648
|
-
}
|
|
649
|
-
const results = [];
|
|
650
|
-
const batches = this.createBatches(tasks, this.maxWorkers);
|
|
651
|
-
for (const batch of batches) {
|
|
652
|
-
const batchResults = await Promise.all(
|
|
653
|
-
batch.map((task) => this.executeTask(task))
|
|
654
|
-
);
|
|
655
|
-
results.push(...batchResults);
|
|
656
|
-
}
|
|
657
|
-
return results;
|
|
658
|
-
}
|
|
659
|
-
/**
|
|
660
|
-
* Create batches for parallel execution
|
|
661
|
-
*/
|
|
662
|
-
createBatches(tasks, batchSize) {
|
|
663
|
-
const batches = [];
|
|
664
|
-
for (let i = 0; i < tasks.length; i += batchSize) {
|
|
665
|
-
batches.push(tasks.slice(i, i + batchSize));
|
|
666
|
-
}
|
|
667
|
-
return batches;
|
|
668
|
-
}
|
|
669
|
-
/**
|
|
670
|
-
* Execute a single task
|
|
671
|
-
*/
|
|
672
|
-
async executeTask(task) {
|
|
673
|
-
const startTime = Date.now();
|
|
674
|
-
this.streaming?.startAgent(task.agent.name);
|
|
675
|
-
try {
|
|
676
|
-
const result = this.canUseWorkers() ? await this.executeTaskInWorker(task) : await task.agent.scan(task.files, task.context);
|
|
677
|
-
const executionTime = Date.now() - startTime;
|
|
678
|
-
this.streaming?.completeAgent(task.agent.name, result.issues);
|
|
679
|
-
return {
|
|
680
|
-
agent: task.agent.name,
|
|
681
|
-
result,
|
|
682
|
-
fromCache: false,
|
|
683
|
-
executionTime
|
|
684
|
-
};
|
|
685
|
-
} catch (error) {
|
|
686
|
-
const executionTime = Date.now() - startTime;
|
|
687
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
688
|
-
this.streaming?.reportError(new Error(errorMessage), `Agent: ${task.agent.name}`);
|
|
689
|
-
return {
|
|
690
|
-
agent: task.agent.name,
|
|
691
|
-
result: {
|
|
692
|
-
agent: task.agent.name,
|
|
693
|
-
issues: [],
|
|
694
|
-
executionTime,
|
|
695
|
-
success: false,
|
|
696
|
-
error: errorMessage
|
|
697
|
-
},
|
|
698
|
-
fromCache: false,
|
|
699
|
-
executionTime
|
|
700
|
-
};
|
|
701
|
-
}
|
|
702
|
-
}
|
|
703
|
-
canUseWorkers() {
|
|
704
|
-
if (!this.useWorkerThreads) {
|
|
705
|
-
return false;
|
|
706
|
-
}
|
|
707
|
-
if (this.workerAvailable !== null) {
|
|
708
|
-
return this.workerAvailable;
|
|
709
|
-
}
|
|
710
|
-
const workerUrl = this.getWorkerUrl();
|
|
711
|
-
this.workerAvailable = existsSync2(fileURLToPath(workerUrl));
|
|
712
|
-
if (!this.workerAvailable && !this.warnedWorkerFallback && !isInteractiveMode()) {
|
|
713
|
-
console.error("Worker threads unavailable; falling back to in-process agents.");
|
|
714
|
-
this.warnedWorkerFallback = true;
|
|
715
|
-
}
|
|
716
|
-
return this.workerAvailable;
|
|
717
|
-
}
|
|
718
|
-
getWorkerUrl() {
|
|
719
|
-
const distDir = new URL(".", import.meta.url);
|
|
720
|
-
return new URL("workers/agent-worker.js", distDir);
|
|
721
|
-
}
|
|
722
|
-
async executeTaskInWorker(task) {
|
|
723
|
-
const workerUrl = this.getWorkerUrl();
|
|
724
|
-
return new Promise((resolve2, reject) => {
|
|
725
|
-
const worker = new Worker(workerUrl, {
|
|
726
|
-
workerData: {
|
|
727
|
-
agentName: task.agent.name,
|
|
728
|
-
files: task.files,
|
|
729
|
-
context: task.context
|
|
730
|
-
}
|
|
731
|
-
});
|
|
732
|
-
this.activeWorkers.add(worker);
|
|
733
|
-
const timeout = setTimeout(() => {
|
|
734
|
-
worker.terminate().catch(() => void 0);
|
|
735
|
-
reject(new Error(`Agent ${task.agent.name} timed out after ${task.timeoutMs}ms`));
|
|
736
|
-
}, task.timeoutMs);
|
|
737
|
-
worker.on("message", (message) => {
|
|
738
|
-
if (message?.type === "result") {
|
|
739
|
-
clearTimeout(timeout);
|
|
740
|
-
resolve2(message.result);
|
|
741
|
-
} else if (message?.type === "error") {
|
|
742
|
-
clearTimeout(timeout);
|
|
743
|
-
reject(new Error(message.error));
|
|
744
|
-
}
|
|
745
|
-
});
|
|
746
|
-
worker.on("error", (error) => {
|
|
747
|
-
clearTimeout(timeout);
|
|
748
|
-
reject(error);
|
|
749
|
-
});
|
|
750
|
-
worker.on("exit", (code) => {
|
|
751
|
-
this.activeWorkers.delete(worker);
|
|
752
|
-
if (code !== 0) {
|
|
753
|
-
clearTimeout(timeout);
|
|
754
|
-
reject(new Error(`Worker stopped with exit code ${code}`));
|
|
755
|
-
}
|
|
756
|
-
});
|
|
757
|
-
});
|
|
758
|
-
}
|
|
759
|
-
/**
|
|
760
|
-
* Cache results for future use
|
|
761
|
-
*/
|
|
762
|
-
async cacheResults(results) {
|
|
763
|
-
if (!this.cacheEnabled || !this.cache) {
|
|
764
|
-
return;
|
|
765
|
-
}
|
|
766
|
-
const cachePromises = results.filter((r) => r.result.success && !r.fromCache).map((r) => {
|
|
767
|
-
const issuesByFile = this.groupIssuesByFile(r.result.issues);
|
|
768
|
-
const perFilePromises = Object.entries(issuesByFile).map(
|
|
769
|
-
([file, issues]) => this.cache.setCached(file, r.agent, issues, r.executionTime)
|
|
770
|
-
);
|
|
771
|
-
return Promise.all(perFilePromises);
|
|
772
|
-
});
|
|
773
|
-
await Promise.allSettled(cachePromises);
|
|
774
|
-
}
|
|
775
|
-
/**
|
|
776
|
-
* Cleanup resources
|
|
777
|
-
*/
|
|
778
|
-
async cleanup() {
|
|
779
|
-
const terminationPromises = Array.from(this.activeWorkers).map(
|
|
780
|
-
(worker) => worker.terminate()
|
|
781
|
-
);
|
|
782
|
-
await Promise.allSettled(terminationPromises);
|
|
783
|
-
this.activeWorkers.clear();
|
|
784
|
-
}
|
|
785
|
-
groupIssuesByFile(issues) {
|
|
786
|
-
const grouped = {};
|
|
787
|
-
for (const issue of issues) {
|
|
788
|
-
if (!grouped[issue.file]) {
|
|
789
|
-
grouped[issue.file] = [];
|
|
790
|
-
}
|
|
791
|
-
grouped[issue.file].push(issue);
|
|
792
|
-
}
|
|
793
|
-
return grouped;
|
|
794
|
-
}
|
|
795
|
-
};
|
|
796
|
-
function calculateOptimalConcurrency() {
|
|
797
|
-
const numCPUs = cpus().length;
|
|
798
|
-
const availableMemoryGB = process.memoryUsage().rss / 1024 / 1024 / 1024;
|
|
799
|
-
let optimal = Math.max(2, Math.min(numCPUs - 1, 8));
|
|
800
|
-
if (availableMemoryGB < 2) {
|
|
801
|
-
optimal = Math.max(2, Math.floor(optimal / 2));
|
|
802
|
-
}
|
|
803
|
-
if (numCPUs > 8) {
|
|
804
|
-
optimal = Math.min(optimal + 2, 12);
|
|
805
|
-
}
|
|
806
|
-
return optimal;
|
|
807
|
-
}
|
|
808
|
-
|
|
809
|
-
// src/utils/cache-manager.ts
|
|
810
|
-
import { readFile, writeFile, mkdir, stat } from "fs/promises";
|
|
811
|
-
import { join as join2 } from "path";
|
|
812
|
-
import { createHash } from "crypto";
|
|
813
|
-
var CacheManager = class {
|
|
814
|
-
cacheDir;
|
|
815
|
-
indexPath;
|
|
816
|
-
VERSION = "1.0.0";
|
|
817
|
-
MAX_AGE_MS = 24 * 60 * 60 * 1e3;
|
|
818
|
-
// 24 hours
|
|
819
|
-
MAX_ENTRIES = 1e3;
|
|
820
|
-
constructor(baseDir) {
|
|
821
|
-
this.cacheDir = join2(getTrieDirectory(baseDir), "cache");
|
|
822
|
-
this.indexPath = join2(this.cacheDir, "index.json");
|
|
823
|
-
}
|
|
824
|
-
/**
|
|
825
|
-
* Generate cache key for a file and agent combination
|
|
826
|
-
*/
|
|
827
|
-
generateCacheKey(filePath, agent, fileHash) {
|
|
828
|
-
const key = `${filePath}:${agent}:${fileHash}`;
|
|
829
|
-
return createHash("sha256").update(key).digest("hex").slice(0, 16);
|
|
830
|
-
}
|
|
831
|
-
/**
|
|
832
|
-
* Get file hash for cache validation
|
|
833
|
-
*/
|
|
834
|
-
async getFileHash(filePath) {
|
|
835
|
-
try {
|
|
836
|
-
const content = await readFile(filePath, "utf-8");
|
|
837
|
-
const stats = await stat(filePath);
|
|
838
|
-
const hash = createHash("sha256").update(content).digest("hex").slice(0, 16);
|
|
839
|
-
return {
|
|
840
|
-
hash,
|
|
841
|
-
size: stats.size,
|
|
842
|
-
mtime: stats.mtime.getTime()
|
|
843
|
-
};
|
|
844
|
-
} catch {
|
|
845
|
-
return { hash: "", size: 0, mtime: 0 };
|
|
846
|
-
}
|
|
847
|
-
}
|
|
848
|
-
/**
|
|
849
|
-
* Load cache index
|
|
850
|
-
*/
|
|
851
|
-
async loadIndex() {
|
|
852
|
-
try {
|
|
853
|
-
const content = await readFile(this.indexPath, "utf-8");
|
|
854
|
-
return JSON.parse(content);
|
|
855
|
-
} catch {
|
|
856
|
-
return {
|
|
857
|
-
version: this.VERSION,
|
|
858
|
-
created: Date.now(),
|
|
859
|
-
entries: {}
|
|
860
|
-
};
|
|
861
|
-
}
|
|
862
|
-
}
|
|
863
|
-
/**
|
|
864
|
-
* Save cache index
|
|
865
|
-
*/
|
|
866
|
-
async saveIndex(index) {
|
|
867
|
-
try {
|
|
868
|
-
await mkdir(this.cacheDir, { recursive: true });
|
|
869
|
-
await writeFile(this.indexPath, JSON.stringify(index, null, 2));
|
|
870
|
-
} catch (error) {
|
|
871
|
-
if (!isInteractiveMode()) {
|
|
872
|
-
console.warn("Failed to save cache index:", error);
|
|
873
|
-
}
|
|
874
|
-
}
|
|
875
|
-
}
|
|
876
|
-
/**
|
|
877
|
-
* Clean up expired entries
|
|
878
|
-
*/
|
|
879
|
-
cleanupExpired(index) {
|
|
880
|
-
const now = Date.now();
|
|
881
|
-
const validEntries = {};
|
|
882
|
-
for (const [key, entry] of Object.entries(index.entries)) {
|
|
883
|
-
if (now - entry.timestamp < this.MAX_AGE_MS) {
|
|
884
|
-
validEntries[key] = entry;
|
|
885
|
-
}
|
|
886
|
-
}
|
|
887
|
-
const entries = Object.entries(validEntries);
|
|
888
|
-
if (entries.length > this.MAX_ENTRIES) {
|
|
889
|
-
entries.sort((a, b) => b[1].timestamp - a[1].timestamp);
|
|
890
|
-
const limited = entries.slice(0, this.MAX_ENTRIES);
|
|
891
|
-
return {
|
|
892
|
-
...index,
|
|
893
|
-
entries: Object.fromEntries(limited)
|
|
894
|
-
};
|
|
895
|
-
}
|
|
896
|
-
return {
|
|
897
|
-
...index,
|
|
898
|
-
entries: validEntries
|
|
899
|
-
};
|
|
900
|
-
}
|
|
901
|
-
/**
|
|
902
|
-
* Get cached result for a file and agent
|
|
903
|
-
*
|
|
904
|
-
* Cache automatically invalidates when files change:
|
|
905
|
-
* - Cache key includes file hash: hash(filePath:agent:fileHash)
|
|
906
|
-
* - When file changes, hash changes, so cache key changes
|
|
907
|
-
* - Old cache entry won't be found (different key)
|
|
908
|
-
* - File is automatically rescanned
|
|
909
|
-
*
|
|
910
|
-
* This means cache auto-updates when Claude fixes code - no manual invalidation needed!
|
|
911
|
-
*/
|
|
912
|
-
async getCached(filePath, agent) {
|
|
913
|
-
try {
|
|
914
|
-
const { hash, size: _size, mtime: _mtime } = await this.getFileHash(filePath);
|
|
915
|
-
if (!hash) return null;
|
|
916
|
-
const index = await this.loadIndex();
|
|
917
|
-
const cacheKey = this.generateCacheKey(filePath, agent, hash);
|
|
918
|
-
const entry = index.entries[cacheKey];
|
|
919
|
-
if (!entry) return null;
|
|
920
|
-
const isValid = entry.fileHash === hash && entry.version === this.VERSION && Date.now() - entry.timestamp < this.MAX_AGE_MS;
|
|
921
|
-
if (!isValid) {
|
|
922
|
-
delete index.entries[cacheKey];
|
|
923
|
-
await this.saveIndex(index);
|
|
924
|
-
return null;
|
|
925
|
-
}
|
|
926
|
-
return entry.issues;
|
|
927
|
-
} catch {
|
|
928
|
-
return null;
|
|
929
|
-
}
|
|
930
|
-
}
|
|
931
|
-
/**
|
|
932
|
-
* Cache result for a file and agent
|
|
933
|
-
*/
|
|
934
|
-
async setCached(filePath, agent, issues, executionTime) {
|
|
935
|
-
try {
|
|
936
|
-
const { hash, size } = await this.getFileHash(filePath);
|
|
937
|
-
if (!hash) return;
|
|
938
|
-
const index = await this.loadIndex();
|
|
939
|
-
const cacheKey = this.generateCacheKey(filePath, agent, hash);
|
|
940
|
-
index.entries[cacheKey] = {
|
|
941
|
-
version: this.VERSION,
|
|
942
|
-
timestamp: Date.now(),
|
|
943
|
-
fileHash: hash,
|
|
944
|
-
fileSize: size,
|
|
945
|
-
agent,
|
|
946
|
-
issues,
|
|
947
|
-
executionTime
|
|
948
|
-
};
|
|
949
|
-
const cleanedIndex = this.cleanupExpired(index);
|
|
950
|
-
await this.saveIndex(cleanedIndex);
|
|
951
|
-
} catch (error) {
|
|
952
|
-
if (!isInteractiveMode()) {
|
|
953
|
-
console.warn("Failed to cache result:", error);
|
|
954
|
-
}
|
|
955
|
-
}
|
|
956
|
-
}
|
|
957
|
-
/**
|
|
958
|
-
* Check if multiple files have cached results
|
|
959
|
-
*/
|
|
960
|
-
async getCachedBatch(files, agent) {
|
|
961
|
-
const results = /* @__PURE__ */ new Map();
|
|
962
|
-
await Promise.all(
|
|
963
|
-
files.map(async (file) => {
|
|
964
|
-
const cached = await this.getCached(file, agent);
|
|
965
|
-
if (cached) {
|
|
966
|
-
results.set(file, cached);
|
|
967
|
-
}
|
|
968
|
-
})
|
|
969
|
-
);
|
|
970
|
-
return results;
|
|
971
|
-
}
|
|
972
|
-
/**
|
|
973
|
-
* Get cache statistics
|
|
974
|
-
*/
|
|
975
|
-
async getStats() {
|
|
976
|
-
try {
|
|
977
|
-
const index = await this.loadIndex();
|
|
978
|
-
const entries = Object.values(index.entries);
|
|
979
|
-
const totalSizeKB = entries.reduce((acc, entry) => acc + entry.fileSize, 0) / 1024;
|
|
980
|
-
const timestamps = entries.map((e) => e.timestamp);
|
|
981
|
-
const agents = Array.from(new Set(entries.map((e) => e.agent)));
|
|
982
|
-
return {
|
|
983
|
-
totalEntries: entries.length,
|
|
984
|
-
totalSizeKB: Math.round(totalSizeKB),
|
|
985
|
-
oldestEntry: timestamps.length > 0 ? Math.min(...timestamps) : null,
|
|
986
|
-
newestEntry: timestamps.length > 0 ? Math.max(...timestamps) : null,
|
|
987
|
-
agents
|
|
988
|
-
};
|
|
989
|
-
} catch {
|
|
990
|
-
return {
|
|
991
|
-
totalEntries: 0,
|
|
992
|
-
totalSizeKB: 0,
|
|
993
|
-
oldestEntry: null,
|
|
994
|
-
newestEntry: null,
|
|
995
|
-
agents: []
|
|
996
|
-
};
|
|
997
|
-
}
|
|
998
|
-
}
|
|
999
|
-
/**
|
|
1000
|
-
* Clean up stale cache entries by verifying file hashes
|
|
1001
|
-
* This removes entries where files have changed or no longer exist
|
|
1002
|
-
* Called periodically to keep cache clean
|
|
1003
|
-
*
|
|
1004
|
-
* Note: Since cache keys are hashed, we can't easily reverse-engineer file paths.
|
|
1005
|
-
* However, when getCached() is called, it naturally invalidates stale entries
|
|
1006
|
-
* by checking if the current file hash matches the cached hash. This method
|
|
1007
|
-
* proactively cleans up entries for known changed files.
|
|
1008
|
-
*/
|
|
1009
|
-
async cleanupStaleEntries(filePaths) {
|
|
1010
|
-
try {
|
|
1011
|
-
const index = await this.loadIndex();
|
|
1012
|
-
let removedCount = 0;
|
|
1013
|
-
const keysToRemove = [];
|
|
1014
|
-
if (filePaths && filePaths.length > 0) {
|
|
1015
|
-
const agents = /* @__PURE__ */ new Set();
|
|
1016
|
-
for (const entry of Object.values(index.entries)) {
|
|
1017
|
-
agents.add(entry.agent);
|
|
1018
|
-
}
|
|
1019
|
-
for (const filePath of filePaths) {
|
|
1020
|
-
try {
|
|
1021
|
-
const { hash: currentHash } = await this.getFileHash(filePath);
|
|
1022
|
-
if (!currentHash) {
|
|
1023
|
-
continue;
|
|
1024
|
-
}
|
|
1025
|
-
for (const agent of Array.from(agents)) {
|
|
1026
|
-
for (const [key, entry] of Object.entries(index.entries)) {
|
|
1027
|
-
if (entry.agent !== agent) continue;
|
|
1028
|
-
if (entry.fileHash !== currentHash) {
|
|
1029
|
-
const oldKey = this.generateCacheKey(filePath, agent, entry.fileHash);
|
|
1030
|
-
if (oldKey === key) {
|
|
1031
|
-
keysToRemove.push(key);
|
|
1032
|
-
removedCount++;
|
|
1033
|
-
}
|
|
1034
|
-
}
|
|
1035
|
-
}
|
|
1036
|
-
}
|
|
1037
|
-
} catch {
|
|
1038
|
-
continue;
|
|
1039
|
-
}
|
|
1040
|
-
}
|
|
1041
|
-
}
|
|
1042
|
-
const uniqueKeys = Array.from(new Set(keysToRemove));
|
|
1043
|
-
for (const key of uniqueKeys) {
|
|
1044
|
-
delete index.entries[key];
|
|
1045
|
-
}
|
|
1046
|
-
if (removedCount > 0) {
|
|
1047
|
-
await this.saveIndex(index);
|
|
1048
|
-
}
|
|
1049
|
-
return removedCount;
|
|
1050
|
-
} catch (error) {
|
|
1051
|
-
if (!isInteractiveMode()) {
|
|
1052
|
-
console.warn("Failed to cleanup stale cache entries:", error);
|
|
1053
|
-
}
|
|
1054
|
-
return 0;
|
|
1055
|
-
}
|
|
1056
|
-
}
|
|
1057
|
-
/**
|
|
1058
|
-
* Clear all cache
|
|
1059
|
-
*/
|
|
1060
|
-
async clear() {
|
|
1061
|
-
try {
|
|
1062
|
-
const emptyIndex = {
|
|
1063
|
-
version: this.VERSION,
|
|
1064
|
-
created: Date.now(),
|
|
1065
|
-
entries: {}
|
|
1066
|
-
};
|
|
1067
|
-
await this.saveIndex(emptyIndex);
|
|
1068
|
-
} catch (error) {
|
|
1069
|
-
if (!isInteractiveMode()) {
|
|
1070
|
-
console.warn("Failed to clear cache:", error);
|
|
1071
|
-
}
|
|
1072
|
-
}
|
|
1073
|
-
}
|
|
1074
|
-
};
|
|
1075
|
-
|
|
1076
|
-
// src/orchestrator/executor.ts
|
|
1077
|
-
var Executor = class {
|
|
1078
|
-
async executeAgents(agents, files, context, options) {
|
|
1079
|
-
const parallel = options?.parallel ?? true;
|
|
1080
|
-
const cacheEnabled = options?.cacheEnabled ?? true;
|
|
1081
|
-
const maxConcurrency = options?.maxConcurrency ?? calculateOptimalConcurrency();
|
|
1082
|
-
const useWorkerThreads = options?.useWorkerThreads ?? false;
|
|
1083
|
-
if (!isInteractiveMode()) {
|
|
1084
|
-
console.error(`Executing ${agents.length} scouts ${parallel ? "in parallel" : "sequentially"}...`);
|
|
1085
|
-
}
|
|
1086
|
-
if (parallel) {
|
|
1087
|
-
const cacheManager = cacheEnabled ? new CacheManager(context.workingDir) : null;
|
|
1088
|
-
const executor = new ParallelExecutor(cacheManager, maxConcurrency, {
|
|
1089
|
-
cacheEnabled,
|
|
1090
|
-
useWorkerThreads
|
|
1091
|
-
});
|
|
1092
|
-
if (options?.streaming) {
|
|
1093
|
-
executor.setStreaming(options.streaming);
|
|
1094
|
-
}
|
|
1095
|
-
const results = await executor.executeAgents(agents, files, {
|
|
1096
|
-
...context,
|
|
1097
|
-
config: { timeoutMs: options?.timeoutMs ?? 12e4 }
|
|
1098
|
-
});
|
|
1099
|
-
return agents.map((agent) => results.get(agent.name)).filter(Boolean);
|
|
1100
|
-
}
|
|
1101
|
-
const promises = agents.map(
|
|
1102
|
-
(agent) => this.executeAgentWithTimeout(agent, files, context, options?.timeoutMs ?? 3e4)
|
|
1103
|
-
);
|
|
1104
|
-
try {
|
|
1105
|
-
const results = await Promise.allSettled(promises);
|
|
1106
|
-
return results.map((result, index) => {
|
|
1107
|
-
if (result.status === "fulfilled") {
|
|
1108
|
-
if (!isInteractiveMode()) {
|
|
1109
|
-
console.error(`${agents[index].name} completed in ${result.value.executionTime}ms`);
|
|
1110
|
-
}
|
|
1111
|
-
return result.value;
|
|
1112
|
-
} else {
|
|
1113
|
-
if (!isInteractiveMode()) {
|
|
1114
|
-
console.error(`${agents[index].name} failed:`, result.reason);
|
|
1115
|
-
}
|
|
1116
|
-
return {
|
|
1117
|
-
agent: agents[index].name,
|
|
1118
|
-
issues: [],
|
|
1119
|
-
executionTime: 0,
|
|
1120
|
-
success: false,
|
|
1121
|
-
error: result.reason instanceof Error ? result.reason.message : String(result.reason)
|
|
1122
|
-
};
|
|
1123
|
-
}
|
|
1124
|
-
});
|
|
1125
|
-
} catch (error) {
|
|
1126
|
-
if (!isInteractiveMode()) {
|
|
1127
|
-
console.error("Executor error:", error);
|
|
1128
|
-
}
|
|
1129
|
-
return agents.map((agent) => ({
|
|
1130
|
-
agent: agent.name,
|
|
1131
|
-
issues: [],
|
|
1132
|
-
executionTime: 0,
|
|
1133
|
-
success: false,
|
|
1134
|
-
error: "Execution failed"
|
|
1135
|
-
}));
|
|
1136
|
-
}
|
|
1137
|
-
}
|
|
1138
|
-
async executeAgentWithTimeout(agent, files, context, timeoutMs = 3e4) {
|
|
1139
|
-
return new Promise(async (resolve2, reject) => {
|
|
1140
|
-
const timeout = setTimeout(() => {
|
|
1141
|
-
reject(new Error(`Agent ${agent.name} timed out after ${timeoutMs}ms`));
|
|
1142
|
-
}, timeoutMs);
|
|
1143
|
-
try {
|
|
1144
|
-
const result = await agent.scan(files, context);
|
|
1145
|
-
clearTimeout(timeout);
|
|
1146
|
-
resolve2(result);
|
|
1147
|
-
} catch (error) {
|
|
1148
|
-
clearTimeout(timeout);
|
|
1149
|
-
reject(error);
|
|
1150
|
-
}
|
|
1151
|
-
});
|
|
1152
|
-
}
|
|
1153
|
-
};
|
|
1154
|
-
|
|
1155
|
-
// src/agent/reason.ts
|
|
1156
|
-
function buildDefaultCodeContext() {
|
|
1157
|
-
return {
|
|
1158
|
-
changeType: "general",
|
|
1159
|
-
isNewFeature: false,
|
|
1160
|
-
touchesUserData: false,
|
|
1161
|
-
touchesAuth: false,
|
|
1162
|
-
touchesPayments: false,
|
|
1163
|
-
touchesDatabase: false,
|
|
1164
|
-
touchesAPI: false,
|
|
1165
|
-
touchesUI: false,
|
|
1166
|
-
touchesHealthData: false,
|
|
1167
|
-
touchesSecurityConfig: false,
|
|
1168
|
-
linesChanged: 50,
|
|
1169
|
-
filePatterns: [],
|
|
1170
|
-
framework: "unknown",
|
|
1171
|
-
language: "typescript",
|
|
1172
|
-
touchesCrypto: false,
|
|
1173
|
-
touchesFileSystem: false,
|
|
1174
|
-
touchesThirdPartyAPI: false,
|
|
1175
|
-
touchesLogging: false,
|
|
1176
|
-
touchesErrorHandling: false,
|
|
1177
|
-
hasTests: false,
|
|
1178
|
-
complexity: "medium",
|
|
1179
|
-
patterns: {
|
|
1180
|
-
hasAsyncCode: false,
|
|
1181
|
-
hasFormHandling: false,
|
|
1182
|
-
hasFileUploads: false,
|
|
1183
|
-
hasEmailHandling: false,
|
|
1184
|
-
hasRateLimiting: false,
|
|
1185
|
-
hasWebSockets: false,
|
|
1186
|
-
hasCaching: false,
|
|
1187
|
-
hasQueue: false
|
|
1188
|
-
}
|
|
1189
|
-
};
|
|
1190
|
-
}
|
|
1191
|
-
function buildExplanation(result) {
|
|
1192
|
-
const top = [...result.files].sort((a, b) => b.score - a.score)[0];
|
|
1193
|
-
if (!top) return `Risk level ${result.overall} (no files provided)`;
|
|
1194
|
-
return `Risk level ${result.overall} because ${top.file} ${top.reasons.join(", ")}`;
|
|
1195
|
-
}
|
|
1196
|
-
function buildRecommendation(risk, hasAntiPattern) {
|
|
1197
|
-
if (hasAntiPattern || risk === "critical") {
|
|
1198
|
-
return "Block until reviewed: address anti-patterns and rerun targeted tests.";
|
|
1199
|
-
}
|
|
1200
|
-
if (risk === "high") {
|
|
1201
|
-
return "Require senior review and run full test suite before merge.";
|
|
1202
|
-
}
|
|
1203
|
-
if (risk === "medium") {
|
|
1204
|
-
return "Proceed with caution; run impacted tests and sanity checks.";
|
|
1205
|
-
}
|
|
1206
|
-
return "Low risk; proceed but keep an eye on recent changes.";
|
|
1207
|
-
}
|
|
1208
|
-
async function reasonAboutChanges(projectPath, files, options = {}) {
|
|
1209
|
-
const graph = new ContextGraph(projectPath);
|
|
1210
|
-
const { matches, byFile } = await matchPatternsForFiles(graph, files);
|
|
1211
|
-
const changeRisk = await scoreChangeSet(graph, files, byFile);
|
|
1212
|
-
const incidents = [];
|
|
1213
|
-
for (const file of files) {
|
|
1214
|
-
const fileIncidents = await graph.getIncidentsForFile(file);
|
|
1215
|
-
incidents.push(...fileIncidents);
|
|
1216
|
-
}
|
|
1217
|
-
const hasAntiPattern = matches.some((m) => m.isAntiPattern);
|
|
1218
|
-
const riskLevel = hasAntiPattern ? "critical" : changeRisk.overall;
|
|
1219
|
-
const shouldBlock = hasAntiPattern || riskLevel === "critical" || riskLevel === "high";
|
|
1220
|
-
const reasoning = {
|
|
1221
|
-
riskLevel,
|
|
1222
|
-
shouldBlock,
|
|
1223
|
-
explanation: buildExplanation(changeRisk),
|
|
1224
|
-
relevantIncidents: incidents,
|
|
1225
|
-
matchedPatterns: matches.map((m) => m.pattern),
|
|
1226
|
-
recommendation: buildRecommendation(riskLevel, hasAntiPattern),
|
|
1227
|
-
files: changeRisk.files
|
|
1228
|
-
};
|
|
1229
|
-
if (options.runAgents) {
|
|
1230
|
-
const codeContext = options.codeContext ?? buildDefaultCodeContext();
|
|
1231
|
-
const triager = new Triager();
|
|
1232
|
-
const agents = await triager.triage(codeContext);
|
|
1233
|
-
if (agents.length > 0) {
|
|
1234
|
-
const executor = new Executor();
|
|
1235
|
-
const scanContext = {
|
|
1236
|
-
workingDir: projectPath,
|
|
1237
|
-
...options.scanContext
|
|
1238
|
-
};
|
|
1239
|
-
if (codeContext.framework) scanContext.framework = codeContext.framework;
|
|
1240
|
-
if (codeContext.language) scanContext.language = codeContext.language;
|
|
1241
|
-
reasoning.agentResults = await executor.executeAgents(agents, files, scanContext, {
|
|
1242
|
-
parallel: true,
|
|
1243
|
-
timeoutMs: options.scanContext?.config?.timeoutMs ?? 6e4
|
|
1244
|
-
});
|
|
1245
|
-
} else {
|
|
1246
|
-
reasoning.agentResults = [];
|
|
1247
|
-
}
|
|
1248
|
-
}
|
|
1249
|
-
return reasoning;
|
|
1250
|
-
}
|
|
1251
|
-
async function reasonAboutChangesHumanReadable(projectPath, files, options = {}) {
|
|
1252
|
-
const reasoning = await reasonAboutChanges(projectPath, files, options);
|
|
1253
|
-
const topFile = [...reasoning.files].sort((a, b) => b.score - a.score)[0];
|
|
1254
|
-
const topReasons = topFile?.reasons?.slice(0, 3) ?? [];
|
|
1255
|
-
const summaryParts = [];
|
|
1256
|
-
summaryParts.push(`Risk ${reasoning.riskLevel.toUpperCase()} (${reasoning.shouldBlock ? "block" : "allow"})`);
|
|
1257
|
-
if (topFile) summaryParts.push(`Most sensitive file: ${topFile.file}`);
|
|
1258
|
-
if (topReasons.length) summaryParts.push(`Top reasons: ${topReasons.join("; ")}`);
|
|
1259
|
-
const patterns = reasoning.matchedPatterns;
|
|
1260
|
-
const incidents = reasoning.relevantIncidents;
|
|
1261
|
-
const firstPattern = patterns[0];
|
|
1262
|
-
const patternLine = firstPattern ? `Pattern match: ${firstPattern.data.description}` : "No known risky patterns matched.";
|
|
1263
|
-
const incidentLine = incidents[0] ? `Relevant prior incident: ${incidents[0].data.description} (${incidents[0].data.severity})` : "No prior incidents linked to these files.";
|
|
1264
|
-
const howBad = [
|
|
1265
|
-
`Overall risk is ${reasoning.riskLevel}.`,
|
|
1266
|
-
reasoning.shouldBlock ? "This is likely to break things or violate policy if pushed without review." : "This is unlikely to cause major issues if you follow normal review/testing.",
|
|
1267
|
-
incidentLine
|
|
1268
|
-
].join(" ");
|
|
1269
|
-
const whatToDo = [
|
|
1270
|
-
reasoning.recommendation,
|
|
1271
|
-
patterns.length ? `Double-check the pattern(s): ${patterns.slice(0, 2).map((p) => p.data.description).join(" | ")}.` : ""
|
|
1272
|
-
].filter(Boolean).join(" ");
|
|
1273
|
-
return {
|
|
1274
|
-
summary: summaryParts.join(" \u2014 "),
|
|
1275
|
-
whatIFound: [patternLine, `Incidents considered: ${incidents.length}. Patterns considered: ${patterns.length}.`].join(" "),
|
|
1276
|
-
howBad,
|
|
1277
|
-
whatToDo,
|
|
1278
|
-
original: reasoning
|
|
1279
|
-
};
|
|
1280
|
-
}
|
|
1281
|
-
|
|
1282
|
-
export {
|
|
1283
|
-
formatFriendlyError,
|
|
1284
|
-
LearningEngine,
|
|
1285
|
-
perceiveCurrentChanges,
|
|
1286
|
-
reasonAboutChangesHumanReadable,
|
|
1287
|
-
isTrieInitialized
|
|
1288
|
-
};
|
|
1289
|
-
//# sourceMappingURL=chunk-YZ6Y2H3P.js.map
|