brain-cache 0.4.2 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/skills/brain-cache/SKILL.md +52 -0
- package/README.md +49 -100
- package/dist/{askCodebase-BZIXS3EV.js → askCodebase-EE32B7BP.js} +9 -9
- package/dist/buildContext-GWVDAYH6.js +14 -0
- package/dist/{chunk-Y7BU7IYX.js → chunk-3HQRTLBH.js} +70 -6
- package/dist/{chunk-ZKVZTDND.js → chunk-4IOR54GU.js} +2 -1
- package/dist/chunk-6C2OYMKD.js +16 -0
- package/dist/{workflows-KYCBR7TC.js → chunk-CY34XQ2O.js} +115 -24
- package/dist/chunk-DFFMV3RR.js +171 -0
- package/dist/{chunk-PJQNHMQH.js → chunk-DPH5X5HL.js} +1 -1
- package/dist/{chunk-FQL4HV4R.js → chunk-HRJ3OT6Q.js} +1 -1
- package/dist/chunk-KMRPAVMM.js +967 -0
- package/dist/{chunk-KQZSBRRH.js → chunk-RKPICQU7.js} +1 -1
- package/dist/{chunk-EEC7KYPY.js → chunk-TXLCXXKY.js} +7 -8
- package/dist/claude-md-section-K47HUTE4.js +38 -0
- package/dist/cli.js +13 -9
- package/dist/{doctor-KRNLXE4R.js → doctor-FCET2MNJ.js} +3 -3
- package/dist/{embedder-ZLHAZZUI.js → embedder-HVEXDJAU.js} +2 -2
- package/dist/{init-QNN5H3DR.js → init-2E4JMZZC.js} +71 -6
- package/dist/mcp.js +1450 -130
- package/dist/{search-O4CFAH45.js → search-7ISZ7EXI.js} +16 -15
- package/dist/{status-7MT4IROA.js → status-VKTSG2SN.js} +3 -3
- package/dist/statusline-script-NFUDFOWK.js +95 -0
- package/dist/watch-QPMAB62P.js +128 -0
- package/dist/workflows-MWEY7OAI.js +14 -0
- package/package.json +5 -1
- package/dist/buildContext-APWOPZMJ.js +0 -14
- package/dist/chunk-JZQWPHAQ.js +0 -103
- package/dist/chunk-SBSMKI4B.js +0 -109
- package/dist/chunk-ZGYLHFHJ.js +0 -17
- package/dist/claude-md-section-6ZJ3TMO4.js +0 -34
|
@@ -0,0 +1,967 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
RETRIEVAL_STRATEGIES,
|
|
4
|
+
classifyRetrievalMode,
|
|
5
|
+
deduplicateChunks,
|
|
6
|
+
searchChunks
|
|
7
|
+
} from "./chunk-DFFMV3RR.js";
|
|
8
|
+
import {
|
|
9
|
+
assembleContext,
|
|
10
|
+
countChunkTokens,
|
|
11
|
+
formatChunk
|
|
12
|
+
} from "./chunk-4IOR54GU.js";
|
|
13
|
+
import {
|
|
14
|
+
openDatabase,
|
|
15
|
+
queryEdgesFrom,
|
|
16
|
+
readIndexState
|
|
17
|
+
} from "./chunk-3HQRTLBH.js";
|
|
18
|
+
import {
|
|
19
|
+
embedBatchWithRetry
|
|
20
|
+
} from "./chunk-RKPICQU7.js";
|
|
21
|
+
import {
|
|
22
|
+
isOllamaRunning
|
|
23
|
+
} from "./chunk-HRJ3OT6Q.js";
|
|
24
|
+
import {
|
|
25
|
+
readProfile
|
|
26
|
+
} from "./chunk-DPH5X5HL.js";
|
|
27
|
+
import {
|
|
28
|
+
COMPRESSION_HARD_LIMIT,
|
|
29
|
+
COMPRESSION_TOKEN_THRESHOLD,
|
|
30
|
+
CONFIG_PATH,
|
|
31
|
+
DEFAULT_TOKEN_BUDGET,
|
|
32
|
+
HIGH_RELEVANCE_SIMILARITY_THRESHOLD,
|
|
33
|
+
TOOL_CALL_OVERHEAD_TOKENS,
|
|
34
|
+
childLogger
|
|
35
|
+
} from "./chunk-TXLCXXKY.js";
|
|
36
|
+
|
|
37
|
+
// src/workflows/buildContext.ts
|
|
38
|
+
import { readFile as readFile4 } from "fs/promises";
|
|
39
|
+
import { resolve as resolve3 } from "path";
|
|
40
|
+
|
|
41
|
+
// src/services/cohesion.ts
|
|
42
|
+
import { dirname, relative, basename } from "path";
|
|
43
|
+
var log = childLogger("cohesion");
|
|
44
|
+
function groupChunksByFile(chunks) {
|
|
45
|
+
const groups = /* @__PURE__ */ new Map();
|
|
46
|
+
for (const chunk of chunks) {
|
|
47
|
+
const group = groups.get(chunk.filePath);
|
|
48
|
+
if (group === void 0) {
|
|
49
|
+
groups.set(chunk.filePath, [chunk]);
|
|
50
|
+
} else {
|
|
51
|
+
group.push(chunk);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
for (const [, group] of groups) {
|
|
55
|
+
group.sort((a, b) => a.startLine - b.startLine);
|
|
56
|
+
}
|
|
57
|
+
return groups;
|
|
58
|
+
}
|
|
59
|
+
async function enrichWithParentClass(chunks, chunksTable, opts) {
|
|
60
|
+
const existingIds = new Set(chunks.map((c) => c.id));
|
|
61
|
+
const result = [...chunks];
|
|
62
|
+
let { currentTokens } = opts;
|
|
63
|
+
const parentsToInsert = [];
|
|
64
|
+
for (const chunk of chunks) {
|
|
65
|
+
if (chunk.chunkType !== "method" || chunk.scope === null) {
|
|
66
|
+
continue;
|
|
67
|
+
}
|
|
68
|
+
const escapedScope = chunk.scope.replace(/'/g, "''");
|
|
69
|
+
const escapedFilePath = chunk.filePath.replace(/'/g, "''");
|
|
70
|
+
log.debug({ scope: chunk.scope, filePath: chunk.filePath }, "Looking for parent class");
|
|
71
|
+
const rows = await chunksTable.query().where(`name = '${escapedScope}' AND file_path = '${escapedFilePath}' AND chunk_type = 'class'`).toArray();
|
|
72
|
+
if (rows.length === 0) {
|
|
73
|
+
continue;
|
|
74
|
+
}
|
|
75
|
+
const row = rows[0];
|
|
76
|
+
if (existingIds.has(row.id)) {
|
|
77
|
+
continue;
|
|
78
|
+
}
|
|
79
|
+
const parentChunk = {
|
|
80
|
+
id: row.id,
|
|
81
|
+
filePath: row.file_path,
|
|
82
|
+
chunkType: row.chunk_type,
|
|
83
|
+
scope: row.scope,
|
|
84
|
+
name: row.name,
|
|
85
|
+
content: row.content,
|
|
86
|
+
startLine: row.start_line,
|
|
87
|
+
endLine: row.end_line,
|
|
88
|
+
similarity: 1
|
|
89
|
+
};
|
|
90
|
+
const tokenCost = countChunkTokens(formatChunk(parentChunk));
|
|
91
|
+
if (currentTokens + tokenCost > opts.maxTokens) {
|
|
92
|
+
log.debug({ parentId: row.id, tokenCost, currentTokens, maxTokens: opts.maxTokens }, "Skipping parent class \u2014 token budget exceeded");
|
|
93
|
+
continue;
|
|
94
|
+
}
|
|
95
|
+
existingIds.add(row.id);
|
|
96
|
+
currentTokens += tokenCost;
|
|
97
|
+
parentsToInsert.push({ parent: parentChunk, beforeId: chunk.id });
|
|
98
|
+
}
|
|
99
|
+
for (const { parent, beforeId } of parentsToInsert) {
|
|
100
|
+
const idx = result.findIndex((c) => c.id === beforeId);
|
|
101
|
+
if (idx !== -1) {
|
|
102
|
+
result.splice(idx, 0, parent);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
return result;
|
|
106
|
+
}
|
|
107
|
+
function formatGroupedContext(groups) {
|
|
108
|
+
const sections = [];
|
|
109
|
+
for (const [filePath, chunks] of groups) {
|
|
110
|
+
const header = `// \u2500\u2500 ${filePath} \u2500\u2500`;
|
|
111
|
+
const body = chunks.map(formatChunk).join("\n\n");
|
|
112
|
+
sections.push(`${header}
|
|
113
|
+
${body}`);
|
|
114
|
+
}
|
|
115
|
+
return sections.join("\n\n---\n\n");
|
|
116
|
+
}
|
|
117
|
+
function extractBehavioralSummary(content) {
|
|
118
|
+
const lines = content.split("\n");
|
|
119
|
+
const jsDocLines = [];
|
|
120
|
+
let inJsDoc = false;
|
|
121
|
+
for (const line of lines) {
|
|
122
|
+
const trimmed = line.trim();
|
|
123
|
+
if (trimmed.startsWith("// [compressed]") || trimmed.startsWith("// Signature:") || trimmed.startsWith("// [body stripped]")) continue;
|
|
124
|
+
if (trimmed.startsWith("/**")) {
|
|
125
|
+
inJsDoc = true;
|
|
126
|
+
jsDocLines.push(line);
|
|
127
|
+
if (trimmed.endsWith("*/")) break;
|
|
128
|
+
continue;
|
|
129
|
+
}
|
|
130
|
+
if (inJsDoc) {
|
|
131
|
+
jsDocLines.push(line);
|
|
132
|
+
if (trimmed.endsWith("*/")) break;
|
|
133
|
+
continue;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
if (jsDocLines.length === 0) return null;
|
|
137
|
+
const descLines = jsDocLines.map((l) => l.replace(/^\s*\/?\*+\s?/, "").replace(/\s*\*\/.*$/, "").trim()).filter((l) => l.length > 0 && !l.startsWith("@") && l !== "/");
|
|
138
|
+
return descLines[0] ?? null;
|
|
139
|
+
}
|
|
140
|
+
function groupChunksByModule(chunks, rootDir) {
|
|
141
|
+
const groups = /* @__PURE__ */ new Map();
|
|
142
|
+
for (const chunk of chunks) {
|
|
143
|
+
const rel = relative(rootDir, chunk.filePath);
|
|
144
|
+
const moduleKey = dirname(rel) || ".";
|
|
145
|
+
const group = groups.get(moduleKey);
|
|
146
|
+
if (group === void 0) groups.set(moduleKey, [chunk]);
|
|
147
|
+
else group.push(chunk);
|
|
148
|
+
}
|
|
149
|
+
for (const [, group] of groups) {
|
|
150
|
+
group.sort((a, b) => a.startLine - b.startLine);
|
|
151
|
+
}
|
|
152
|
+
return groups;
|
|
153
|
+
}
|
|
154
|
+
function extractWiringAnnotations(chunks) {
|
|
155
|
+
const importPattern = /from\s+['"](\.[^'"]+)['"]/g;
|
|
156
|
+
const internalDeps = /* @__PURE__ */ new Set();
|
|
157
|
+
for (const chunk of chunks) {
|
|
158
|
+
for (const match of chunk.content.matchAll(importPattern)) {
|
|
159
|
+
const importPath = match[1];
|
|
160
|
+
const stem = importPath.replace(/\.js$/, "").split("/").pop();
|
|
161
|
+
if (stem && stem.length > 1) {
|
|
162
|
+
internalDeps.add(stem);
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
return [...internalDeps].sort();
|
|
167
|
+
}
|
|
168
|
+
function formatModuleNarratives(groups) {
|
|
169
|
+
const sections = [];
|
|
170
|
+
for (const [moduleKey, chunks] of groups) {
|
|
171
|
+
const lines = [`### module: ${moduleKey}`];
|
|
172
|
+
const byFile = /* @__PURE__ */ new Map();
|
|
173
|
+
for (const chunk of chunks) {
|
|
174
|
+
const file = chunk.filePath;
|
|
175
|
+
const group = byFile.get(file);
|
|
176
|
+
if (group === void 0) byFile.set(file, [chunk]);
|
|
177
|
+
else group.push(chunk);
|
|
178
|
+
}
|
|
179
|
+
for (const [filePath, fileChunks] of byFile) {
|
|
180
|
+
const fileName = basename(filePath);
|
|
181
|
+
const summary = extractBehavioralSummary(fileChunks[0].content);
|
|
182
|
+
if (summary) {
|
|
183
|
+
lines.push(`
|
|
184
|
+
**${fileName}** -- ${summary}`);
|
|
185
|
+
} else {
|
|
186
|
+
lines.push(`
|
|
187
|
+
**${fileName}**`);
|
|
188
|
+
}
|
|
189
|
+
const wiring = extractWiringAnnotations(fileChunks);
|
|
190
|
+
if (wiring.length > 0) {
|
|
191
|
+
lines.push(` imports: ${wiring.join(", ")}`);
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
sections.push(lines.join("\n"));
|
|
195
|
+
}
|
|
196
|
+
return sections.join("\n\n");
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// src/services/compression.ts
|
|
200
|
+
function compressChunk(chunk) {
|
|
201
|
+
const tokens = countChunkTokens(chunk.content);
|
|
202
|
+
if (tokens <= COMPRESSION_TOKEN_THRESHOLD) return chunk;
|
|
203
|
+
const isHighRelevance = chunk.similarity >= HIGH_RELEVANCE_SIMILARITY_THRESHOLD;
|
|
204
|
+
if (tokens <= COMPRESSION_HARD_LIMIT && isHighRelevance) {
|
|
205
|
+
return chunk;
|
|
206
|
+
}
|
|
207
|
+
const lines = chunk.content.split("\n");
|
|
208
|
+
const jsDocLines = [];
|
|
209
|
+
let signatureLine = "";
|
|
210
|
+
let inJsDoc = false;
|
|
211
|
+
let jsDocDone = false;
|
|
212
|
+
for (const line of lines) {
|
|
213
|
+
const trimmed = line.trim();
|
|
214
|
+
if (!jsDocDone) {
|
|
215
|
+
if (trimmed.startsWith("/**")) {
|
|
216
|
+
inJsDoc = true;
|
|
217
|
+
jsDocLines.push(line);
|
|
218
|
+
if (trimmed.endsWith("*/")) {
|
|
219
|
+
inJsDoc = false;
|
|
220
|
+
jsDocDone = true;
|
|
221
|
+
}
|
|
222
|
+
continue;
|
|
223
|
+
}
|
|
224
|
+
if (inJsDoc) {
|
|
225
|
+
jsDocLines.push(line);
|
|
226
|
+
if (trimmed.endsWith("*/")) {
|
|
227
|
+
inJsDoc = false;
|
|
228
|
+
jsDocDone = true;
|
|
229
|
+
}
|
|
230
|
+
continue;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
if (trimmed.length > 0 && signatureLine === "") {
|
|
234
|
+
signatureLine = line;
|
|
235
|
+
break;
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
if (signatureLine === "" && jsDocLines.length === 0) {
|
|
239
|
+
signatureLine = lines.find((l) => l.trim().length > 0) ?? "";
|
|
240
|
+
}
|
|
241
|
+
const manifestParts = [
|
|
242
|
+
`// [compressed] ${chunk.name ?? "unknown"} (lines ${chunk.startLine}-${chunk.endLine})`
|
|
243
|
+
];
|
|
244
|
+
if (jsDocLines.length > 0) {
|
|
245
|
+
manifestParts.push(...jsDocLines);
|
|
246
|
+
}
|
|
247
|
+
manifestParts.push(`// Signature: ${signatureLine}`);
|
|
248
|
+
manifestParts.push("// [body stripped]");
|
|
249
|
+
return { ...chunk, content: manifestParts.join("\n") };
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
// src/services/configLoader.ts
|
|
253
|
+
import { readFile } from "fs/promises";
|
|
254
|
+
var log2 = childLogger("configLoader");
|
|
255
|
+
async function loadUserConfig() {
|
|
256
|
+
try {
|
|
257
|
+
const raw = await readFile(CONFIG_PATH, "utf-8");
|
|
258
|
+
return JSON.parse(raw);
|
|
259
|
+
} catch {
|
|
260
|
+
log2.debug({ configPath: CONFIG_PATH }, "Config file not found or invalid \u2014 using defaults");
|
|
261
|
+
return {};
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
function resolveStrategy(mode, userConfig, toolOverride) {
|
|
265
|
+
const base = RETRIEVAL_STRATEGIES[mode];
|
|
266
|
+
const userOverride = userConfig.retrieval?.[mode] ?? {};
|
|
267
|
+
return { ...base, ...userOverride, ...toolOverride };
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
// src/workflows/traceFlow.ts
|
|
271
|
+
import { resolve } from "path";
|
|
272
|
+
import { readFile as readFile2 } from "fs/promises";
|
|
273
|
+
|
|
274
|
+
// src/services/flowTracer.ts
|
|
275
|
+
var log3 = childLogger("flowTracer");
|
|
276
|
+
async function resolveSymbolToChunkId(chunksTable, toSymbol, fromFile) {
|
|
277
|
+
const escaped = toSymbol.replace(/'/g, "''");
|
|
278
|
+
const rows = await chunksTable.query().where(`name = '${escaped}'`).toArray();
|
|
279
|
+
if (rows.length === 0) {
|
|
280
|
+
return null;
|
|
281
|
+
}
|
|
282
|
+
const sameFile = rows.find((r) => r.file_path === fromFile);
|
|
283
|
+
return (sameFile ?? rows[0]).id;
|
|
284
|
+
}
|
|
285
|
+
async function traceFlow(edgesTable, chunksTable, seedChunkId, opts) {
|
|
286
|
+
const maxHops = opts?.maxHops ?? 3;
|
|
287
|
+
const visited = /* @__PURE__ */ new Set();
|
|
288
|
+
const queue = [{ chunkId: seedChunkId, depth: 0 }];
|
|
289
|
+
const hops = [];
|
|
290
|
+
log3.debug({ seedChunkId, maxHops }, "Starting BFS flow trace");
|
|
291
|
+
while (queue.length > 0) {
|
|
292
|
+
const { chunkId, depth } = queue.shift();
|
|
293
|
+
if (visited.has(chunkId)) {
|
|
294
|
+
continue;
|
|
295
|
+
}
|
|
296
|
+
visited.add(chunkId);
|
|
297
|
+
const escapedId = chunkId.replace(/'/g, "''");
|
|
298
|
+
const chunkRows = await chunksTable.query().where(`id = '${escapedId}'`).toArray();
|
|
299
|
+
if (chunkRows.length === 0) {
|
|
300
|
+
log3.debug({ chunkId }, "Chunk not found \u2014 skipping hop");
|
|
301
|
+
continue;
|
|
302
|
+
}
|
|
303
|
+
const row = chunkRows[0];
|
|
304
|
+
const edges = await queryEdgesFrom(edgesTable, chunkId);
|
|
305
|
+
const callEdges = edges.filter((e) => e.edge_type === "call");
|
|
306
|
+
hops.push({
|
|
307
|
+
chunkId,
|
|
308
|
+
filePath: row.file_path,
|
|
309
|
+
name: row.name,
|
|
310
|
+
startLine: row.start_line,
|
|
311
|
+
endLine: row.end_line,
|
|
312
|
+
content: row.content,
|
|
313
|
+
hopDepth: depth,
|
|
314
|
+
callsFound: [...new Set(callEdges.map((e) => e.to_symbol))]
|
|
315
|
+
});
|
|
316
|
+
if (depth >= maxHops) {
|
|
317
|
+
continue;
|
|
318
|
+
}
|
|
319
|
+
for (const edge of callEdges) {
|
|
320
|
+
const nextChunkId = await resolveSymbolToChunkId(chunksTable, edge.to_symbol, edge.from_file);
|
|
321
|
+
if (nextChunkId !== null && !visited.has(nextChunkId)) {
|
|
322
|
+
queue.push({ chunkId: nextChunkId, depth: depth + 1 });
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
log3.debug({ seedChunkId, hopsFound: hops.length, maxDepthReached: hops.length > 0 ? Math.max(...hops.map((h) => h.hopDepth)) : 0 }, "BFS flow trace complete");
|
|
327
|
+
return hops;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// src/workflows/traceFlow.ts
|
|
331
|
+
var BODY_STRIPPED_MARKER = "// [body stripped]";
|
|
332
|
+
var TEST_FILE_PATTERNS = [".test.", ".spec.", "/__tests__/", "/tests/"];
|
|
333
|
+
function isTestFile(filePath) {
|
|
334
|
+
return TEST_FILE_PATTERNS.some((p) => filePath.includes(p));
|
|
335
|
+
}
|
|
336
|
+
var STDLIB_SYMBOLS = /* @__PURE__ */ new Set([
|
|
337
|
+
// Array
|
|
338
|
+
"map",
|
|
339
|
+
"filter",
|
|
340
|
+
"reduce",
|
|
341
|
+
"forEach",
|
|
342
|
+
"find",
|
|
343
|
+
"findIndex",
|
|
344
|
+
"some",
|
|
345
|
+
"every",
|
|
346
|
+
"includes",
|
|
347
|
+
"push",
|
|
348
|
+
"pop",
|
|
349
|
+
"shift",
|
|
350
|
+
"unshift",
|
|
351
|
+
"splice",
|
|
352
|
+
"slice",
|
|
353
|
+
"concat",
|
|
354
|
+
"join",
|
|
355
|
+
"sort",
|
|
356
|
+
"reverse",
|
|
357
|
+
"flat",
|
|
358
|
+
"flatMap",
|
|
359
|
+
"fill",
|
|
360
|
+
"indexOf",
|
|
361
|
+
"lastIndexOf",
|
|
362
|
+
// Set/Map
|
|
363
|
+
"keys",
|
|
364
|
+
"values",
|
|
365
|
+
"entries",
|
|
366
|
+
"has",
|
|
367
|
+
"get",
|
|
368
|
+
"set",
|
|
369
|
+
"delete",
|
|
370
|
+
"add",
|
|
371
|
+
"clear",
|
|
372
|
+
// Promise
|
|
373
|
+
"resolve",
|
|
374
|
+
"reject",
|
|
375
|
+
"then",
|
|
376
|
+
"catch",
|
|
377
|
+
"finally",
|
|
378
|
+
"all",
|
|
379
|
+
"race",
|
|
380
|
+
"allSettled",
|
|
381
|
+
// Object
|
|
382
|
+
"toString",
|
|
383
|
+
"valueOf",
|
|
384
|
+
"hasOwnProperty",
|
|
385
|
+
"assign",
|
|
386
|
+
"freeze",
|
|
387
|
+
"create",
|
|
388
|
+
// String
|
|
389
|
+
"split",
|
|
390
|
+
"replace",
|
|
391
|
+
"replaceAll",
|
|
392
|
+
"match",
|
|
393
|
+
"matchAll",
|
|
394
|
+
"trim",
|
|
395
|
+
"trimStart",
|
|
396
|
+
"trimEnd",
|
|
397
|
+
"startsWith",
|
|
398
|
+
"endsWith",
|
|
399
|
+
"padStart",
|
|
400
|
+
"padEnd",
|
|
401
|
+
"repeat",
|
|
402
|
+
"charAt",
|
|
403
|
+
"charCodeAt",
|
|
404
|
+
"substring",
|
|
405
|
+
"toLowerCase",
|
|
406
|
+
"toUpperCase",
|
|
407
|
+
// Property-like
|
|
408
|
+
"length"
|
|
409
|
+
]);
|
|
410
|
+
var LOW_CONFIDENCE_THRESHOLD = 0.5;
|
|
411
|
+
function isCLIQuery(query) {
|
|
412
|
+
const lower = query.toLowerCase();
|
|
413
|
+
return lower.includes(" cli ") || lower.startsWith("cli ") || lower.includes("command");
|
|
414
|
+
}
|
|
415
|
+
function isCLIFile(filePath) {
|
|
416
|
+
return filePath.includes("/cli/");
|
|
417
|
+
}
|
|
418
|
+
async function computeHopSavings(hops) {
|
|
419
|
+
if (hops.length === 0) {
|
|
420
|
+
return { tokensSent: 0, estimatedWithoutBraincache: 0, reductionPct: 0, filesInContext: 0 };
|
|
421
|
+
}
|
|
422
|
+
const tokensSent = hops.reduce((sum, h) => sum + countChunkTokens(h.content), 0);
|
|
423
|
+
const uniqueFiles = [...new Set(hops.map((h) => h.filePath))];
|
|
424
|
+
const filesInContext = uniqueFiles.length;
|
|
425
|
+
const filesWithUncompressedContent = new Set(
|
|
426
|
+
hops.filter((h) => !h.content.includes(BODY_STRIPPED_MARKER)).map((h) => h.filePath)
|
|
427
|
+
);
|
|
428
|
+
let fileContentTokens = 0;
|
|
429
|
+
for (const filePath of uniqueFiles) {
|
|
430
|
+
if (!filesWithUncompressedContent.has(filePath)) continue;
|
|
431
|
+
try {
|
|
432
|
+
const fileContent = await readFile2(filePath, "utf-8");
|
|
433
|
+
fileContentTokens += countChunkTokens(fileContent);
|
|
434
|
+
} catch {
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
const toolCalls = 1 + filesInContext;
|
|
438
|
+
const toolCallOverhead = toolCalls * TOOL_CALL_OVERHEAD_TOKENS;
|
|
439
|
+
const estimatedWithoutBraincache = fileContentTokens + toolCallOverhead;
|
|
440
|
+
const reductionPct = estimatedWithoutBraincache > 0 ? Math.max(0, Math.round((1 - tokensSent / estimatedWithoutBraincache) * 100)) : 0;
|
|
441
|
+
return { tokensSent, estimatedWithoutBraincache, reductionPct, filesInContext };
|
|
442
|
+
}
|
|
443
|
+
function extractSymbolCandidate(query) {
|
|
444
|
+
const tokens = query.match(/\b[a-zA-Z_][a-zA-Z0-9_]{2,}\b/g);
|
|
445
|
+
if (!tokens) return null;
|
|
446
|
+
const stopWords = /* @__PURE__ */ new Set([
|
|
447
|
+
"how",
|
|
448
|
+
"does",
|
|
449
|
+
"work",
|
|
450
|
+
"the",
|
|
451
|
+
"what",
|
|
452
|
+
"where",
|
|
453
|
+
"trace",
|
|
454
|
+
"flow",
|
|
455
|
+
"call",
|
|
456
|
+
"path",
|
|
457
|
+
"find",
|
|
458
|
+
"show",
|
|
459
|
+
"into",
|
|
460
|
+
"from",
|
|
461
|
+
"this",
|
|
462
|
+
"that",
|
|
463
|
+
"with",
|
|
464
|
+
"when",
|
|
465
|
+
"which",
|
|
466
|
+
"about",
|
|
467
|
+
"explain",
|
|
468
|
+
"describe"
|
|
469
|
+
]);
|
|
470
|
+
const camel = tokens.filter((t) => /[a-z][A-Z]/.test(t));
|
|
471
|
+
if (camel.length > 0) return camel[camel.length - 1];
|
|
472
|
+
const nonStop = tokens.filter((t) => !stopWords.has(t.toLowerCase()));
|
|
473
|
+
return nonStop.length > 0 ? nonStop[nonStop.length - 1] : null;
|
|
474
|
+
}
|
|
475
|
+
async function runTraceFlow(entrypoint, opts) {
|
|
476
|
+
const profile = await readProfile();
|
|
477
|
+
if (profile === null) {
|
|
478
|
+
throw new Error("No profile found. Run 'brain-cache init' first.");
|
|
479
|
+
}
|
|
480
|
+
const running = await isOllamaRunning();
|
|
481
|
+
if (!running) {
|
|
482
|
+
throw new Error("Ollama is not running.");
|
|
483
|
+
}
|
|
484
|
+
const rootDir = resolve(opts?.path ?? ".");
|
|
485
|
+
const indexState = await readIndexState(rootDir);
|
|
486
|
+
if (indexState === null) {
|
|
487
|
+
throw new Error(`No index found at ${rootDir}. Run 'brain-cache index' first.`);
|
|
488
|
+
}
|
|
489
|
+
const db = await openDatabase(rootDir);
|
|
490
|
+
const tableNames = await db.tableNames();
|
|
491
|
+
if (!tableNames.includes("chunks")) {
|
|
492
|
+
throw new Error("No chunks table found. Run 'brain-cache index' first.");
|
|
493
|
+
}
|
|
494
|
+
const table = await db.openTable("chunks");
|
|
495
|
+
if (!tableNames.includes("edges")) {
|
|
496
|
+
throw new Error("No edges table found. Re-run 'brain-cache index' to build call edges.");
|
|
497
|
+
}
|
|
498
|
+
const edgesTable = await db.openTable("edges");
|
|
499
|
+
const userConfig = await loadUserConfig();
|
|
500
|
+
const toolOverride = {};
|
|
501
|
+
if (opts?.limit !== void 0) toolOverride.limit = opts.limit;
|
|
502
|
+
if (opts?.distanceThreshold !== void 0) toolOverride.distanceThreshold = opts.distanceThreshold;
|
|
503
|
+
const strategy = resolveStrategy("trace", userConfig, Object.keys(toolOverride).length > 0 ? toolOverride : void 0);
|
|
504
|
+
const candidate = extractSymbolCandidate(entrypoint);
|
|
505
|
+
let seedChunkId = null;
|
|
506
|
+
if (candidate !== null) {
|
|
507
|
+
seedChunkId = await resolveSymbolToChunkId(table, candidate, "");
|
|
508
|
+
}
|
|
509
|
+
if (seedChunkId !== null) {
|
|
510
|
+
const maxHops2 = opts?.maxHops ?? 3;
|
|
511
|
+
const flowHops2 = await traceFlow(edgesTable, table, seedChunkId, { maxHops: maxHops2 });
|
|
512
|
+
const productionHops2 = flowHops2.filter((hop) => !isTestFile(hop.filePath));
|
|
513
|
+
const hops2 = productionHops2.map((hop) => {
|
|
514
|
+
const asChunk = {
|
|
515
|
+
id: hop.chunkId,
|
|
516
|
+
filePath: hop.filePath,
|
|
517
|
+
chunkType: "function",
|
|
518
|
+
scope: null,
|
|
519
|
+
name: hop.name,
|
|
520
|
+
content: hop.content,
|
|
521
|
+
startLine: hop.startLine,
|
|
522
|
+
endLine: hop.endLine,
|
|
523
|
+
similarity: 1
|
|
524
|
+
};
|
|
525
|
+
const compressed = compressChunk(asChunk);
|
|
526
|
+
return {
|
|
527
|
+
filePath: hop.filePath,
|
|
528
|
+
name: hop.name,
|
|
529
|
+
startLine: hop.startLine,
|
|
530
|
+
content: compressed.content,
|
|
531
|
+
callsFound: hop.callsFound.filter((s) => !STDLIB_SYMBOLS.has(s)),
|
|
532
|
+
// TRACE-02
|
|
533
|
+
hopDepth: hop.hopDepth
|
|
534
|
+
};
|
|
535
|
+
});
|
|
536
|
+
const exactSavings = await computeHopSavings(hops2);
|
|
537
|
+
return {
|
|
538
|
+
hops: hops2,
|
|
539
|
+
metadata: {
|
|
540
|
+
seedChunkId,
|
|
541
|
+
totalHops: hops2.length,
|
|
542
|
+
localTasksPerformed: ["exact_name_lookup", "bfs_trace", "compress"],
|
|
543
|
+
...exactSavings,
|
|
544
|
+
confidenceWarning: null
|
|
545
|
+
}
|
|
546
|
+
};
|
|
547
|
+
}
|
|
548
|
+
const { embeddings } = await embedBatchWithRetry(indexState.embeddingModel, [entrypoint]);
|
|
549
|
+
const seedResults = await searchChunks(table, embeddings[0], strategy, entrypoint);
|
|
550
|
+
const seeds = deduplicateChunks(seedResults);
|
|
551
|
+
if (seeds.length === 0) {
|
|
552
|
+
return {
|
|
553
|
+
hops: [],
|
|
554
|
+
metadata: {
|
|
555
|
+
seedChunkId: null,
|
|
556
|
+
totalHops: 0,
|
|
557
|
+
localTasksPerformed: ["embed_query", "seed_search"],
|
|
558
|
+
tokensSent: 0,
|
|
559
|
+
estimatedWithoutBraincache: 0,
|
|
560
|
+
reductionPct: 0,
|
|
561
|
+
filesInContext: 0
|
|
562
|
+
}
|
|
563
|
+
};
|
|
564
|
+
}
|
|
565
|
+
let selectedSeed = seeds[0];
|
|
566
|
+
if (isCLIQuery(entrypoint)) {
|
|
567
|
+
const cliSeed = seeds.find((s) => isCLIFile(s.filePath));
|
|
568
|
+
if (cliSeed) selectedSeed = cliSeed;
|
|
569
|
+
}
|
|
570
|
+
let confidenceWarning = null;
|
|
571
|
+
if (selectedSeed.similarity < LOW_CONFIDENCE_THRESHOLD) {
|
|
572
|
+
const seedName = selectedSeed.name ?? "unknown";
|
|
573
|
+
const seedFile = selectedSeed.filePath.split("/").pop() ?? selectedSeed.filePath;
|
|
574
|
+
confidenceWarning = `No confident match for "${entrypoint}" \u2014 tracing nearest match: ${seedName} (${seedFile}:${selectedSeed.startLine}, similarity: ${selectedSeed.similarity.toFixed(2)})`;
|
|
575
|
+
}
|
|
576
|
+
const maxHops = opts?.maxHops ?? 3;
|
|
577
|
+
const flowHops = await traceFlow(edgesTable, table, selectedSeed.id, { maxHops });
|
|
578
|
+
const productionHops = flowHops.filter((hop) => !isTestFile(hop.filePath));
|
|
579
|
+
const hops = productionHops.map((hop) => {
|
|
580
|
+
const asChunk = {
|
|
581
|
+
id: hop.chunkId,
|
|
582
|
+
filePath: hop.filePath,
|
|
583
|
+
chunkType: "function",
|
|
584
|
+
scope: null,
|
|
585
|
+
name: hop.name,
|
|
586
|
+
content: hop.content,
|
|
587
|
+
startLine: hop.startLine,
|
|
588
|
+
endLine: hop.endLine,
|
|
589
|
+
similarity: 1
|
|
590
|
+
};
|
|
591
|
+
const compressed = compressChunk(asChunk);
|
|
592
|
+
return {
|
|
593
|
+
filePath: hop.filePath,
|
|
594
|
+
name: hop.name,
|
|
595
|
+
startLine: hop.startLine,
|
|
596
|
+
content: compressed.content,
|
|
597
|
+
callsFound: hop.callsFound.filter((s) => !STDLIB_SYMBOLS.has(s)),
|
|
598
|
+
// TRACE-02
|
|
599
|
+
hopDepth: hop.hopDepth
|
|
600
|
+
};
|
|
601
|
+
});
|
|
602
|
+
const savings = await computeHopSavings(hops);
|
|
603
|
+
return {
|
|
604
|
+
hops,
|
|
605
|
+
metadata: {
|
|
606
|
+
seedChunkId: selectedSeed.id,
|
|
607
|
+
totalHops: hops.length,
|
|
608
|
+
localTasksPerformed: ["embed_query", "seed_search", "bfs_trace", "compress"],
|
|
609
|
+
...savings,
|
|
610
|
+
confidenceWarning
|
|
611
|
+
}
|
|
612
|
+
};
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
// src/workflows/explainCodebase.ts
|
|
616
|
+
import { readFile as readFile3 } from "fs/promises";
|
|
617
|
+
import { resolve as resolve2, relative as relative2, dirname as dirname2 } from "path";
|
|
618
|
+
function isExportedChunk(chunk) {
|
|
619
|
+
if (chunk.chunkType === "file") return true;
|
|
620
|
+
const lines = chunk.content.split("\n");
|
|
621
|
+
let inJsDoc = false;
|
|
622
|
+
for (const line of lines) {
|
|
623
|
+
const trimmed = line.trim();
|
|
624
|
+
if (trimmed.startsWith("/**")) {
|
|
625
|
+
inJsDoc = true;
|
|
626
|
+
if (trimmed.endsWith("*/")) {
|
|
627
|
+
inJsDoc = false;
|
|
628
|
+
}
|
|
629
|
+
continue;
|
|
630
|
+
}
|
|
631
|
+
if (inJsDoc) {
|
|
632
|
+
if (trimmed.endsWith("*/")) inJsDoc = false;
|
|
633
|
+
continue;
|
|
634
|
+
}
|
|
635
|
+
if (trimmed.startsWith("// [compressed]") || trimmed.startsWith("// Signature:") || trimmed.startsWith("// [body stripped]")) continue;
|
|
636
|
+
if (trimmed.length === 0) continue;
|
|
637
|
+
return trimmed.startsWith("export ");
|
|
638
|
+
}
|
|
639
|
+
return false;
|
|
640
|
+
}
|
|
641
|
+
var FALLBACK_QUERY = "module structure and component responsibilities";
|
|
642
|
+
var ARCHITECTURE_QUERIES = [
|
|
643
|
+
FALLBACK_QUERY,
|
|
644
|
+
"entry points, CLI commands, and main application flow",
|
|
645
|
+
"core services, business logic, and data processing",
|
|
646
|
+
"data models, types, schemas, and configuration"
|
|
647
|
+
];
|
|
648
|
+
function buildDirectoryTree(filePaths, rootDir) {
|
|
649
|
+
const relativePaths = [
|
|
650
|
+
...new Set(filePaths.map((fp) => relative2(rootDir, fp)))
|
|
651
|
+
].sort();
|
|
652
|
+
const byDir = /* @__PURE__ */ new Map();
|
|
653
|
+
for (const rel of relativePaths) {
|
|
654
|
+
const dir = dirname2(rel);
|
|
655
|
+
if (!byDir.has(dir)) byDir.set(dir, []);
|
|
656
|
+
byDir.get(dir).push(rel);
|
|
657
|
+
}
|
|
658
|
+
const lines = [];
|
|
659
|
+
const dirs = [...byDir.keys()].sort();
|
|
660
|
+
for (let di = 0; di < dirs.length; di++) {
|
|
661
|
+
const dir = dirs[di];
|
|
662
|
+
const files = byDir.get(dir);
|
|
663
|
+
const isLastDir = di === dirs.length - 1;
|
|
664
|
+
if (dir !== ".") {
|
|
665
|
+
lines.push(`${isLastDir ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500"} ${dir}/`);
|
|
666
|
+
}
|
|
667
|
+
for (let fi = 0; fi < files.length; fi++) {
|
|
668
|
+
const isLastFile = fi === files.length - 1;
|
|
669
|
+
const fileName = files[fi].includes("/") ? files[fi].split("/").pop() : files[fi];
|
|
670
|
+
const indent = dir !== "." ? " " : "";
|
|
671
|
+
const isLast = isLastFile && (isLastDir || dir === ".");
|
|
672
|
+
lines.push(`${indent}${isLast ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500"} ${fileName}`);
|
|
673
|
+
}
|
|
674
|
+
}
|
|
675
|
+
return lines.join("\n");
|
|
676
|
+
}
|
|
677
|
+
async function runExplainCodebase(opts) {
|
|
678
|
+
const profile = await readProfile();
|
|
679
|
+
if (profile === null) {
|
|
680
|
+
throw new Error("No profile found. Run 'brain-cache init' first.");
|
|
681
|
+
}
|
|
682
|
+
const running = await isOllamaRunning();
|
|
683
|
+
if (!running) {
|
|
684
|
+
throw new Error("Ollama is not running.");
|
|
685
|
+
}
|
|
686
|
+
const rootDir = resolve2(opts?.path ?? ".");
|
|
687
|
+
const indexState = await readIndexState(rootDir);
|
|
688
|
+
if (indexState === null) {
|
|
689
|
+
throw new Error(
|
|
690
|
+
`No index found at ${rootDir}. Run 'brain-cache index' first.`
|
|
691
|
+
);
|
|
692
|
+
}
|
|
693
|
+
const db = await openDatabase(rootDir);
|
|
694
|
+
const tableNames = await db.tableNames();
|
|
695
|
+
if (!tableNames.includes("chunks")) {
|
|
696
|
+
throw new Error("No chunks table found. Run 'brain-cache index' first.");
|
|
697
|
+
}
|
|
698
|
+
const table = await db.openTable("chunks");
|
|
699
|
+
const userConfig = await loadUserConfig();
|
|
700
|
+
const toolOverride = {};
|
|
701
|
+
if (opts?.limit !== void 0) toolOverride.limit = opts.limit;
|
|
702
|
+
if (opts?.distanceThreshold !== void 0)
|
|
703
|
+
toolOverride.distanceThreshold = opts.distanceThreshold;
|
|
704
|
+
const strategy = resolveStrategy(
|
|
705
|
+
"explore",
|
|
706
|
+
userConfig,
|
|
707
|
+
Object.keys(toolOverride).length > 0 ? toolOverride : void 0
|
|
708
|
+
);
|
|
709
|
+
const maxTokens = opts?.maxTokens ?? DEFAULT_TOKEN_BUDGET * 2;
|
|
710
|
+
const customQuestion = opts?.question;
|
|
711
|
+
const queries = customQuestion ? [customQuestion] : ARCHITECTURE_QUERIES;
|
|
712
|
+
process.stderr.write(
|
|
713
|
+
`brain-cache: explaining codebase (budget=${maxTokens} tokens, queries=${queries.length})
|
|
714
|
+
`
|
|
715
|
+
);
|
|
716
|
+
const { embeddings } = await embedBatchWithRetry(indexState.embeddingModel, queries);
|
|
717
|
+
const allResults = await Promise.all(
|
|
718
|
+
embeddings.map((vec) => searchChunks(table, vec, strategy))
|
|
719
|
+
);
|
|
720
|
+
const merged = allResults.flat();
|
|
721
|
+
const deduped = deduplicateChunks(merged);
|
|
722
|
+
let allFilePaths = [];
|
|
723
|
+
try {
|
|
724
|
+
const allRows = await table.query().toArray();
|
|
725
|
+
allFilePaths = [...new Set(allRows.map((r) => r.file_path))].sort();
|
|
726
|
+
} catch {
|
|
727
|
+
allFilePaths = [];
|
|
728
|
+
}
|
|
729
|
+
const sorted = [...deduped].sort((a, b) => {
|
|
730
|
+
const aIsTest = /\/(tests?|__tests__|spec)\//i.test(a.filePath) || /\.(test|spec)\./i.test(a.filePath);
|
|
731
|
+
const bIsTest = /\/(tests?|__tests__|spec)\//i.test(b.filePath) || /\.(test|spec)\./i.test(b.filePath);
|
|
732
|
+
if (aIsTest === bIsTest) return 0;
|
|
733
|
+
return aIsTest ? 1 : -1;
|
|
734
|
+
});
|
|
735
|
+
const exportedOnly = sorted.filter(isExportedChunk);
|
|
736
|
+
const assembled = assembleContext(exportedOnly, { maxTokens });
|
|
737
|
+
const enriched = await enrichWithParentClass(assembled.chunks, table, {
|
|
738
|
+
maxTokens,
|
|
739
|
+
currentTokens: assembled.tokenCount
|
|
740
|
+
});
|
|
741
|
+
const compressed = enriched.map((c) => {
|
|
742
|
+
const tokens = countChunkTokens(c.content);
|
|
743
|
+
return tokens > 500 ? compressChunk(c) : c;
|
|
744
|
+
});
|
|
745
|
+
const moduleGroups = groupChunksByModule(compressed, rootDir);
|
|
746
|
+
const codeContent = formatModuleNarratives(moduleGroups);
|
|
747
|
+
const treeFilePaths = allFilePaths.length > 0 ? allFilePaths : [...new Set(compressed.map((c) => c.filePath))];
|
|
748
|
+
const nonTestPaths = treeFilePaths.filter(
|
|
749
|
+
(fp) => !(/\/(tests?|__tests__|spec)\//i.test(fp) || /\.(test|spec)\./i.test(fp))
|
|
750
|
+
);
|
|
751
|
+
const directoryTree = buildDirectoryTree(nonTestPaths, rootDir);
|
|
752
|
+
const content = [
|
|
753
|
+
"## Directory Structure\n\n```\n" + directoryTree + "\n```",
|
|
754
|
+
codeContent
|
|
755
|
+
].join("\n\n---\n\n");
|
|
756
|
+
const uniqueFiles = [...new Set(compressed.map((c) => c.filePath))];
|
|
757
|
+
let fileContentTokens = 0;
|
|
758
|
+
for (const filePath of uniqueFiles) {
|
|
759
|
+
try {
|
|
760
|
+
const fileContent = await readFile3(filePath, "utf-8");
|
|
761
|
+
fileContentTokens += countChunkTokens(fileContent);
|
|
762
|
+
} catch {
|
|
763
|
+
}
|
|
764
|
+
}
|
|
765
|
+
const toolCalls = 1 + uniqueFiles.length;
|
|
766
|
+
const estimatedWithoutBraincache = fileContentTokens + toolCalls * TOOL_CALL_OVERHEAD_TOKENS;
|
|
767
|
+
const tokensSent = assembled.tokenCount;
|
|
768
|
+
const reductionPct = estimatedWithoutBraincache > 0 ? Math.max(
|
|
769
|
+
0,
|
|
770
|
+
Math.round((1 - tokensSent / estimatedWithoutBraincache) * 100)
|
|
771
|
+
) : 0;
|
|
772
|
+
return {
|
|
773
|
+
content,
|
|
774
|
+
chunks: compressed,
|
|
775
|
+
metadata: {
|
|
776
|
+
tokensSent,
|
|
777
|
+
estimatedWithoutBraincache,
|
|
778
|
+
reductionPct,
|
|
779
|
+
filesInContext: uniqueFiles.length,
|
|
780
|
+
localTasksPerformed: [
|
|
781
|
+
"embed_query",
|
|
782
|
+
"vector_search",
|
|
783
|
+
"dedup",
|
|
784
|
+
"parent_enrich",
|
|
785
|
+
"compress",
|
|
786
|
+
"cohesion_group",
|
|
787
|
+
"token_budget",
|
|
788
|
+
"directory_tree"
|
|
789
|
+
],
|
|
790
|
+
cloudCallsMade: 0
|
|
791
|
+
}
|
|
792
|
+
};
|
|
793
|
+
}
|
|
794
|
+
|
|
795
|
+
// src/workflows/buildContext.ts
|
|
796
|
+
function splitCamelCase(name) {
|
|
797
|
+
return name.replace(/([a-z])([A-Z])/g, "$1 $2").replace(/([A-Z]+)([A-Z][a-z])/g, "$1 $2").toLowerCase().split(/\s+/).filter((t) => t.length >= 2);
|
|
798
|
+
}
|
|
799
|
+
function extractQueryTokens(query) {
|
|
800
|
+
return query.toLowerCase().split(/[\s.,;:!?'"()\[\]{}/\\]+/).filter((t) => t.length >= 3);
|
|
801
|
+
}
|
|
802
|
+
function isPrimaryMatch(chunk, queryTokens) {
|
|
803
|
+
if (queryTokens.length === 0) return false;
|
|
804
|
+
const fileName = chunk.filePath.split("/").pop()?.toLowerCase() ?? "";
|
|
805
|
+
const fileNameStem = fileName.replace(/\.[^.]+$/, "");
|
|
806
|
+
const originalName = chunk.name ?? "";
|
|
807
|
+
const chunkName = originalName.toLowerCase();
|
|
808
|
+
if (chunkName.length > 0 && queryTokens.some((t) => t === chunkName)) return true;
|
|
809
|
+
const subTokens = originalName.length > 0 ? splitCamelCase(originalName) : [];
|
|
810
|
+
if (subTokens.length > 1 && subTokens.every((sub) => queryTokens.some((t) => t.includes(sub) || sub.includes(t)))) return true;
|
|
811
|
+
if (fileNameStem.length > 0 && queryTokens.some((t) => t === fileNameStem)) return true;
|
|
812
|
+
return false;
|
|
813
|
+
}
|
|
814
|
+
var TEST_FILE_PATTERNS2 = [".test.", ".spec.", "/__tests__/", "/tests/"];
|
|
815
|
+
function isTestFile2(filePath) {
|
|
816
|
+
return TEST_FILE_PATTERNS2.some((p) => filePath.includes(p));
|
|
817
|
+
}
|
|
818
|
+
var CONFIG_FILE_PATTERNS = [
|
|
819
|
+
/vitest\.config\./,
|
|
820
|
+
/tsup\.config\./,
|
|
821
|
+
/tsconfig.*\.json$/,
|
|
822
|
+
/jest\.config\./,
|
|
823
|
+
/eslint\.config\./,
|
|
824
|
+
/\.eslintrc/
|
|
825
|
+
];
|
|
826
|
+
function isConfigFile(filePath) {
|
|
827
|
+
const fileName = filePath.split("/").pop() ?? "";
|
|
828
|
+
return CONFIG_FILE_PATTERNS.some((p) => p.test(fileName));
|
|
829
|
+
}
|
|
830
|
+
async function runBuildContext(query, opts) {
|
|
831
|
+
const profile = await readProfile();
|
|
832
|
+
if (profile === null) {
|
|
833
|
+
throw new Error("No profile found. Run 'brain-cache init' first.");
|
|
834
|
+
}
|
|
835
|
+
const running = await isOllamaRunning();
|
|
836
|
+
if (!running) {
|
|
837
|
+
throw new Error("Ollama is not running. Start it with 'ollama serve' or run 'brain-cache init'.");
|
|
838
|
+
}
|
|
839
|
+
const rootDir = resolve3(opts?.path ?? ".");
|
|
840
|
+
const indexState = await readIndexState(rootDir);
|
|
841
|
+
if (indexState === null) {
|
|
842
|
+
throw new Error(`No index found at ${rootDir}. Run 'brain-cache index' first.`);
|
|
843
|
+
}
|
|
844
|
+
const db = await openDatabase(rootDir);
|
|
845
|
+
const tableNames = await db.tableNames();
|
|
846
|
+
if (!tableNames.includes("chunks")) {
|
|
847
|
+
throw new Error("No chunks table found. Run 'brain-cache index' first.");
|
|
848
|
+
}
|
|
849
|
+
const table = await db.openTable("chunks");
|
|
850
|
+
const hasEdges = tableNames.includes("edges");
|
|
851
|
+
const mode = classifyRetrievalMode(query);
|
|
852
|
+
const maxTokens = opts?.maxTokens ?? DEFAULT_TOKEN_BUDGET;
|
|
853
|
+
process.stderr.write(
|
|
854
|
+
`brain-cache: building context (intent=${mode}, budget=${maxTokens} tokens)
|
|
855
|
+
`
|
|
856
|
+
);
|
|
857
|
+
const userConfig = await loadUserConfig();
|
|
858
|
+
const strategy = resolveStrategy(
|
|
859
|
+
mode,
|
|
860
|
+
userConfig,
|
|
861
|
+
opts?.limit !== void 0 ? { limit: opts.limit } : void 0
|
|
862
|
+
);
|
|
863
|
+
let finalChunks;
|
|
864
|
+
let finalContent;
|
|
865
|
+
let finalTokenCount;
|
|
866
|
+
let localTasksPerformed;
|
|
867
|
+
if (mode === "trace" && hasEdges) {
|
|
868
|
+
const traceResult = await runTraceFlow(query, {
|
|
869
|
+
maxHops: 3,
|
|
870
|
+
path: opts?.path,
|
|
871
|
+
limit: strategy.limit,
|
|
872
|
+
distanceThreshold: strategy.distanceThreshold
|
|
873
|
+
});
|
|
874
|
+
const traceChunks = traceResult.hops.map((hop, i) => ({
|
|
875
|
+
id: `trace-hop-${i}`,
|
|
876
|
+
filePath: hop.filePath,
|
|
877
|
+
chunkType: "function",
|
|
878
|
+
scope: null,
|
|
879
|
+
name: hop.name,
|
|
880
|
+
content: hop.content,
|
|
881
|
+
startLine: hop.startLine,
|
|
882
|
+
endLine: 0,
|
|
883
|
+
similarity: 1 - hop.hopDepth * 0.1
|
|
884
|
+
}));
|
|
885
|
+
const assembled = assembleContext(traceChunks, { maxTokens });
|
|
886
|
+
const groups = groupChunksByFile(assembled.chunks);
|
|
887
|
+
finalContent = formatGroupedContext(groups);
|
|
888
|
+
finalChunks = assembled.chunks;
|
|
889
|
+
finalTokenCount = assembled.tokenCount;
|
|
890
|
+
localTasksPerformed = traceResult.metadata.localTasksPerformed;
|
|
891
|
+
} else if (mode === "explore") {
|
|
892
|
+
const exploreResult = await runExplainCodebase({
|
|
893
|
+
question: query,
|
|
894
|
+
maxTokens,
|
|
895
|
+
path: opts?.path,
|
|
896
|
+
limit: strategy.limit,
|
|
897
|
+
distanceThreshold: strategy.distanceThreshold
|
|
898
|
+
});
|
|
899
|
+
finalContent = exploreResult.content;
|
|
900
|
+
finalChunks = exploreResult.chunks;
|
|
901
|
+
finalTokenCount = exploreResult.metadata.tokensSent;
|
|
902
|
+
localTasksPerformed = exploreResult.metadata.localTasksPerformed;
|
|
903
|
+
} else {
|
|
904
|
+
if (mode === "trace" && !hasEdges) {
|
|
905
|
+
process.stderr.write(`brain-cache: No edges table found, falling back to explore mode
|
|
906
|
+
`);
|
|
907
|
+
}
|
|
908
|
+
const { embeddings: vectors } = await embedBatchWithRetry(indexState.embeddingModel, [query]);
|
|
909
|
+
const queryVector = vectors[0];
|
|
910
|
+
const results = await searchChunks(table, queryVector, strategy, query);
|
|
911
|
+
const deduped = deduplicateChunks(results);
|
|
912
|
+
const assembled = assembleContext(deduped, { maxTokens });
|
|
913
|
+
const enriched = await enrichWithParentClass(assembled.chunks, table, { maxTokens, currentTokens: assembled.tokenCount });
|
|
914
|
+
const withoutPeripheral = enriched.filter((chunk) => !isTestFile2(chunk.filePath) && !isConfigFile(chunk.filePath));
|
|
915
|
+
const queryTokens = extractQueryTokens(query);
|
|
916
|
+
const compressed = withoutPeripheral.map(
|
|
917
|
+
(chunk) => isPrimaryMatch(chunk, queryTokens) ? chunk : compressChunk(chunk)
|
|
918
|
+
);
|
|
919
|
+
const groups = groupChunksByFile(compressed);
|
|
920
|
+
finalContent = formatGroupedContext(groups);
|
|
921
|
+
finalChunks = compressed;
|
|
922
|
+
finalTokenCount = assembled.tokenCount;
|
|
923
|
+
localTasksPerformed = ["embed_query", "vector_search", "dedup", "parent_enrich", "drop_peripheral", "compress", "cohesion_group", "token_budget"];
|
|
924
|
+
}
|
|
925
|
+
const BODY_STRIPPED_MARKER2 = "// [body stripped]";
|
|
926
|
+
const filesWithUncompressedContent = new Set(
|
|
927
|
+
finalChunks.filter((c) => !c.content.includes(BODY_STRIPPED_MARKER2)).map((c) => c.filePath)
|
|
928
|
+
);
|
|
929
|
+
const uniqueFiles = [...new Set(finalChunks.map((c) => c.filePath))];
|
|
930
|
+
const numFiles = uniqueFiles.length;
|
|
931
|
+
let fileContentTokens = 0;
|
|
932
|
+
for (const filePath of uniqueFiles) {
|
|
933
|
+
if (!filesWithUncompressedContent.has(filePath)) {
|
|
934
|
+
continue;
|
|
935
|
+
}
|
|
936
|
+
try {
|
|
937
|
+
const fileContent = await readFile4(filePath, "utf-8");
|
|
938
|
+
fileContentTokens += countChunkTokens(fileContent);
|
|
939
|
+
} catch {
|
|
940
|
+
}
|
|
941
|
+
}
|
|
942
|
+
const toolCalls = 1 + numFiles;
|
|
943
|
+
const toolCallOverhead = toolCalls * TOOL_CALL_OVERHEAD_TOKENS;
|
|
944
|
+
const estimatedWithoutBraincache = fileContentTokens + toolCallOverhead;
|
|
945
|
+
const reductionPct = estimatedWithoutBraincache > 0 ? Math.max(0, Math.round((1 - finalTokenCount / estimatedWithoutBraincache) * 100)) : 0;
|
|
946
|
+
const result = {
|
|
947
|
+
content: finalContent,
|
|
948
|
+
chunks: finalChunks,
|
|
949
|
+
metadata: {
|
|
950
|
+
tokensSent: finalTokenCount,
|
|
951
|
+
estimatedWithoutBraincache,
|
|
952
|
+
reductionPct,
|
|
953
|
+
filesInContext: numFiles,
|
|
954
|
+
localTasksPerformed,
|
|
955
|
+
cloudCallsMade: 0
|
|
956
|
+
}
|
|
957
|
+
};
|
|
958
|
+
process.stderr.write(
|
|
959
|
+
`brain-cache: context assembled (${finalTokenCount} tokens, ${reductionPct}% reduction, ${finalChunks.length} chunks)
|
|
960
|
+
`
|
|
961
|
+
);
|
|
962
|
+
return result;
|
|
963
|
+
}
|
|
964
|
+
|
|
965
|
+
export {
|
|
966
|
+
runBuildContext
|
|
967
|
+
};
|