brain-cache 0.4.2 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/skills/brain-cache/SKILL.md +52 -0
- package/README.md +49 -100
- package/dist/{askCodebase-BZIXS3EV.js → askCodebase-EE32B7BP.js} +9 -9
- package/dist/buildContext-GWVDAYH6.js +14 -0
- package/dist/{chunk-Y7BU7IYX.js → chunk-3HQRTLBH.js} +70 -6
- package/dist/{chunk-ZKVZTDND.js → chunk-4IOR54GU.js} +2 -1
- package/dist/chunk-6C2OYMKD.js +16 -0
- package/dist/{workflows-KYCBR7TC.js → chunk-CY34XQ2O.js} +115 -24
- package/dist/chunk-DFFMV3RR.js +171 -0
- package/dist/{chunk-PJQNHMQH.js → chunk-DPH5X5HL.js} +1 -1
- package/dist/{chunk-FQL4HV4R.js → chunk-HRJ3OT6Q.js} +1 -1
- package/dist/chunk-KMRPAVMM.js +967 -0
- package/dist/{chunk-KQZSBRRH.js → chunk-RKPICQU7.js} +1 -1
- package/dist/{chunk-EEC7KYPY.js → chunk-TXLCXXKY.js} +7 -8
- package/dist/claude-md-section-K47HUTE4.js +38 -0
- package/dist/cli.js +13 -9
- package/dist/{doctor-KRNLXE4R.js → doctor-FCET2MNJ.js} +3 -3
- package/dist/{embedder-ZLHAZZUI.js → embedder-HVEXDJAU.js} +2 -2
- package/dist/{init-QNN5H3DR.js → init-2E4JMZZC.js} +71 -6
- package/dist/mcp.js +1450 -130
- package/dist/{search-O4CFAH45.js → search-7ISZ7EXI.js} +16 -15
- package/dist/{status-7MT4IROA.js → status-VKTSG2SN.js} +3 -3
- package/dist/statusline-script-NFUDFOWK.js +95 -0
- package/dist/watch-QPMAB62P.js +128 -0
- package/dist/workflows-MWEY7OAI.js +14 -0
- package/package.json +5 -1
- package/dist/buildContext-APWOPZMJ.js +0 -14
- package/dist/chunk-JZQWPHAQ.js +0 -103
- package/dist/chunk-SBSMKI4B.js +0 -109
- package/dist/chunk-ZGYLHFHJ.js +0 -17
- package/dist/claude-md-section-6ZJ3TMO4.js +0 -34
package/dist/mcp.js
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
// src/mcp/index.ts
|
|
2
|
-
import { resolve as
|
|
2
|
+
import { resolve as resolve7 } from "path";
|
|
3
3
|
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
4
4
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
5
5
|
import { z as z2 } from "zod";
|
|
@@ -48,15 +48,92 @@ function setLogLevel(level) {
|
|
|
48
48
|
}
|
|
49
49
|
|
|
50
50
|
// src/lib/format.ts
|
|
51
|
+
import dedent from "dedent";
|
|
52
|
+
function formatToolResponse(summary, body) {
|
|
53
|
+
return `${summary}
|
|
54
|
+
|
|
55
|
+
${body}`;
|
|
56
|
+
}
|
|
57
|
+
function formatErrorEnvelope(message, suggestion) {
|
|
58
|
+
const lines = [`Error: ${message}`];
|
|
59
|
+
if (suggestion) lines.push(`Suggestion: ${suggestion}`);
|
|
60
|
+
return lines.join("\n");
|
|
61
|
+
}
|
|
51
62
|
function formatTokenSavings(input) {
|
|
52
|
-
const PAD = 27;
|
|
53
63
|
const fileSuffix = input.filesInContext !== 1 ? "s" : "";
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
];
|
|
59
|
-
|
|
64
|
+
return [
|
|
65
|
+
`Tokens sent to Claude: ${input.tokensSent.toLocaleString()}`,
|
|
66
|
+
`Estimated without: ~${input.estimatedWithout.toLocaleString()} (${input.filesInContext} file${fileSuffix} + overhead)`,
|
|
67
|
+
`Reduction: ${input.reductionPct}%`
|
|
68
|
+
].join("\n");
|
|
69
|
+
}
|
|
70
|
+
function formatDoctorOutput(health) {
|
|
71
|
+
const lines = [];
|
|
72
|
+
let ollamaLine = `Ollama: ${health.ollamaStatus}`;
|
|
73
|
+
if (health.ollamaStatus === "running" && health.ollamaVersion) {
|
|
74
|
+
ollamaLine += ` (v${health.ollamaVersion})`;
|
|
75
|
+
}
|
|
76
|
+
lines.push(ollamaLine);
|
|
77
|
+
if (health.indexFreshness.indexed) {
|
|
78
|
+
const { fileCount, chunkCount, indexedAt } = health.indexFreshness;
|
|
79
|
+
let indexLine = "Index: indexed";
|
|
80
|
+
if (fileCount !== null && chunkCount !== null) {
|
|
81
|
+
indexLine += ` \u2014 ${fileCount} files, ${chunkCount} chunks`;
|
|
82
|
+
}
|
|
83
|
+
if (indexedAt) {
|
|
84
|
+
indexLine += ` (at ${indexedAt})`;
|
|
85
|
+
}
|
|
86
|
+
lines.push(indexLine);
|
|
87
|
+
} else {
|
|
88
|
+
lines.push("Index: not indexed");
|
|
89
|
+
}
|
|
90
|
+
lines.push(`Embedding model: ${health.embeddingModel ?? "none"}`);
|
|
91
|
+
if (health.vramTier === "none") {
|
|
92
|
+
lines.push("VRAM: no GPU detected");
|
|
93
|
+
} else {
|
|
94
|
+
const vramVal = health.vramAvailable !== null ? `${health.vramAvailable} GiB` : "unknown";
|
|
95
|
+
lines.push(`VRAM: ${health.vramTier} (${vramVal})`);
|
|
96
|
+
}
|
|
97
|
+
return lines.join("\n");
|
|
98
|
+
}
|
|
99
|
+
function formatIndexResult(result) {
|
|
100
|
+
if (result.fileCount !== null && result.chunkCount !== null) {
|
|
101
|
+
return `Indexed ${result.path} \u2014 ${result.fileCount} files, ${result.chunkCount} chunks.`;
|
|
102
|
+
}
|
|
103
|
+
return `Indexed ${result.path}.`;
|
|
104
|
+
}
|
|
105
|
+
function formatSearchResults(chunks) {
|
|
106
|
+
if (chunks.length === 0) {
|
|
107
|
+
return "No results found for the given query.";
|
|
108
|
+
}
|
|
109
|
+
return chunks.map((chunk, i) => {
|
|
110
|
+
const name = chunk.name ?? "(anonymous)";
|
|
111
|
+
return dedent`
|
|
112
|
+
${i + 1}. ${name} (${chunk.chunkType})
|
|
113
|
+
${chunk.filePath}:${chunk.startLine}
|
|
114
|
+
Score: ${chunk.similarity.toFixed(3)}
|
|
115
|
+
`.trim();
|
|
116
|
+
}).join("\n\n");
|
|
117
|
+
}
|
|
118
|
+
function formatTraceFlow(result) {
|
|
119
|
+
if (result.hops.length === 0) {
|
|
120
|
+
return "No call hops found. The entrypoint may not be indexed \u2014 run index_repo first.";
|
|
121
|
+
}
|
|
122
|
+
return result.hops.map((hop, i) => {
|
|
123
|
+
const name = hop.name ?? "(anonymous)";
|
|
124
|
+
const calls = hop.callsFound.length > 0 ? hop.callsFound.join(", ") : "(none)";
|
|
125
|
+
return dedent`
|
|
126
|
+
${i + 1}. depth:${hop.hopDepth} ${name}
|
|
127
|
+
${hop.filePath}:${hop.startLine}
|
|
128
|
+
Calls: ${calls}
|
|
129
|
+
`.trim();
|
|
130
|
+
}).join("\n\n");
|
|
131
|
+
}
|
|
132
|
+
function formatContext(result) {
|
|
133
|
+
return result.content;
|
|
134
|
+
}
|
|
135
|
+
function formatPipelineLabel(tasks) {
|
|
136
|
+
return tasks.join(" -> ");
|
|
60
137
|
}
|
|
61
138
|
|
|
62
139
|
// src/services/capability.ts
|
|
@@ -103,6 +180,7 @@ var GLOBAL_CONFIG_DIR = join(homedir(), ".brain-cache");
|
|
|
103
180
|
var PROFILE_PATH = join(GLOBAL_CONFIG_DIR, "profile.json");
|
|
104
181
|
var CONFIG_PATH = join(GLOBAL_CONFIG_DIR, "config.json");
|
|
105
182
|
var PROJECT_DATA_DIR = ".brain-cache";
|
|
183
|
+
var SESSION_STATS_FILENAME = "session-stats.json";
|
|
106
184
|
var EMBEDDING_DIMENSIONS = {
|
|
107
185
|
"nomic-embed-text": 768,
|
|
108
186
|
"mxbai-embed-large": 1024
|
|
@@ -114,13 +192,12 @@ var VECTOR_INDEX_THRESHOLD = 256;
|
|
|
114
192
|
var EMBED_TIMEOUT_MS = 3e4;
|
|
115
193
|
var COLD_START_RETRY_DELAY_MS = 2e3;
|
|
116
194
|
var EMBED_MAX_TOKENS = 8192;
|
|
117
|
-
var DEFAULT_SEARCH_LIMIT = 10;
|
|
118
|
-
var DEFAULT_DISTANCE_THRESHOLD = 0.4;
|
|
119
|
-
var DIAGNOSTIC_DISTANCE_THRESHOLD = 0.45;
|
|
120
|
-
var DIAGNOSTIC_SEARCH_LIMIT = 20;
|
|
121
195
|
var DEFAULT_TOKEN_BUDGET = 4096;
|
|
122
196
|
var FILE_HASHES_FILENAME = "file-hashes.json";
|
|
123
197
|
var TOOL_CALL_OVERHEAD_TOKENS = 300;
|
|
198
|
+
var COMPRESSION_TOKEN_THRESHOLD = 500;
|
|
199
|
+
var HIGH_RELEVANCE_SIMILARITY_THRESHOLD = 0.85;
|
|
200
|
+
var COMPRESSION_HARD_LIMIT = 800;
|
|
124
201
|
|
|
125
202
|
// src/services/capability.ts
|
|
126
203
|
var execFileAsync = promisify(execFile);
|
|
@@ -272,6 +349,12 @@ import { Schema, Field, Utf8, Int32, Float32, FixedSizeList } from "apache-arrow
|
|
|
272
349
|
import { join as join2 } from "path";
|
|
273
350
|
import { readFile as readFile2, writeFile as writeFile2, mkdir as mkdir2 } from "fs/promises";
|
|
274
351
|
var log3 = childLogger("lancedb");
|
|
352
|
+
var _writeMutex = Promise.resolve();
|
|
353
|
+
function withWriteLock(fn) {
|
|
354
|
+
const next = _writeMutex.then(() => fn());
|
|
355
|
+
_writeMutex = next.then(() => void 0, () => void 0);
|
|
356
|
+
return next;
|
|
357
|
+
}
|
|
275
358
|
function chunkSchema(dim) {
|
|
276
359
|
return new Schema([
|
|
277
360
|
new Field("id", new Utf8(), false),
|
|
@@ -289,6 +372,16 @@ function chunkSchema(dim) {
|
|
|
289
372
|
)
|
|
290
373
|
]);
|
|
291
374
|
}
|
|
375
|
+
function edgeSchema() {
|
|
376
|
+
return new Schema([
|
|
377
|
+
new Field("from_chunk_id", new Utf8(), false),
|
|
378
|
+
new Field("from_file", new Utf8(), false),
|
|
379
|
+
new Field("from_symbol", new Utf8(), true),
|
|
380
|
+
new Field("to_symbol", new Utf8(), false),
|
|
381
|
+
new Field("to_file", new Utf8(), true),
|
|
382
|
+
new Field("edge_type", new Utf8(), false)
|
|
383
|
+
]);
|
|
384
|
+
}
|
|
292
385
|
async function openDatabase(projectRoot) {
|
|
293
386
|
const dataDir = join2(projectRoot, PROJECT_DATA_DIR);
|
|
294
387
|
await mkdir2(dataDir, { recursive: true });
|
|
@@ -306,6 +399,10 @@ async function openOrCreateChunkTable(db, projectRoot, model, dim) {
|
|
|
306
399
|
"Embedding model or dimension changed \u2014 dropping and recreating chunks table"
|
|
307
400
|
);
|
|
308
401
|
await db.dropTable("chunks");
|
|
402
|
+
if (tableNames.includes("edges")) {
|
|
403
|
+
await db.dropTable("edges");
|
|
404
|
+
log3.warn("Also dropped edges table (stale chunk IDs)");
|
|
405
|
+
}
|
|
309
406
|
} else {
|
|
310
407
|
log3.info({ model, dim }, "Opened existing chunks table");
|
|
311
408
|
return db.openTable("chunks");
|
|
@@ -321,8 +418,10 @@ async function insertChunks(table, rows) {
|
|
|
321
418
|
if (rows.length === 0) {
|
|
322
419
|
return;
|
|
323
420
|
}
|
|
324
|
-
await
|
|
325
|
-
|
|
421
|
+
await withWriteLock(async () => {
|
|
422
|
+
await table.add(rows);
|
|
423
|
+
log3.debug({ count: rows.length }, "Inserted chunk rows");
|
|
424
|
+
});
|
|
326
425
|
}
|
|
327
426
|
async function createVectorIndexIfNeeded(table, embeddingModel) {
|
|
328
427
|
const rowCount = await table.countRows();
|
|
@@ -389,12 +488,50 @@ async function writeFileHashes(projectRoot, hashes) {
|
|
|
389
488
|
}
|
|
390
489
|
async function deleteChunksByFilePath(table, filePath) {
|
|
391
490
|
const escaped = filePath.replace(/'/g, "''");
|
|
392
|
-
await
|
|
491
|
+
await withWriteLock(async () => {
|
|
492
|
+
await table.delete(`file_path = '${escaped}'`);
|
|
493
|
+
});
|
|
494
|
+
}
|
|
495
|
+
async function openOrCreateEdgesTable(db, opts) {
|
|
496
|
+
const tableNames = await db.tableNames();
|
|
497
|
+
if (tableNames.includes("edges")) {
|
|
498
|
+
if (opts?.shouldReset) {
|
|
499
|
+
log3.warn("Resetting edges table (chunks table was recreated)");
|
|
500
|
+
await db.dropTable("edges");
|
|
501
|
+
} else {
|
|
502
|
+
log3.info("Opened existing edges table");
|
|
503
|
+
return db.openTable("edges");
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
const schema = edgeSchema();
|
|
507
|
+
const emptyData = lancedb.makeArrowTable([], { schema });
|
|
508
|
+
const table = await db.createTable("edges", emptyData, { mode: "overwrite" });
|
|
509
|
+
log3.info("Created new edges table");
|
|
510
|
+
return table;
|
|
511
|
+
}
|
|
512
|
+
async function insertEdges(table, edges) {
|
|
513
|
+
if (edges.length === 0) return;
|
|
514
|
+
const rows = edges.map((e) => ({
|
|
515
|
+
from_chunk_id: e.fromChunkId,
|
|
516
|
+
from_file: e.fromFile,
|
|
517
|
+
from_symbol: e.fromSymbol,
|
|
518
|
+
to_symbol: e.toSymbol,
|
|
519
|
+
to_file: e.toFile,
|
|
520
|
+
edge_type: e.edgeType
|
|
521
|
+
}));
|
|
522
|
+
await withWriteLock(async () => {
|
|
523
|
+
await table.add(rows);
|
|
524
|
+
log3.debug({ count: rows.length }, "Inserted edge rows");
|
|
525
|
+
});
|
|
526
|
+
}
|
|
527
|
+
async function queryEdgesFrom(edgesTable, fromChunkId) {
|
|
528
|
+
const escaped = fromChunkId.replace(/'/g, "''");
|
|
529
|
+
return edgesTable.query().where(`from_chunk_id = '${escaped}'`).toArray();
|
|
393
530
|
}
|
|
394
531
|
|
|
395
532
|
// src/workflows/index.ts
|
|
396
|
-
import { resolve } from "path";
|
|
397
|
-
import { readFile as
|
|
533
|
+
import { resolve as resolve2 } from "path";
|
|
534
|
+
import { readFile as readFile5 } from "fs/promises";
|
|
398
535
|
import { createHash } from "crypto";
|
|
399
536
|
|
|
400
537
|
// src/services/crawler.ts
|
|
@@ -431,13 +568,16 @@ var ALWAYS_EXCLUDE_GLOBS = [
|
|
|
431
568
|
"**/Cargo.lock",
|
|
432
569
|
"**/*.min.js"
|
|
433
570
|
];
|
|
434
|
-
async function crawlSourceFiles(rootDir) {
|
|
571
|
+
async function crawlSourceFiles(rootDir, opts) {
|
|
435
572
|
const ig = ignore();
|
|
436
573
|
try {
|
|
437
574
|
const gitignoreContent = await readFile3(`${rootDir}/.gitignore`, "utf-8");
|
|
438
575
|
ig.add(gitignoreContent);
|
|
439
576
|
} catch {
|
|
440
577
|
}
|
|
578
|
+
if (opts?.extraIgnorePatterns?.length) {
|
|
579
|
+
ig.add(opts.extraIgnorePatterns);
|
|
580
|
+
}
|
|
441
581
|
const files = await fg("**/*", {
|
|
442
582
|
cwd: rootDir,
|
|
443
583
|
absolute: true,
|
|
@@ -456,7 +596,7 @@ async function crawlSourceFiles(rootDir) {
|
|
|
456
596
|
|
|
457
597
|
// src/services/chunker.ts
|
|
458
598
|
import { createRequire } from "module";
|
|
459
|
-
import { extname as extname2 } from "path";
|
|
599
|
+
import { extname as extname2, resolve, dirname } from "path";
|
|
460
600
|
var _require = createRequire(import.meta.url);
|
|
461
601
|
var Parser = _require("tree-sitter");
|
|
462
602
|
var { typescript: tsLang, tsx: tsxLang } = _require("tree-sitter-typescript");
|
|
@@ -561,7 +701,7 @@ function chunkFile(filePath, content) {
|
|
|
561
701
|
const ext = extname2(filePath);
|
|
562
702
|
const lang = LANGUAGE_MAP[ext];
|
|
563
703
|
if (!lang) {
|
|
564
|
-
return [];
|
|
704
|
+
return { chunks: [], edges: [] };
|
|
565
705
|
}
|
|
566
706
|
const category = getLanguageCategory(ext);
|
|
567
707
|
const nodeTypes = CHUNK_NODE_TYPES[category];
|
|
@@ -569,7 +709,50 @@ function chunkFile(filePath, content) {
|
|
|
569
709
|
parser.setLanguage(lang);
|
|
570
710
|
const tree = parser.parse(content);
|
|
571
711
|
const chunks = [];
|
|
712
|
+
const edges = [];
|
|
713
|
+
let currentChunkId = null;
|
|
714
|
+
let currentSymbol = null;
|
|
572
715
|
for (const node of walkNodes(tree.rootNode)) {
|
|
716
|
+
if (node.type === "call_expression") {
|
|
717
|
+
const funcNode = node.childForFieldName("function");
|
|
718
|
+
if (funcNode) {
|
|
719
|
+
let toSymbol = null;
|
|
720
|
+
if (funcNode.type === "identifier") {
|
|
721
|
+
toSymbol = funcNode.text;
|
|
722
|
+
} else if (funcNode.type === "member_expression" || funcNode.type === "optional_member_expression") {
|
|
723
|
+
toSymbol = funcNode.childForFieldName("property")?.text ?? null;
|
|
724
|
+
}
|
|
725
|
+
if (toSymbol) {
|
|
726
|
+
const chunkId = currentChunkId ?? `${filePath}:0`;
|
|
727
|
+
const symbol = currentSymbol;
|
|
728
|
+
edges.push({
|
|
729
|
+
fromChunkId: chunkId,
|
|
730
|
+
fromFile: filePath,
|
|
731
|
+
fromSymbol: symbol,
|
|
732
|
+
toSymbol,
|
|
733
|
+
toFile: null,
|
|
734
|
+
// Resolved at query time, not index time
|
|
735
|
+
edgeType: "call"
|
|
736
|
+
});
|
|
737
|
+
}
|
|
738
|
+
}
|
|
739
|
+
}
|
|
740
|
+
if (node.type === "import_statement") {
|
|
741
|
+
const source = node.childForFieldName("source");
|
|
742
|
+
if (source) {
|
|
743
|
+
const raw = source.text.replace(/['"]/g, "");
|
|
744
|
+
const isRelative = raw.startsWith("./") || raw.startsWith("../");
|
|
745
|
+
const toFile = isRelative ? resolve(dirname(filePath), raw) : null;
|
|
746
|
+
edges.push({
|
|
747
|
+
fromChunkId: `${filePath}:0`,
|
|
748
|
+
fromFile: filePath,
|
|
749
|
+
fromSymbol: null,
|
|
750
|
+
toSymbol: raw,
|
|
751
|
+
toFile,
|
|
752
|
+
edgeType: "import"
|
|
753
|
+
});
|
|
754
|
+
}
|
|
755
|
+
}
|
|
573
756
|
if (!nodeTypes.has(node.type)) {
|
|
574
757
|
continue;
|
|
575
758
|
}
|
|
@@ -595,6 +778,8 @@ function chunkFile(filePath, content) {
|
|
|
595
778
|
startLine: node.startPosition.row + 1,
|
|
596
779
|
endLine: node.endPosition.row + 1
|
|
597
780
|
});
|
|
781
|
+
currentChunkId = `${filePath}:${node.startPosition.row}`;
|
|
782
|
+
currentSymbol = extractName(node);
|
|
598
783
|
}
|
|
599
784
|
if (chunks.length === 0) {
|
|
600
785
|
chunks.push({
|
|
@@ -608,8 +793,8 @@ function chunkFile(filePath, content) {
|
|
|
608
793
|
endLine: content.split("\n").length
|
|
609
794
|
});
|
|
610
795
|
}
|
|
611
|
-
log5.debug({ filePath, chunkCount: chunks.length }, "File chunked");
|
|
612
|
-
return chunks;
|
|
796
|
+
log5.debug({ filePath, chunkCount: chunks.length, edgeCount: edges.length }, "File chunked");
|
|
797
|
+
return { chunks, edges };
|
|
613
798
|
}
|
|
614
799
|
|
|
615
800
|
// src/services/embedder.ts
|
|
@@ -677,6 +862,18 @@ async function embedBatchWithRetry(model, texts, dimension = DEFAULT_EMBEDDING_D
|
|
|
677
862
|
}
|
|
678
863
|
}
|
|
679
864
|
|
|
865
|
+
// src/services/ignorePatterns.ts
|
|
866
|
+
import { readFile as readFile4 } from "fs/promises";
|
|
867
|
+
import { join as join3 } from "path";
|
|
868
|
+
async function loadIgnorePatterns(rootDir) {
|
|
869
|
+
try {
|
|
870
|
+
const content = await readFile4(join3(rootDir, ".braincacheignore"), "utf-8");
|
|
871
|
+
return content.split("\n").filter((line) => line.trim() !== "" && !line.startsWith("#"));
|
|
872
|
+
} catch {
|
|
873
|
+
return [];
|
|
874
|
+
}
|
|
875
|
+
}
|
|
876
|
+
|
|
680
877
|
// src/services/tokenCounter.ts
|
|
681
878
|
import { countTokens } from "@anthropic-ai/tokenizer";
|
|
682
879
|
var log7 = childLogger("tokenCounter");
|
|
@@ -725,7 +922,12 @@ async function runIndex(targetPath, opts) {
|
|
|
725
922
|
return originalStderrWrite(chunk, ...args);
|
|
726
923
|
});
|
|
727
924
|
try {
|
|
728
|
-
const rootDir =
|
|
925
|
+
const rootDir = resolve2(targetPath ?? ".");
|
|
926
|
+
const ignorePatterns = await loadIgnorePatterns(rootDir);
|
|
927
|
+
if (ignorePatterns.length > 0) {
|
|
928
|
+
process.stderr.write(`brain-cache: loaded ${ignorePatterns.length} patterns from .braincacheignore
|
|
929
|
+
`);
|
|
930
|
+
}
|
|
729
931
|
const profile = await readProfile();
|
|
730
932
|
if (profile === null) {
|
|
731
933
|
throw new Error("No profile found. Run 'brain-cache init' first.");
|
|
@@ -743,7 +945,10 @@ async function runIndex(targetPath, opts) {
|
|
|
743
945
|
}
|
|
744
946
|
const db = await openDatabase(rootDir);
|
|
745
947
|
const table = await openOrCreateChunkTable(db, rootDir, profile.embeddingModel, dim);
|
|
746
|
-
const
|
|
948
|
+
const edgesTable = await openOrCreateEdgesTable(db);
|
|
949
|
+
const files = await crawlSourceFiles(rootDir, {
|
|
950
|
+
extraIgnorePatterns: ignorePatterns.length > 0 ? ignorePatterns : void 0
|
|
951
|
+
});
|
|
747
952
|
process.stderr.write(`brain-cache: found ${files.length} source files
|
|
748
953
|
`);
|
|
749
954
|
if (files.length === 0) {
|
|
@@ -757,7 +962,7 @@ async function runIndex(targetPath, opts) {
|
|
|
757
962
|
const group = files.slice(groupStart, groupStart + FILE_READ_CONCURRENCY);
|
|
758
963
|
const results = await Promise.all(
|
|
759
964
|
group.map(async (filePath) => {
|
|
760
|
-
const content = await
|
|
965
|
+
const content = await readFile5(filePath, "utf-8");
|
|
761
966
|
return { filePath, content, hash: hashContent(content) };
|
|
762
967
|
})
|
|
763
968
|
);
|
|
@@ -793,6 +998,10 @@ async function runIndex(targetPath, opts) {
|
|
|
793
998
|
);
|
|
794
999
|
for (const filePath of [...removedFiles, ...changedFiles]) {
|
|
795
1000
|
await deleteChunksByFilePath(table, filePath);
|
|
1001
|
+
await withWriteLock(async () => {
|
|
1002
|
+
const escaped = filePath.replace(/'/g, "''");
|
|
1003
|
+
await edgesTable.delete(`from_file = '${escaped}'`);
|
|
1004
|
+
});
|
|
796
1005
|
}
|
|
797
1006
|
const updatedHashes = { ...storedHashes };
|
|
798
1007
|
for (const filePath of removedFiles) {
|
|
@@ -840,11 +1049,13 @@ async function runIndex(targetPath, opts) {
|
|
|
840
1049
|
for (let groupStart = 0; groupStart < filesToProcess.length; groupStart += FILE_READ_CONCURRENCY) {
|
|
841
1050
|
const group = filesToProcess.slice(groupStart, groupStart + FILE_READ_CONCURRENCY);
|
|
842
1051
|
const groupChunks = [];
|
|
1052
|
+
const groupEdges = [];
|
|
843
1053
|
for (const filePath of group) {
|
|
844
1054
|
const content = contentMap.get(filePath);
|
|
845
1055
|
totalRawTokens += countChunkTokens(content);
|
|
846
|
-
const chunks = chunkFile(filePath, content);
|
|
1056
|
+
const { chunks, edges } = chunkFile(filePath, content);
|
|
847
1057
|
groupChunks.push(...chunks);
|
|
1058
|
+
groupEdges.push(...edges);
|
|
848
1059
|
}
|
|
849
1060
|
processedFiles += group.length;
|
|
850
1061
|
totalChunks += groupChunks.length;
|
|
@@ -885,6 +1096,9 @@ async function runIndex(targetPath, opts) {
|
|
|
885
1096
|
`
|
|
886
1097
|
);
|
|
887
1098
|
}
|
|
1099
|
+
if (groupEdges.length > 0) {
|
|
1100
|
+
await insertEdges(edgesTable, groupEdges);
|
|
1101
|
+
}
|
|
888
1102
|
}
|
|
889
1103
|
if (skippedChunks > 0) {
|
|
890
1104
|
process.stderr.write(`brain-cache: ${skippedChunks} chunks skipped (too large for model context)
|
|
@@ -894,6 +1108,14 @@ async function runIndex(targetPath, opts) {
|
|
|
894
1108
|
`brain-cache: ${totalChunks} chunks from ${filesToProcess.length} files
|
|
895
1109
|
`
|
|
896
1110
|
);
|
|
1111
|
+
const edgeCount = await edgesTable.countRows();
|
|
1112
|
+
if (edgeCount === 0) {
|
|
1113
|
+
process.stderr.write(`brain-cache: no call edges extracted \u2014 check source files
|
|
1114
|
+
`);
|
|
1115
|
+
} else {
|
|
1116
|
+
process.stderr.write(`brain-cache: ${edgeCount} call/import edges stored
|
|
1117
|
+
`);
|
|
1118
|
+
}
|
|
897
1119
|
await createVectorIndexIfNeeded(table, profile.embeddingModel);
|
|
898
1120
|
for (const filePath of filesToProcess) {
|
|
899
1121
|
updatedHashes[filePath] = currentHashes[filePath];
|
|
@@ -936,31 +1158,22 @@ ${savingsBlock}
|
|
|
936
1158
|
}
|
|
937
1159
|
|
|
938
1160
|
// src/workflows/search.ts
|
|
939
|
-
import { resolve as
|
|
1161
|
+
import { resolve as resolve3 } from "path";
|
|
940
1162
|
|
|
941
1163
|
// src/services/retriever.ts
|
|
942
1164
|
var log8 = childLogger("retriever");
|
|
943
|
-
var
|
|
944
|
-
"
|
|
945
|
-
"
|
|
946
|
-
"
|
|
947
|
-
"
|
|
948
|
-
"
|
|
949
|
-
"
|
|
950
|
-
"
|
|
951
|
-
"
|
|
952
|
-
"null",
|
|
953
|
-
"wrong",
|
|
954
|
-
"issue",
|
|
955
|
-
"problem",
|
|
956
|
-
"causes",
|
|
957
|
-
"caused",
|
|
958
|
-
"debug",
|
|
959
|
-
"fix",
|
|
960
|
-
"incorrect",
|
|
961
|
-
"unexpected"
|
|
1165
|
+
var TRACE_KEYWORDS = [
|
|
1166
|
+
"trace the",
|
|
1167
|
+
"trace flow",
|
|
1168
|
+
"call path",
|
|
1169
|
+
"flow of",
|
|
1170
|
+
"follows from",
|
|
1171
|
+
"calls into",
|
|
1172
|
+
"invokes",
|
|
1173
|
+
"trace from"
|
|
962
1174
|
];
|
|
963
|
-
var
|
|
1175
|
+
var TRACE_REGEX = /how does\b.*\bflow\b/i;
|
|
1176
|
+
var LOOKUP_BIGRAMS = [
|
|
964
1177
|
"stack trace",
|
|
965
1178
|
"null pointer",
|
|
966
1179
|
"not defined",
|
|
@@ -972,7 +1185,16 @@ var DIAGNOSTIC_BIGRAMS = [
|
|
|
972
1185
|
"not working",
|
|
973
1186
|
"throws exception"
|
|
974
1187
|
];
|
|
975
|
-
var
|
|
1188
|
+
var LOOKUP_KEYWORDS = [
|
|
1189
|
+
"where is",
|
|
1190
|
+
"find the",
|
|
1191
|
+
"definition of",
|
|
1192
|
+
"signature of",
|
|
1193
|
+
"show me the",
|
|
1194
|
+
"what does",
|
|
1195
|
+
"what is the type"
|
|
1196
|
+
];
|
|
1197
|
+
var EXPLORE_EXCLUSIONS = [
|
|
976
1198
|
"error handler",
|
|
977
1199
|
"error handling",
|
|
978
1200
|
"error boundary",
|
|
@@ -990,28 +1212,82 @@ var DIAGNOSTIC_EXCLUSIONS = [
|
|
|
990
1212
|
"fix the config",
|
|
991
1213
|
"fix the setup"
|
|
992
1214
|
];
|
|
993
|
-
function
|
|
1215
|
+
function classifyRetrievalMode(query) {
|
|
994
1216
|
const lower = query.toLowerCase();
|
|
995
|
-
if (
|
|
996
|
-
|
|
1217
|
+
if (TRACE_KEYWORDS.some((kw) => lower.includes(kw)) || TRACE_REGEX.test(lower)) {
|
|
1218
|
+
const broadTerms = ["architecture", "overview", "structure", "system", "design", "pipeline", "codebase"];
|
|
1219
|
+
const isBroad = broadTerms.some((t) => lower.includes(t));
|
|
1220
|
+
if (!isBroad) {
|
|
1221
|
+
return "trace";
|
|
1222
|
+
}
|
|
1223
|
+
}
|
|
1224
|
+
if (LOOKUP_BIGRAMS.some((bg) => lower.includes(bg))) {
|
|
1225
|
+
return "lookup";
|
|
997
1226
|
}
|
|
998
|
-
const
|
|
999
|
-
if (
|
|
1000
|
-
const isExcluded =
|
|
1227
|
+
const hasLookupKeyword = LOOKUP_KEYWORDS.some((kw) => lower.includes(kw));
|
|
1228
|
+
if (hasLookupKeyword) {
|
|
1229
|
+
const isExcluded = EXPLORE_EXCLUSIONS.some((ex) => lower.includes(ex));
|
|
1001
1230
|
if (!isExcluded) {
|
|
1002
|
-
return "
|
|
1231
|
+
return "lookup";
|
|
1003
1232
|
}
|
|
1004
1233
|
}
|
|
1005
|
-
return "
|
|
1234
|
+
return "explore";
|
|
1006
1235
|
}
|
|
1007
1236
|
var RETRIEVAL_STRATEGIES = {
|
|
1008
|
-
|
|
1009
|
-
|
|
1237
|
+
lookup: { limit: 5, distanceThreshold: 0.4, keywordBoostWeight: 0.4 },
|
|
1238
|
+
trace: { limit: 3, distanceThreshold: 0.5, keywordBoostWeight: 0.2 },
|
|
1239
|
+
explore: { limit: 20, distanceThreshold: 0.6, keywordBoostWeight: 0.1 }
|
|
1010
1240
|
};
|
|
1011
|
-
|
|
1241
|
+
function extractQueryTokens(query) {
|
|
1242
|
+
return query.toLowerCase().split(/[\s.,;:!?'"()\[\]{}/\\]+/).filter((t) => t.length >= 3);
|
|
1243
|
+
}
|
|
1244
|
+
function splitCamelCase(name) {
|
|
1245
|
+
return name.replace(/([a-z])([A-Z])/g, "$1 $2").replace(/([A-Z]+)([A-Z][a-z])/g, "$1 $2").toLowerCase().split(/\s+/).filter((t) => t.length >= 2);
|
|
1246
|
+
}
|
|
1247
|
+
function computeKeywordBoost(chunk, queryTokens) {
|
|
1248
|
+
if (queryTokens.length === 0) return 0;
|
|
1249
|
+
const fileName = chunk.filePath.split("/").pop()?.toLowerCase() ?? "";
|
|
1250
|
+
const fileNameStem = fileName.replace(/\.[^.]+$/, "");
|
|
1251
|
+
const chunkName = (chunk.name ?? "").toLowerCase();
|
|
1252
|
+
if (chunkName.length > 0 && queryTokens.some((t) => t === chunkName)) {
|
|
1253
|
+
return 1;
|
|
1254
|
+
}
|
|
1255
|
+
const subTokens = chunkName.length > 0 ? splitCamelCase(chunkName) : [];
|
|
1256
|
+
if (subTokens.length > 1 && subTokens.every((sub) => queryTokens.some((t) => t.includes(sub) || sub.includes(t)))) {
|
|
1257
|
+
return 1;
|
|
1258
|
+
}
|
|
1259
|
+
if (fileNameStem.length > 0 && queryTokens.some((t) => t === fileNameStem)) {
|
|
1260
|
+
return 0.6;
|
|
1261
|
+
}
|
|
1262
|
+
const target = `${fileName} ${chunkName}`;
|
|
1263
|
+
const matchCount = queryTokens.filter((t) => target.includes(t)).length;
|
|
1264
|
+
return matchCount / queryTokens.length;
|
|
1265
|
+
}
|
|
1266
|
+
var CONFIG_NOISE_PATTERNS = [
|
|
1267
|
+
{ pattern: /^vitest\.config\./, toolName: "vitest" },
|
|
1268
|
+
{ pattern: /^tsup\.config\./, toolName: "tsup" },
|
|
1269
|
+
{ pattern: /^tsconfig.*\.json$/, toolName: "tsconfig" },
|
|
1270
|
+
{ pattern: /^jest\.config\./, toolName: "jest" },
|
|
1271
|
+
{ pattern: /^eslint\.config\./, toolName: "eslint" },
|
|
1272
|
+
{ pattern: /^\.eslintrc/, toolName: "eslint" }
|
|
1273
|
+
];
|
|
1274
|
+
var CONFIG_FILE_NOISE_PENALTY = 0.15;
|
|
1275
|
+
function computeNoisePenalty(chunk, query) {
|
|
1276
|
+
const fileName = chunk.filePath.split("/").pop() ?? "";
|
|
1277
|
+
const lowerQuery = query.toLowerCase();
|
|
1278
|
+
for (const { pattern, toolName } of CONFIG_NOISE_PATTERNS) {
|
|
1279
|
+
if (pattern.test(fileName)) {
|
|
1280
|
+
if (lowerQuery.includes(toolName)) return 0;
|
|
1281
|
+
return CONFIG_FILE_NOISE_PENALTY;
|
|
1282
|
+
}
|
|
1283
|
+
}
|
|
1284
|
+
return 0;
|
|
1285
|
+
}
|
|
1286
|
+
async function searchChunks(table, queryVector, opts, query) {
|
|
1012
1287
|
log8.debug({ limit: opts.limit, distanceThreshold: opts.distanceThreshold }, "Searching chunks");
|
|
1013
1288
|
const rows = await table.query().nearestTo(queryVector).distanceType("cosine").limit(opts.limit).toArray();
|
|
1014
|
-
|
|
1289
|
+
const queryTokens = query ? extractQueryTokens(query) : [];
|
|
1290
|
+
const chunks = rows.filter((r) => r._distance <= opts.distanceThreshold).map((r) => ({
|
|
1015
1291
|
id: r.id,
|
|
1016
1292
|
filePath: r.file_path,
|
|
1017
1293
|
chunkType: r.chunk_type,
|
|
@@ -1021,7 +1297,18 @@ async function searchChunks(table, queryVector, opts) {
|
|
|
1021
1297
|
startLine: r.start_line,
|
|
1022
1298
|
endLine: r.end_line,
|
|
1023
1299
|
similarity: 1 - r._distance
|
|
1024
|
-
}))
|
|
1300
|
+
}));
|
|
1301
|
+
if (queryTokens.length > 0) {
|
|
1302
|
+
const boostWeight = opts.keywordBoostWeight ?? 0.1;
|
|
1303
|
+
const scored = chunks.map((chunk) => {
|
|
1304
|
+
const boost = computeKeywordBoost(chunk, queryTokens);
|
|
1305
|
+
const score = chunk.similarity * (1 - boostWeight) + boost * boostWeight - computeNoisePenalty(chunk, query);
|
|
1306
|
+
const promotedSimilarity = boost > 0 ? Math.max(chunk.similarity, HIGH_RELEVANCE_SIMILARITY_THRESHOLD) : chunk.similarity;
|
|
1307
|
+
return { chunk: { ...chunk, similarity: promotedSimilarity }, score };
|
|
1308
|
+
});
|
|
1309
|
+
return scored.sort((a, b) => b.score - a.score).map(({ chunk }) => chunk);
|
|
1310
|
+
}
|
|
1311
|
+
return chunks.sort((a, b) => b.similarity - a.similarity);
|
|
1025
1312
|
}
|
|
1026
1313
|
function deduplicateChunks(chunks) {
|
|
1027
1314
|
const seen = /* @__PURE__ */ new Set();
|
|
@@ -1044,7 +1331,7 @@ async function runSearch(query, opts) {
|
|
|
1044
1331
|
"Ollama is not running. Start it with 'ollama serve' or run 'brain-cache init'."
|
|
1045
1332
|
);
|
|
1046
1333
|
}
|
|
1047
|
-
const rootDir =
|
|
1334
|
+
const rootDir = resolve3(opts?.path ?? ".");
|
|
1048
1335
|
const indexState = await readIndexState(rootDir);
|
|
1049
1336
|
if (indexState === null) {
|
|
1050
1337
|
throw new Error(
|
|
@@ -1063,18 +1350,19 @@ async function runSearch(query, opts) {
|
|
|
1063
1350
|
`Index is empty at ${rootDir}. No source files were indexed.`
|
|
1064
1351
|
);
|
|
1065
1352
|
}
|
|
1066
|
-
const
|
|
1353
|
+
const mode = classifyRetrievalMode(query);
|
|
1067
1354
|
const strategy = {
|
|
1068
|
-
limit: opts?.limit ?? RETRIEVAL_STRATEGIES[
|
|
1069
|
-
distanceThreshold: RETRIEVAL_STRATEGIES[
|
|
1355
|
+
limit: opts?.limit ?? RETRIEVAL_STRATEGIES[mode].limit,
|
|
1356
|
+
distanceThreshold: RETRIEVAL_STRATEGIES[mode].distanceThreshold,
|
|
1357
|
+
keywordBoostWeight: RETRIEVAL_STRATEGIES[mode].keywordBoostWeight
|
|
1070
1358
|
};
|
|
1071
1359
|
process.stderr.write(
|
|
1072
|
-
`brain-cache: searching (
|
|
1360
|
+
`brain-cache: searching (mode=${mode}, limit=${strategy.limit})
|
|
1073
1361
|
`
|
|
1074
1362
|
);
|
|
1075
1363
|
const { embeddings: vectors } = await embedBatchWithRetry(indexState.embeddingModel, [query]);
|
|
1076
1364
|
const queryVector = vectors[0];
|
|
1077
|
-
const results = await searchChunks(table, queryVector, strategy);
|
|
1365
|
+
const results = await searchChunks(table, queryVector, strategy, query);
|
|
1078
1366
|
const deduped = deduplicateChunks(results);
|
|
1079
1367
|
process.stderr.write(
|
|
1080
1368
|
`brain-cache: found ${deduped.length} chunks (${results.length} before dedup)
|
|
@@ -1090,8 +1378,798 @@ async function runSearch(query, opts) {
|
|
|
1090
1378
|
}
|
|
1091
1379
|
|
|
1092
1380
|
// src/workflows/buildContext.ts
|
|
1093
|
-
import { readFile as
|
|
1094
|
-
import { resolve as
|
|
1381
|
+
import { readFile as readFile9 } from "fs/promises";
|
|
1382
|
+
import { resolve as resolve6 } from "path";
|
|
1383
|
+
|
|
1384
|
+
// src/services/cohesion.ts
|
|
1385
|
+
import { dirname as dirname2, relative as relative2, basename } from "path";
|
|
1386
|
+
var log9 = childLogger("cohesion");
|
|
1387
|
+
function groupChunksByFile(chunks) {
|
|
1388
|
+
const groups = /* @__PURE__ */ new Map();
|
|
1389
|
+
for (const chunk of chunks) {
|
|
1390
|
+
const group = groups.get(chunk.filePath);
|
|
1391
|
+
if (group === void 0) {
|
|
1392
|
+
groups.set(chunk.filePath, [chunk]);
|
|
1393
|
+
} else {
|
|
1394
|
+
group.push(chunk);
|
|
1395
|
+
}
|
|
1396
|
+
}
|
|
1397
|
+
for (const [, group] of groups) {
|
|
1398
|
+
group.sort((a, b) => a.startLine - b.startLine);
|
|
1399
|
+
}
|
|
1400
|
+
return groups;
|
|
1401
|
+
}
|
|
1402
|
+
async function enrichWithParentClass(chunks, chunksTable, opts) {
|
|
1403
|
+
const existingIds = new Set(chunks.map((c) => c.id));
|
|
1404
|
+
const result = [...chunks];
|
|
1405
|
+
let { currentTokens } = opts;
|
|
1406
|
+
const parentsToInsert = [];
|
|
1407
|
+
for (const chunk of chunks) {
|
|
1408
|
+
if (chunk.chunkType !== "method" || chunk.scope === null) {
|
|
1409
|
+
continue;
|
|
1410
|
+
}
|
|
1411
|
+
const escapedScope = chunk.scope.replace(/'/g, "''");
|
|
1412
|
+
const escapedFilePath = chunk.filePath.replace(/'/g, "''");
|
|
1413
|
+
log9.debug({ scope: chunk.scope, filePath: chunk.filePath }, "Looking for parent class");
|
|
1414
|
+
const rows = await chunksTable.query().where(`name = '${escapedScope}' AND file_path = '${escapedFilePath}' AND chunk_type = 'class'`).toArray();
|
|
1415
|
+
if (rows.length === 0) {
|
|
1416
|
+
continue;
|
|
1417
|
+
}
|
|
1418
|
+
const row = rows[0];
|
|
1419
|
+
if (existingIds.has(row.id)) {
|
|
1420
|
+
continue;
|
|
1421
|
+
}
|
|
1422
|
+
const parentChunk = {
|
|
1423
|
+
id: row.id,
|
|
1424
|
+
filePath: row.file_path,
|
|
1425
|
+
chunkType: row.chunk_type,
|
|
1426
|
+
scope: row.scope,
|
|
1427
|
+
name: row.name,
|
|
1428
|
+
content: row.content,
|
|
1429
|
+
startLine: row.start_line,
|
|
1430
|
+
endLine: row.end_line,
|
|
1431
|
+
similarity: 1
|
|
1432
|
+
};
|
|
1433
|
+
const tokenCost = countChunkTokens(formatChunk(parentChunk));
|
|
1434
|
+
if (currentTokens + tokenCost > opts.maxTokens) {
|
|
1435
|
+
log9.debug({ parentId: row.id, tokenCost, currentTokens, maxTokens: opts.maxTokens }, "Skipping parent class \u2014 token budget exceeded");
|
|
1436
|
+
continue;
|
|
1437
|
+
}
|
|
1438
|
+
existingIds.add(row.id);
|
|
1439
|
+
currentTokens += tokenCost;
|
|
1440
|
+
parentsToInsert.push({ parent: parentChunk, beforeId: chunk.id });
|
|
1441
|
+
}
|
|
1442
|
+
for (const { parent, beforeId } of parentsToInsert) {
|
|
1443
|
+
const idx = result.findIndex((c) => c.id === beforeId);
|
|
1444
|
+
if (idx !== -1) {
|
|
1445
|
+
result.splice(idx, 0, parent);
|
|
1446
|
+
}
|
|
1447
|
+
}
|
|
1448
|
+
return result;
|
|
1449
|
+
}
|
|
1450
|
+
function formatGroupedContext(groups) {
|
|
1451
|
+
const sections = [];
|
|
1452
|
+
for (const [filePath, chunks] of groups) {
|
|
1453
|
+
const header = `// \u2500\u2500 ${filePath} \u2500\u2500`;
|
|
1454
|
+
const body = chunks.map(formatChunk).join("\n\n");
|
|
1455
|
+
sections.push(`${header}
|
|
1456
|
+
${body}`);
|
|
1457
|
+
}
|
|
1458
|
+
return sections.join("\n\n---\n\n");
|
|
1459
|
+
}
|
|
1460
|
+
function extractBehavioralSummary(content) {
|
|
1461
|
+
const lines = content.split("\n");
|
|
1462
|
+
const jsDocLines = [];
|
|
1463
|
+
let inJsDoc = false;
|
|
1464
|
+
for (const line of lines) {
|
|
1465
|
+
const trimmed = line.trim();
|
|
1466
|
+
if (trimmed.startsWith("// [compressed]") || trimmed.startsWith("// Signature:") || trimmed.startsWith("// [body stripped]")) continue;
|
|
1467
|
+
if (trimmed.startsWith("/**")) {
|
|
1468
|
+
inJsDoc = true;
|
|
1469
|
+
jsDocLines.push(line);
|
|
1470
|
+
if (trimmed.endsWith("*/")) break;
|
|
1471
|
+
continue;
|
|
1472
|
+
}
|
|
1473
|
+
if (inJsDoc) {
|
|
1474
|
+
jsDocLines.push(line);
|
|
1475
|
+
if (trimmed.endsWith("*/")) break;
|
|
1476
|
+
continue;
|
|
1477
|
+
}
|
|
1478
|
+
}
|
|
1479
|
+
if (jsDocLines.length === 0) return null;
|
|
1480
|
+
const descLines = jsDocLines.map((l) => l.replace(/^\s*\/?\*+\s?/, "").replace(/\s*\*\/.*$/, "").trim()).filter((l) => l.length > 0 && !l.startsWith("@") && l !== "/");
|
|
1481
|
+
return descLines[0] ?? null;
|
|
1482
|
+
}
|
|
1483
|
+
function groupChunksByModule(chunks, rootDir) {
|
|
1484
|
+
const groups = /* @__PURE__ */ new Map();
|
|
1485
|
+
for (const chunk of chunks) {
|
|
1486
|
+
const rel = relative2(rootDir, chunk.filePath);
|
|
1487
|
+
const moduleKey = dirname2(rel) || ".";
|
|
1488
|
+
const group = groups.get(moduleKey);
|
|
1489
|
+
if (group === void 0) groups.set(moduleKey, [chunk]);
|
|
1490
|
+
else group.push(chunk);
|
|
1491
|
+
}
|
|
1492
|
+
for (const [, group] of groups) {
|
|
1493
|
+
group.sort((a, b) => a.startLine - b.startLine);
|
|
1494
|
+
}
|
|
1495
|
+
return groups;
|
|
1496
|
+
}
|
|
1497
|
+
function extractWiringAnnotations(chunks) {
|
|
1498
|
+
const importPattern = /from\s+['"](\.[^'"]+)['"]/g;
|
|
1499
|
+
const internalDeps = /* @__PURE__ */ new Set();
|
|
1500
|
+
for (const chunk of chunks) {
|
|
1501
|
+
for (const match of chunk.content.matchAll(importPattern)) {
|
|
1502
|
+
const importPath = match[1];
|
|
1503
|
+
const stem = importPath.replace(/\.js$/, "").split("/").pop();
|
|
1504
|
+
if (stem && stem.length > 1) {
|
|
1505
|
+
internalDeps.add(stem);
|
|
1506
|
+
}
|
|
1507
|
+
}
|
|
1508
|
+
}
|
|
1509
|
+
return [...internalDeps].sort();
|
|
1510
|
+
}
|
|
1511
|
+
function formatModuleNarratives(groups) {
|
|
1512
|
+
const sections = [];
|
|
1513
|
+
for (const [moduleKey, chunks] of groups) {
|
|
1514
|
+
const lines = [`### module: ${moduleKey}`];
|
|
1515
|
+
const byFile = /* @__PURE__ */ new Map();
|
|
1516
|
+
for (const chunk of chunks) {
|
|
1517
|
+
const file = chunk.filePath;
|
|
1518
|
+
const group = byFile.get(file);
|
|
1519
|
+
if (group === void 0) byFile.set(file, [chunk]);
|
|
1520
|
+
else group.push(chunk);
|
|
1521
|
+
}
|
|
1522
|
+
for (const [filePath, fileChunks] of byFile) {
|
|
1523
|
+
const fileName = basename(filePath);
|
|
1524
|
+
const summary = extractBehavioralSummary(fileChunks[0].content);
|
|
1525
|
+
if (summary) {
|
|
1526
|
+
lines.push(`
|
|
1527
|
+
**${fileName}** -- ${summary}`);
|
|
1528
|
+
} else {
|
|
1529
|
+
lines.push(`
|
|
1530
|
+
**${fileName}**`);
|
|
1531
|
+
}
|
|
1532
|
+
const wiring = extractWiringAnnotations(fileChunks);
|
|
1533
|
+
if (wiring.length > 0) {
|
|
1534
|
+
lines.push(` imports: ${wiring.join(", ")}`);
|
|
1535
|
+
}
|
|
1536
|
+
}
|
|
1537
|
+
sections.push(lines.join("\n"));
|
|
1538
|
+
}
|
|
1539
|
+
return sections.join("\n\n");
|
|
1540
|
+
}
|
|
1541
|
+
|
|
1542
|
+
// src/services/compression.ts
|
|
1543
|
+
function compressChunk(chunk) {
|
|
1544
|
+
const tokens = countChunkTokens(chunk.content);
|
|
1545
|
+
if (tokens <= COMPRESSION_TOKEN_THRESHOLD) return chunk;
|
|
1546
|
+
const isHighRelevance = chunk.similarity >= HIGH_RELEVANCE_SIMILARITY_THRESHOLD;
|
|
1547
|
+
if (tokens <= COMPRESSION_HARD_LIMIT && isHighRelevance) {
|
|
1548
|
+
return chunk;
|
|
1549
|
+
}
|
|
1550
|
+
const lines = chunk.content.split("\n");
|
|
1551
|
+
const jsDocLines = [];
|
|
1552
|
+
let signatureLine = "";
|
|
1553
|
+
let inJsDoc = false;
|
|
1554
|
+
let jsDocDone = false;
|
|
1555
|
+
for (const line of lines) {
|
|
1556
|
+
const trimmed = line.trim();
|
|
1557
|
+
if (!jsDocDone) {
|
|
1558
|
+
if (trimmed.startsWith("/**")) {
|
|
1559
|
+
inJsDoc = true;
|
|
1560
|
+
jsDocLines.push(line);
|
|
1561
|
+
if (trimmed.endsWith("*/")) {
|
|
1562
|
+
inJsDoc = false;
|
|
1563
|
+
jsDocDone = true;
|
|
1564
|
+
}
|
|
1565
|
+
continue;
|
|
1566
|
+
}
|
|
1567
|
+
if (inJsDoc) {
|
|
1568
|
+
jsDocLines.push(line);
|
|
1569
|
+
if (trimmed.endsWith("*/")) {
|
|
1570
|
+
inJsDoc = false;
|
|
1571
|
+
jsDocDone = true;
|
|
1572
|
+
}
|
|
1573
|
+
continue;
|
|
1574
|
+
}
|
|
1575
|
+
}
|
|
1576
|
+
if (trimmed.length > 0 && signatureLine === "") {
|
|
1577
|
+
signatureLine = line;
|
|
1578
|
+
break;
|
|
1579
|
+
}
|
|
1580
|
+
}
|
|
1581
|
+
if (signatureLine === "" && jsDocLines.length === 0) {
|
|
1582
|
+
signatureLine = lines.find((l) => l.trim().length > 0) ?? "";
|
|
1583
|
+
}
|
|
1584
|
+
const manifestParts = [
|
|
1585
|
+
`// [compressed] ${chunk.name ?? "unknown"} (lines ${chunk.startLine}-${chunk.endLine})`
|
|
1586
|
+
];
|
|
1587
|
+
if (jsDocLines.length > 0) {
|
|
1588
|
+
manifestParts.push(...jsDocLines);
|
|
1589
|
+
}
|
|
1590
|
+
manifestParts.push(`// Signature: ${signatureLine}`);
|
|
1591
|
+
manifestParts.push("// [body stripped]");
|
|
1592
|
+
return { ...chunk, content: manifestParts.join("\n") };
|
|
1593
|
+
}
|
|
1594
|
+
|
|
1595
|
+
// src/services/configLoader.ts
|
|
1596
|
+
import { readFile as readFile6 } from "fs/promises";
|
|
1597
|
+
var log10 = childLogger("configLoader");
|
|
1598
|
+
async function loadUserConfig() {
|
|
1599
|
+
try {
|
|
1600
|
+
const raw = await readFile6(CONFIG_PATH, "utf-8");
|
|
1601
|
+
return JSON.parse(raw);
|
|
1602
|
+
} catch {
|
|
1603
|
+
log10.debug({ configPath: CONFIG_PATH }, "Config file not found or invalid \u2014 using defaults");
|
|
1604
|
+
return {};
|
|
1605
|
+
}
|
|
1606
|
+
}
|
|
1607
|
+
function resolveStrategy(mode, userConfig, toolOverride) {
|
|
1608
|
+
const base = RETRIEVAL_STRATEGIES[mode];
|
|
1609
|
+
const userOverride = userConfig.retrieval?.[mode] ?? {};
|
|
1610
|
+
return { ...base, ...userOverride, ...toolOverride };
|
|
1611
|
+
}
|
|
1612
|
+
|
|
1613
|
+
// src/workflows/traceFlow.ts
|
|
1614
|
+
import { resolve as resolve4 } from "path";
|
|
1615
|
+
import { readFile as readFile7 } from "fs/promises";
|
|
1616
|
+
|
|
1617
|
+
// src/services/flowTracer.ts
|
|
1618
|
+
var log11 = childLogger("flowTracer");
|
|
1619
|
+
async function resolveSymbolToChunkId(chunksTable, toSymbol, fromFile) {
|
|
1620
|
+
const escaped = toSymbol.replace(/'/g, "''");
|
|
1621
|
+
const rows = await chunksTable.query().where(`name = '${escaped}'`).toArray();
|
|
1622
|
+
if (rows.length === 0) {
|
|
1623
|
+
return null;
|
|
1624
|
+
}
|
|
1625
|
+
const sameFile = rows.find((r) => r.file_path === fromFile);
|
|
1626
|
+
return (sameFile ?? rows[0]).id;
|
|
1627
|
+
}
|
|
1628
|
+
async function traceFlow(edgesTable, chunksTable, seedChunkId, opts) {
|
|
1629
|
+
const maxHops = opts?.maxHops ?? 3;
|
|
1630
|
+
const visited = /* @__PURE__ */ new Set();
|
|
1631
|
+
const queue = [{ chunkId: seedChunkId, depth: 0 }];
|
|
1632
|
+
const hops = [];
|
|
1633
|
+
log11.debug({ seedChunkId, maxHops }, "Starting BFS flow trace");
|
|
1634
|
+
while (queue.length > 0) {
|
|
1635
|
+
const { chunkId, depth } = queue.shift();
|
|
1636
|
+
if (visited.has(chunkId)) {
|
|
1637
|
+
continue;
|
|
1638
|
+
}
|
|
1639
|
+
visited.add(chunkId);
|
|
1640
|
+
const escapedId = chunkId.replace(/'/g, "''");
|
|
1641
|
+
const chunkRows = await chunksTable.query().where(`id = '${escapedId}'`).toArray();
|
|
1642
|
+
if (chunkRows.length === 0) {
|
|
1643
|
+
log11.debug({ chunkId }, "Chunk not found \u2014 skipping hop");
|
|
1644
|
+
continue;
|
|
1645
|
+
}
|
|
1646
|
+
const row = chunkRows[0];
|
|
1647
|
+
const edges = await queryEdgesFrom(edgesTable, chunkId);
|
|
1648
|
+
const callEdges = edges.filter((e) => e.edge_type === "call");
|
|
1649
|
+
hops.push({
|
|
1650
|
+
chunkId,
|
|
1651
|
+
filePath: row.file_path,
|
|
1652
|
+
name: row.name,
|
|
1653
|
+
startLine: row.start_line,
|
|
1654
|
+
endLine: row.end_line,
|
|
1655
|
+
content: row.content,
|
|
1656
|
+
hopDepth: depth,
|
|
1657
|
+
callsFound: [...new Set(callEdges.map((e) => e.to_symbol))]
|
|
1658
|
+
});
|
|
1659
|
+
if (depth >= maxHops) {
|
|
1660
|
+
continue;
|
|
1661
|
+
}
|
|
1662
|
+
for (const edge of callEdges) {
|
|
1663
|
+
const nextChunkId = await resolveSymbolToChunkId(chunksTable, edge.to_symbol, edge.from_file);
|
|
1664
|
+
if (nextChunkId !== null && !visited.has(nextChunkId)) {
|
|
1665
|
+
queue.push({ chunkId: nextChunkId, depth: depth + 1 });
|
|
1666
|
+
}
|
|
1667
|
+
}
|
|
1668
|
+
}
|
|
1669
|
+
log11.debug({ seedChunkId, hopsFound: hops.length, maxDepthReached: hops.length > 0 ? Math.max(...hops.map((h) => h.hopDepth)) : 0 }, "BFS flow trace complete");
|
|
1670
|
+
return hops;
|
|
1671
|
+
}
|
|
1672
|
+
|
|
1673
|
+
// src/workflows/traceFlow.ts
|
|
1674
|
+
var BODY_STRIPPED_MARKER = "// [body stripped]";
|
|
1675
|
+
var TEST_FILE_PATTERNS = [".test.", ".spec.", "/__tests__/", "/tests/"];
|
|
1676
|
+
function isTestFile(filePath) {
|
|
1677
|
+
return TEST_FILE_PATTERNS.some((p) => filePath.includes(p));
|
|
1678
|
+
}
|
|
1679
|
+
var STDLIB_SYMBOLS = /* @__PURE__ */ new Set([
|
|
1680
|
+
// Array
|
|
1681
|
+
"map",
|
|
1682
|
+
"filter",
|
|
1683
|
+
"reduce",
|
|
1684
|
+
"forEach",
|
|
1685
|
+
"find",
|
|
1686
|
+
"findIndex",
|
|
1687
|
+
"some",
|
|
1688
|
+
"every",
|
|
1689
|
+
"includes",
|
|
1690
|
+
"push",
|
|
1691
|
+
"pop",
|
|
1692
|
+
"shift",
|
|
1693
|
+
"unshift",
|
|
1694
|
+
"splice",
|
|
1695
|
+
"slice",
|
|
1696
|
+
"concat",
|
|
1697
|
+
"join",
|
|
1698
|
+
"sort",
|
|
1699
|
+
"reverse",
|
|
1700
|
+
"flat",
|
|
1701
|
+
"flatMap",
|
|
1702
|
+
"fill",
|
|
1703
|
+
"indexOf",
|
|
1704
|
+
"lastIndexOf",
|
|
1705
|
+
// Set/Map
|
|
1706
|
+
"keys",
|
|
1707
|
+
"values",
|
|
1708
|
+
"entries",
|
|
1709
|
+
"has",
|
|
1710
|
+
"get",
|
|
1711
|
+
"set",
|
|
1712
|
+
"delete",
|
|
1713
|
+
"add",
|
|
1714
|
+
"clear",
|
|
1715
|
+
// Promise
|
|
1716
|
+
"resolve",
|
|
1717
|
+
"reject",
|
|
1718
|
+
"then",
|
|
1719
|
+
"catch",
|
|
1720
|
+
"finally",
|
|
1721
|
+
"all",
|
|
1722
|
+
"race",
|
|
1723
|
+
"allSettled",
|
|
1724
|
+
// Object
|
|
1725
|
+
"toString",
|
|
1726
|
+
"valueOf",
|
|
1727
|
+
"hasOwnProperty",
|
|
1728
|
+
"assign",
|
|
1729
|
+
"freeze",
|
|
1730
|
+
"create",
|
|
1731
|
+
// String
|
|
1732
|
+
"split",
|
|
1733
|
+
"replace",
|
|
1734
|
+
"replaceAll",
|
|
1735
|
+
"match",
|
|
1736
|
+
"matchAll",
|
|
1737
|
+
"trim",
|
|
1738
|
+
"trimStart",
|
|
1739
|
+
"trimEnd",
|
|
1740
|
+
"startsWith",
|
|
1741
|
+
"endsWith",
|
|
1742
|
+
"padStart",
|
|
1743
|
+
"padEnd",
|
|
1744
|
+
"repeat",
|
|
1745
|
+
"charAt",
|
|
1746
|
+
"charCodeAt",
|
|
1747
|
+
"substring",
|
|
1748
|
+
"toLowerCase",
|
|
1749
|
+
"toUpperCase",
|
|
1750
|
+
// Property-like
|
|
1751
|
+
"length"
|
|
1752
|
+
]);
|
|
1753
|
+
var LOW_CONFIDENCE_THRESHOLD = 0.5;
|
|
1754
|
+
function isCLIQuery(query) {
|
|
1755
|
+
const lower = query.toLowerCase();
|
|
1756
|
+
return lower.includes(" cli ") || lower.startsWith("cli ") || lower.includes("command");
|
|
1757
|
+
}
|
|
1758
|
+
function isCLIFile(filePath) {
|
|
1759
|
+
return filePath.includes("/cli/");
|
|
1760
|
+
}
|
|
1761
|
+
async function computeHopSavings(hops) {
|
|
1762
|
+
if (hops.length === 0) {
|
|
1763
|
+
return { tokensSent: 0, estimatedWithoutBraincache: 0, reductionPct: 0, filesInContext: 0 };
|
|
1764
|
+
}
|
|
1765
|
+
const tokensSent = hops.reduce((sum, h) => sum + countChunkTokens(h.content), 0);
|
|
1766
|
+
const uniqueFiles = [...new Set(hops.map((h) => h.filePath))];
|
|
1767
|
+
const filesInContext = uniqueFiles.length;
|
|
1768
|
+
const filesWithUncompressedContent = new Set(
|
|
1769
|
+
hops.filter((h) => !h.content.includes(BODY_STRIPPED_MARKER)).map((h) => h.filePath)
|
|
1770
|
+
);
|
|
1771
|
+
let fileContentTokens = 0;
|
|
1772
|
+
for (const filePath of uniqueFiles) {
|
|
1773
|
+
if (!filesWithUncompressedContent.has(filePath)) continue;
|
|
1774
|
+
try {
|
|
1775
|
+
const fileContent = await readFile7(filePath, "utf-8");
|
|
1776
|
+
fileContentTokens += countChunkTokens(fileContent);
|
|
1777
|
+
} catch {
|
|
1778
|
+
}
|
|
1779
|
+
}
|
|
1780
|
+
const toolCalls = 1 + filesInContext;
|
|
1781
|
+
const toolCallOverhead = toolCalls * TOOL_CALL_OVERHEAD_TOKENS;
|
|
1782
|
+
const estimatedWithoutBraincache = fileContentTokens + toolCallOverhead;
|
|
1783
|
+
const reductionPct = estimatedWithoutBraincache > 0 ? Math.max(0, Math.round((1 - tokensSent / estimatedWithoutBraincache) * 100)) : 0;
|
|
1784
|
+
return { tokensSent, estimatedWithoutBraincache, reductionPct, filesInContext };
|
|
1785
|
+
}
|
|
1786
|
+
function extractSymbolCandidate(query) {
|
|
1787
|
+
const tokens = query.match(/\b[a-zA-Z_][a-zA-Z0-9_]{2,}\b/g);
|
|
1788
|
+
if (!tokens) return null;
|
|
1789
|
+
const stopWords = /* @__PURE__ */ new Set([
|
|
1790
|
+
"how",
|
|
1791
|
+
"does",
|
|
1792
|
+
"work",
|
|
1793
|
+
"the",
|
|
1794
|
+
"what",
|
|
1795
|
+
"where",
|
|
1796
|
+
"trace",
|
|
1797
|
+
"flow",
|
|
1798
|
+
"call",
|
|
1799
|
+
"path",
|
|
1800
|
+
"find",
|
|
1801
|
+
"show",
|
|
1802
|
+
"into",
|
|
1803
|
+
"from",
|
|
1804
|
+
"this",
|
|
1805
|
+
"that",
|
|
1806
|
+
"with",
|
|
1807
|
+
"when",
|
|
1808
|
+
"which",
|
|
1809
|
+
"about",
|
|
1810
|
+
"explain",
|
|
1811
|
+
"describe"
|
|
1812
|
+
]);
|
|
1813
|
+
const camel = tokens.filter((t) => /[a-z][A-Z]/.test(t));
|
|
1814
|
+
if (camel.length > 0) return camel[camel.length - 1];
|
|
1815
|
+
const nonStop = tokens.filter((t) => !stopWords.has(t.toLowerCase()));
|
|
1816
|
+
return nonStop.length > 0 ? nonStop[nonStop.length - 1] : null;
|
|
1817
|
+
}
|
|
1818
|
+
async function runTraceFlow(entrypoint, opts) {
|
|
1819
|
+
const profile = await readProfile();
|
|
1820
|
+
if (profile === null) {
|
|
1821
|
+
throw new Error("No profile found. Run 'brain-cache init' first.");
|
|
1822
|
+
}
|
|
1823
|
+
const running = await isOllamaRunning();
|
|
1824
|
+
if (!running) {
|
|
1825
|
+
throw new Error("Ollama is not running.");
|
|
1826
|
+
}
|
|
1827
|
+
const rootDir = resolve4(opts?.path ?? ".");
|
|
1828
|
+
const indexState = await readIndexState(rootDir);
|
|
1829
|
+
if (indexState === null) {
|
|
1830
|
+
throw new Error(`No index found at ${rootDir}. Run 'brain-cache index' first.`);
|
|
1831
|
+
}
|
|
1832
|
+
const db = await openDatabase(rootDir);
|
|
1833
|
+
const tableNames = await db.tableNames();
|
|
1834
|
+
if (!tableNames.includes("chunks")) {
|
|
1835
|
+
throw new Error("No chunks table found. Run 'brain-cache index' first.");
|
|
1836
|
+
}
|
|
1837
|
+
const table = await db.openTable("chunks");
|
|
1838
|
+
if (!tableNames.includes("edges")) {
|
|
1839
|
+
throw new Error("No edges table found. Re-run 'brain-cache index' to build call edges.");
|
|
1840
|
+
}
|
|
1841
|
+
const edgesTable = await db.openTable("edges");
|
|
1842
|
+
const userConfig = await loadUserConfig();
|
|
1843
|
+
const toolOverride = {};
|
|
1844
|
+
if (opts?.limit !== void 0) toolOverride.limit = opts.limit;
|
|
1845
|
+
if (opts?.distanceThreshold !== void 0) toolOverride.distanceThreshold = opts.distanceThreshold;
|
|
1846
|
+
const strategy = resolveStrategy("trace", userConfig, Object.keys(toolOverride).length > 0 ? toolOverride : void 0);
|
|
1847
|
+
const candidate = extractSymbolCandidate(entrypoint);
|
|
1848
|
+
let seedChunkId = null;
|
|
1849
|
+
if (candidate !== null) {
|
|
1850
|
+
seedChunkId = await resolveSymbolToChunkId(table, candidate, "");
|
|
1851
|
+
}
|
|
1852
|
+
if (seedChunkId !== null) {
|
|
1853
|
+
const maxHops2 = opts?.maxHops ?? 3;
|
|
1854
|
+
const flowHops2 = await traceFlow(edgesTable, table, seedChunkId, { maxHops: maxHops2 });
|
|
1855
|
+
const productionHops2 = flowHops2.filter((hop) => !isTestFile(hop.filePath));
|
|
1856
|
+
const hops2 = productionHops2.map((hop) => {
|
|
1857
|
+
const asChunk = {
|
|
1858
|
+
id: hop.chunkId,
|
|
1859
|
+
filePath: hop.filePath,
|
|
1860
|
+
chunkType: "function",
|
|
1861
|
+
scope: null,
|
|
1862
|
+
name: hop.name,
|
|
1863
|
+
content: hop.content,
|
|
1864
|
+
startLine: hop.startLine,
|
|
1865
|
+
endLine: hop.endLine,
|
|
1866
|
+
similarity: 1
|
|
1867
|
+
};
|
|
1868
|
+
const compressed = compressChunk(asChunk);
|
|
1869
|
+
return {
|
|
1870
|
+
filePath: hop.filePath,
|
|
1871
|
+
name: hop.name,
|
|
1872
|
+
startLine: hop.startLine,
|
|
1873
|
+
content: compressed.content,
|
|
1874
|
+
callsFound: hop.callsFound.filter((s) => !STDLIB_SYMBOLS.has(s)),
|
|
1875
|
+
// TRACE-02
|
|
1876
|
+
hopDepth: hop.hopDepth
|
|
1877
|
+
};
|
|
1878
|
+
});
|
|
1879
|
+
const exactSavings = await computeHopSavings(hops2);
|
|
1880
|
+
return {
|
|
1881
|
+
hops: hops2,
|
|
1882
|
+
metadata: {
|
|
1883
|
+
seedChunkId,
|
|
1884
|
+
totalHops: hops2.length,
|
|
1885
|
+
localTasksPerformed: ["exact_name_lookup", "bfs_trace", "compress"],
|
|
1886
|
+
...exactSavings,
|
|
1887
|
+
confidenceWarning: null
|
|
1888
|
+
}
|
|
1889
|
+
};
|
|
1890
|
+
}
|
|
1891
|
+
const { embeddings } = await embedBatchWithRetry(indexState.embeddingModel, [entrypoint]);
|
|
1892
|
+
const seedResults = await searchChunks(table, embeddings[0], strategy, entrypoint);
|
|
1893
|
+
const seeds = deduplicateChunks(seedResults);
|
|
1894
|
+
if (seeds.length === 0) {
|
|
1895
|
+
return {
|
|
1896
|
+
hops: [],
|
|
1897
|
+
metadata: {
|
|
1898
|
+
seedChunkId: null,
|
|
1899
|
+
totalHops: 0,
|
|
1900
|
+
localTasksPerformed: ["embed_query", "seed_search"],
|
|
1901
|
+
tokensSent: 0,
|
|
1902
|
+
estimatedWithoutBraincache: 0,
|
|
1903
|
+
reductionPct: 0,
|
|
1904
|
+
filesInContext: 0
|
|
1905
|
+
}
|
|
1906
|
+
};
|
|
1907
|
+
}
|
|
1908
|
+
let selectedSeed = seeds[0];
|
|
1909
|
+
if (isCLIQuery(entrypoint)) {
|
|
1910
|
+
const cliSeed = seeds.find((s) => isCLIFile(s.filePath));
|
|
1911
|
+
if (cliSeed) selectedSeed = cliSeed;
|
|
1912
|
+
}
|
|
1913
|
+
let confidenceWarning = null;
|
|
1914
|
+
if (selectedSeed.similarity < LOW_CONFIDENCE_THRESHOLD) {
|
|
1915
|
+
const seedName = selectedSeed.name ?? "unknown";
|
|
1916
|
+
const seedFile = selectedSeed.filePath.split("/").pop() ?? selectedSeed.filePath;
|
|
1917
|
+
confidenceWarning = `No confident match for "${entrypoint}" \u2014 tracing nearest match: ${seedName} (${seedFile}:${selectedSeed.startLine}, similarity: ${selectedSeed.similarity.toFixed(2)})`;
|
|
1918
|
+
}
|
|
1919
|
+
const maxHops = opts?.maxHops ?? 3;
|
|
1920
|
+
const flowHops = await traceFlow(edgesTable, table, selectedSeed.id, { maxHops });
|
|
1921
|
+
const productionHops = flowHops.filter((hop) => !isTestFile(hop.filePath));
|
|
1922
|
+
const hops = productionHops.map((hop) => {
|
|
1923
|
+
const asChunk = {
|
|
1924
|
+
id: hop.chunkId,
|
|
1925
|
+
filePath: hop.filePath,
|
|
1926
|
+
chunkType: "function",
|
|
1927
|
+
scope: null,
|
|
1928
|
+
name: hop.name,
|
|
1929
|
+
content: hop.content,
|
|
1930
|
+
startLine: hop.startLine,
|
|
1931
|
+
endLine: hop.endLine,
|
|
1932
|
+
similarity: 1
|
|
1933
|
+
};
|
|
1934
|
+
const compressed = compressChunk(asChunk);
|
|
1935
|
+
return {
|
|
1936
|
+
filePath: hop.filePath,
|
|
1937
|
+
name: hop.name,
|
|
1938
|
+
startLine: hop.startLine,
|
|
1939
|
+
content: compressed.content,
|
|
1940
|
+
callsFound: hop.callsFound.filter((s) => !STDLIB_SYMBOLS.has(s)),
|
|
1941
|
+
// TRACE-02
|
|
1942
|
+
hopDepth: hop.hopDepth
|
|
1943
|
+
};
|
|
1944
|
+
});
|
|
1945
|
+
const savings = await computeHopSavings(hops);
|
|
1946
|
+
return {
|
|
1947
|
+
hops,
|
|
1948
|
+
metadata: {
|
|
1949
|
+
seedChunkId: selectedSeed.id,
|
|
1950
|
+
totalHops: hops.length,
|
|
1951
|
+
localTasksPerformed: ["embed_query", "seed_search", "bfs_trace", "compress"],
|
|
1952
|
+
...savings,
|
|
1953
|
+
confidenceWarning
|
|
1954
|
+
}
|
|
1955
|
+
};
|
|
1956
|
+
}
|
|
1957
|
+
|
|
1958
|
+
// src/workflows/explainCodebase.ts
|
|
1959
|
+
import { readFile as readFile8 } from "fs/promises";
|
|
1960
|
+
import { resolve as resolve5, relative as relative3, dirname as dirname3 } from "path";
|
|
1961
|
+
function isExportedChunk(chunk) {
|
|
1962
|
+
if (chunk.chunkType === "file") return true;
|
|
1963
|
+
const lines = chunk.content.split("\n");
|
|
1964
|
+
let inJsDoc = false;
|
|
1965
|
+
for (const line of lines) {
|
|
1966
|
+
const trimmed = line.trim();
|
|
1967
|
+
if (trimmed.startsWith("/**")) {
|
|
1968
|
+
inJsDoc = true;
|
|
1969
|
+
if (trimmed.endsWith("*/")) {
|
|
1970
|
+
inJsDoc = false;
|
|
1971
|
+
}
|
|
1972
|
+
continue;
|
|
1973
|
+
}
|
|
1974
|
+
if (inJsDoc) {
|
|
1975
|
+
if (trimmed.endsWith("*/")) inJsDoc = false;
|
|
1976
|
+
continue;
|
|
1977
|
+
}
|
|
1978
|
+
if (trimmed.startsWith("// [compressed]") || trimmed.startsWith("// Signature:") || trimmed.startsWith("// [body stripped]")) continue;
|
|
1979
|
+
if (trimmed.length === 0) continue;
|
|
1980
|
+
return trimmed.startsWith("export ");
|
|
1981
|
+
}
|
|
1982
|
+
return false;
|
|
1983
|
+
}
|
|
1984
|
+
var FALLBACK_QUERY = "module structure and component responsibilities";
|
|
1985
|
+
var ARCHITECTURE_QUERIES = [
|
|
1986
|
+
FALLBACK_QUERY,
|
|
1987
|
+
"entry points, CLI commands, and main application flow",
|
|
1988
|
+
"core services, business logic, and data processing",
|
|
1989
|
+
"data models, types, schemas, and configuration"
|
|
1990
|
+
];
|
|
1991
|
+
function buildDirectoryTree(filePaths, rootDir) {
|
|
1992
|
+
const relativePaths = [
|
|
1993
|
+
...new Set(filePaths.map((fp) => relative3(rootDir, fp)))
|
|
1994
|
+
].sort();
|
|
1995
|
+
const byDir = /* @__PURE__ */ new Map();
|
|
1996
|
+
for (const rel of relativePaths) {
|
|
1997
|
+
const dir = dirname3(rel);
|
|
1998
|
+
if (!byDir.has(dir)) byDir.set(dir, []);
|
|
1999
|
+
byDir.get(dir).push(rel);
|
|
2000
|
+
}
|
|
2001
|
+
const lines = [];
|
|
2002
|
+
const dirs = [...byDir.keys()].sort();
|
|
2003
|
+
for (let di = 0; di < dirs.length; di++) {
|
|
2004
|
+
const dir = dirs[di];
|
|
2005
|
+
const files = byDir.get(dir);
|
|
2006
|
+
const isLastDir = di === dirs.length - 1;
|
|
2007
|
+
if (dir !== ".") {
|
|
2008
|
+
lines.push(`${isLastDir ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500"} ${dir}/`);
|
|
2009
|
+
}
|
|
2010
|
+
for (let fi = 0; fi < files.length; fi++) {
|
|
2011
|
+
const isLastFile = fi === files.length - 1;
|
|
2012
|
+
const fileName = files[fi].includes("/") ? files[fi].split("/").pop() : files[fi];
|
|
2013
|
+
const indent = dir !== "." ? " " : "";
|
|
2014
|
+
const isLast = isLastFile && (isLastDir || dir === ".");
|
|
2015
|
+
lines.push(`${indent}${isLast ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500"} ${fileName}`);
|
|
2016
|
+
}
|
|
2017
|
+
}
|
|
2018
|
+
return lines.join("\n");
|
|
2019
|
+
}
|
|
2020
|
+
async function runExplainCodebase(opts) {
|
|
2021
|
+
const profile = await readProfile();
|
|
2022
|
+
if (profile === null) {
|
|
2023
|
+
throw new Error("No profile found. Run 'brain-cache init' first.");
|
|
2024
|
+
}
|
|
2025
|
+
const running = await isOllamaRunning();
|
|
2026
|
+
if (!running) {
|
|
2027
|
+
throw new Error("Ollama is not running.");
|
|
2028
|
+
}
|
|
2029
|
+
const rootDir = resolve5(opts?.path ?? ".");
|
|
2030
|
+
const indexState = await readIndexState(rootDir);
|
|
2031
|
+
if (indexState === null) {
|
|
2032
|
+
throw new Error(
|
|
2033
|
+
`No index found at ${rootDir}. Run 'brain-cache index' first.`
|
|
2034
|
+
);
|
|
2035
|
+
}
|
|
2036
|
+
const db = await openDatabase(rootDir);
|
|
2037
|
+
const tableNames = await db.tableNames();
|
|
2038
|
+
if (!tableNames.includes("chunks")) {
|
|
2039
|
+
throw new Error("No chunks table found. Run 'brain-cache index' first.");
|
|
2040
|
+
}
|
|
2041
|
+
const table = await db.openTable("chunks");
|
|
2042
|
+
const userConfig = await loadUserConfig();
|
|
2043
|
+
const toolOverride = {};
|
|
2044
|
+
if (opts?.limit !== void 0) toolOverride.limit = opts.limit;
|
|
2045
|
+
if (opts?.distanceThreshold !== void 0)
|
|
2046
|
+
toolOverride.distanceThreshold = opts.distanceThreshold;
|
|
2047
|
+
const strategy = resolveStrategy(
|
|
2048
|
+
"explore",
|
|
2049
|
+
userConfig,
|
|
2050
|
+
Object.keys(toolOverride).length > 0 ? toolOverride : void 0
|
|
2051
|
+
);
|
|
2052
|
+
const maxTokens = opts?.maxTokens ?? DEFAULT_TOKEN_BUDGET * 2;
|
|
2053
|
+
const customQuestion = opts?.question;
|
|
2054
|
+
const queries = customQuestion ? [customQuestion] : ARCHITECTURE_QUERIES;
|
|
2055
|
+
process.stderr.write(
|
|
2056
|
+
`brain-cache: explaining codebase (budget=${maxTokens} tokens, queries=${queries.length})
|
|
2057
|
+
`
|
|
2058
|
+
);
|
|
2059
|
+
const { embeddings } = await embedBatchWithRetry(indexState.embeddingModel, queries);
|
|
2060
|
+
const allResults = await Promise.all(
|
|
2061
|
+
embeddings.map((vec) => searchChunks(table, vec, strategy))
|
|
2062
|
+
);
|
|
2063
|
+
const merged = allResults.flat();
|
|
2064
|
+
const deduped = deduplicateChunks(merged);
|
|
2065
|
+
let allFilePaths = [];
|
|
2066
|
+
try {
|
|
2067
|
+
const allRows = await table.query().toArray();
|
|
2068
|
+
allFilePaths = [...new Set(allRows.map((r) => r.file_path))].sort();
|
|
2069
|
+
} catch {
|
|
2070
|
+
allFilePaths = [];
|
|
2071
|
+
}
|
|
2072
|
+
const sorted = [...deduped].sort((a, b) => {
|
|
2073
|
+
const aIsTest = /\/(tests?|__tests__|spec)\//i.test(a.filePath) || /\.(test|spec)\./i.test(a.filePath);
|
|
2074
|
+
const bIsTest = /\/(tests?|__tests__|spec)\//i.test(b.filePath) || /\.(test|spec)\./i.test(b.filePath);
|
|
2075
|
+
if (aIsTest === bIsTest) return 0;
|
|
2076
|
+
return aIsTest ? 1 : -1;
|
|
2077
|
+
});
|
|
2078
|
+
const exportedOnly = sorted.filter(isExportedChunk);
|
|
2079
|
+
const assembled = assembleContext(exportedOnly, { maxTokens });
|
|
2080
|
+
const enriched = await enrichWithParentClass(assembled.chunks, table, {
|
|
2081
|
+
maxTokens,
|
|
2082
|
+
currentTokens: assembled.tokenCount
|
|
2083
|
+
});
|
|
2084
|
+
const compressed = enriched.map((c) => {
|
|
2085
|
+
const tokens = countChunkTokens(c.content);
|
|
2086
|
+
return tokens > 500 ? compressChunk(c) : c;
|
|
2087
|
+
});
|
|
2088
|
+
const moduleGroups = groupChunksByModule(compressed, rootDir);
|
|
2089
|
+
const codeContent = formatModuleNarratives(moduleGroups);
|
|
2090
|
+
const treeFilePaths = allFilePaths.length > 0 ? allFilePaths : [...new Set(compressed.map((c) => c.filePath))];
|
|
2091
|
+
const nonTestPaths = treeFilePaths.filter(
|
|
2092
|
+
(fp) => !(/\/(tests?|__tests__|spec)\//i.test(fp) || /\.(test|spec)\./i.test(fp))
|
|
2093
|
+
);
|
|
2094
|
+
const directoryTree = buildDirectoryTree(nonTestPaths, rootDir);
|
|
2095
|
+
const content = [
|
|
2096
|
+
"## Directory Structure\n\n```\n" + directoryTree + "\n```",
|
|
2097
|
+
codeContent
|
|
2098
|
+
].join("\n\n---\n\n");
|
|
2099
|
+
const uniqueFiles = [...new Set(compressed.map((c) => c.filePath))];
|
|
2100
|
+
let fileContentTokens = 0;
|
|
2101
|
+
for (const filePath of uniqueFiles) {
|
|
2102
|
+
try {
|
|
2103
|
+
const fileContent = await readFile8(filePath, "utf-8");
|
|
2104
|
+
fileContentTokens += countChunkTokens(fileContent);
|
|
2105
|
+
} catch {
|
|
2106
|
+
}
|
|
2107
|
+
}
|
|
2108
|
+
const toolCalls = 1 + uniqueFiles.length;
|
|
2109
|
+
const estimatedWithoutBraincache = fileContentTokens + toolCalls * TOOL_CALL_OVERHEAD_TOKENS;
|
|
2110
|
+
const tokensSent = assembled.tokenCount;
|
|
2111
|
+
const reductionPct = estimatedWithoutBraincache > 0 ? Math.max(
|
|
2112
|
+
0,
|
|
2113
|
+
Math.round((1 - tokensSent / estimatedWithoutBraincache) * 100)
|
|
2114
|
+
) : 0;
|
|
2115
|
+
return {
|
|
2116
|
+
content,
|
|
2117
|
+
chunks: compressed,
|
|
2118
|
+
metadata: {
|
|
2119
|
+
tokensSent,
|
|
2120
|
+
estimatedWithoutBraincache,
|
|
2121
|
+
reductionPct,
|
|
2122
|
+
filesInContext: uniqueFiles.length,
|
|
2123
|
+
localTasksPerformed: [
|
|
2124
|
+
"embed_query",
|
|
2125
|
+
"vector_search",
|
|
2126
|
+
"dedup",
|
|
2127
|
+
"parent_enrich",
|
|
2128
|
+
"compress",
|
|
2129
|
+
"cohesion_group",
|
|
2130
|
+
"token_budget",
|
|
2131
|
+
"directory_tree"
|
|
2132
|
+
],
|
|
2133
|
+
cloudCallsMade: 0
|
|
2134
|
+
}
|
|
2135
|
+
};
|
|
2136
|
+
}
|
|
2137
|
+
|
|
2138
|
+
// src/workflows/buildContext.ts
|
|
2139
|
+
function splitCamelCase2(name) {
|
|
2140
|
+
return name.replace(/([a-z])([A-Z])/g, "$1 $2").replace(/([A-Z]+)([A-Z][a-z])/g, "$1 $2").toLowerCase().split(/\s+/).filter((t) => t.length >= 2);
|
|
2141
|
+
}
|
|
2142
|
+
function extractQueryTokens2(query) {
|
|
2143
|
+
return query.toLowerCase().split(/[\s.,;:!?'"()\[\]{}/\\]+/).filter((t) => t.length >= 3);
|
|
2144
|
+
}
|
|
2145
|
+
function isPrimaryMatch(chunk, queryTokens) {
|
|
2146
|
+
if (queryTokens.length === 0) return false;
|
|
2147
|
+
const fileName = chunk.filePath.split("/").pop()?.toLowerCase() ?? "";
|
|
2148
|
+
const fileNameStem = fileName.replace(/\.[^.]+$/, "");
|
|
2149
|
+
const originalName = chunk.name ?? "";
|
|
2150
|
+
const chunkName = originalName.toLowerCase();
|
|
2151
|
+
if (chunkName.length > 0 && queryTokens.some((t) => t === chunkName)) return true;
|
|
2152
|
+
const subTokens = originalName.length > 0 ? splitCamelCase2(originalName) : [];
|
|
2153
|
+
if (subTokens.length > 1 && subTokens.every((sub) => queryTokens.some((t) => t.includes(sub) || sub.includes(t)))) return true;
|
|
2154
|
+
if (fileNameStem.length > 0 && queryTokens.some((t) => t === fileNameStem)) return true;
|
|
2155
|
+
return false;
|
|
2156
|
+
}
|
|
2157
|
+
var TEST_FILE_PATTERNS2 = [".test.", ".spec.", "/__tests__/", "/tests/"];
|
|
2158
|
+
function isTestFile2(filePath) {
|
|
2159
|
+
return TEST_FILE_PATTERNS2.some((p) => filePath.includes(p));
|
|
2160
|
+
}
|
|
2161
|
+
var CONFIG_FILE_PATTERNS = [
|
|
2162
|
+
/vitest\.config\./,
|
|
2163
|
+
/tsup\.config\./,
|
|
2164
|
+
/tsconfig.*\.json$/,
|
|
2165
|
+
/jest\.config\./,
|
|
2166
|
+
/eslint\.config\./,
|
|
2167
|
+
/\.eslintrc/
|
|
2168
|
+
];
|
|
2169
|
+
function isConfigFile(filePath) {
|
|
2170
|
+
const fileName = filePath.split("/").pop() ?? "";
|
|
2171
|
+
return CONFIG_FILE_PATTERNS.some((p) => p.test(fileName));
|
|
2172
|
+
}
|
|
1095
2173
|
async function runBuildContext(query, opts) {
|
|
1096
2174
|
const profile = await readProfile();
|
|
1097
2175
|
if (profile === null) {
|
|
@@ -1101,7 +2179,7 @@ async function runBuildContext(query, opts) {
|
|
|
1101
2179
|
if (!running) {
|
|
1102
2180
|
throw new Error("Ollama is not running. Start it with 'ollama serve' or run 'brain-cache init'.");
|
|
1103
2181
|
}
|
|
1104
|
-
const rootDir =
|
|
2182
|
+
const rootDir = resolve6(opts?.path ?? ".");
|
|
1105
2183
|
const indexState = await readIndexState(rootDir);
|
|
1106
2184
|
if (indexState === null) {
|
|
1107
2185
|
throw new Error(`No index found at ${rootDir}. Run 'brain-cache index' first.`);
|
|
@@ -1112,27 +2190,94 @@ async function runBuildContext(query, opts) {
|
|
|
1112
2190
|
throw new Error("No chunks table found. Run 'brain-cache index' first.");
|
|
1113
2191
|
}
|
|
1114
2192
|
const table = await db.openTable("chunks");
|
|
1115
|
-
const
|
|
1116
|
-
const
|
|
1117
|
-
limit: opts?.limit ?? RETRIEVAL_STRATEGIES[intent].limit,
|
|
1118
|
-
distanceThreshold: RETRIEVAL_STRATEGIES[intent].distanceThreshold
|
|
1119
|
-
};
|
|
2193
|
+
const hasEdges = tableNames.includes("edges");
|
|
2194
|
+
const mode = classifyRetrievalMode(query);
|
|
1120
2195
|
const maxTokens = opts?.maxTokens ?? DEFAULT_TOKEN_BUDGET;
|
|
1121
2196
|
process.stderr.write(
|
|
1122
|
-
`brain-cache: building context (intent=${
|
|
2197
|
+
`brain-cache: building context (intent=${mode}, budget=${maxTokens} tokens)
|
|
1123
2198
|
`
|
|
1124
2199
|
);
|
|
1125
|
-
const
|
|
1126
|
-
const
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
2200
|
+
const userConfig = await loadUserConfig();
|
|
2201
|
+
const strategy = resolveStrategy(
|
|
2202
|
+
mode,
|
|
2203
|
+
userConfig,
|
|
2204
|
+
opts?.limit !== void 0 ? { limit: opts.limit } : void 0
|
|
2205
|
+
);
|
|
2206
|
+
let finalChunks;
|
|
2207
|
+
let finalContent;
|
|
2208
|
+
let finalTokenCount;
|
|
2209
|
+
let localTasksPerformed;
|
|
2210
|
+
if (mode === "trace" && hasEdges) {
|
|
2211
|
+
const traceResult = await runTraceFlow(query, {
|
|
2212
|
+
maxHops: 3,
|
|
2213
|
+
path: opts?.path,
|
|
2214
|
+
limit: strategy.limit,
|
|
2215
|
+
distanceThreshold: strategy.distanceThreshold
|
|
2216
|
+
});
|
|
2217
|
+
const traceChunks = traceResult.hops.map((hop, i) => ({
|
|
2218
|
+
id: `trace-hop-${i}`,
|
|
2219
|
+
filePath: hop.filePath,
|
|
2220
|
+
chunkType: "function",
|
|
2221
|
+
scope: null,
|
|
2222
|
+
name: hop.name,
|
|
2223
|
+
content: hop.content,
|
|
2224
|
+
startLine: hop.startLine,
|
|
2225
|
+
endLine: 0,
|
|
2226
|
+
similarity: 1 - hop.hopDepth * 0.1
|
|
2227
|
+
}));
|
|
2228
|
+
const assembled = assembleContext(traceChunks, { maxTokens });
|
|
2229
|
+
const groups = groupChunksByFile(assembled.chunks);
|
|
2230
|
+
finalContent = formatGroupedContext(groups);
|
|
2231
|
+
finalChunks = assembled.chunks;
|
|
2232
|
+
finalTokenCount = assembled.tokenCount;
|
|
2233
|
+
localTasksPerformed = traceResult.metadata.localTasksPerformed;
|
|
2234
|
+
} else if (mode === "explore") {
|
|
2235
|
+
const exploreResult = await runExplainCodebase({
|
|
2236
|
+
question: query,
|
|
2237
|
+
maxTokens,
|
|
2238
|
+
path: opts?.path,
|
|
2239
|
+
limit: strategy.limit,
|
|
2240
|
+
distanceThreshold: strategy.distanceThreshold
|
|
2241
|
+
});
|
|
2242
|
+
finalContent = exploreResult.content;
|
|
2243
|
+
finalChunks = exploreResult.chunks;
|
|
2244
|
+
finalTokenCount = exploreResult.metadata.tokensSent;
|
|
2245
|
+
localTasksPerformed = exploreResult.metadata.localTasksPerformed;
|
|
2246
|
+
} else {
|
|
2247
|
+
if (mode === "trace" && !hasEdges) {
|
|
2248
|
+
process.stderr.write(`brain-cache: No edges table found, falling back to explore mode
|
|
2249
|
+
`);
|
|
2250
|
+
}
|
|
2251
|
+
const { embeddings: vectors } = await embedBatchWithRetry(indexState.embeddingModel, [query]);
|
|
2252
|
+
const queryVector = vectors[0];
|
|
2253
|
+
const results = await searchChunks(table, queryVector, strategy, query);
|
|
2254
|
+
const deduped = deduplicateChunks(results);
|
|
2255
|
+
const assembled = assembleContext(deduped, { maxTokens });
|
|
2256
|
+
const enriched = await enrichWithParentClass(assembled.chunks, table, { maxTokens, currentTokens: assembled.tokenCount });
|
|
2257
|
+
const withoutPeripheral = enriched.filter((chunk) => !isTestFile2(chunk.filePath) && !isConfigFile(chunk.filePath));
|
|
2258
|
+
const queryTokens = extractQueryTokens2(query);
|
|
2259
|
+
const compressed = withoutPeripheral.map(
|
|
2260
|
+
(chunk) => isPrimaryMatch(chunk, queryTokens) ? chunk : compressChunk(chunk)
|
|
2261
|
+
);
|
|
2262
|
+
const groups = groupChunksByFile(compressed);
|
|
2263
|
+
finalContent = formatGroupedContext(groups);
|
|
2264
|
+
finalChunks = compressed;
|
|
2265
|
+
finalTokenCount = assembled.tokenCount;
|
|
2266
|
+
localTasksPerformed = ["embed_query", "vector_search", "dedup", "parent_enrich", "drop_peripheral", "compress", "cohesion_group", "token_budget"];
|
|
2267
|
+
}
|
|
2268
|
+
const BODY_STRIPPED_MARKER2 = "// [body stripped]";
|
|
2269
|
+
const filesWithUncompressedContent = new Set(
|
|
2270
|
+
finalChunks.filter((c) => !c.content.includes(BODY_STRIPPED_MARKER2)).map((c) => c.filePath)
|
|
2271
|
+
);
|
|
2272
|
+
const uniqueFiles = [...new Set(finalChunks.map((c) => c.filePath))];
|
|
1131
2273
|
const numFiles = uniqueFiles.length;
|
|
1132
2274
|
let fileContentTokens = 0;
|
|
1133
2275
|
for (const filePath of uniqueFiles) {
|
|
2276
|
+
if (!filesWithUncompressedContent.has(filePath)) {
|
|
2277
|
+
continue;
|
|
2278
|
+
}
|
|
1134
2279
|
try {
|
|
1135
|
-
const fileContent = await
|
|
2280
|
+
const fileContent = await readFile9(filePath, "utf-8");
|
|
1136
2281
|
fileContentTokens += countChunkTokens(fileContent);
|
|
1137
2282
|
} catch {
|
|
1138
2283
|
}
|
|
@@ -1140,29 +2285,82 @@ async function runBuildContext(query, opts) {
|
|
|
1140
2285
|
const toolCalls = 1 + numFiles;
|
|
1141
2286
|
const toolCallOverhead = toolCalls * TOOL_CALL_OVERHEAD_TOKENS;
|
|
1142
2287
|
const estimatedWithoutBraincache = fileContentTokens + toolCallOverhead;
|
|
1143
|
-
const reductionPct = estimatedWithoutBraincache > 0 ? Math.max(0, Math.round((1 -
|
|
2288
|
+
const reductionPct = estimatedWithoutBraincache > 0 ? Math.max(0, Math.round((1 - finalTokenCount / estimatedWithoutBraincache) * 100)) : 0;
|
|
1144
2289
|
const result = {
|
|
1145
|
-
content:
|
|
1146
|
-
chunks:
|
|
2290
|
+
content: finalContent,
|
|
2291
|
+
chunks: finalChunks,
|
|
1147
2292
|
metadata: {
|
|
1148
|
-
tokensSent:
|
|
2293
|
+
tokensSent: finalTokenCount,
|
|
1149
2294
|
estimatedWithoutBraincache,
|
|
1150
2295
|
reductionPct,
|
|
1151
2296
|
filesInContext: numFiles,
|
|
1152
|
-
localTasksPerformed
|
|
2297
|
+
localTasksPerformed,
|
|
1153
2298
|
cloudCallsMade: 0
|
|
1154
2299
|
}
|
|
1155
2300
|
};
|
|
1156
2301
|
process.stderr.write(
|
|
1157
|
-
`brain-cache: context assembled (${
|
|
2302
|
+
`brain-cache: context assembled (${finalTokenCount} tokens, ${reductionPct}% reduction, ${finalChunks.length} chunks)
|
|
1158
2303
|
`
|
|
1159
2304
|
);
|
|
1160
2305
|
return result;
|
|
1161
2306
|
}
|
|
1162
2307
|
|
|
2308
|
+
// src/services/sessionStats.ts
|
|
2309
|
+
import { readFile as readFile10, writeFile as writeFile3, rename, mkdir as mkdir3 } from "fs/promises";
|
|
2310
|
+
import { join as join4 } from "path";
|
|
2311
|
+
var log12 = childLogger("sessionStats");
|
|
2312
|
+
var SESSION_STATS_PATH = join4(GLOBAL_CONFIG_DIR, SESSION_STATS_FILENAME);
|
|
2313
|
+
var STATS_TTL_MS = 2 * 60 * 60 * 1e3;
|
|
2314
|
+
var _statsMutex = Promise.resolve();
|
|
2315
|
+
async function _readStats() {
|
|
2316
|
+
try {
|
|
2317
|
+
const raw = await readFile10(SESSION_STATS_PATH, "utf-8");
|
|
2318
|
+
return JSON.parse(raw);
|
|
2319
|
+
} catch {
|
|
2320
|
+
return null;
|
|
2321
|
+
}
|
|
2322
|
+
}
|
|
2323
|
+
async function _doAccumulate(delta, ttlMs) {
|
|
2324
|
+
await mkdir3(GLOBAL_CONFIG_DIR, { recursive: true });
|
|
2325
|
+
let effectiveTtlMs;
|
|
2326
|
+
if (ttlMs !== void 0) {
|
|
2327
|
+
effectiveTtlMs = ttlMs;
|
|
2328
|
+
} else {
|
|
2329
|
+
try {
|
|
2330
|
+
const config = await loadUserConfig();
|
|
2331
|
+
const ttlHours = config.stats?.ttlHours;
|
|
2332
|
+
effectiveTtlMs = ttlHours !== void 0 ? ttlHours * 60 * 60 * 1e3 : STATS_TTL_MS;
|
|
2333
|
+
} catch {
|
|
2334
|
+
effectiveTtlMs = STATS_TTL_MS;
|
|
2335
|
+
}
|
|
2336
|
+
}
|
|
2337
|
+
const existing = await _readStats();
|
|
2338
|
+
const now = Date.now();
|
|
2339
|
+
const isExpired = existing === null || now - Date.parse(existing.lastUpdatedAt) > effectiveTtlMs;
|
|
2340
|
+
const base = isExpired ? { tokensSent: 0, estimatedWithoutBraincache: 0, callCount: 0 } : existing;
|
|
2341
|
+
const updated = {
|
|
2342
|
+
tokensSent: base.tokensSent + delta.tokensSent,
|
|
2343
|
+
estimatedWithoutBraincache: base.estimatedWithoutBraincache + delta.estimatedWithoutBraincache,
|
|
2344
|
+
callCount: base.callCount + 1,
|
|
2345
|
+
lastUpdatedAt: new Date(now).toISOString()
|
|
2346
|
+
};
|
|
2347
|
+
const tmpPath = SESSION_STATS_PATH + ".tmp";
|
|
2348
|
+
await writeFile3(tmpPath, JSON.stringify(updated, null, 2), "utf-8");
|
|
2349
|
+
await rename(tmpPath, SESSION_STATS_PATH);
|
|
2350
|
+
}
|
|
2351
|
+
function accumulateStats(delta, ttlMs) {
|
|
2352
|
+
const next = _statsMutex.then(
|
|
2353
|
+
() => _doAccumulate(delta, ttlMs).catch((err) => {
|
|
2354
|
+
log12.warn({ err }, "stats accumulation failed");
|
|
2355
|
+
})
|
|
2356
|
+
);
|
|
2357
|
+
_statsMutex = next.then(() => void 0, () => void 0);
|
|
2358
|
+
return next;
|
|
2359
|
+
}
|
|
2360
|
+
|
|
1163
2361
|
// src/mcp/index.ts
|
|
1164
|
-
var version = "0.
|
|
1165
|
-
var
|
|
2362
|
+
var version = true ? "3.0.0" : "dev";
|
|
2363
|
+
var log13 = childLogger("mcp");
|
|
1166
2364
|
var server = new McpServer({ name: "brain-cache", version });
|
|
1167
2365
|
server.registerTool(
|
|
1168
2366
|
"index_repo",
|
|
@@ -1183,7 +2381,7 @@ server.registerTool(
|
|
|
1183
2381
|
content: [
|
|
1184
2382
|
{
|
|
1185
2383
|
type: "text",
|
|
1186
|
-
text: "No capability profile found. Run 'brain-cache init' first."
|
|
2384
|
+
text: formatErrorEnvelope("No capability profile found.", "Run 'brain-cache init' first.")
|
|
1187
2385
|
}
|
|
1188
2386
|
]
|
|
1189
2387
|
};
|
|
@@ -1195,14 +2393,14 @@ server.registerTool(
|
|
|
1195
2393
|
content: [
|
|
1196
2394
|
{
|
|
1197
2395
|
type: "text",
|
|
1198
|
-
text: "Ollama is not running. Start it with 'ollama serve'."
|
|
2396
|
+
text: formatErrorEnvelope("Ollama is not running.", "Start it with 'ollama serve'.")
|
|
1199
2397
|
}
|
|
1200
2398
|
]
|
|
1201
2399
|
};
|
|
1202
2400
|
}
|
|
1203
2401
|
try {
|
|
1204
2402
|
await runIndex(path, { force });
|
|
1205
|
-
const resolvedPath =
|
|
2403
|
+
const resolvedPath = resolve7(path);
|
|
1206
2404
|
const indexState = await readIndexState(resolvedPath);
|
|
1207
2405
|
const result = {
|
|
1208
2406
|
status: "ok",
|
|
@@ -1211,7 +2409,7 @@ server.registerTool(
|
|
|
1211
2409
|
chunkCount: indexState?.chunkCount ?? null
|
|
1212
2410
|
};
|
|
1213
2411
|
return {
|
|
1214
|
-
content: [{ type: "text", text:
|
|
2412
|
+
content: [{ type: "text", text: formatIndexResult(result) }]
|
|
1215
2413
|
};
|
|
1216
2414
|
} catch (err) {
|
|
1217
2415
|
return {
|
|
@@ -1219,17 +2417,48 @@ server.registerTool(
|
|
|
1219
2417
|
content: [
|
|
1220
2418
|
{
|
|
1221
2419
|
type: "text",
|
|
1222
|
-
text: `Indexing failed: ${err instanceof Error ? err.message : String(err)}`
|
|
2420
|
+
text: formatErrorEnvelope(`Indexing failed: ${err instanceof Error ? err.message : String(err)}`)
|
|
1223
2421
|
}
|
|
1224
2422
|
]
|
|
1225
2423
|
};
|
|
1226
2424
|
}
|
|
1227
2425
|
}
|
|
1228
2426
|
);
|
|
2427
|
+
function buildSearchResponse(chunks, query) {
|
|
2428
|
+
const filesInContext = new Set(chunks.map((c) => c.filePath)).size;
|
|
2429
|
+
const tokensSent = Math.round(chunks.reduce((sum, c) => sum + c.content.length, 0) / 4);
|
|
2430
|
+
const estimatedWithout = tokensSent * 3;
|
|
2431
|
+
const reductionPct = estimatedWithout > 0 ? Math.round((1 - tokensSent / estimatedWithout) * 100) : 0;
|
|
2432
|
+
const savings = formatTokenSavings({ tokensSent, estimatedWithout, reductionPct, filesInContext });
|
|
2433
|
+
const pipeline = formatPipelineLabel(["embed", "search", "dedup"]);
|
|
2434
|
+
const footer = `---
|
|
2435
|
+
${savings}
|
|
2436
|
+
Pipeline: ${pipeline}`;
|
|
2437
|
+
const summary = `Found ${chunks.length} result${chunks.length !== 1 ? "s" : ""} for "${query}".`;
|
|
2438
|
+
return {
|
|
2439
|
+
content: [{ type: "text", text: formatToolResponse(summary, `${formatSearchResults(chunks)}
|
|
2440
|
+
|
|
2441
|
+
${footer}`) }]
|
|
2442
|
+
};
|
|
2443
|
+
}
|
|
2444
|
+
function buildContextResponse(result, query) {
|
|
2445
|
+
const { tokensSent, estimatedWithoutBraincache, reductionPct, filesInContext, localTasksPerformed } = result.metadata;
|
|
2446
|
+
const savings = formatTokenSavings({ tokensSent, estimatedWithout: estimatedWithoutBraincache, reductionPct, filesInContext });
|
|
2447
|
+
const pipeline = formatPipelineLabel(localTasksPerformed);
|
|
2448
|
+
const footer = `---
|
|
2449
|
+
${savings}
|
|
2450
|
+
Pipeline: ${pipeline}`;
|
|
2451
|
+
const summary = `Context assembled for "${query}".`;
|
|
2452
|
+
return {
|
|
2453
|
+
content: [{ type: "text", text: formatToolResponse(summary, `${formatContext(result)}
|
|
2454
|
+
|
|
2455
|
+
${footer}`) }]
|
|
2456
|
+
};
|
|
2457
|
+
}
|
|
1229
2458
|
server.registerTool(
|
|
1230
2459
|
"search_codebase",
|
|
1231
2460
|
{
|
|
1232
|
-
description: "Locate specific code \u2014 functions, symbols, definitions, implementations, and type declarations \u2014 using semantic search that finds code by meaning, not just keyword match. This is a locator tool \u2014 it finds WHERE code lives. For understanding HOW code works or answering questions that span multiple files, use build_context instead. Requires index_repo to have been run first.",
|
|
2461
|
+
description: "Locate specific code \u2014 functions, symbols, definitions, implementations, and type declarations \u2014 using semantic search that finds code by meaning, not just keyword match. This is a locator tool \u2014 it finds WHERE code lives. For understanding HOW code works or answering questions that span multiple files, use build_context instead. Requires index_repo to have been run first. Do NOT use this tool to understand how code works or answer behavioral questions \u2014 use build_context once you have located the symbol.",
|
|
1233
2462
|
inputSchema: {
|
|
1234
2463
|
query: z2.string().describe("Natural language query string"),
|
|
1235
2464
|
limit: z2.number().int().min(1).max(50).optional().describe("Max results (default 10)"),
|
|
@@ -1244,7 +2473,7 @@ server.registerTool(
|
|
|
1244
2473
|
content: [
|
|
1245
2474
|
{
|
|
1246
2475
|
type: "text",
|
|
1247
|
-
text: "No capability profile found. Run 'brain-cache init' first."
|
|
2476
|
+
text: formatErrorEnvelope("No capability profile found.", "Run 'brain-cache init' first.")
|
|
1248
2477
|
}
|
|
1249
2478
|
]
|
|
1250
2479
|
};
|
|
@@ -1256,32 +2485,34 @@ server.registerTool(
|
|
|
1256
2485
|
content: [
|
|
1257
2486
|
{
|
|
1258
2487
|
type: "text",
|
|
1259
|
-
text: "Ollama is not running. Start it with 'ollama serve'."
|
|
2488
|
+
text: formatErrorEnvelope("Ollama is not running.", "Start it with 'ollama serve'.")
|
|
1260
2489
|
}
|
|
1261
2490
|
]
|
|
1262
2491
|
};
|
|
1263
2492
|
}
|
|
1264
2493
|
try {
|
|
1265
2494
|
const chunks = await runSearch(query, { limit, path });
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
};
|
|
2495
|
+
const tokensSent = Math.round(chunks.reduce((sum, c) => sum + c.content.length, 0) / 4);
|
|
2496
|
+
const estimatedWithoutBraincache = tokensSent * 3;
|
|
2497
|
+
accumulateStats({ tokensSent, estimatedWithoutBraincache }).catch((err) => log13.warn({ err }, "stats accumulation failed"));
|
|
2498
|
+
return buildSearchResponse(chunks, query);
|
|
1269
2499
|
} catch (err) {
|
|
1270
2500
|
if (err instanceof Error && err.message.includes("No index found")) {
|
|
1271
|
-
const resolvedPath =
|
|
2501
|
+
const resolvedPath = resolve7(path ?? ".");
|
|
1272
2502
|
await runIndex(resolvedPath);
|
|
1273
2503
|
try {
|
|
1274
2504
|
const chunks = await runSearch(query, { limit, path });
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
};
|
|
2505
|
+
const tokensSent = Math.round(chunks.reduce((sum, c) => sum + c.content.length, 0) / 4);
|
|
2506
|
+
const estimatedWithoutBraincache = tokensSent * 3;
|
|
2507
|
+
accumulateStats({ tokensSent, estimatedWithoutBraincache }).catch((err2) => log13.warn({ err: err2 }, "stats accumulation failed"));
|
|
2508
|
+
return buildSearchResponse(chunks, query);
|
|
1278
2509
|
} catch (retryErr) {
|
|
1279
2510
|
return {
|
|
1280
2511
|
isError: true,
|
|
1281
2512
|
content: [
|
|
1282
2513
|
{
|
|
1283
2514
|
type: "text",
|
|
1284
|
-
text: `Search failed after auto-index: ${retryErr instanceof Error ? retryErr.message : String(retryErr)}`
|
|
2515
|
+
text: formatErrorEnvelope(`Search failed after auto-index: ${retryErr instanceof Error ? retryErr.message : String(retryErr)}`)
|
|
1285
2516
|
}
|
|
1286
2517
|
]
|
|
1287
2518
|
};
|
|
@@ -1292,7 +2523,7 @@ server.registerTool(
|
|
|
1292
2523
|
content: [
|
|
1293
2524
|
{
|
|
1294
2525
|
type: "text",
|
|
1295
|
-
text: `Search failed: ${err instanceof Error ? err.message : String(err)}`
|
|
2526
|
+
text: formatErrorEnvelope(`Search failed: ${err instanceof Error ? err.message : String(err)}`)
|
|
1296
2527
|
}
|
|
1297
2528
|
]
|
|
1298
2529
|
};
|
|
@@ -1302,7 +2533,7 @@ server.registerTool(
|
|
|
1302
2533
|
server.registerTool(
|
|
1303
2534
|
"build_context",
|
|
1304
2535
|
{
|
|
1305
|
-
description: "
|
|
2536
|
+
description: "Use this tool when answering questions like 'how does X work', 'what does this function do', or any question requiring understanding of specific code behavior across multiple files. Retrieves semantically relevant code across the entire repo, deduplicates, and assembles a token-budgeted context block \u2014 more accurate and efficient than reading files individually or relying on memory. Use this before answering to ensure your response is grounded in actual code rather than assumptions. Ideal for explaining how systems work, understanding workflows and data flow, answering code behavior questions, multi-file reasoning, and debugging unfamiliar code paths. Do NOT use this tool when you need to trace a call path across files \u2014 use trace_flow instead. Do NOT use this tool for architecture overviews \u2014 use explain_codebase instead. Requires index_repo to have been run first.",
|
|
1306
2537
|
inputSchema: {
|
|
1307
2538
|
query: z2.string().describe("Natural language query or question"),
|
|
1308
2539
|
maxTokens: z2.number().int().min(100).max(1e5).optional().describe("Token budget for assembled context (default 4096)"),
|
|
@@ -1317,7 +2548,7 @@ server.registerTool(
|
|
|
1317
2548
|
content: [
|
|
1318
2549
|
{
|
|
1319
2550
|
type: "text",
|
|
1320
|
-
text: "No capability profile found. Run 'brain-cache init' first."
|
|
2551
|
+
text: formatErrorEnvelope("No capability profile found.", "Run 'brain-cache init' first.")
|
|
1321
2552
|
}
|
|
1322
2553
|
]
|
|
1323
2554
|
};
|
|
@@ -1329,38 +2560,36 @@ server.registerTool(
|
|
|
1329
2560
|
content: [
|
|
1330
2561
|
{
|
|
1331
2562
|
type: "text",
|
|
1332
|
-
text: "Ollama is not running. Start it with 'ollama serve'."
|
|
2563
|
+
text: formatErrorEnvelope("Ollama is not running.", "Start it with 'ollama serve'.")
|
|
1333
2564
|
}
|
|
1334
2565
|
]
|
|
1335
2566
|
};
|
|
1336
2567
|
}
|
|
1337
2568
|
try {
|
|
1338
2569
|
const result = await runBuildContext(query, { maxTokens, path });
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
};
|
|
2570
|
+
accumulateStats({
|
|
2571
|
+
tokensSent: result.metadata.tokensSent,
|
|
2572
|
+
estimatedWithoutBraincache: result.metadata.estimatedWithoutBraincache
|
|
2573
|
+
}).catch((err) => log13.warn({ err }, "stats accumulation failed"));
|
|
2574
|
+
return buildContextResponse(result, query);
|
|
1345
2575
|
} catch (err) {
|
|
1346
2576
|
if (err instanceof Error && err.message.includes("No index found")) {
|
|
1347
|
-
const resolvedPath =
|
|
2577
|
+
const resolvedPath = resolve7(path ?? ".");
|
|
1348
2578
|
await runIndex(resolvedPath);
|
|
1349
2579
|
try {
|
|
1350
2580
|
const result = await runBuildContext(query, { maxTokens, path });
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
};
|
|
2581
|
+
accumulateStats({
|
|
2582
|
+
tokensSent: result.metadata.tokensSent,
|
|
2583
|
+
estimatedWithoutBraincache: result.metadata.estimatedWithoutBraincache
|
|
2584
|
+
}).catch((err2) => log13.warn({ err: err2 }, "stats accumulation failed"));
|
|
2585
|
+
return buildContextResponse(result, query);
|
|
1357
2586
|
} catch (retryErr) {
|
|
1358
2587
|
return {
|
|
1359
2588
|
isError: true,
|
|
1360
2589
|
content: [
|
|
1361
2590
|
{
|
|
1362
2591
|
type: "text",
|
|
1363
|
-
text: `Context build failed after auto-index: ${retryErr instanceof Error ? retryErr.message : String(retryErr)}`
|
|
2592
|
+
text: formatErrorEnvelope(`Context build failed after auto-index: ${retryErr instanceof Error ? retryErr.message : String(retryErr)}`)
|
|
1364
2593
|
}
|
|
1365
2594
|
]
|
|
1366
2595
|
};
|
|
@@ -1371,7 +2600,7 @@ server.registerTool(
|
|
|
1371
2600
|
content: [
|
|
1372
2601
|
{
|
|
1373
2602
|
type: "text",
|
|
1374
|
-
text: `Context build failed: ${err instanceof Error ? err.message : String(err)}`
|
|
2603
|
+
text: formatErrorEnvelope(`Context build failed: ${err instanceof Error ? err.message : String(err)}`)
|
|
1375
2604
|
}
|
|
1376
2605
|
]
|
|
1377
2606
|
};
|
|
@@ -1390,7 +2619,7 @@ server.registerTool(
|
|
|
1390
2619
|
},
|
|
1391
2620
|
async ({ path: projectPath }) => {
|
|
1392
2621
|
try {
|
|
1393
|
-
const rootDir =
|
|
2622
|
+
const rootDir = resolve7(projectPath ?? ".");
|
|
1394
2623
|
const profile = await readProfile();
|
|
1395
2624
|
const installed = await isOllamaInstalled();
|
|
1396
2625
|
const running = installed ? await isOllamaRunning() : false;
|
|
@@ -1412,7 +2641,7 @@ server.registerTool(
|
|
|
1412
2641
|
vramTier: live.vramTier
|
|
1413
2642
|
};
|
|
1414
2643
|
return {
|
|
1415
|
-
content: [{ type: "text", text:
|
|
2644
|
+
content: [{ type: "text", text: formatDoctorOutput(health) }]
|
|
1416
2645
|
};
|
|
1417
2646
|
} catch (err) {
|
|
1418
2647
|
return {
|
|
@@ -1420,17 +2649,108 @@ server.registerTool(
|
|
|
1420
2649
|
content: [
|
|
1421
2650
|
{
|
|
1422
2651
|
type: "text",
|
|
1423
|
-
text: `Doctor failed: ${err instanceof Error ? err.message : String(err)}`
|
|
2652
|
+
text: formatErrorEnvelope(`Doctor failed: ${err instanceof Error ? err.message : String(err)}`)
|
|
1424
2653
|
}
|
|
1425
2654
|
]
|
|
1426
2655
|
};
|
|
1427
2656
|
}
|
|
1428
2657
|
}
|
|
1429
2658
|
);
|
|
2659
|
+
server.registerTool(
|
|
2660
|
+
"trace_flow",
|
|
2661
|
+
{
|
|
2662
|
+
description: 'Trace call paths from an entrypoint symbol. Returns a structured hops[] array showing which functions are called in sequence, their file locations, and what they call next. Use this instead of build_context when asked to trace how a function call propagates through the codebase, e.g. "how does indexing flow from CLI to LanceDB". Requires index_repo to have been run first. Do NOT use this tool when the question is about how code works or what a function does \u2014 use build_context instead.',
|
|
2663
|
+
inputSchema: {
|
|
2664
|
+
entrypoint: z2.string().describe('Natural language description of the starting function or entry point to trace from, e.g. "runBuildContext workflow" or "how does indexing work"'),
|
|
2665
|
+
maxHops: z2.number().int().min(1).max(10).optional().describe("Maximum call depth to follow (default 3)"),
|
|
2666
|
+
path: z2.string().optional().describe("Project root directory (default: current directory)")
|
|
2667
|
+
}
|
|
2668
|
+
},
|
|
2669
|
+
async ({ entrypoint, maxHops, path }) => {
|
|
2670
|
+
const profile = await readProfile();
|
|
2671
|
+
if (!profile) {
|
|
2672
|
+
return { isError: true, content: [{ type: "text", text: formatErrorEnvelope("No capability profile found.", "Run 'brain-cache init' first.") }] };
|
|
2673
|
+
}
|
|
2674
|
+
const running = await isOllamaRunning();
|
|
2675
|
+
if (!running) {
|
|
2676
|
+
return { isError: true, content: [{ type: "text", text: formatErrorEnvelope("Ollama is not running.", "Start it with 'ollama serve'.") }] };
|
|
2677
|
+
}
|
|
2678
|
+
try {
|
|
2679
|
+
const result = await runTraceFlow(entrypoint, { maxHops, path });
|
|
2680
|
+
accumulateStats({
|
|
2681
|
+
tokensSent: result.metadata.tokensSent,
|
|
2682
|
+
estimatedWithoutBraincache: result.metadata.estimatedWithoutBraincache
|
|
2683
|
+
}).catch((err) => log13.warn({ err }, "stats accumulation failed"));
|
|
2684
|
+
const { tokensSent, estimatedWithoutBraincache, reductionPct, filesInContext } = result.metadata;
|
|
2685
|
+
const savings = formatTokenSavings({
|
|
2686
|
+
tokensSent,
|
|
2687
|
+
estimatedWithout: estimatedWithoutBraincache,
|
|
2688
|
+
reductionPct,
|
|
2689
|
+
filesInContext
|
|
2690
|
+
});
|
|
2691
|
+
const pipeline = formatPipelineLabel(result.metadata.localTasksPerformed);
|
|
2692
|
+
const footer = `---
|
|
2693
|
+
${savings}
|
|
2694
|
+
Pipeline: ${pipeline}`;
|
|
2695
|
+
const summary = `Traced ${result.hops.length} hop${result.hops.length !== 1 ? "s" : ""} from "${entrypoint}".`;
|
|
2696
|
+
const warningLine = result.metadata.confidenceWarning ? `Warning: ${result.metadata.confidenceWarning}
|
|
2697
|
+
|
|
2698
|
+
` : "";
|
|
2699
|
+
return {
|
|
2700
|
+
content: [{ type: "text", text: formatToolResponse(summary, `${warningLine}${formatTraceFlow(result)}
|
|
2701
|
+
|
|
2702
|
+
${footer}`) }]
|
|
2703
|
+
};
|
|
2704
|
+
} catch (err) {
|
|
2705
|
+
return { isError: true, content: [{ type: "text", text: formatErrorEnvelope(`trace_flow failed: ${err instanceof Error ? err.message : String(err)}`) }] };
|
|
2706
|
+
}
|
|
2707
|
+
}
|
|
2708
|
+
);
|
|
2709
|
+
server.registerTool(
|
|
2710
|
+
"explain_codebase",
|
|
2711
|
+
{
|
|
2712
|
+
description: "Get a high-level architecture overview of the indexed codebase. Returns module-grouped summaries describing what each part of the repo does. Use this instead of build_context when asked to explain the project architecture, understand the overall structure, or get oriented in a new codebase. No follow-up question required \u2014 works with just a project path. Requires index_repo to have been run first. Do NOT use this tool for questions about specific code behavior or how a particular function works \u2014 use build_context instead.",
|
|
2713
|
+
inputSchema: {
|
|
2714
|
+
question: z2.string().optional().describe('Optional focus question, e.g. "how is authentication structured". Defaults to a broad architecture overview.'),
|
|
2715
|
+
maxTokens: z2.number().int().min(100).max(1e5).optional().describe("Token budget for assembled context (default 4096)"),
|
|
2716
|
+
path: z2.string().optional().describe("Project root directory (default: current directory)")
|
|
2717
|
+
}
|
|
2718
|
+
},
|
|
2719
|
+
async ({ question, maxTokens, path }) => {
|
|
2720
|
+
const profile = await readProfile();
|
|
2721
|
+
if (!profile) {
|
|
2722
|
+
return { isError: true, content: [{ type: "text", text: formatErrorEnvelope("No capability profile found.", "Run 'brain-cache init' first.") }] };
|
|
2723
|
+
}
|
|
2724
|
+
const running = await isOllamaRunning();
|
|
2725
|
+
if (!running) {
|
|
2726
|
+
return { isError: true, content: [{ type: "text", text: formatErrorEnvelope("Ollama is not running.", "Start it with 'ollama serve'.") }] };
|
|
2727
|
+
}
|
|
2728
|
+
try {
|
|
2729
|
+
const result = await runExplainCodebase({ question, maxTokens, path });
|
|
2730
|
+
accumulateStats({
|
|
2731
|
+
tokensSent: result.metadata.tokensSent,
|
|
2732
|
+
estimatedWithoutBraincache: result.metadata.estimatedWithoutBraincache
|
|
2733
|
+
}).catch((err) => log13.warn({ err }, "stats accumulation failed"));
|
|
2734
|
+
const { tokensSent, estimatedWithoutBraincache, reductionPct, filesInContext, localTasksPerformed } = result.metadata;
|
|
2735
|
+
const savings = formatTokenSavings({ tokensSent, estimatedWithout: estimatedWithoutBraincache, reductionPct, filesInContext });
|
|
2736
|
+
const pipeline = formatPipelineLabel(localTasksPerformed);
|
|
2737
|
+
const footer = `---
|
|
2738
|
+
${savings}
|
|
2739
|
+
Pipeline: ${pipeline}`;
|
|
2740
|
+
const summary = `Architecture overview for ${path ?? "."}.`;
|
|
2741
|
+
const text = formatToolResponse(summary, `${formatContext(result)}
|
|
2742
|
+
|
|
2743
|
+
${footer}`);
|
|
2744
|
+
return { content: [{ type: "text", text }] };
|
|
2745
|
+
} catch (err) {
|
|
2746
|
+
return { isError: true, content: [{ type: "text", text: formatErrorEnvelope(`explain_codebase failed: ${err instanceof Error ? err.message : String(err)}`) }] };
|
|
2747
|
+
}
|
|
2748
|
+
}
|
|
2749
|
+
);
|
|
1430
2750
|
async function main() {
|
|
1431
2751
|
const transport = new StdioServerTransport();
|
|
1432
2752
|
await server.connect(transport);
|
|
1433
|
-
|
|
2753
|
+
log13.info("brain-cache MCP server running on stdio");
|
|
1434
2754
|
}
|
|
1435
2755
|
main().catch((error) => {
|
|
1436
2756
|
process.stderr.write(`Fatal: ${String(error)}
|