@leanlabsinnov/codegraph 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2446 @@
1
+ import {
2
+ createLlmRouter,
3
+ namespaceLabel
4
+ } from "./chunk-B2TIVKUB.js";
5
+ import {
6
+ GraphDb,
7
+ defaultDbPath
8
+ } from "./chunk-F5QKPRNW.js";
9
+ import {
10
+ DEFAULT_CONFIG,
11
+ LLM_PRESETS,
12
+ makeFileId,
13
+ makeNodeId
14
+ } from "./chunk-XGPZDCQ4.js";
15
+
16
+ // src/config-store.ts
17
+ import { mkdir, readFile, writeFile } from "fs/promises";
18
+ import os from "os";
19
+ import path from "path";
20
+ var CONFIG_DIR = path.join(os.homedir(), ".codegraph");
21
+ var CONFIG_FILE = path.join(CONFIG_DIR, "config.json");
22
+ function configPath() {
23
+ return CONFIG_FILE;
24
+ }
25
+ async function loadConfig() {
26
+ try {
27
+ const raw = await readFile(CONFIG_FILE, "utf8");
28
+ const parsed = JSON.parse(raw);
29
+ return mergeConfig(DEFAULT_CONFIG, parsed);
30
+ } catch (err) {
31
+ if (err.code === "ENOENT") {
32
+ return DEFAULT_CONFIG;
33
+ }
34
+ throw err;
35
+ }
36
+ }
37
+ async function saveConfig(config) {
38
+ await mkdir(CONFIG_DIR, { recursive: true });
39
+ await writeFile(CONFIG_FILE, `${JSON.stringify(config, null, 2)}
40
+ `, "utf8");
41
+ }
42
+ function mergeConfig(base, override) {
43
+ return {
44
+ llm: {
45
+ ...base.llm,
46
+ ...override.llm ?? {},
47
+ generation: { ...base.llm.generation, ...override.llm?.generation ?? {} },
48
+ embeddings: { ...base.llm.embeddings, ...override.llm?.embeddings ?? {} }
49
+ },
50
+ server: { ...base.server, ...override.server ?? {} },
51
+ data: { ...base.data, ...override.data ?? {} }
52
+ };
53
+ }
54
+
55
+ // src/ui.ts
56
+ import boxen from "boxen";
57
+ import Table from "cli-table3";
58
+ import kleur from "kleur";
59
+ import ora from "ora";
60
+ var ASCII_LOGO = ` ___ _ ___ _
61
+ / __\\___ __| | ___ / _ \\_ __ __ _ _ __ | |__
62
+ / / / _ \\ / _\` |/ _ \\/ /_\\/ '__/ _\` | '_ \\| '_ \\
63
+ / /__| (_) | (_| | __/ /_\\\\| | | (_| | |_) | | | |
64
+ \\____/\\___/ \\__,_|\\___\\____/|_| \\__,_| .__/|_| |_|
65
+ |_|`;
66
+ var TAGLINE = "Live, queryable knowledge graph for your codebase.";
67
+ function printBanner() {
68
+ if (!process.stdout.isTTY) return;
69
+ const body = `${kleur.cyan(ASCII_LOGO)}
70
+
71
+ ${kleur.dim(TAGLINE)}`;
72
+ process.stdout.write(
73
+ `${boxen(body, {
74
+ padding: { top: 0, bottom: 0, left: 2, right: 2 },
75
+ margin: { top: 0, bottom: 1, left: 0, right: 0 },
76
+ borderStyle: "round",
77
+ borderColor: "cyan",
78
+ title: kleur.bold("codegraph"),
79
+ titleAlignment: "left"
80
+ })}
81
+ `
82
+ );
83
+ }
84
+ function makeSpinner(text) {
85
+ return ora({
86
+ text,
87
+ color: "cyan",
88
+ spinner: "dots",
89
+ isEnabled: process.stdout.isTTY === true
90
+ });
91
+ }
92
+ var ProgressBar = class {
93
+ constructor(label, total, width = 28) {
94
+ this.label = label;
95
+ this.total = total;
96
+ this.width = width;
97
+ }
98
+ label;
99
+ total;
100
+ lastRender = 0;
101
+ width;
102
+ update(current) {
103
+ if (this.total <= 0) return;
104
+ if (process.stdout.isTTY !== true) {
105
+ if (current === this.total || current - this.lastRender >= 25) {
106
+ process.stdout.write(`${kleur.dim(`${this.label}: ${current}/${this.total}`)}
107
+ `);
108
+ this.lastRender = current;
109
+ }
110
+ return;
111
+ }
112
+ const ratio = Math.min(1, current / this.total);
113
+ const filled = Math.round(ratio * this.width);
114
+ const bar = `${"\u2588".repeat(filled)}${"\u2591".repeat(this.width - filled)}`;
115
+ const pct = `${(ratio * 100).toFixed(0).padStart(3)}%`;
116
+ process.stdout.write(
117
+ `\r${kleur.cyan(this.label.padEnd(10))} ${kleur.cyan(bar)} ${pct} ${current}/${this.total}`
118
+ );
119
+ if (current >= this.total) process.stdout.write("\n");
120
+ }
121
+ done() {
122
+ this.update(this.total);
123
+ }
124
+ };
125
+ function renderStatusTables(repoId, stats) {
126
+ const nodeTable = new Table({
127
+ head: [kleur.bold("Node kind"), kleur.bold("Count")],
128
+ colAligns: ["left", "right"],
129
+ style: { head: [], border: ["grey"] }
130
+ });
131
+ for (const [kind, count] of Object.entries(stats.nodes)) {
132
+ nodeTable.push([kind, count]);
133
+ }
134
+ const edgeTable = new Table({
135
+ head: [kleur.bold("Edge kind"), kleur.bold("Count")],
136
+ colAligns: ["left", "right"],
137
+ style: { head: [], border: ["grey"] }
138
+ });
139
+ for (const [kind, count] of Object.entries(stats.edges)) {
140
+ edgeTable.push([kind, count]);
141
+ }
142
+ const coverage = `${kleur.bold("Embedding coverage:")} ${kleur.cyan(
143
+ `${(stats.embeddingCoverage * 100).toFixed(1)}%`
144
+ )}`;
145
+ return [kleur.bold(`Status for ${repoId}`), "", nodeTable.toString(), edgeTable.toString(), coverage].join("\n");
146
+ }
147
+ function renderError(err, opts = { verbose: false }) {
148
+ const message = err instanceof Error ? err.message : String(err);
149
+ const hint = friendlyHint(message);
150
+ const lines = [kleur.red().bold("\u2717 codegraph hit an error"), "", kleur.red(message)];
151
+ if (hint) {
152
+ lines.push("", kleur.yellow("hint: "), kleur.yellow(hint));
153
+ }
154
+ if (opts.verbose && err instanceof Error && err.stack) {
155
+ lines.push("", kleur.dim(err.stack));
156
+ } else if (err instanceof Error) {
157
+ lines.push("", kleur.dim("(re-run with --verbose for a full stack trace)"));
158
+ }
159
+ return boxen(lines.join("\n"), {
160
+ padding: 1,
161
+ borderColor: "red",
162
+ borderStyle: "round"
163
+ });
164
+ }
165
+ function friendlyHint(message) {
166
+ if (/OPENAI_API_KEY/i.test(message) || /api key/i.test(message)) {
167
+ return "Set OPENAI_API_KEY in your shell (or switch to local-ollama with `codegraph config llm set local-ollama`).";
168
+ }
169
+ if (/ANTHROPIC_API_KEY/i.test(message)) {
170
+ return "Set ANTHROPIC_API_KEY, or switch presets via `codegraph config llm set <preset>`.";
171
+ }
172
+ if (/GOOGLE_GENERATIVE_AI_API_KEY/i.test(message)) {
173
+ return "Set GOOGLE_GENERATIVE_AI_API_KEY, or switch presets via `codegraph config llm set <preset>`.";
174
+ }
175
+ if (/ECONNREFUSED/.test(message) && /11434/.test(message)) {
176
+ return "Ollama isn't running on :11434. Start it with `ollama serve`, or switch to a hosted provider.";
177
+ }
178
+ if (/EACCES|permission denied/i.test(message)) {
179
+ return "Codegraph stores data in ~/.codegraph. Check that the directory is writable.";
180
+ }
181
+ if (/database is locked/i.test(message) || /Lock file/i.test(message)) {
182
+ return "Another `codegraph` process holds the Kuzu DB. Stop it (or remove ~/.codegraph/graph/.lock) before retrying.";
183
+ }
184
+ if (/Unknown preset/i.test(message)) {
185
+ return "Run `codegraph config llm set` with no args for an interactive prompt.";
186
+ }
187
+ return null;
188
+ }
189
+ function renderServeBanner(url, tokenHint) {
190
+ const body = [
191
+ `${kleur.green("\u2713")} codegraph mcp listening on ${kleur.cyan(url)}`,
192
+ "",
193
+ kleur.dim(tokenHint),
194
+ kleur.dim("Ctrl-C to stop.")
195
+ ].join("\n");
196
+ return boxen(body, {
197
+ padding: { top: 0, bottom: 0, left: 2, right: 2 },
198
+ margin: { top: 1, bottom: 1, left: 0, right: 0 },
199
+ borderColor: "green",
200
+ borderStyle: "round"
201
+ });
202
+ }
203
+
204
+ // src/program.ts
205
+ import { Command } from "commander";
206
+
207
+ // src/commands/config.ts
208
+ import { password, select } from "@inquirer/prompts";
209
+ import kleur2 from "kleur";
210
+ async function runConfigShow() {
211
+ const config = await loadConfig();
212
+ console.log(kleur2.dim(`config file: ${configPath()}`));
213
+ console.log(JSON.stringify(config, null, 2));
214
+ }
215
+ async function runConfigLlmSet(presetArg) {
216
+ const presetKeys = Object.keys(LLM_PRESETS);
217
+ let preset = presetArg;
218
+ if (!preset) {
219
+ preset = await select({
220
+ message: "Which LLM preset?",
221
+ choices: presetKeys.map((id) => ({
222
+ name: id,
223
+ value: id,
224
+ description: describePreset(id)
225
+ }))
226
+ });
227
+ }
228
+ const lookup = LLM_PRESETS[preset];
229
+ if (!lookup) {
230
+ throw new Error(`Unknown preset "${preset}". Available: ${presetKeys.join(", ")}`);
231
+ }
232
+ const config = await loadConfig();
233
+ config.llm = { ...config.llm, ...lookup };
234
+ await saveConfig(config);
235
+ console.log(kleur2.green(`\u2713 saved preset "${preset}" to ${configPath()}`));
236
+ console.log(kleur2.dim(`embedding namespace: ${namespaceLabel(config.llm)}`));
237
+ if (!presetArg) {
238
+ await maybePromptForApiKey(preset);
239
+ }
240
+ }
241
+ async function maybePromptForApiKey(preset) {
242
+ const envVar = apiKeyEnvVarFor(preset);
243
+ if (!envVar) return;
244
+ if (process.env[envVar]) {
245
+ console.log(kleur2.dim(`\u2713 ${envVar} already set in environment`));
246
+ return;
247
+ }
248
+ const provideNow = await select({
249
+ message: `${envVar} is not set. Provide it now?`,
250
+ choices: [
251
+ { name: "Yes (paste; I'll print the export line)", value: "yes" },
252
+ { name: "No, I'll set it later", value: "no" }
253
+ ]
254
+ });
255
+ if (provideNow !== "yes") {
256
+ console.log(
257
+ kleur2.yellow(`! Set ${envVar} in your shell before running \`codegraph index\``)
258
+ );
259
+ return;
260
+ }
261
+ const value = await password({ message: `${envVar} value (input hidden)`, mask: "*" });
262
+ if (!value) return;
263
+ console.log(kleur2.dim("Add this to your ~/.zshrc or ~/.bashrc:"));
264
+ console.log(kleur2.cyan(` export ${envVar}=${value}`));
265
+ }
266
+ function apiKeyEnvVarFor(preset) {
267
+ if (preset === "byo-openai" || preset === "managed-stub") return "OPENAI_API_KEY";
268
+ if (preset === "byo-anthropic") return "ANTHROPIC_API_KEY";
269
+ if (preset === "byo-google") return "GOOGLE_GENERATIVE_AI_API_KEY";
270
+ return null;
271
+ }
272
+ function describePreset(id) {
273
+ switch (id) {
274
+ case "managed-stub":
275
+ return "Phase-2 placeholder - same as BYO OpenAI";
276
+ case "byo-openai":
277
+ return "Your own OpenAI key (gpt-4o-mini + text-embedding-3-small)";
278
+ case "byo-anthropic":
279
+ return "Anthropic for gen, OpenAI for embeddings";
280
+ case "byo-google":
281
+ return "Gemini for gen + Google embeddings";
282
+ case "local-ollama":
283
+ return "Fully local via Ollama (qwen2.5-coder + nomic-embed-text)";
284
+ default:
285
+ return "";
286
+ }
287
+ }
288
+ async function runConfigLlmTest() {
289
+ const config = await loadConfig();
290
+ console.log(kleur2.dim("testing llm config:"));
291
+ console.log(kleur2.dim(` mode ${config.llm.mode}`));
292
+ console.log(
293
+ kleur2.dim(` generate ${config.llm.generation.provider}:${config.llm.generation.model}`)
294
+ );
295
+ console.log(
296
+ kleur2.dim(` embed ${config.llm.embeddings.provider}:${config.llm.embeddings.model}`)
297
+ );
298
+ const spinner = makeSpinner("Calling provider").start();
299
+ try {
300
+ const router = await createLlmRouter({ config: config.llm });
301
+ const result = await router.selfTest();
302
+ spinner.succeed("LLM provider reachable");
303
+ console.log(` embed dims: ${result.embedDims}`);
304
+ console.log(` embed latency: ${result.embedLatencyMs}ms`);
305
+ console.log(` generate latency: ${result.generateLatencyMs}ms`);
306
+ console.log(` namespace: ${namespaceLabel(config.llm)}`);
307
+ } catch (err) {
308
+ spinner.fail("LLM provider unreachable");
309
+ throw err;
310
+ }
311
+ }
312
+
313
+ // src/commands/doctor.ts
314
+ import { access, constants, mkdir as mkdir2 } from "fs/promises";
315
+ import { dirname } from "path";
316
+ import kleur3 from "kleur";
317
+ async function runDoctorCommand() {
318
+ const checks = [];
319
+ checks.push(checkNodeVersion());
320
+ let config = null;
321
+ try {
322
+ config = await loadConfig();
323
+ checks.push({ name: "config file", status: "ok", detail: configPath() });
324
+ } catch (err) {
325
+ checks.push({
326
+ name: "config file",
327
+ status: "fail",
328
+ detail: err instanceof Error ? err.message : String(err)
329
+ });
330
+ }
331
+ const dbPath = config?.data.dbPath ?? defaultDbPath();
332
+ checks.push(await checkWritable("graph dir", dbPath));
333
+ if (config) {
334
+ checks.push(checkApiKey(config.llm.generation.provider));
335
+ if (config.llm.embeddings.provider !== config.llm.generation.provider) {
336
+ checks.push(checkApiKey(config.llm.embeddings.provider));
337
+ }
338
+ }
339
+ if (config) {
340
+ checks.push(await selfTestLlm(config));
341
+ checks.push(await selfTestKuzu(dbPath, config.llm.embeddings.dimension));
342
+ }
343
+ let failed = 0;
344
+ let warned = 0;
345
+ for (const c of checks) {
346
+ const icon = c.status === "ok" ? kleur3.green("\u2713") : c.status === "warn" ? kleur3.yellow("!") : kleur3.red("\u2717");
347
+ const name = c.name.padEnd(18);
348
+ const detail = c.status === "ok" ? kleur3.dim(c.detail) : c.detail;
349
+ console.log(`${icon} ${name} ${detail}`);
350
+ if (c.status === "fail") failed++;
351
+ if (c.status === "warn") warned++;
352
+ }
353
+ console.log();
354
+ if (failed > 0) {
355
+ console.log(kleur3.red().bold(`${failed} check(s) failed; codegraph won't work as-is.`));
356
+ process.exitCode = 1;
357
+ return;
358
+ }
359
+ if (warned > 0) {
360
+ console.log(kleur3.yellow(`${warned} warning(s); core flows work, optional features may not.`));
361
+ return;
362
+ }
363
+ console.log(kleur3.green().bold("All checks passed. You're ready to `codegraph index <repo>`."));
364
+ }
365
+ function checkNodeVersion() {
366
+ const version = process.versions.node;
367
+ const major = Number(version.split(".")[0] ?? 0);
368
+ if (major >= 20) {
369
+ return { name: "node version", status: "ok", detail: `v${version}` };
370
+ }
371
+ return {
372
+ name: "node version",
373
+ status: "fail",
374
+ detail: `v${version} (codegraph requires >= 20)`
375
+ };
376
+ }
377
+ async function checkWritable(name, path6) {
378
+ try {
379
+ await mkdir2(dirname(path6), { recursive: true });
380
+ try {
381
+ await access(path6, constants.W_OK);
382
+ } catch {
383
+ await access(dirname(path6), constants.W_OK);
384
+ }
385
+ return { name, status: "ok", detail: path6 };
386
+ } catch (err) {
387
+ return {
388
+ name,
389
+ status: "fail",
390
+ detail: `${path6} (${err instanceof Error ? err.message : String(err)})`
391
+ };
392
+ }
393
+ }
394
+ function checkApiKey(provider) {
395
+ const envVar = providerEnvVar(provider);
396
+ if (envVar === null) {
397
+ return { name: `${provider} key`, status: "ok", detail: "(no key needed)" };
398
+ }
399
+ if (process.env[envVar]) {
400
+ return { name: envVar, status: "ok", detail: "set" };
401
+ }
402
+ return {
403
+ name: envVar,
404
+ status: "fail",
405
+ detail: `unset (export ${envVar}=... or switch presets)`
406
+ };
407
+ }
408
+ function providerEnvVar(provider) {
409
+ if (provider === "openai") return "OPENAI_API_KEY";
410
+ if (provider === "anthropic") return "ANTHROPIC_API_KEY";
411
+ if (provider === "google") return "GOOGLE_GENERATIVE_AI_API_KEY";
412
+ return null;
413
+ }
414
+ async function selfTestLlm(config) {
415
+ try {
416
+ const router = await createLlmRouter({ config: config.llm });
417
+ const result = await router.selfTest();
418
+ return {
419
+ name: "llm round-trip",
420
+ status: "ok",
421
+ detail: `embed=${result.embedLatencyMs}ms gen=${result.generateLatencyMs}ms`
422
+ };
423
+ } catch (err) {
424
+ return {
425
+ name: "llm round-trip",
426
+ status: "fail",
427
+ detail: err instanceof Error ? err.message : String(err)
428
+ };
429
+ }
430
+ }
431
+ async function selfTestKuzu(dbPath, embeddingDimension) {
432
+ const db = new GraphDb({ dbPath, embeddingDimension });
433
+ try {
434
+ await db.connect();
435
+ await db.migrate();
436
+ const result = await db.query("RETURN 1 AS result");
437
+ await db.close();
438
+ if (result.data[0]?.result === 1) {
439
+ return {
440
+ name: "kuzu round-trip",
441
+ status: db.hasVectorIndex() ? "ok" : "warn",
442
+ detail: db.hasVectorIndex() ? "ok (vector index ready)" : "ok (vector extension missing; semantic search disabled)"
443
+ };
444
+ }
445
+ return { name: "kuzu round-trip", status: "fail", detail: "unexpected result" };
446
+ } catch (err) {
447
+ await db.close().catch(() => {
448
+ });
449
+ return {
450
+ name: "kuzu round-trip",
451
+ status: "fail",
452
+ detail: err instanceof Error ? err.message : String(err)
453
+ };
454
+ }
455
+ }
456
+
457
+ // src/commands/index.ts
458
+ import path5 from "path";
459
+
460
+ // ../ingestion/src/parser.ts
461
+ import { readFile as readFile2 } from "fs/promises";
462
+ import { createRequire } from "module";
463
+ import { extname, dirname as dirname2, join } from "path";
464
+ import Parser from "web-tree-sitter";
465
+ var require2 = createRequire(import.meta.url);
466
+ var GRAMMAR_FILE = {
467
+ typescript: "tree-sitter-typescript.wasm",
468
+ tsx: "tree-sitter-tsx.wasm",
469
+ javascript: "tree-sitter-javascript.wasm",
470
+ jsx: "tree-sitter-javascript.wasm"
471
+ };
472
+ var initPromise = null;
473
+ var languageCache = /* @__PURE__ */ new Map();
474
+ async function ensureInit() {
475
+ if (!initPromise) initPromise = Parser.init();
476
+ await initPromise;
477
+ }
478
+ function grammarPath(lang) {
479
+ let pkgRoot;
480
+ try {
481
+ pkgRoot = dirname2(require2.resolve("tree-sitter-wasms/package.json"));
482
+ } catch {
483
+ pkgRoot = join(dirname2(require2.resolve("web-tree-sitter")), "..", "tree-sitter-wasms");
484
+ }
485
+ return join(pkgRoot, "out", GRAMMAR_FILE[lang]);
486
+ }
487
+ async function loadLanguage(lang) {
488
+ await ensureInit();
489
+ const cached = languageCache.get(lang);
490
+ if (cached) return cached;
491
+ const buffer = await readFile2(grammarPath(lang));
492
+ const loaded = await Parser.Language.load(new Uint8Array(buffer));
493
+ languageCache.set(lang, loaded);
494
+ return loaded;
495
+ }
496
+ function detectLanguage(filePath) {
497
+ const ext = extname(filePath).toLowerCase();
498
+ switch (ext) {
499
+ case ".ts":
500
+ return "typescript";
501
+ case ".tsx":
502
+ return "tsx";
503
+ case ".js":
504
+ case ".mjs":
505
+ case ".cjs":
506
+ return "javascript";
507
+ case ".jsx":
508
+ return "jsx";
509
+ default:
510
+ return null;
511
+ }
512
+ }
513
+ async function parseSource(source, language) {
514
+ const lang = await loadLanguage(language);
515
+ const parser = new Parser();
516
+ parser.setLanguage(lang);
517
+ const tree = parser.parse(source);
518
+ if (!tree) {
519
+ throw new Error(`tree-sitter failed to parse ${language} source`);
520
+ }
521
+ return {
522
+ tree,
523
+ rootNode: tree.rootNode,
524
+ language,
525
+ source
526
+ };
527
+ }
528
+
529
+ // ../ingestion/src/extractors/extract.ts
530
+ import { createHash } from "crypto";
531
+ import { basename } from "path";
532
+
533
+ // ../ingestion/src/walker.ts
534
+ function* walk(node) {
535
+ const stack = [];
536
+ for (let i = node.namedChildCount - 1; i >= 0; i--) {
537
+ const child = node.namedChild(i);
538
+ if (child) stack.push(child);
539
+ }
540
+ while (stack.length > 0) {
541
+ const current = stack.pop();
542
+ if (!current) continue;
543
+ yield current;
544
+ for (let i = current.namedChildCount - 1; i >= 0; i--) {
545
+ const child = current.namedChild(i);
546
+ if (child) stack.push(child);
547
+ }
548
+ }
549
+ }
550
+ function findChildByType(node, type) {
551
+ for (let i = 0; i < node.namedChildCount; i++) {
552
+ const child = node.namedChild(i);
553
+ if (child && child.type === type) return child;
554
+ }
555
+ return null;
556
+ }
557
+ function findChildrenByType(node, type) {
558
+ const out = [];
559
+ for (let i = 0; i < node.namedChildCount; i++) {
560
+ const child = node.namedChild(i);
561
+ if (child && child.type === type) out.push(child);
562
+ }
563
+ return out;
564
+ }
565
+ function nodeText(node, source) {
566
+ return source.slice(node.startIndex, node.endIndex);
567
+ }
568
+ function startLine(node) {
569
+ return node.startPosition.row + 1;
570
+ }
571
+ function endLine(node) {
572
+ return node.endPosition.row + 1;
573
+ }
574
+ function leadingCommentFor(node, source) {
575
+ let cursor = node.previousNamedSibling;
576
+ const lines = [];
577
+ while (cursor && /comment/.test(cursor.type)) {
578
+ lines.unshift(stripCommentMarkers(nodeText(cursor, source)));
579
+ cursor = cursor.previousNamedSibling;
580
+ }
581
+ return lines.join("\n").trim();
582
+ }
583
+ function stripCommentMarkers(text) {
584
+ return text.replace(/^\/\*+/, "").replace(/\*+\/$/, "").replace(/^\/\//gm, "").replace(/^\s*\*/gm, "").trim();
585
+ }
586
+ function isPascalCase(name) {
587
+ return /^[A-Z][A-Za-z0-9]*$/.test(name);
588
+ }
589
+
590
+ // ../ingestion/src/extractors/routes.ts
591
+ var EXPRESS_METHODS = /* @__PURE__ */ new Set([
592
+ "get",
593
+ "post",
594
+ "put",
595
+ "patch",
596
+ "delete",
597
+ "options",
598
+ "head",
599
+ "all",
600
+ "use"
601
+ ]);
602
+ var HTTP_VERBS = /* @__PURE__ */ new Set([
603
+ "GET",
604
+ "POST",
605
+ "PUT",
606
+ "PATCH",
607
+ "DELETE",
608
+ "OPTIONS",
609
+ "HEAD"
610
+ ]);
611
+ function detectRoutes(input) {
612
+ return [
613
+ ...detectExpressRoutes(input),
614
+ ...detectNextAppRouterRoutes(input),
615
+ ...detectNextPagesApiRoutes(input)
616
+ ];
617
+ }
618
+ function detectExpressRoutes(input) {
619
+ const out = [];
620
+ for (const node of walk(input.rootNode)) {
621
+ if (node.type !== "call_expression") continue;
622
+ const callee = node.childForFieldName("function");
623
+ if (!callee || callee.type !== "member_expression") continue;
624
+ const obj = callee.childForFieldName("object");
625
+ const prop = callee.childForFieldName("property");
626
+ if (!obj || !prop) continue;
627
+ const method = nodeText(prop, input.source).toLowerCase();
628
+ if (!EXPRESS_METHODS.has(method)) continue;
629
+ const args = node.childForFieldName("arguments");
630
+ if (!args) continue;
631
+ const firstArg = args.namedChild(0);
632
+ if (!firstArg || firstArg.type !== "string") continue;
633
+ const routePath = stripQuotes(nodeText(firstArg, input.source));
634
+ if (!routePath.startsWith("/")) continue;
635
+ const line = startLine(node);
636
+ const name = `${method.toUpperCase()} ${routePath}`;
637
+ out.push({
638
+ id: makeNodeId({
639
+ repoId: input.repoId,
640
+ kind: "Route",
641
+ path: input.relativePath,
642
+ name,
643
+ line
644
+ }),
645
+ kind: "Route",
646
+ repoId: input.repoId,
647
+ name,
648
+ path: input.relativePath,
649
+ lineStart: line,
650
+ lineEnd: endLine(node),
651
+ method: method.toUpperCase(),
652
+ routePath,
653
+ framework: "express"
654
+ });
655
+ }
656
+ return out;
657
+ }
658
+ function detectNextAppRouterRoutes(input) {
659
+ if (!/(^|\/)app\/.+\/route\.(ts|tsx|js|jsx|mjs|cjs)$/.test(input.relativePath)) return [];
660
+ const routePath = appRoutePathFor(input.relativePath);
661
+ const out = [];
662
+ for (const node of walk(input.rootNode)) {
663
+ if (node.type !== "export_statement") continue;
664
+ const decl = findChildByType(node, "function_declaration") ?? findChildByType(node, "lexical_declaration");
665
+ if (!decl) continue;
666
+ const name = extractTopLevelName(decl, input.source);
667
+ if (!name || !HTTP_VERBS.has(name)) continue;
668
+ const line = startLine(node);
669
+ out.push({
670
+ id: makeNodeId({
671
+ repoId: input.repoId,
672
+ kind: "Route",
673
+ path: input.relativePath,
674
+ name: `${name} ${routePath}`,
675
+ line
676
+ }),
677
+ kind: "Route",
678
+ repoId: input.repoId,
679
+ name: `${name} ${routePath}`,
680
+ path: input.relativePath,
681
+ lineStart: line,
682
+ lineEnd: endLine(node),
683
+ method: name,
684
+ routePath,
685
+ framework: "next-app"
686
+ });
687
+ }
688
+ return out;
689
+ }
690
+ function detectNextPagesApiRoutes(input) {
691
+ if (!/(^|\/)pages\/api\//.test(input.relativePath)) return [];
692
+ const routePath = pagesApiPathFor(input.relativePath);
693
+ for (const node of walk(input.rootNode)) {
694
+ if (node.type !== "export_statement") continue;
695
+ const text = nodeText(node, input.source);
696
+ if (!/default/.test(text)) continue;
697
+ const line = startLine(node);
698
+ return [
699
+ {
700
+ id: makeNodeId({
701
+ repoId: input.repoId,
702
+ kind: "Route",
703
+ path: input.relativePath,
704
+ name: `ANY ${routePath}`,
705
+ line
706
+ }),
707
+ kind: "Route",
708
+ repoId: input.repoId,
709
+ name: `ANY ${routePath}`,
710
+ path: input.relativePath,
711
+ lineStart: line,
712
+ lineEnd: endLine(node),
713
+ method: "ANY",
714
+ routePath,
715
+ framework: "next-pages"
716
+ }
717
+ ];
718
+ }
719
+ return [];
720
+ }
721
+ function stripQuotes(s) {
722
+ return s.replace(/^["'`]/, "").replace(/["'`]$/, "");
723
+ }
724
+ function extractTopLevelName(decl, source) {
725
+ if (decl.type === "function_declaration") {
726
+ const name = decl.childForFieldName("name");
727
+ return name ? nodeText(name, source) : null;
728
+ }
729
+ if (decl.type === "lexical_declaration") {
730
+ const declarator = findChildByType(decl, "variable_declarator");
731
+ if (!declarator) return null;
732
+ const name = declarator.childForFieldName("name");
733
+ return name ? nodeText(name, source) : null;
734
+ }
735
+ return null;
736
+ }
737
+ function appRoutePathFor(relativePath) {
738
+ const idx = relativePath.indexOf("app/");
739
+ const tail = idx >= 0 ? relativePath.slice(idx + "app/".length) : relativePath;
740
+ const segments = tail.replace(/\/route\.[^/]+$/, "").split("/").filter((seg) => seg.length > 0 && !/^\(.*\)$/.test(seg)).map((seg) => seg.replace(/^\[\.\.\.(.+)\]$/, ":$1*").replace(/^\[(.+)\]$/, ":$1"));
741
+ return `/${segments.join("/")}` || "/";
742
+ }
743
+ function pagesApiPathFor(relativePath) {
744
+ const idx = relativePath.indexOf("pages/api/");
745
+ const tail = idx >= 0 ? relativePath.slice(idx + "pages/api/".length) : relativePath;
746
+ const base = tail.replace(/\.[^/.]+$/, "");
747
+ const segments = base.split("/").filter((seg) => seg.length > 0).map(
748
+ (seg) => seg.replace(/^\[\.\.\.(.+)\]$/, ":$1*").replace(/^\[(.+)\]$/, ":$1").replace(/^index$/, "")
749
+ ).filter((seg) => seg.length > 0);
750
+ return `/api/${segments.join("/")}`.replace(/\/+$/, "") || "/api";
751
+ }
752
+
753
+ // ../ingestion/src/extractors/extract.ts
754
+ async function extractFile(input) {
755
+ const parsed = await parseSource(input.source, input.language);
756
+ const fileId = makeFileId({ repoId: input.repoId, path: input.relativePath });
757
+ const file = {
758
+ id: fileId,
759
+ kind: "File",
760
+ repoId: input.repoId,
761
+ name: basename(input.relativePath),
762
+ path: input.relativePath,
763
+ lineStart: 1,
764
+ lineEnd: Math.max(1, parsed.rootNode.endPosition.row + 1),
765
+ language: input.language,
766
+ sizeBytes: Buffer.byteLength(input.source, "utf8"),
767
+ contentHash: sha1(input.source)
768
+ };
769
+ const nodes = [];
770
+ const edges = [];
771
+ const localSymbols = /* @__PURE__ */ new Map();
772
+ for (const node of walk(parsed.rootNode)) {
773
+ const symbol = extractSymbol(node, input, parsed.source);
774
+ if (!symbol) continue;
775
+ nodes.push(symbol.node);
776
+ localSymbols.set(symbol.node.name, symbol.node.id);
777
+ edges.push({ kind: "DEFINES", fromId: fileId, toId: symbol.node.id });
778
+ if (symbol.node.isExported) {
779
+ edges.push({ kind: "EXPORTS", fromId: fileId, toId: symbol.node.id });
780
+ }
781
+ if (symbol.parentClass) {
782
+ edges.push({
783
+ kind: "INHERITS",
784
+ fromId: symbol.node.id,
785
+ toId: "",
786
+ unresolvedTargetName: symbol.parentClass
787
+ });
788
+ }
789
+ }
790
+ for (const node of walk(parsed.rootNode)) {
791
+ if (node.type === "import_statement") {
792
+ const src = findChildByType(node, "string");
793
+ if (!src) continue;
794
+ const raw = nodeText(src, parsed.source);
795
+ const target = raw.slice(1, -1);
796
+ edges.push({
797
+ kind: "IMPORTS",
798
+ fromId: fileId,
799
+ toId: "",
800
+ line: startLine(node),
801
+ fromPath: input.relativePath,
802
+ toPath: target,
803
+ unresolvedTargetName: target
804
+ });
805
+ }
806
+ }
807
+ for (const node of walk(parsed.rootNode)) {
808
+ if (node.type !== "call_expression") continue;
809
+ const callee = node.childForFieldName("function");
810
+ if (!callee) continue;
811
+ const calleeName = extractCalleeName(callee, parsed.source);
812
+ if (!calleeName) continue;
813
+ const enclosing = findEnclosingSymbolId(node, input, parsed.source, localSymbols);
814
+ if (!enclosing) continue;
815
+ edges.push({
816
+ kind: "CALLS",
817
+ fromId: enclosing,
818
+ toId: "",
819
+ line: startLine(node),
820
+ unresolvedTargetName: calleeName
821
+ });
822
+ }
823
+ if (input.language === "tsx" || input.language === "jsx") {
824
+ for (const node of walk(parsed.rootNode)) {
825
+ if (node.type !== "jsx_opening_element" && node.type !== "jsx_self_closing_element") continue;
826
+ const ident = node.childForFieldName("name") ?? findChildByType(node, "identifier");
827
+ if (!ident) continue;
828
+ const tag = nodeText(ident, parsed.source);
829
+ if (!isPascalCase(tag)) continue;
830
+ const enclosing = findEnclosingSymbolId(node, input, parsed.source, localSymbols);
831
+ if (!enclosing) continue;
832
+ edges.push({
833
+ kind: "RENDERS",
834
+ fromId: enclosing,
835
+ toId: "",
836
+ line: startLine(node),
837
+ unresolvedTargetName: tag
838
+ });
839
+ }
840
+ }
841
+ const routes = detectRoutes({
842
+ repoId: input.repoId,
843
+ relativePath: input.relativePath,
844
+ absolutePath: input.absolutePath,
845
+ fileId,
846
+ rootNode: parsed.rootNode,
847
+ source: parsed.source,
848
+ language: input.language
849
+ });
850
+ for (const route of routes) {
851
+ nodes.push(route);
852
+ edges.push({ kind: "DEFINES", fromId: fileId, toId: route.id });
853
+ }
854
+ return { file, nodes, edges };
855
+ }
856
+ function extractSymbol(node, input, source) {
857
+ switch (node.type) {
858
+ case "function_declaration":
859
+ case "generator_function_declaration":
860
+ return functionFromDeclaration(node, input, source);
861
+ case "class_declaration":
862
+ return classFromDeclaration(node, input, source);
863
+ case "interface_declaration":
864
+ return interfaceFromDeclaration(node, input, source, "Interface");
865
+ case "type_alias_declaration":
866
+ return interfaceFromDeclaration(node, input, source, "Interface");
867
+ case "lexical_declaration":
868
+ case "variable_declaration":
869
+ return variableOrArrowFromDeclaration(node, input, source);
870
+ default:
871
+ return null;
872
+ }
873
+ }
874
+ function functionFromDeclaration(node, input, source) {
875
+ const nameNode = node.childForFieldName("name");
876
+ if (!nameNode) return null;
877
+ const name = nodeText(nameNode, source);
878
+ const line = startLine(node);
879
+ const id = makeNodeId({
880
+ repoId: input.repoId,
881
+ kind: "Function",
882
+ path: input.relativePath,
883
+ name,
884
+ line
885
+ });
886
+ return {
887
+ node: {
888
+ id,
889
+ kind: "Function",
890
+ repoId: input.repoId,
891
+ name,
892
+ path: input.relativePath,
893
+ lineStart: line,
894
+ lineEnd: endLine(node),
895
+ signature: extractSignature(node, source),
896
+ leadingComment: leadingCommentFor(parentForLeadingComment(node), source),
897
+ isExported: isExported(node),
898
+ isAsync: hasAsyncModifier(node, source)
899
+ }
900
+ };
901
+ }
902
+ function classFromDeclaration(node, input, source) {
903
+ const nameNode = node.childForFieldName("name");
904
+ if (!nameNode) return null;
905
+ const name = nodeText(nameNode, source);
906
+ const line = startLine(node);
907
+ const isComponent = looksLikeComponentClass(node, name, source);
908
+ const kind = isComponent ? "Component" : "Class";
909
+ const id = makeNodeId({
910
+ repoId: input.repoId,
911
+ kind,
912
+ path: input.relativePath,
913
+ name,
914
+ line
915
+ });
916
+ const parentClass = extractExtendsName(node, source);
917
+ return {
918
+ node: {
919
+ id,
920
+ kind,
921
+ repoId: input.repoId,
922
+ name,
923
+ path: input.relativePath,
924
+ lineStart: line,
925
+ lineEnd: endLine(node),
926
+ signature: `class ${name}${parentClass ? ` extends ${parentClass}` : ""}`,
927
+ leadingComment: leadingCommentFor(parentForLeadingComment(node), source),
928
+ isExported: isExported(node)
929
+ },
930
+ ...parentClass !== void 0 ? { parentClass } : {}
931
+ };
932
+ }
933
+ function interfaceFromDeclaration(node, input, source, kind) {
934
+ const nameNode = node.childForFieldName("name");
935
+ if (!nameNode) return null;
936
+ const name = nodeText(nameNode, source);
937
+ const line = startLine(node);
938
+ const id = makeNodeId({
939
+ repoId: input.repoId,
940
+ kind,
941
+ path: input.relativePath,
942
+ name,
943
+ line
944
+ });
945
+ return {
946
+ node: {
947
+ id,
948
+ kind,
949
+ repoId: input.repoId,
950
+ name,
951
+ path: input.relativePath,
952
+ lineStart: line,
953
+ lineEnd: endLine(node),
954
+ signature: nodeText(node, source).split("\n")[0]?.slice(0, 200) ?? "",
955
+ leadingComment: leadingCommentFor(parentForLeadingComment(node), source),
956
+ isExported: isExported(node)
957
+ }
958
+ };
959
+ }
960
+ function variableOrArrowFromDeclaration(node, input, source) {
961
+ const declarators = findChildrenByType(node, "variable_declarator");
962
+ if (declarators.length === 0) return null;
963
+ const decl = declarators[0];
964
+ if (!decl) return null;
965
+ const nameNode = decl.childForFieldName("name");
966
+ const value = decl.childForFieldName("value");
967
+ if (!nameNode) return null;
968
+ const name = nodeText(nameNode, source);
969
+ const line = startLine(decl);
970
+ const isArrow = value?.type === "arrow_function" || value?.type === "function_expression";
971
+ if (isArrow) {
972
+ const isComponent = isPascalCase(name) && containsJsx(value);
973
+ const kind = isComponent ? "Component" : "Function";
974
+ const id2 = makeNodeId({
975
+ repoId: input.repoId,
976
+ kind,
977
+ path: input.relativePath,
978
+ name,
979
+ line
980
+ });
981
+ return {
982
+ node: {
983
+ id: id2,
984
+ kind,
985
+ repoId: input.repoId,
986
+ name,
987
+ path: input.relativePath,
988
+ lineStart: line,
989
+ lineEnd: endLine(decl),
990
+ signature: extractSignature(value, source),
991
+ leadingComment: leadingCommentFor(node, source),
992
+ isExported: isExported(node),
993
+ ...kind === "Function" ? { isArrow: true } : {}
994
+ }
995
+ };
996
+ }
997
+ const id = makeNodeId({
998
+ repoId: input.repoId,
999
+ kind: "Variable",
1000
+ path: input.relativePath,
1001
+ name,
1002
+ line
1003
+ });
1004
+ return {
1005
+ node: {
1006
+ id,
1007
+ kind: "Variable",
1008
+ repoId: input.repoId,
1009
+ name,
1010
+ path: input.relativePath,
1011
+ lineStart: line,
1012
+ lineEnd: endLine(decl),
1013
+ signature: nodeText(decl, source).split("\n")[0]?.slice(0, 200) ?? "",
1014
+ leadingComment: leadingCommentFor(node, source),
1015
+ isExported: isExported(node)
1016
+ }
1017
+ };
1018
+ }
1019
+ function extractSignature(node, source) {
1020
+ if (!node) return "";
1021
+ const params = node.childForFieldName("parameters");
1022
+ const ret = node.childForFieldName("return_type");
1023
+ const name = node.childForFieldName("name");
1024
+ const head = name ? nodeText(name, source) : "";
1025
+ const ps = params ? nodeText(params, source) : "()";
1026
+ const rs = ret ? ` ${nodeText(ret, source)}` : "";
1027
+ return `${head}${ps}${rs}`.trim().slice(0, 200);
1028
+ }
1029
+ function extractCalleeName(callee, source) {
1030
+ if (callee.type === "identifier") return nodeText(callee, source);
1031
+ if (callee.type === "member_expression") {
1032
+ const property = callee.childForFieldName("property");
1033
+ if (property) return nodeText(property, source);
1034
+ }
1035
+ return null;
1036
+ }
1037
+ function extractExtendsName(classNode, source) {
1038
+ const heritage = findChildByType(classNode, "class_heritage");
1039
+ if (!heritage) return void 0;
1040
+ const extendsClause = findChildByType(heritage, "extends_clause");
1041
+ if (!extendsClause) {
1042
+ const ident2 = findChildByType(heritage, "identifier");
1043
+ if (ident2) return nodeText(ident2, source);
1044
+ return void 0;
1045
+ }
1046
+ const ident = findChildByType(extendsClause, "identifier") ?? findChildByType(extendsClause, "member_expression");
1047
+ return ident ? nodeText(ident, source) : void 0;
1048
+ }
1049
+ function looksLikeComponentClass(classNode, name, source) {
1050
+ if (!isPascalCase(name)) return false;
1051
+ const parent = extractExtendsName(classNode, source) ?? "";
1052
+ return /(?:^|\.)Component$|PureComponent$/.test(parent);
1053
+ }
1054
+ function containsJsx(node) {
1055
+ if (!node) return false;
1056
+ for (const child of walk(node)) {
1057
+ if (child.type === "jsx_element" || child.type === "jsx_self_closing_element" || child.type === "jsx_fragment") {
1058
+ return true;
1059
+ }
1060
+ }
1061
+ return false;
1062
+ }
1063
+ function hasAsyncModifier(node, source) {
1064
+ for (let i = 0; i < node.childCount; i++) {
1065
+ const child = node.child(i);
1066
+ if (!child) continue;
1067
+ if (child.type === "async" || nodeText(child, source) === "async") return true;
1068
+ if (child.type === "function" || child.type === "class") return false;
1069
+ }
1070
+ return false;
1071
+ }
1072
+ function isExported(node) {
1073
+ let cursor = node.parent;
1074
+ while (cursor) {
1075
+ if (cursor.type === "export_statement") return true;
1076
+ if (cursor.type === "program") return false;
1077
+ cursor = cursor.parent;
1078
+ }
1079
+ return false;
1080
+ }
1081
+ function parentForLeadingComment(node) {
1082
+ let cursor = node;
1083
+ while (cursor.parent && cursor.parent.type === "export_statement") {
1084
+ cursor = cursor.parent;
1085
+ }
1086
+ return cursor;
1087
+ }
1088
+ function findEnclosingSymbolId(node, input, source, localSymbols) {
1089
+ let cursor = node.parent;
1090
+ while (cursor) {
1091
+ if (cursor.type === "function_declaration" || cursor.type === "method_definition" || cursor.type === "class_declaration" || cursor.type === "arrow_function" || cursor.type === "function_expression") {
1092
+ const name = enclosingDeclarationName(cursor, source);
1093
+ if (name) {
1094
+ const id = localSymbols.get(name);
1095
+ if (id) return id;
1096
+ }
1097
+ }
1098
+ cursor = cursor.parent;
1099
+ }
1100
+ return makeFileId({ repoId: input.repoId, path: input.relativePath });
1101
+ }
1102
+ function enclosingDeclarationName(node, source) {
1103
+ const nameField = node.childForFieldName("name");
1104
+ if (nameField) return nodeText(nameField, source);
1105
+ let cursor = node.parent;
1106
+ while (cursor) {
1107
+ if (cursor.type === "variable_declarator") {
1108
+ const declName = cursor.childForFieldName("name");
1109
+ if (declName) return nodeText(declName, source);
1110
+ return null;
1111
+ }
1112
+ if (cursor.type === "function_declaration" || cursor.type === "class_declaration") return null;
1113
+ cursor = cursor.parent;
1114
+ }
1115
+ return null;
1116
+ }
1117
+ function sha1(s) {
1118
+ return createHash("sha1").update(s).digest("hex");
1119
+ }
1120
+
1121
+ // ../ingestion/src/extractors/resolve.ts
1122
+ import path2 from "path";
1123
+ var EXTENSIONS = [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"];
1124
+ function resolveEdges(input) {
1125
+ const byName = /* @__PURE__ */ new Map();
1126
+ for (const n of input.nodes) {
1127
+ if (n.kind === "File") continue;
1128
+ if (!byName.has(n.name)) byName.set(n.name, n.id);
1129
+ }
1130
+ const out = [];
1131
+ let dropped = 0;
1132
+ for (const edge of input.edges) {
1133
+ if (edge.toId) {
1134
+ out.push(edge);
1135
+ continue;
1136
+ }
1137
+ if (!edge.unresolvedTargetName) {
1138
+ dropped++;
1139
+ continue;
1140
+ }
1141
+ if (edge.kind === "IMPORTS") {
1142
+ const resolved = resolveImportPath({
1143
+ repoId: input.repoId,
1144
+ fromPath: edge.fromPath ?? "",
1145
+ spec: edge.unresolvedTargetName,
1146
+ known: input.knownFilePaths
1147
+ });
1148
+ if (!resolved) {
1149
+ dropped++;
1150
+ continue;
1151
+ }
1152
+ const targetId2 = makeFileId({ repoId: input.repoId, path: resolved });
1153
+ const { unresolvedTargetName: _unused2, ...rest2 } = edge;
1154
+ out.push({ ...rest2, toId: targetId2, toPath: resolved });
1155
+ continue;
1156
+ }
1157
+ const targetId = byName.get(edge.unresolvedTargetName);
1158
+ if (!targetId) {
1159
+ dropped++;
1160
+ continue;
1161
+ }
1162
+ const { unresolvedTargetName: _unused, ...rest } = edge;
1163
+ out.push({ ...rest, toId: targetId });
1164
+ }
1165
+ return { resolved: out, dropped };
1166
+ }
1167
+ function resolveImportPath(input) {
1168
+ const { fromPath, spec, known } = input;
1169
+ if (!spec.startsWith(".") && !spec.startsWith("/")) return null;
1170
+ const baseDir = path2.posix.dirname(toPosix(fromPath));
1171
+ const joined = path2.posix.normalize(path2.posix.join(baseDir, toPosix(spec)));
1172
+ const candidates = [joined];
1173
+ const ext = path2.posix.extname(joined);
1174
+ const stem = ext ? joined.slice(0, -ext.length) : joined;
1175
+ for (const e of EXTENSIONS) candidates.push(`${stem}${e}`);
1176
+ for (const e of EXTENSIONS) candidates.push(`${stem}/index${e}`);
1177
+ for (const c of candidates) {
1178
+ if (known.has(c)) return c;
1179
+ }
1180
+ return null;
1181
+ }
1182
+ function toPosix(p) {
1183
+ return p.split(path2.sep).join("/");
1184
+ }
1185
+
1186
+ // ../ingestion/src/orchestrator.ts
1187
+ import { readFile as readFile4, stat as stat2 } from "fs/promises";
1188
+ import { cpus } from "os";
1189
+ import { join as join2 } from "path";
1190
+ import { createHash as createHash2 } from "crypto";
1191
+ import ignore2 from "ignore";
1192
+
1193
+ // ../ingestion/src/embedder.ts
1194
+ function buildEmbeddingInput(node) {
1195
+ const sig = node.signature ?? "";
1196
+ const comment = node.leadingComment ?? "";
1197
+ return `${node.kind} ${node.name}
1198
+ ${sig}
1199
+ ${comment}`.trim();
1200
+ }
1201
+ async function embedNodes(nodes, opts) {
1202
+ const batchSize = opts.batchSize ?? 100;
1203
+ const candidates = nodes.filter((n) => n.kind !== "File");
1204
+ const result = [];
1205
+ const total = Math.ceil(candidates.length / batchSize);
1206
+ const namespace = `${opts.router.config.embeddingNamespace.provider}:${opts.router.config.embeddingNamespace.model}:${opts.router.config.embeddingNamespace.dimension}`;
1207
+ for (let i = 0; i < candidates.length; i += batchSize) {
1208
+ const slice = candidates.slice(i, i + batchSize);
1209
+ const inputs = slice.map(buildEmbeddingInput);
1210
+ const vectors = await opts.router.embed(inputs);
1211
+ for (let j = 0; j < slice.length; j++) {
1212
+ const node = slice[j];
1213
+ const vec = vectors[j];
1214
+ if (!node || !vec) continue;
1215
+ result.push({ id: node.id, embedding: vec, embeddingNamespace: namespace });
1216
+ }
1217
+ opts.onBatch?.({
1218
+ batchIndex: i / batchSize,
1219
+ total,
1220
+ embedded: result.length
1221
+ });
1222
+ }
1223
+ return result;
1224
+ }
1225
+
1226
+ // ../ingestion/src/repo-walker.ts
1227
+ import { readFile as readFile3, readdir, stat } from "fs/promises";
1228
+ import path3 from "path";
1229
+ import ignore from "ignore";
1230
+ var ALWAYS_SKIP_DIRS = /* @__PURE__ */ new Set([
1231
+ ".git",
1232
+ "node_modules",
1233
+ ".next",
1234
+ "dist",
1235
+ "build",
1236
+ "out",
1237
+ ".turbo",
1238
+ ".cache",
1239
+ "coverage",
1240
+ ".idea",
1241
+ ".vscode"
1242
+ ]);
1243
+ async function walkRepo(root) {
1244
+ const base = path3.resolve(root);
1245
+ const matcher = ignore();
1246
+ await loadGitignoreInto(matcher, base);
1247
+ const out = [];
1248
+ await walkDir(base, "", matcher, out);
1249
+ return out.sort();
1250
+ }
1251
+ async function walkDir(absDir, relDir, ig, out) {
1252
+ let entries;
1253
+ try {
1254
+ entries = await readdir(absDir, { withFileTypes: true, encoding: "utf8" });
1255
+ } catch {
1256
+ return;
1257
+ }
1258
+ for (const entry of entries) {
1259
+ const name = entry.name;
1260
+ const rel = relDir ? `${relDir}/${name}` : name;
1261
+ const abs = path3.join(absDir, name);
1262
+ if (entry.isDirectory()) {
1263
+ if (ALWAYS_SKIP_DIRS.has(name)) continue;
1264
+ if (ig.ignores(`${rel}/`)) continue;
1265
+ await walkDir(abs, rel, ig, out);
1266
+ } else if (entry.isFile()) {
1267
+ if (ig.ignores(rel)) continue;
1268
+ const s = await safeStat(abs);
1269
+ if (s && s.size > 2e6) continue;
1270
+ out.push(rel);
1271
+ }
1272
+ }
1273
+ }
1274
+ async function loadGitignoreInto(ig, absDir) {
1275
+ const file = path3.join(absDir, ".gitignore");
1276
+ try {
1277
+ const text = await readFile3(file, "utf8");
1278
+ ig.add(text);
1279
+ } catch {
1280
+ }
1281
+ }
1282
+ async function safeStat(p) {
1283
+ try {
1284
+ return await stat(p);
1285
+ } catch {
1286
+ return null;
1287
+ }
1288
+ }
1289
+
1290
+ // ../ingestion/src/orchestrator.ts
1291
+ async function indexRepo(opts) {
1292
+ const start = Date.now();
1293
+ const parallelism = Math.max(1, opts.parallelism ?? cpus().length);
1294
+ const files = await walkRepo(opts.repoPath);
1295
+ opts.onProgress?.({ type: "walk", files: files.length });
1296
+ const parsable = files.map((rel) => ({ rel, language: detectLanguage(rel) })).filter(
1297
+ (f) => f.language !== null
1298
+ );
1299
+ const knownFilePaths = new Set(parsable.map((f) => f.rel));
1300
+ const allNodes = [];
1301
+ const allEdges = [];
1302
+ let parsedCount = 0;
1303
+ let failed = 0;
1304
+ await runWithConcurrency(parsable, parallelism, async (entry) => {
1305
+ const abs = join2(opts.repoPath, entry.rel);
1306
+ try {
1307
+ const source = await readFile4(abs, "utf8");
1308
+ const result = await extractFile({
1309
+ repoId: opts.repoId,
1310
+ relativePath: entry.rel,
1311
+ absolutePath: abs,
1312
+ language: entry.language,
1313
+ source
1314
+ });
1315
+ allNodes.push(result.file, ...result.nodes);
1316
+ allEdges.push(...result.edges);
1317
+ } catch (_err) {
1318
+ failed++;
1319
+ } finally {
1320
+ parsedCount++;
1321
+ opts.onProgress?.({ type: "parse", parsed: parsedCount, total: parsable.length });
1322
+ }
1323
+ });
1324
+ const { resolved, dropped } = resolveEdges({
1325
+ repoId: opts.repoId,
1326
+ nodes: allNodes,
1327
+ edges: allEdges,
1328
+ knownFilePaths
1329
+ });
1330
+ await opts.graphDb.deleteByRepo(opts.repoId);
1331
+ await opts.graphDb.upsertNodes(allNodes);
1332
+ await opts.graphDb.upsertEdges(resolved);
1333
+ opts.onProgress?.({ type: "upsert", nodes: allNodes.length, edges: resolved.length });
1334
+ let embeddingCount = 0;
1335
+ if (!opts.skipEmbeddings && opts.router) {
1336
+ const embedded = await embedNodes(allNodes, {
1337
+ router: opts.router,
1338
+ onBatch: ({ embedded: embedded2, total }) => opts.onProgress?.({ type: "embed", embedded: embedded2, total })
1339
+ });
1340
+ await persistEmbeddings(opts.graphDb, embedded);
1341
+ embeddingCount = embedded.length;
1342
+ }
1343
+ return {
1344
+ durationMs: Date.now() - start,
1345
+ parsedFiles: parsedCount - failed,
1346
+ failedFiles: failed,
1347
+ nodes: allNodes.length,
1348
+ edges: resolved.length,
1349
+ embeddings: embeddingCount,
1350
+ droppedEdges: dropped
1351
+ };
1352
+ }
1353
+ async function persistEmbeddings(graphDb, embedded) {
1354
+ if (embedded.length === 0) return;
1355
+ const BATCH = 100;
1356
+ for (let i = 0; i < embedded.length; i += BATCH) {
1357
+ const batch = embedded.slice(i, i + BATCH);
1358
+ await graphDb.query(
1359
+ `
1360
+ UNWIND $batch AS e
1361
+ MATCH (n:Symbol { id: e.id })
1362
+ SET n.embedding = e.embedding,
1363
+ n.embeddingNamespace = e.embeddingNamespace
1364
+ `,
1365
+ { batch }
1366
+ );
1367
+ }
1368
+ }
1369
+ async function runWithConcurrency(items, concurrency, fn) {
1370
+ let cursor = 0;
1371
+ const runners = Array.from({ length: Math.min(concurrency, items.length) }, async () => {
1372
+ while (cursor < items.length) {
1373
+ const i = cursor++;
1374
+ const item = items[i];
1375
+ if (item === void 0) continue;
1376
+ await fn(item);
1377
+ }
1378
+ });
1379
+ await Promise.all(runners);
1380
+ }
1381
+
1382
+ // src/commands/index.ts
1383
+ import kleur4 from "kleur";
1384
+
1385
+ // src/repo-id.ts
1386
+ import { createHash as createHash3 } from "crypto";
1387
+ import path4 from "path";
1388
+ function repoIdFromPath(absPath) {
1389
+ const base = path4.basename(path4.resolve(absPath));
1390
+ const sha = createHash3("sha1").update(path4.resolve(absPath)).digest("hex").slice(0, 8);
1391
+ const safe = base.replace(/[^A-Za-z0-9_-]/g, "_");
1392
+ return `${safe}-${sha}`;
1393
+ }
1394
+
1395
+ // src/commands/index.ts
1396
+ async function runIndexCommand(opts) {
1397
+ const config = await loadConfig();
1398
+ const absolutePath = path5.resolve(opts.repoPath);
1399
+ const repoId = repoIdFromPath(absolutePath);
1400
+ const dbPath = config.data.dbPath ?? defaultDbPath();
1401
+ console.log(kleur4.dim(`repo: ${absolutePath}`));
1402
+ console.log(kleur4.dim(`id: ${repoId}`));
1403
+ console.log(kleur4.dim(`graph: ${dbPath}`));
1404
+ console.log(
1405
+ kleur4.dim(
1406
+ `llm: ${config.llm.mode} / gen=${config.llm.generation.provider}:${config.llm.generation.model} / embed=${config.llm.embeddings.provider}:${config.llm.embeddings.model}`
1407
+ )
1408
+ );
1409
+ console.log();
1410
+ const graphDb = new GraphDb({
1411
+ dbPath,
1412
+ embeddingDimension: config.llm.embeddings.dimension
1413
+ });
1414
+ const connectSpinner = makeSpinner("Opening graph").start();
1415
+ try {
1416
+ await graphDb.connect();
1417
+ await graphDb.migrate();
1418
+ connectSpinner.succeed(`Graph ready ${kleur4.dim(`(${dbPath})`)}`);
1419
+ } catch (err) {
1420
+ connectSpinner.fail("Failed to open graph");
1421
+ throw err;
1422
+ }
1423
+ let router;
1424
+ if (!opts.noEmbed) {
1425
+ const llmSpinner = makeSpinner("Initialising LLM router").start();
1426
+ try {
1427
+ router = await createLlmRouter({ config: config.llm });
1428
+ llmSpinner.succeed("LLM router initialised");
1429
+ } catch (err) {
1430
+ llmSpinner.warn(
1431
+ `LLM router init failed; continuing without embeddings (${err instanceof Error ? err.message : String(err)})`
1432
+ );
1433
+ router = void 0;
1434
+ }
1435
+ }
1436
+ let parseBar = null;
1437
+ let embedBar = null;
1438
+ const result = await indexRepo({
1439
+ repoId,
1440
+ repoPath: absolutePath,
1441
+ graphDb,
1442
+ ...router ? { router } : {},
1443
+ skipEmbeddings: opts.noEmbed === true || router === void 0,
1444
+ onProgress: (event) => {
1445
+ if (event.type === "walk") {
1446
+ console.log(kleur4.dim(`walked ${event.files} files`));
1447
+ }
1448
+ if (event.type === "parse") {
1449
+ if (!parseBar) parseBar = new ProgressBar("parsing", event.total);
1450
+ parseBar.update(event.parsed);
1451
+ }
1452
+ if (event.type === "upsert") {
1453
+ console.log(
1454
+ kleur4.dim(`upserted ${event.nodes} nodes, ${event.edges} edges`)
1455
+ );
1456
+ }
1457
+ if (event.type === "embed") {
1458
+ if (!embedBar) embedBar = new ProgressBar("embedding", event.total);
1459
+ embedBar.update(event.embedded);
1460
+ }
1461
+ }
1462
+ });
1463
+ parseBar?.done();
1464
+ embedBar?.done();
1465
+ await graphDb.close();
1466
+ console.log();
1467
+ console.log(
1468
+ kleur4.green().bold(
1469
+ `\u2713 Indexed ${result.nodes} nodes, ${result.edges} edges in ${(result.durationMs / 1e3).toFixed(2)}s`
1470
+ )
1471
+ );
1472
+ console.log(
1473
+ kleur4.dim(
1474
+ ` parsed=${result.parsedFiles} failed=${result.failedFiles} embeddings=${result.embeddings} dropped_edges=${result.droppedEdges}`
1475
+ )
1476
+ );
1477
+ }
1478
+ async function runStatusCommand(opts) {
1479
+ const config = await loadConfig();
1480
+ const absolutePath = path5.resolve(opts.repoPath);
1481
+ const repoId = repoIdFromPath(absolutePath);
1482
+ const graphDb = new GraphDb({
1483
+ dbPath: config.data.dbPath ?? defaultDbPath(),
1484
+ embeddingDimension: config.llm.embeddings.dimension
1485
+ });
1486
+ const spinner = makeSpinner("Reading graph").start();
1487
+ try {
1488
+ await graphDb.connect();
1489
+ const stats = await graphDb.stats(repoId);
1490
+ await graphDb.close();
1491
+ spinner.stop();
1492
+ console.log(renderStatusTables(repoId, stats));
1493
+ } catch (err) {
1494
+ spinner.fail("Failed to read graph");
1495
+ throw err;
1496
+ }
1497
+ }
1498
+
1499
+ // ../mcp-server/dist/index.js
1500
+ import { createHash as createHash4 } from "crypto";
1501
+ import { mkdir as mkdir3, readFile as readFile5, writeFile as writeFile2 } from "fs/promises";
1502
+ import { homedir } from "os";
1503
+ import { dirname as dirname3, resolve } from "path";
1504
+ import { randomUUID } from "crypto";
1505
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
1506
+ import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
1507
+ import express from "express";
1508
+ import { z } from "zod";
1509
+ import { z as z2 } from "zod";
1510
+ import { z as z3 } from "zod";
1511
+ import { z as z4 } from "zod";
1512
+ import { z as z5 } from "zod";
1513
+ import { z as z6 } from "zod";
1514
+ import { z as z7 } from "zod";
1515
+ import { z as z8 } from "zod";
1516
+ import { z as z9 } from "zod";
1517
+ import { z as z10 } from "zod";
1518
+ function buildCacheKey(toolName, args, namespace = "v1") {
1519
+ const serialized = stableStringify(args);
1520
+ const hash = createHash4("sha1").update(serialized).digest("hex");
1521
+ return `codegraph:${namespace}:tool:${toolName}:${hash}`;
1522
+ }
1523
+ function stableStringify(value) {
1524
+ return JSON.stringify(value, (_key, v) => {
1525
+ if (v && typeof v === "object" && !Array.isArray(v)) {
1526
+ const sorted = {};
1527
+ for (const key of Object.keys(v).sort()) {
1528
+ sorted[key] = v[key];
1529
+ }
1530
+ return sorted;
1531
+ }
1532
+ return v;
1533
+ });
1534
+ }
1535
+ var InMemoryCache = class {
1536
+ store = /* @__PURE__ */ new Map();
1537
+ async get(key) {
1538
+ const entry = this.store.get(key);
1539
+ if (!entry) return null;
1540
+ if (entry.expiresAt <= Date.now()) {
1541
+ this.store.delete(key);
1542
+ return null;
1543
+ }
1544
+ return entry.value;
1545
+ }
1546
+ async set(key, value, ttlSeconds) {
1547
+ this.store.set(key, { value, expiresAt: Date.now() + ttlSeconds * 1e3 });
1548
+ }
1549
+ async close() {
1550
+ this.store.clear();
1551
+ }
1552
+ };
1553
+ async function memoizeJson(cache, key, ttlSeconds, compute) {
1554
+ const cached = await cache.get(key);
1555
+ if (cached !== null) {
1556
+ try {
1557
+ return JSON.parse(cached);
1558
+ } catch {
1559
+ }
1560
+ }
1561
+ const value = await compute();
1562
+ await cache.set(key, JSON.stringify(value), ttlSeconds);
1563
+ return value;
1564
+ }
1565
+ var DEFAULT_TOOL_TTL_SECONDS = 30;
1566
+ function jsonResult(payload, extra) {
1567
+ const text = JSON.stringify(payload, null, 2);
1568
+ const base = {
1569
+ content: [{ type: "text", text }],
1570
+ structuredContent: isStructuredContent(payload) ? payload : { result: payload }
1571
+ };
1572
+ return extra?.isError ? { ...base, isError: true } : base;
1573
+ }
1574
+ function isStructuredContent(value) {
1575
+ return value !== null && typeof value === "object" && !Array.isArray(value);
1576
+ }
1577
+ async function cachedJsonResult(toolName, args, deps, compute) {
1578
+ const key = buildCacheKey(toolName, args);
1579
+ const ttl = deps.cacheTtlSeconds ?? DEFAULT_TOOL_TTL_SECONDS;
1580
+ const value = await memoizeJson(deps.cache, key, ttl, compute);
1581
+ return jsonResult(value);
1582
+ }
1583
+ var inputSchema = {
1584
+ symbol: z.string().min(1).describe("Symbol name. Use `path` for the file when ambiguous."),
1585
+ path: z.string().optional().describe("Optional file path to disambiguate the symbol."),
1586
+ depth: z.number().int().positive().max(6).default(3),
1587
+ limit: z.number().int().positive().max(500).default(200)
1588
+ };
1589
+ var affectedByTool = {
1590
+ name: "affected_by",
1591
+ config: {
1592
+ title: "Affected By",
1593
+ description: "Reverse-BFS over CALLS and IMPORTS edges to surface every symbol or file whose behaviour could be affected by changing the given symbol. Depth-limited to keep query bounded.",
1594
+ inputSchema,
1595
+ annotations: { readOnlyHint: true, idempotentHint: true }
1596
+ },
1597
+ handler: async ({ symbol, path: path6, depth, limit }, deps) => cachedJsonResult("affected_by", { symbol, path: path6, depth, limit }, deps, async () => {
1598
+ const where = path6 ? "WHERE target.name = $symbol AND target.path = $path" : "WHERE target.name = $symbol";
1599
+ const rows = await deps.graph.query(
1600
+ `MATCH (target:Symbol) ${where}
1601
+ WITH target
1602
+ MATCH path = (affected:Symbol)-[:CALLS|IMPORTS*1..${depth}]->(target)
1603
+ WITH DISTINCT affected, min(length(path)) AS distance
1604
+ RETURN affected.id AS id, affected.name AS name, affected.kind AS kind,
1605
+ affected.path AS path, distance
1606
+ ORDER BY distance ASC, affected.path ASC LIMIT $limit`,
1607
+ { symbol, path: path6 ?? null, limit }
1608
+ );
1609
+ return { symbol, path: path6 ?? null, depth, count: rows.length, affected: rows };
1610
+ })
1611
+ };
1612
+ var inputSchema2 = {
1613
+ symbol: z2.string().min(1).describe("Symbol name to compute the blast radius for."),
1614
+ path: z2.string().optional().describe("Optional file path to disambiguate the symbol."),
1615
+ depth: z2.number().int().positive().max(6).default(4),
1616
+ sampleSize: z2.number().int().positive().max(50).default(10)
1617
+ };
1618
+ var blastRadiusTool = {
1619
+ name: "blast_radius",
1620
+ config: {
1621
+ title: "Blast Radius",
1622
+ description: "Reverse-BFS over CALLS, IMPORTS, and RENDERS edges to count the total number of upstream dependents, plus a small sample to give the agent context.",
1623
+ inputSchema: inputSchema2,
1624
+ annotations: { readOnlyHint: true, idempotentHint: true }
1625
+ },
1626
+ handler: async ({ symbol, path: path6, depth, sampleSize }, deps) => cachedJsonResult("blast_radius", { symbol, path: path6, depth, sampleSize }, deps, async () => {
1627
+ const where = path6 ? "WHERE target.name = $symbol AND target.path = $path" : "WHERE target.name = $symbol";
1628
+ const [stats] = await deps.graph.query(
1629
+ `MATCH (target:Symbol) ${where}
1630
+ WITH target
1631
+ MATCH (dependent:Symbol)-[:CALLS|IMPORTS|RENDERS*1..${depth}]->(target)
1632
+ RETURN count(DISTINCT dependent) AS total`,
1633
+ { symbol, path: path6 ?? null }
1634
+ );
1635
+ const sample = await deps.graph.query(
1636
+ `MATCH (target:Symbol) ${where}
1637
+ WITH target
1638
+ MATCH p = (dependent:Symbol)-[:CALLS|IMPORTS|RENDERS*1..${depth}]->(target)
1639
+ WITH DISTINCT dependent, min(length(p)) AS distance
1640
+ RETURN dependent.id AS id, dependent.name AS name, dependent.kind AS kind,
1641
+ dependent.path AS path, distance
1642
+ ORDER BY distance ASC, dependent.path ASC LIMIT $sampleSize`,
1643
+ { symbol, path: path6 ?? null, sampleSize }
1644
+ );
1645
+ return {
1646
+ symbol,
1647
+ path: path6 ?? null,
1648
+ depth,
1649
+ totalDependents: stats?.total ?? 0,
1650
+ sample
1651
+ };
1652
+ })
1653
+ };
1654
+ var inputSchema3 = {
1655
+ name: z3.string().min(1).describe("Exact symbol name to find callers of."),
1656
+ limit: z3.number().int().positive().max(500).default(100)
1657
+ };
1658
+ var findCallersTool = {
1659
+ name: "find_callers",
1660
+ config: {
1661
+ title: "Find Callers",
1662
+ description: "Return symbols that have a CALLS edge to the named target. Uses Phase-1 heuristic edges (identifier-position match).",
1663
+ inputSchema: inputSchema3,
1664
+ annotations: { readOnlyHint: true, idempotentHint: true }
1665
+ },
1666
+ handler: async ({ name, limit }, deps) => cachedJsonResult("find_callers", { name, limit }, deps, async () => {
1667
+ const rows = await deps.graph.query(
1668
+ `MATCH (caller:Symbol)-[:CALLS]->(callee:Symbol)
1669
+ WHERE callee.name = $name
1670
+ RETURN caller.id AS id, caller.name AS name, caller.kind AS kind,
1671
+ caller.path AS path, caller.lineStart AS line
1672
+ ORDER BY caller.path, caller.lineStart LIMIT $limit`,
1673
+ { name, limit }
1674
+ );
1675
+ return { name, count: rows.length, callers: rows };
1676
+ })
1677
+ };
1678
+ var inputSchema4 = {
1679
+ fragment: z4.string().min(1).describe("Substring to match against file paths."),
1680
+ limit: z4.number().int().positive().max(200).default(50)
1681
+ };
1682
+ var findFileTool = {
1683
+ name: "find_file",
1684
+ config: {
1685
+ title: "Find File",
1686
+ description: "Locate files in the indexed repo by a substring of their path.",
1687
+ inputSchema: inputSchema4,
1688
+ annotations: { readOnlyHint: true, idempotentHint: true }
1689
+ },
1690
+ handler: async ({ fragment, limit }, deps) => cachedJsonResult("find_file", { fragment, limit }, deps, async () => {
1691
+ const rows = await deps.graph.query(
1692
+ `MATCH (f:Symbol)
1693
+ WHERE f.kind = 'File' AND lower(f.path) CONTAINS lower($q)
1694
+ RETURN f.id AS id, f.path AS path, f.name AS name
1695
+ ORDER BY f.path LIMIT $limit`,
1696
+ { q: fragment, limit }
1697
+ );
1698
+ return { fragment, count: rows.length, matches: rows };
1699
+ })
1700
+ };
1701
+ var inputSchema5 = {
1702
+ name: z5.string().min(1).describe("Component name (PascalCase)."),
1703
+ depth: z5.number().int().positive().max(8).default(4)
1704
+ };
1705
+ var getComponentTreeTool = {
1706
+ name: "get_component_tree",
1707
+ config: {
1708
+ title: "Get Component Tree",
1709
+ description: "Recursively walk RENDERS edges from the named component, returning the rendered descendants up to the requested depth.",
1710
+ inputSchema: inputSchema5,
1711
+ annotations: { readOnlyHint: true, idempotentHint: true }
1712
+ },
1713
+ handler: async ({ name, depth }, deps) => cachedJsonResult("get_component_tree", { name, depth }, deps, async () => {
1714
+ const rows = await deps.graph.query(
1715
+ `MATCH (root:Symbol {kind: 'Component', name: $name})
1716
+ OPTIONAL MATCH path = (root)-[:RENDERS*1..${depth}]->(child:Symbol)
1717
+ WHERE child.kind = 'Component'
1718
+ WITH root, collect(DISTINCT { name: child.name, id: child.id, path: child.path, depth: length(path) }) AS children
1719
+ RETURN root.id AS rootId, root.name AS rootName, root.path AS rootPath, children`,
1720
+ { name }
1721
+ );
1722
+ return { name, depth, tree: rows[0] ?? null };
1723
+ })
1724
+ };
1725
+ var inputSchema6 = {
1726
+ path: z6.string().min(1).describe("Repo-relative file path."),
1727
+ depth: z6.number().int().positive().max(8).default(4),
1728
+ limit: z6.number().int().positive().max(500).default(200)
1729
+ };
1730
+ var getDependenciesTool = {
1731
+ name: "get_dependencies",
1732
+ config: {
1733
+ title: "Get Dependencies",
1734
+ description: "Forward-BFS over IMPORTS edges, returning every file transitively imported by the given path, with depth.",
1735
+ inputSchema: inputSchema6,
1736
+ annotations: { readOnlyHint: true, idempotentHint: true }
1737
+ },
1738
+ handler: async ({ path: path6, depth, limit }, deps) => cachedJsonResult("get_dependencies", { path: path6, depth, limit }, deps, async () => {
1739
+ const rows = await deps.graph.query(
1740
+ `MATCH (root:Symbol)
1741
+ WHERE root.kind = 'File' AND root.path = $path
1742
+ OPTIONAL MATCH p = (root)-[:IMPORTS*1..${depth}]->(dep:Symbol)
1743
+ WHERE dep.kind = 'File'
1744
+ WITH DISTINCT dep, min(length(p)) AS distance
1745
+ WHERE dep IS NOT NULL
1746
+ RETURN dep.id AS id, dep.path AS path, distance
1747
+ ORDER BY distance ASC, dep.path ASC LIMIT $limit`,
1748
+ { path: path6, limit }
1749
+ );
1750
+ return { path: path6, depth, count: rows.length, dependencies: rows };
1751
+ })
1752
+ };
1753
+ var inputSchema7 = {
1754
+ path: z7.string().min(1).describe("Repo-relative file path."),
1755
+ symbolLimit: z7.number().int().positive().max(500).default(100)
1756
+ };
1757
+ var getFileContextTool = {
1758
+ name: "get_file_context",
1759
+ config: {
1760
+ title: "Get File Context",
1761
+ description: "Return the file's defined symbols, imported files, exported symbols, and the files that import it.",
1762
+ inputSchema: inputSchema7,
1763
+ annotations: { readOnlyHint: true, idempotentHint: true }
1764
+ },
1765
+ handler: async ({ path: path6, symbolLimit }, deps) => cachedJsonResult("get_file_context", { path: path6, symbolLimit }, deps, async () => {
1766
+ const fileRows = await deps.graph.query(
1767
+ `MATCH (f:Symbol)
1768
+ WHERE f.kind = 'File' AND f.path = $path
1769
+ RETURN f.id AS id, f.path AS path, f.name AS name LIMIT 1`,
1770
+ { path: path6 }
1771
+ );
1772
+ if (fileRows.length === 0) {
1773
+ return { path: path6, found: false };
1774
+ }
1775
+ const [defined, imports, exports, importers] = await Promise.all([
1776
+ deps.graph.query(
1777
+ `MATCH (f:Symbol)-[:DEFINES]->(s:Symbol)
1778
+ WHERE f.kind = 'File' AND f.path = $path
1779
+ RETURN s.id AS id, s.name AS name, s.kind AS kind, s.lineStart AS line, s.signature AS signature
1780
+ ORDER BY s.lineStart LIMIT $limit`,
1781
+ { path: path6, limit: symbolLimit }
1782
+ ),
1783
+ deps.graph.query(
1784
+ `MATCH (f:Symbol)-[:IMPORTS]->(t:Symbol)
1785
+ WHERE f.kind = 'File' AND t.kind = 'File' AND f.path = $path
1786
+ RETURN DISTINCT t.path AS path ORDER BY t.path`,
1787
+ { path: path6 }
1788
+ ),
1789
+ deps.graph.query(
1790
+ `MATCH (f:Symbol)-[:EXPORTS]->(s:Symbol)
1791
+ WHERE f.kind = 'File' AND f.path = $path
1792
+ RETURN s.id AS id, s.name AS name, s.kind AS kind, s.lineStart AS line
1793
+ ORDER BY s.name`,
1794
+ { path: path6 }
1795
+ ),
1796
+ deps.graph.query(
1797
+ `MATCH (other:Symbol)-[:IMPORTS]->(f:Symbol)
1798
+ WHERE other.kind = 'File' AND f.kind = 'File' AND f.path = $path
1799
+ RETURN DISTINCT other.path AS path ORDER BY other.path`,
1800
+ { path: path6 }
1801
+ )
1802
+ ]);
1803
+ return {
1804
+ path: path6,
1805
+ found: true,
1806
+ file: fileRows[0],
1807
+ definedSymbols: defined,
1808
+ importsFiles: imports,
1809
+ exportedSymbols: exports,
1810
+ importedBy: importers
1811
+ };
1812
+ })
1813
+ };
1814
+ var ALLOWED_LEADING_CLAUSES = ["MATCH", "OPTIONAL MATCH", "WITH", "UNWIND", "RETURN"];
1815
+ var FORBIDDEN_KEYWORDS = [
1816
+ "CREATE",
1817
+ "MERGE",
1818
+ "DELETE",
1819
+ "DETACH DELETE",
1820
+ "SET",
1821
+ "REMOVE",
1822
+ "DROP",
1823
+ "FOREACH",
1824
+ "LOAD CSV"
1825
+ ];
1826
+ function stripComments(src) {
1827
+ return src.replace(/\/\*[\s\S]*?\*\//g, " ").replace(/\/\/[^\n]*/g, " ");
1828
+ }
1829
+ function stripStringLiterals(src) {
1830
+ return src.replace(/'(?:\\.|[^'\\])*'/g, "''").replace(/"(?:\\.|[^"\\])*"/g, '""').replace(/`(?:\\.|[^`\\])*`/g, "``");
1831
+ }
1832
+ function startsWithAllowedClause(upper) {
1833
+ for (const clause of ALLOWED_LEADING_CLAUSES) {
1834
+ if (upper.startsWith(`${clause} `) || upper === clause) {
1835
+ return true;
1836
+ }
1837
+ }
1838
+ return false;
1839
+ }
1840
+ function findForbiddenKeyword(upper) {
1841
+ for (const kw of FORBIDDEN_KEYWORDS) {
1842
+ const pattern = new RegExp(`\\b${kw.replace(/\s+/g, "\\s+")}\\b`);
1843
+ if (pattern.test(upper)) {
1844
+ return kw;
1845
+ }
1846
+ }
1847
+ return null;
1848
+ }
1849
+ function validateReadOnlyCypher(input) {
1850
+ if (typeof input !== "string" || input.trim().length === 0) {
1851
+ return { ok: false, reason: "Empty Cypher statement." };
1852
+ }
1853
+ const withoutComments = stripComments(input);
1854
+ const sanitized = stripStringLiterals(withoutComments).trim();
1855
+ const withoutTrailingSemicolon = sanitized.replace(/;\s*$/, "");
1856
+ if (withoutTrailingSemicolon.includes(";")) {
1857
+ return {
1858
+ ok: false,
1859
+ reason: "Multiple statements are not allowed (found `;`).",
1860
+ normalized: sanitized
1861
+ };
1862
+ }
1863
+ const normalized = withoutTrailingSemicolon.trim();
1864
+ const upper = normalized.toUpperCase().replace(/\s+/g, " ").trim();
1865
+ if (!startsWithAllowedClause(upper)) {
1866
+ return {
1867
+ ok: false,
1868
+ reason: `Statement must start with one of: ${ALLOWED_LEADING_CLAUSES.join(", ")}.`,
1869
+ normalized
1870
+ };
1871
+ }
1872
+ const forbidden = findForbiddenKeyword(upper);
1873
+ if (forbidden) {
1874
+ return {
1875
+ ok: false,
1876
+ reason: `Mutating keyword '${forbidden}' is not allowed in nl_query results.`,
1877
+ normalized
1878
+ };
1879
+ }
1880
+ if (/\bCALL\s*\{/.test(upper)) {
1881
+ return {
1882
+ ok: false,
1883
+ reason: "CALL { ... } subqueries are not allowed (they may hide writes).",
1884
+ normalized
1885
+ };
1886
+ }
1887
+ if (/\bCALL\s+[A-Z0-9_.]+\s*\(/.test(upper)) {
1888
+ return {
1889
+ ok: false,
1890
+ reason: "Procedure calls (CALL ns.proc(...)) are not allowed.",
1891
+ normalized
1892
+ };
1893
+ }
1894
+ return { ok: true, normalized };
1895
+ }
1896
+ var inputSchema8 = {
1897
+ question: z8.string().min(1).describe("Natural-language question about the codebase."),
1898
+ limit: z8.number().int().positive().max(200).default(50).describe("Row cap injected into the generated query as $limit.")
1899
+ };
1900
+ var SYSTEM_PROMPT = `You translate natural-language questions about a code graph into a single Cypher query.
1901
+
1902
+ Schema (Kuzu):
1903
+ - Single node table: \`Symbol\`. Distinguish kinds via the \`kind\` STRING column.
1904
+ Valid kinds: File, Function, Class, Interface, Component, Route, Variable.
1905
+ - Edge tables (each FROM Symbol TO Symbol): IMPORTS, CALLS, RENDERS, INHERITS, DEFINES, EXPORTS.
1906
+ - Useful Symbol columns: id, kind, repoId, name, path, lineStart, lineEnd, signature, leadingComment.
1907
+
1908
+ Examples:
1909
+ // All files imported by app.ts:
1910
+ MATCH (a:Symbol)-[:IMPORTS]->(b:Symbol)
1911
+ WHERE a.kind = 'File' AND a.path = 'app.ts' AND b.kind = 'File'
1912
+ RETURN b.path LIMIT $limit
1913
+
1914
+ // What calls useAuth?
1915
+ MATCH (caller:Symbol)-[:CALLS]->(callee:Symbol)
1916
+ WHERE callee.name = 'useAuth'
1917
+ RETURN caller.name, caller.path, caller.kind LIMIT $limit
1918
+
1919
+ Constraints you MUST follow:
1920
+ - Produce exactly ONE statement.
1921
+ - It must be read-only: only MATCH / OPTIONAL MATCH / WITH / UNWIND / RETURN clauses.
1922
+ - Do not use CREATE, MERGE, DELETE, SET, REMOVE, DROP, FOREACH, CALL { ... }, or procedure CALLs.
1923
+ - Always include a LIMIT (use the $limit parameter that will be passed in).
1924
+ - Use \`s.kind = 'File'\` style filters - do NOT use multi-label syntax like \`(s:File)\`.
1925
+ - Return useful columns named clearly (id, name, kind, path, line, score, etc.).
1926
+ - Output ONLY the Cypher query. No prose, no markdown fences.`;
1927
+ var nlQueryTool = {
1928
+ name: "nl_query",
1929
+ config: {
1930
+ title: "Natural-Language Cypher Query",
1931
+ description: "Translate a natural-language question into Cypher via the configured LLM, validate it against a read-only allowlist, then execute it on the graph.",
1932
+ inputSchema: inputSchema8,
1933
+ annotations: { readOnlyHint: true, openWorldHint: true }
1934
+ },
1935
+ handler: async ({ question, limit }, deps) => {
1936
+ const rawCypher = await deps.llm.generate({
1937
+ system: SYSTEM_PROMPT,
1938
+ messages: [{ role: "user", content: question }],
1939
+ temperature: 0,
1940
+ maxTokens: 400
1941
+ });
1942
+ const cypher = stripCodeFence(rawCypher).trim();
1943
+ const guard = validateReadOnlyCypher(cypher);
1944
+ if (!guard.ok) {
1945
+ return jsonResult(
1946
+ {
1947
+ question,
1948
+ accepted: false,
1949
+ rejectedCypher: cypher,
1950
+ reason: guard.reason
1951
+ },
1952
+ { isError: false }
1953
+ );
1954
+ }
1955
+ return cachedJsonResult(
1956
+ "nl_query",
1957
+ { question, limit, cypher: guard.normalized },
1958
+ deps,
1959
+ async () => {
1960
+ try {
1961
+ const rows = await deps.graph.query(guard.normalized ?? cypher, { limit });
1962
+ return {
1963
+ question,
1964
+ accepted: true,
1965
+ cypher: guard.normalized ?? cypher,
1966
+ rowCount: rows.length,
1967
+ rows
1968
+ };
1969
+ } catch (err) {
1970
+ return {
1971
+ question,
1972
+ accepted: true,
1973
+ cypher: guard.normalized ?? cypher,
1974
+ error: err instanceof Error ? err.message : String(err)
1975
+ };
1976
+ }
1977
+ }
1978
+ );
1979
+ }
1980
+ };
1981
+ function stripCodeFence(text) {
1982
+ const fenced = text.match(/```(?:cypher)?\n([\s\S]*?)```/i);
1983
+ if (fenced?.[1]) return fenced[1];
1984
+ return text;
1985
+ }
1986
+ var inputSchema9 = {
1987
+ description: z9.string().min(1).describe("Natural-language description of the symbol you are looking for."),
1988
+ k: z9.number().int().positive().max(50).default(10).describe("Number of nearest neighbours.")
1989
+ };
1990
+ var searchSemanticTool = {
1991
+ name: "search_semantic",
1992
+ config: {
1993
+ title: "Semantic Symbol Search",
1994
+ description: "Embed the supplied description via the configured LLM router, then run a vector KNN over indexed symbol embeddings.",
1995
+ inputSchema: inputSchema9,
1996
+ annotations: { readOnlyHint: true, idempotentHint: true }
1997
+ },
1998
+ handler: async ({ description, k }, deps) => {
1999
+ const [embedding] = await deps.llm.embed([description]);
2000
+ if (!embedding) {
2001
+ return jsonResult(
2002
+ { error: "LLM router returned no embedding for the query." },
2003
+ { isError: true }
2004
+ );
2005
+ }
2006
+ const namespace = `${deps.llm.embeddingNamespace.provider}:${deps.llm.embeddingNamespace.model}:${deps.llm.embeddingNamespace.dimension}`;
2007
+ return cachedJsonResult("search_semantic", { description, k, namespace }, deps, async () => {
2008
+ const rows = await deps.graph.query(
2009
+ `CALL QUERY_VECTOR_INDEX('Symbol', 'embedding_idx', $vec, $k)
2010
+ WITH node, distance
2011
+ WHERE node.embeddingNamespace = $ns
2012
+ RETURN node.id AS id, node.name AS name, node.kind AS kind, node.path AS path,
2013
+ node.lineStart AS line, node.signature AS signature, distance AS score
2014
+ ORDER BY distance ASC`,
2015
+ { vec: embedding, k, ns: namespace }
2016
+ );
2017
+ return { description, k, namespace, count: rows.length, matches: rows };
2018
+ });
2019
+ }
2020
+ };
2021
+ var inputSchema10 = {
2022
+ query: z10.string().min(1).describe("Substring to match against symbol names (case-insensitive)."),
2023
+ kind: z10.enum(["File", "Function", "Class", "Interface", "Component", "Route", "Variable"]).optional().describe("Optional filter by node kind."),
2024
+ limit: z10.number().int().positive().max(200).default(25)
2025
+ };
2026
+ var searchSymbolTool = {
2027
+ name: "search_symbol",
2028
+ config: {
2029
+ title: "Search Symbol",
2030
+ description: "Find functions, classes, components, routes, or variables by case-insensitive substring match on the symbol name.",
2031
+ inputSchema: inputSchema10,
2032
+ annotations: { readOnlyHint: true, idempotentHint: true }
2033
+ },
2034
+ handler: async ({ query, kind, limit }, deps) => cachedJsonResult("search_symbol", { query, kind, limit }, deps, async () => {
2035
+ const cypher = kind ? `MATCH (s:Symbol)
2036
+ WHERE s.kind = $kind AND lower(s.name) CONTAINS lower($q)
2037
+ RETURN s.id AS id, s.name AS name, s.kind AS kind, s.path AS path,
2038
+ s.lineStart AS line, s.signature AS signature
2039
+ ORDER BY s.name LIMIT $limit` : `MATCH (s:Symbol)
2040
+ WHERE s.name IS NOT NULL AND lower(s.name) CONTAINS lower($q)
2041
+ RETURN s.id AS id, s.name AS name, s.kind AS kind, s.path AS path,
2042
+ s.lineStart AS line, s.signature AS signature
2043
+ ORDER BY s.name LIMIT $limit`;
2044
+ const rows = await deps.graph.query(cypher, { q: query, limit, kind: kind ?? null });
2045
+ return { query, kind: kind ?? null, count: rows.length, matches: rows };
2046
+ })
2047
+ };
2048
+ var ALL_TOOLS = [
2049
+ searchSymbolTool,
2050
+ findFileTool,
2051
+ searchSemanticTool,
2052
+ getFileContextTool,
2053
+ findCallersTool,
2054
+ getComponentTreeTool,
2055
+ affectedByTool,
2056
+ getDependenciesTool,
2057
+ blastRadiusTool,
2058
+ nlQueryTool
2059
+ ];
2060
+ function registerAllTools(server, deps) {
2061
+ for (const tool of ALL_TOOLS) {
2062
+ server.registerTool(tool.name, tool.config, async (args) => {
2063
+ try {
2064
+ return await tool.handler(args, deps);
2065
+ } catch (err) {
2066
+ deps.logger?.error("tool handler threw", {
2067
+ tool: tool.name,
2068
+ error: err instanceof Error ? err.message : String(err)
2069
+ });
2070
+ return jsonResult(
2071
+ {
2072
+ tool: tool.name,
2073
+ error: err instanceof Error ? err.message : String(err)
2074
+ },
2075
+ { isError: true }
2076
+ );
2077
+ }
2078
+ });
2079
+ }
2080
+ }
2081
+ function createMcpServer(opts) {
2082
+ const info = opts.serverInfo ?? { name: "codegraph", version: "0.0.0" };
2083
+ const server = new McpServer(info, {
2084
+ capabilities: { tools: {}, logging: {} }
2085
+ });
2086
+ registerAllTools(server, opts.deps);
2087
+ return server;
2088
+ }
2089
+ async function startSseServer(opts) {
2090
+ const { deps, config } = opts;
2091
+ const logger = deps.logger;
2092
+ const app = express();
2093
+ app.use(express.json({ limit: "2mb" }));
2094
+ app.get("/healthz", (_req, res) => {
2095
+ res.status(200).json({ ok: true });
2096
+ });
2097
+ app.use(bearerAuthMiddleware(config.bearerToken, logger));
2098
+ const transports = /* @__PURE__ */ new Map();
2099
+ app.get("/mcp", async (_req, res) => {
2100
+ let transport;
2101
+ try {
2102
+ transport = new SSEServerTransport("/messages", res);
2103
+ } catch (err) {
2104
+ logger?.error("failed to create SSE transport", {
2105
+ error: err instanceof Error ? err.message : String(err)
2106
+ });
2107
+ if (!res.headersSent) res.status(500).send("Failed to establish SSE stream");
2108
+ return;
2109
+ }
2110
+ const sessionId = transport.sessionId;
2111
+ transports.set(sessionId, transport);
2112
+ transport.onclose = () => {
2113
+ transports.delete(sessionId);
2114
+ logger?.info("sse session closed", { sessionId });
2115
+ };
2116
+ const server = createMcpServer({ deps, serverInfo: opts.serverInfo });
2117
+ try {
2118
+ await server.connect(transport);
2119
+ logger?.info("sse session opened", { sessionId });
2120
+ } catch (err) {
2121
+ logger?.error("server.connect failed", {
2122
+ sessionId,
2123
+ error: err instanceof Error ? err.message : String(err)
2124
+ });
2125
+ transports.delete(sessionId);
2126
+ if (!res.headersSent) res.status(500).send("MCP connect failed");
2127
+ }
2128
+ });
2129
+ app.post("/messages", async (req, res) => {
2130
+ const sessionId = req.query.sessionId;
2131
+ if (!sessionId) {
2132
+ res.status(400).json({ error: "Missing sessionId query parameter" });
2133
+ return;
2134
+ }
2135
+ const transport = transports.get(sessionId);
2136
+ if (!transport) {
2137
+ res.status(404).json({ error: "No active session for sessionId" });
2138
+ return;
2139
+ }
2140
+ try {
2141
+ await transport.handlePostMessage(req, res, req.body);
2142
+ } catch (err) {
2143
+ logger?.error("handlePostMessage failed", {
2144
+ sessionId,
2145
+ error: err instanceof Error ? err.message : String(err)
2146
+ });
2147
+ if (!res.headersSent) res.status(500).json({ error: "Failed to handle message" });
2148
+ }
2149
+ });
2150
+ const httpServer = await new Promise((resolve2, reject) => {
2151
+ const srv = app.listen(config.port, config.host, () => resolve2(srv)).on("error", reject);
2152
+ });
2153
+ const address = httpServer.address();
2154
+ if (address === null || typeof address === "string") {
2155
+ throw new Error("HTTP server bound to unexpected address");
2156
+ }
2157
+ logger?.info("codegraph mcp server listening", {
2158
+ host: address.address,
2159
+ port: address.port,
2160
+ tools: 10
2161
+ });
2162
+ const started = {
2163
+ httpServer,
2164
+ address: { host: address.address, port: address.port },
2165
+ async close() {
2166
+ for (const [id, t] of transports) {
2167
+ try {
2168
+ await t.close();
2169
+ } catch {
2170
+ }
2171
+ transports.delete(id);
2172
+ }
2173
+ await new Promise((resolve2, reject) => {
2174
+ httpServer.close((err) => err ? reject(err) : resolve2());
2175
+ });
2176
+ await deps.cache.close?.();
2177
+ await deps.graph.close?.();
2178
+ }
2179
+ };
2180
+ return started;
2181
+ }
2182
+ function bearerAuthMiddleware(expectedToken, logger) {
2183
+ return (req, res, next) => {
2184
+ const header = req.header("authorization") ?? req.header("Authorization");
2185
+ if (!header || !header.toLowerCase().startsWith("bearer ")) {
2186
+ logger?.warn("auth missing", { path: req.path });
2187
+ res.status(401).json({ error: "Missing bearer token" });
2188
+ return;
2189
+ }
2190
+ const presented = header.slice("bearer ".length).trim();
2191
+ if (!safeEqual(presented, expectedToken)) {
2192
+ logger?.warn("auth rejected", { path: req.path });
2193
+ res.status(403).json({ error: "Invalid bearer token" });
2194
+ return;
2195
+ }
2196
+ next();
2197
+ };
2198
+ }
2199
+ function safeEqual(a, b) {
2200
+ if (a.length !== b.length) return false;
2201
+ let diff = 0;
2202
+ for (let i = 0; i < a.length; i++) {
2203
+ diff |= a.charCodeAt(i) ^ b.charCodeAt(i);
2204
+ }
2205
+ return diff === 0;
2206
+ }
2207
+ function generateBearerToken() {
2208
+ return randomUUID().replace(/-/g, "");
2209
+ }
2210
+ var MCP_PORT = 3748;
2211
+ var DEFAULT_HOST = "127.0.0.1";
2212
+ var DEFAULT_TTL = 30;
2213
+ function defaultConfigPath() {
2214
+ return resolve(homedir(), ".codegraph", "config.json");
2215
+ }
2216
+ async function readCodegraphConfig(path6 = defaultConfigPath()) {
2217
+ try {
2218
+ const raw = await readFile5(path6, "utf8");
2219
+ return JSON.parse(raw);
2220
+ } catch (err) {
2221
+ if (err.code === "ENOENT") return {};
2222
+ throw err;
2223
+ }
2224
+ }
2225
+ async function writeCodegraphConfig(config, path6 = defaultConfigPath()) {
2226
+ await mkdir3(dirname3(path6), { recursive: true });
2227
+ await writeFile2(path6, `${JSON.stringify(config, null, 2)}
2228
+ `, "utf8");
2229
+ }
2230
+ async function resolveServerConfig(opts) {
2231
+ const path6 = opts.configPath ?? defaultConfigPath();
2232
+ const fileConfig = await readCodegraphConfig(path6);
2233
+ const env = process.env;
2234
+ let bearerToken = opts.overrides?.bearerToken ?? fileConfig.mcpToken ?? fileConfig.server?.bearerToken ?? env.CODEGRAPH_BEARER_TOKEN ?? "";
2235
+ let created = false;
2236
+ if (!bearerToken) {
2237
+ bearerToken = generateBearerToken();
2238
+ created = true;
2239
+ await writeCodegraphConfig(
2240
+ {
2241
+ ...fileConfig,
2242
+ mcpToken: bearerToken,
2243
+ server: { ...fileConfig.server ?? {}, bearerToken }
2244
+ },
2245
+ path6
2246
+ );
2247
+ }
2248
+ const config = {
2249
+ host: opts.overrides?.host ?? fileConfig.server?.host ?? env.CODEGRAPH_HOST ?? DEFAULT_HOST,
2250
+ port: opts.overrides?.port ?? fileConfig.server?.port ?? (env.CODEGRAPH_PORT ? Number(env.CODEGRAPH_PORT) : MCP_PORT),
2251
+ bearerToken,
2252
+ cacheTtlSeconds: opts.overrides?.cacheTtlSeconds ?? fileConfig.server?.cacheTtlSeconds ?? (env.CODEGRAPH_CACHE_TTL ? Number(env.CODEGRAPH_CACHE_TTL) : DEFAULT_TTL)
2253
+ };
2254
+ return { config, configPath: path6, created };
2255
+ }
2256
+ function resolveDbPath(fileConfig) {
2257
+ return fileConfig.data?.dbPath ?? process.env.CODEGRAPH_DB_PATH ?? void 0;
2258
+ }
2259
+ function createConsoleLogger(level = "info") {
2260
+ const levels = { info: 10, warn: 20, error: 30 };
2261
+ const min = levels[level];
2262
+ const emit = (lvl, msg, meta) => {
2263
+ if (levels[lvl] < min) return;
2264
+ const line = JSON.stringify({ ts: (/* @__PURE__ */ new Date()).toISOString(), level: lvl, msg, ...meta });
2265
+ process.stderr.write(`${line}
2266
+ `);
2267
+ };
2268
+ return {
2269
+ info: (msg, meta) => emit("info", msg, meta),
2270
+ warn: (msg, meta) => emit("warn", msg, meta),
2271
+ error: (msg, meta) => emit("error", msg, meta)
2272
+ };
2273
+ }
2274
+ async function startMcpServer(portOrOptions) {
2275
+ const options = typeof portOrOptions === "number" ? { port: portOrOptions } : portOrOptions ?? {};
2276
+ const logger = createConsoleLogger();
2277
+ const overrides = {};
2278
+ if (options.port !== void 0) overrides.port = options.port;
2279
+ if (options.host !== void 0) overrides.host = options.host;
2280
+ const { config, configPath: configPath2, created } = await resolveServerConfig({
2281
+ ...options.configPath !== void 0 ? { configPath: options.configPath } : {},
2282
+ overrides
2283
+ });
2284
+ if (created) logger.info("generated new bearer token", { configPath: configPath2 });
2285
+ const fileConfig = await readCodegraphConfig(options.configPath ?? defaultConfigPath());
2286
+ const dbPath = options.dbPath ?? resolveDbPath(fileConfig);
2287
+ const cache = options.deps?.cache ?? new InMemoryCache();
2288
+ const graph = options.deps?.graph ?? await loadGraphClient(dbPath);
2289
+ const llm = options.deps?.llm ?? await loadLlmRouter(options.configPath ?? defaultConfigPath());
2290
+ const deps = {
2291
+ graph,
2292
+ llm,
2293
+ cache,
2294
+ cacheTtlSeconds: config.cacheTtlSeconds,
2295
+ logger
2296
+ };
2297
+ return startSseServer({ deps, config });
2298
+ }
2299
+ async function loadGraphClient(dbPath) {
2300
+ let mod;
2301
+ try {
2302
+ mod = await import("./src-467W2KXC.js");
2303
+ } catch (err) {
2304
+ throw new Error(
2305
+ `Failed to import @codegraph/graph-db. Run \`pnpm -r build\` first. Underlying error: ${err instanceof Error ? err.message : String(err)}`
2306
+ );
2307
+ }
2308
+ const GraphDb2 = mod.GraphDb;
2309
+ if (typeof GraphDb2 !== "function") {
2310
+ throw new Error("@codegraph/graph-db has no `GraphDb` export.");
2311
+ }
2312
+ const db = new GraphDb2(dbPath !== void 0 ? { dbPath } : {});
2313
+ await db.connect();
2314
+ await db.migrate();
2315
+ return {
2316
+ async query(cypher, params) {
2317
+ const r = await db.query(cypher, params ?? {});
2318
+ return r.data;
2319
+ },
2320
+ async close() {
2321
+ await db.close();
2322
+ }
2323
+ };
2324
+ }
2325
+ async function loadLlmRouter(configPath2) {
2326
+ let mod;
2327
+ try {
2328
+ mod = await import("./src-UVET6JHH.js");
2329
+ } catch (err) {
2330
+ throw new Error(
2331
+ `Failed to import @codegraph/llm-router. Run \`pnpm -r build\` first. Underlying error: ${err instanceof Error ? err.message : String(err)}`
2332
+ );
2333
+ }
2334
+ const createLlmRouter2 = mod.createLlmRouter;
2335
+ if (typeof createLlmRouter2 !== "function") {
2336
+ throw new Error("@codegraph/llm-router has no `createLlmRouter` export.");
2337
+ }
2338
+ const fileConfig = await readCodegraphConfig(configPath2);
2339
+ const router = await createLlmRouter2({
2340
+ config: fileConfig.llm ?? void 0,
2341
+ configPath: configPath2
2342
+ });
2343
+ return adaptLlmRouter(router);
2344
+ }
2345
+ function adaptLlmRouter(router) {
2346
+ const ns = router.config?.embeddingNamespace ?? (router.config?.embeddings ? {
2347
+ provider: router.config.embeddings.provider,
2348
+ model: router.config.embeddings.model,
2349
+ dimension: router.config.embeddings.dimension
2350
+ } : { provider: "unknown", model: "unknown", dimension: 0 });
2351
+ return {
2352
+ embed: (texts) => router.embed(texts),
2353
+ generate: (opts) => router.generate(opts),
2354
+ embeddingNamespace: ns
2355
+ };
2356
+ }
2357
+
2358
+ // src/commands/serve.ts
2359
+ async function runServeCommand(opts = {}) {
2360
+ const spinner = makeSpinner("Booting MCP server").start();
2361
+ let started;
2362
+ try {
2363
+ started = await startMcpServer({
2364
+ ...opts.port !== void 0 ? { port: opts.port } : {},
2365
+ ...opts.host !== void 0 ? { host: opts.host } : {},
2366
+ ...opts.dbPath !== void 0 ? { dbPath: opts.dbPath } : {}
2367
+ });
2368
+ spinner.stop();
2369
+ } catch (err) {
2370
+ spinner.fail("Server failed to start");
2371
+ throw err;
2372
+ }
2373
+ const url = `http://${started.address.host}:${started.address.port}/mcp`;
2374
+ const tokenHint = "bearer token at ~/.codegraph/config.json (codegraph config show to view)";
2375
+ process.stdout.write(`${renderServeBanner(url, tokenHint)}
2376
+ `);
2377
+ const shutdown = async (signal) => {
2378
+ process.stderr.write(`
2379
+ shutting down (${signal})...
2380
+ `);
2381
+ try {
2382
+ await started.close();
2383
+ } catch (err) {
2384
+ process.stderr.write(
2385
+ `shutdown failed: ${err instanceof Error ? err.message : String(err)}
2386
+ `
2387
+ );
2388
+ }
2389
+ process.exit(0);
2390
+ };
2391
+ process.once("SIGINT", shutdown);
2392
+ process.once("SIGTERM", shutdown);
2393
+ await new Promise(() => {
2394
+ });
2395
+ }
2396
+
2397
+ // src/program.ts
2398
+ function buildProgram() {
2399
+ const program = new Command();
2400
+ program.name("codegraph").description("Live, queryable knowledge graph for your codebase").version("0.1.0").option("--verbose", "Print full stack traces on error").hook("preAction", (thisCommand) => {
2401
+ const opts = thisCommand.optsWithGlobals();
2402
+ if (opts.verbose) process.env.CODEGRAPH_VERBOSE = "1";
2403
+ });
2404
+ program.on("--help", () => {
2405
+ printBanner();
2406
+ });
2407
+ program.command("index").description("Parse a JS/TS repo into the local embedded graph").argument("<path>", "Path to the repo root").option("--no-embed", "Skip the embedding pass (faster, no LLM calls)").action(async (repoPath, opts) => {
2408
+ await runIndexCommand({ repoPath, noEmbed: opts.embed === false });
2409
+ });
2410
+ program.command("status").description("Show node/edge counts and embedding coverage for a repo").argument("<path>", "Path to the repo root").action(async (repoPath) => {
2411
+ await runStatusCommand({ repoPath });
2412
+ });
2413
+ program.command("serve").description("Boot the local MCP server (default port 3748)").option("--port <port>", "TCP port to bind", (v) => Number(v)).option("--host <host>", "Host to bind (default 127.0.0.1)").option("--db-path <path>", "Override the embedded Kuzu graph directory").action(async (opts) => {
2414
+ await runServeCommand({
2415
+ ...opts.port !== void 0 ? { port: opts.port } : {},
2416
+ ...opts.host !== void 0 ? { host: opts.host } : {},
2417
+ ...opts.dbPath !== void 0 ? { dbPath: opts.dbPath } : {}
2418
+ });
2419
+ });
2420
+ program.command("doctor").description("Check environment, config, LLM credentials, and Kuzu DB health").action(async () => {
2421
+ await runDoctorCommand();
2422
+ });
2423
+ const configCmd = program.command("config").description("Manage ~/.codegraph/config.json");
2424
+ configCmd.command("show").description("Print the resolved config").action(async () => {
2425
+ await runConfigShow();
2426
+ });
2427
+ const llmCmd = configCmd.command("llm").description("Configure the LLM router");
2428
+ llmCmd.command("set [preset]").description(
2429
+ "Set LLM preset: managed-stub | byo-openai | byo-anthropic | byo-google | local-ollama. Omit the preset to pick interactively."
2430
+ ).action(async (preset) => {
2431
+ await runConfigLlmSet(preset);
2432
+ });
2433
+ llmCmd.command("test").description("Run a 5-token generation + 1 embedding call against the current config").action(async () => {
2434
+ await runConfigLlmTest();
2435
+ });
2436
+ return program;
2437
+ }
2438
+
2439
+ export {
2440
+ configPath,
2441
+ loadConfig,
2442
+ saveConfig,
2443
+ renderError,
2444
+ buildProgram
2445
+ };
2446
+ //# sourceMappingURL=chunk-O4ZO6CP5.js.map