bluelens-mcp 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +182 -0
  2. package/dist/index.js +3875 -0
  3. package/package.json +28 -0
package/dist/index.js ADDED
@@ -0,0 +1,3875 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/index.ts
4
+ import { readFileSync as readFileSync2 } from "fs";
5
+ import { join as join5, dirname } from "path";
6
+ import { fileURLToPath } from "url";
7
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
8
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
9
+
10
+ // src/tools/workspaces.ts
11
+ import { z } from "zod";
12
+
13
+ // src/storage.ts
14
+ import { readFile, writeFile, mkdir } from "fs/promises";
15
+ import { existsSync } from "fs";
16
+ import { join } from "path";
17
+ import { homedir } from "os";
18
+ function getDataDir() {
19
+ return process.env.BLUELENS_DATA_DIR ?? join(homedir(), ".bluelens");
20
+ }
21
+ function dbPath() {
22
+ return join(getDataDir(), "db.json");
23
+ }
24
+ function codegraphsDir() {
25
+ return join(getDataDir(), "codegraphs");
26
+ }
27
+ function codegraphIndexPath() {
28
+ return join(codegraphsDir(), "index.json");
29
+ }
30
+ function codegraphFilePath(id) {
31
+ return join(codegraphsDir(), `${id}.json`);
32
+ }
33
+ async function ensureDir(path) {
34
+ if (!existsSync(path)) await mkdir(path, { recursive: true });
35
+ }
36
+ async function readDB() {
37
+ await ensureDir(getDataDir());
38
+ if (!existsSync(dbPath())) return { version: 1, workspaces: [], folders: [], diagrams: [] };
39
+ return JSON.parse(await readFile(dbPath(), "utf-8"));
40
+ }
41
+ async function writeDB(db) {
42
+ await ensureDir(getDataDir());
43
+ await writeFile(dbPath(), JSON.stringify(db, null, 2), "utf-8");
44
+ }
45
+ async function readCodeGraphIndex() {
46
+ await ensureDir(codegraphsDir());
47
+ if (!existsSync(codegraphIndexPath())) return [];
48
+ return JSON.parse(await readFile(codegraphIndexPath(), "utf-8"));
49
+ }
50
+ async function writeCodeGraphIndex(index) {
51
+ await ensureDir(codegraphsDir());
52
+ await writeFile(codegraphIndexPath(), JSON.stringify(index, null, 2), "utf-8");
53
+ }
54
+ function uid() {
55
+ return Math.random().toString(36).slice(2, 11);
56
+ }
57
+ var storage = {
58
+ async listWorkspaces() {
59
+ return (await readDB()).workspaces;
60
+ },
61
+ async createWorkspace(name) {
62
+ const db = await readDB();
63
+ const workspace = { id: uid(), name, createdAt: Date.now() };
64
+ db.workspaces.push(workspace);
65
+ await writeDB(db);
66
+ return workspace;
67
+ },
68
+ async deleteWorkspace(id) {
69
+ const db = await readDB();
70
+ db.workspaces = db.workspaces.filter((w) => w.id !== id);
71
+ db.folders = db.folders.filter((f) => f.workspaceId !== id);
72
+ db.diagrams = db.diagrams.filter((d) => d.workspaceId !== id);
73
+ await writeDB(db);
74
+ },
75
+ async listFolders(workspaceId) {
76
+ return (await readDB()).folders.filter((f) => f.workspaceId === workspaceId);
77
+ },
78
+ async createFolder(workspaceId, name, parentId = null) {
79
+ const db = await readDB();
80
+ const folder = { id: uid(), name, workspaceId, parentId };
81
+ db.folders.push(folder);
82
+ await writeDB(db);
83
+ return folder;
84
+ },
85
+ async deleteFolder(id) {
86
+ const db = await readDB();
87
+ db.folders = db.folders.filter((f) => f.id !== id);
88
+ db.diagrams = db.diagrams.map((d) => d.folderId === id ? { ...d, folderId: null } : d);
89
+ await writeDB(db);
90
+ },
91
+ async listDiagrams(workspaceId, folderId) {
92
+ const db = await readDB();
93
+ return db.diagrams.filter((d) => {
94
+ if (d.workspaceId !== workspaceId) return false;
95
+ if (folderId !== void 0) return d.folderId === folderId;
96
+ return true;
97
+ });
98
+ },
99
+ async getDiagram(id) {
100
+ return (await readDB()).diagrams.find((d) => d.id === id) ?? null;
101
+ },
102
+ async createDiagram(workspaceId, name, code, folderId = null, description) {
103
+ const db = await readDB();
104
+ const diagram = {
105
+ id: uid(),
106
+ name,
107
+ code,
108
+ description,
109
+ lastModified: Date.now(),
110
+ workspaceId,
111
+ folderId,
112
+ nodeLinks: []
113
+ };
114
+ db.diagrams.push(diagram);
115
+ await writeDB(db);
116
+ return diagram;
117
+ },
118
+ async updateDiagram(id, updates) {
119
+ const db = await readDB();
120
+ const idx = db.diagrams.findIndex((d) => d.id === id);
121
+ if (idx === -1) return null;
122
+ db.diagrams[idx] = { ...db.diagrams[idx], ...updates, lastModified: Date.now() };
123
+ await writeDB(db);
124
+ return db.diagrams[idx];
125
+ },
126
+ async deleteDiagram(id) {
127
+ const db = await readDB();
128
+ db.diagrams = db.diagrams.filter((d) => d.id !== id);
129
+ await writeDB(db);
130
+ },
131
+ async listCodeGraphs(workspaceId) {
132
+ const index = await readCodeGraphIndex();
133
+ return workspaceId ? index.filter((e) => e.workspaceId === workspaceId) : index;
134
+ },
135
+ async getCodeGraph(id) {
136
+ const path = codegraphFilePath(id);
137
+ if (!existsSync(path)) return null;
138
+ return JSON.parse(await readFile(path, "utf-8"));
139
+ },
140
+ async saveCodeGraph(graph) {
141
+ await ensureDir(codegraphsDir());
142
+ await writeFile(codegraphFilePath(graph.id), JSON.stringify(graph, null, 2), "utf-8");
143
+ const index = await readCodeGraphIndex();
144
+ const entry = {
145
+ id: graph.id,
146
+ name: graph.name,
147
+ repoId: graph.repoId,
148
+ workspaceId: graph.workspaceId,
149
+ createdAt: graph.createdAt,
150
+ nodeCount: Object.keys(graph.nodes).length,
151
+ relationCount: graph.relations.length
152
+ };
153
+ const existing = index.findIndex((e) => e.id === graph.id);
154
+ if (existing >= 0) index[existing] = entry;
155
+ else index.push(entry);
156
+ await writeCodeGraphIndex(index);
157
+ },
158
+ async deleteCodeGraph(id) {
159
+ const { unlink } = await import("fs/promises");
160
+ const path = codegraphFilePath(id);
161
+ if (existsSync(path)) await unlink(path);
162
+ await writeCodeGraphIndex((await readCodeGraphIndex()).filter((e) => e.id !== id));
163
+ }
164
+ };
165
+
166
+ // src/tools/workspaces.ts
167
+ function registerWorkspaceTools(server2) {
168
+ server2.tool(
169
+ "list_workspaces",
170
+ "List all Bluelens workspaces",
171
+ {},
172
+ async () => {
173
+ const workspaces = await storage.listWorkspaces();
174
+ return {
175
+ content: [{
176
+ type: "text",
177
+ text: workspaces.length === 0 ? "No workspaces found. Use create_workspace to create one." : JSON.stringify(workspaces, null, 2)
178
+ }]
179
+ };
180
+ }
181
+ );
182
+ server2.tool(
183
+ "create_workspace",
184
+ "Create a new Bluelens workspace",
185
+ { name: z.string().min(1).describe("Workspace name") },
186
+ async ({ name }) => {
187
+ const workspace = await storage.createWorkspace(name);
188
+ return {
189
+ content: [{ type: "text", text: JSON.stringify(workspace, null, 2) }]
190
+ };
191
+ }
192
+ );
193
+ server2.tool(
194
+ "delete_workspace",
195
+ "Delete a workspace and all its folders and diagrams",
196
+ { workspace_id: z.string().describe("Workspace ID to delete") },
197
+ async ({ workspace_id }) => {
198
+ const workspaces = await storage.listWorkspaces();
199
+ if (!workspaces.find((w) => w.id === workspace_id)) {
200
+ return { content: [{ type: "text", text: `Workspace "${workspace_id}" not found` }], isError: true };
201
+ }
202
+ await storage.deleteWorkspace(workspace_id);
203
+ return { content: [{ type: "text", text: `Workspace "${workspace_id}" deleted` }] };
204
+ }
205
+ );
206
+ server2.tool(
207
+ "list_folders",
208
+ "List all folders in a workspace",
209
+ { workspace_id: z.string().describe("Workspace ID") },
210
+ async ({ workspace_id }) => {
211
+ const folders = await storage.listFolders(workspace_id);
212
+ return {
213
+ content: [{
214
+ type: "text",
215
+ text: folders.length === 0 ? "No folders in this workspace." : JSON.stringify(folders, null, 2)
216
+ }]
217
+ };
218
+ }
219
+ );
220
+ server2.tool(
221
+ "create_folder",
222
+ "Create a folder inside a workspace",
223
+ {
224
+ workspace_id: z.string().describe("Workspace ID"),
225
+ name: z.string().min(1).describe("Folder name"),
226
+ parent_id: z.string().optional().describe("Parent folder ID (omit for root-level folder)")
227
+ },
228
+ async ({ workspace_id, name, parent_id }) => {
229
+ const folder = await storage.createFolder(workspace_id, name, parent_id ?? null);
230
+ return { content: [{ type: "text", text: JSON.stringify(folder, null, 2) }] };
231
+ }
232
+ );
233
+ server2.tool(
234
+ "delete_folder",
235
+ "Delete a folder (diagrams inside are moved to workspace root)",
236
+ { folder_id: z.string().describe("Folder ID to delete") },
237
+ async ({ folder_id }) => {
238
+ await storage.deleteFolder(folder_id);
239
+ return { content: [{ type: "text", text: `Folder "${folder_id}" deleted` }] };
240
+ }
241
+ );
242
+ }
243
+
244
+ // src/tools/diagrams.ts
245
+ import { z as z2 } from "zod";
246
+ import { writeFile as writeFile2 } from "fs/promises";
247
+ import { homedir as homedir2 } from "os";
248
+ import { join as join2 } from "path";
249
+ var DEFAULT_DIAGRAM = `flowchart TD
250
+ Start([Start]) --> Decision{Decision}
251
+ Decision -->|Yes| Action[Action]
252
+ Decision -->|No| End([End])
253
+ Action --> End`;
254
+ function registerDiagramTools(server2) {
255
+ server2.tool(
256
+ "list_diagrams",
257
+ "List diagrams in a workspace, optionally filtered by folder",
258
+ {
259
+ workspace_id: z2.string().describe("Workspace ID"),
260
+ folder_id: z2.string().optional().describe("Filter by folder ID. Omit to list all diagrams in the workspace")
261
+ },
262
+ async ({ workspace_id, folder_id }) => {
263
+ const diagrams = await storage.listDiagrams(workspace_id, folder_id);
264
+ if (diagrams.length === 0) {
265
+ return { content: [{ type: "text", text: "No diagrams found." }] };
266
+ }
267
+ const summary = diagrams.map((d) => ({
268
+ id: d.id,
269
+ name: d.name,
270
+ description: d.description,
271
+ folderId: d.folderId,
272
+ lastModified: new Date(d.lastModified).toISOString(),
273
+ linesOfCode: d.code.split("\n").length,
274
+ nodeLinks: d.nodeLinks.length
275
+ }));
276
+ return { content: [{ type: "text", text: JSON.stringify(summary, null, 2) }] };
277
+ }
278
+ );
279
+ server2.tool(
280
+ "get_diagram",
281
+ "Get a diagram with its full Mermaid code",
282
+ { diagram_id: z2.string().describe("Diagram ID") },
283
+ async ({ diagram_id }) => {
284
+ const diagram = await storage.getDiagram(diagram_id);
285
+ if (!diagram) {
286
+ return { content: [{ type: "text", text: `Diagram "${diagram_id}" not found` }], isError: true };
287
+ }
288
+ return { content: [{ type: "text", text: JSON.stringify(diagram, null, 2) }] };
289
+ }
290
+ );
291
+ server2.tool(
292
+ "create_diagram",
293
+ "Create a new Mermaid diagram in a workspace",
294
+ {
295
+ workspace_id: z2.string().describe("Workspace ID"),
296
+ name: z2.string().min(1).describe("Diagram name"),
297
+ code: z2.string().optional().describe("Mermaid diagram code. Defaults to a flowchart template if omitted"),
298
+ folder_id: z2.string().optional().describe("Folder ID to place the diagram in"),
299
+ description: z2.string().optional().describe("Optional description")
300
+ },
301
+ async ({ workspace_id, name, code, folder_id, description }) => {
302
+ const diagram = await storage.createDiagram(
303
+ workspace_id,
304
+ name,
305
+ code ?? DEFAULT_DIAGRAM,
306
+ folder_id ?? null,
307
+ description
308
+ );
309
+ return { content: [{ type: "text", text: JSON.stringify(diagram, null, 2) }] };
310
+ }
311
+ );
312
+ server2.tool(
313
+ "update_diagram",
314
+ "Update a diagram's name, code, or description",
315
+ {
316
+ diagram_id: z2.string().describe("Diagram ID"),
317
+ name: z2.string().optional().describe("New name"),
318
+ code: z2.string().optional().describe("New Mermaid code"),
319
+ description: z2.string().optional().describe("New description"),
320
+ folder_id: z2.string().nullable().optional().describe("Move to folder (null to move to workspace root)")
321
+ },
322
+ async ({ diagram_id, name, code, description, folder_id }) => {
323
+ const updates = {};
324
+ if (name !== void 0) updates.name = name;
325
+ if (code !== void 0) updates.code = code;
326
+ if (description !== void 0) updates.description = description;
327
+ if (folder_id !== void 0) updates.folderId = folder_id;
328
+ const diagram = await storage.updateDiagram(diagram_id, updates);
329
+ if (!diagram) {
330
+ return { content: [{ type: "text", text: `Diagram "${diagram_id}" not found` }], isError: true };
331
+ }
332
+ return { content: [{ type: "text", text: JSON.stringify(diagram, null, 2) }] };
333
+ }
334
+ );
335
+ server2.tool(
336
+ "delete_diagram",
337
+ "Delete a diagram permanently",
338
+ { diagram_id: z2.string().describe("Diagram ID to delete") },
339
+ async ({ diagram_id }) => {
340
+ const existing = await storage.getDiagram(diagram_id);
341
+ if (!existing) {
342
+ return { content: [{ type: "text", text: `Diagram "${diagram_id}" not found` }], isError: true };
343
+ }
344
+ await storage.deleteDiagram(diagram_id);
345
+ return { content: [{ type: "text", text: `Diagram "${diagram_id}" ("${existing.name}") deleted` }] };
346
+ }
347
+ );
348
+ server2.tool(
349
+ "search_diagrams",
350
+ "Search diagrams by name or content across all workspaces",
351
+ {
352
+ query: z2.string().min(1).describe("Search query (matches diagram name, description, or Mermaid code)"),
353
+ workspace_id: z2.string().optional().describe("Limit search to this workspace")
354
+ },
355
+ async ({ query, workspace_id }) => {
356
+ const workspaces = workspace_id ? [{ id: workspace_id }] : await storage.listWorkspaces();
357
+ const results = [];
358
+ const q = query.toLowerCase();
359
+ for (const ws of workspaces) {
360
+ const diagrams = await storage.listDiagrams(ws.id);
361
+ for (const d of diagrams) {
362
+ const matchIn = [];
363
+ if (d.name.toLowerCase().includes(q)) matchIn.push("name");
364
+ if (d.description?.toLowerCase().includes(q)) matchIn.push("description");
365
+ if (d.code.toLowerCase().includes(q)) matchIn.push("code");
366
+ if (matchIn.length > 0) {
367
+ results.push({ id: d.id, name: d.name, workspaceId: d.workspaceId, matchIn: matchIn.join(", ") });
368
+ }
369
+ }
370
+ }
371
+ if (results.length === 0) {
372
+ return { content: [{ type: "text", text: `No diagrams found matching "${query}"` }] };
373
+ }
374
+ return { content: [{ type: "text", text: JSON.stringify(results, null, 2) }] };
375
+ }
376
+ );
377
+ server2.tool(
378
+ "add_node_link",
379
+ 'Link a Mermaid node in a diagram to another diagram (enables drill-down navigation in the Bluelens UI). node_id must match the exact node identifier used in the Mermaid code (e.g. "UserService", "A", "DB").',
380
+ {
381
+ diagram_id: z2.string().describe("Diagram containing the node"),
382
+ node_id: z2.string().describe("Mermaid node identifier as written in the diagram code (case-sensitive)"),
383
+ target_diagram_id: z2.string().describe("Diagram to navigate to when the node is clicked"),
384
+ label: z2.string().optional().describe("Optional label for the link")
385
+ },
386
+ async ({ diagram_id, node_id, target_diagram_id, label }) => {
387
+ const diagram = await storage.getDiagram(diagram_id);
388
+ if (!diagram) return { content: [{ type: "text", text: `Diagram "${diagram_id}" not found` }], isError: true };
389
+ const target = await storage.getDiagram(target_diagram_id);
390
+ if (!target) return { content: [{ type: "text", text: `Target diagram "${target_diagram_id}" not found` }], isError: true };
391
+ const existing = diagram.nodeLinks.findIndex((l) => l.nodeId === node_id);
392
+ const updatedLinks = existing !== -1 ? diagram.nodeLinks.map((l, i) => i === existing ? { nodeId: node_id, targetDiagramId: target_diagram_id, label } : l) : [...diagram.nodeLinks, { nodeId: node_id, targetDiagramId: target_diagram_id, label }];
393
+ const updated = await storage.updateDiagram(diagram_id, { nodeLinks: updatedLinks });
394
+ return {
395
+ content: [{
396
+ type: "text",
397
+ text: JSON.stringify({
398
+ message: existing !== -1 ? `Node link updated` : `Node link added`,
399
+ nodeId: node_id,
400
+ targetDiagram: { id: target.id, name: target.name },
401
+ totalLinks: updated.nodeLinks.length
402
+ }, null, 2)
403
+ }]
404
+ };
405
+ }
406
+ );
407
+ server2.tool(
408
+ "remove_node_link",
409
+ "Remove the drill-down link from a Mermaid node in a diagram.",
410
+ {
411
+ diagram_id: z2.string().describe("Diagram containing the node"),
412
+ node_id: z2.string().describe("Mermaid node identifier whose link should be removed")
413
+ },
414
+ async ({ diagram_id, node_id }) => {
415
+ const diagram = await storage.getDiagram(diagram_id);
416
+ if (!diagram) return { content: [{ type: "text", text: `Diagram "${diagram_id}" not found` }], isError: true };
417
+ const before = diagram.nodeLinks.length;
418
+ const updatedLinks = diagram.nodeLinks.filter((l) => l.nodeId !== node_id);
419
+ if (updatedLinks.length === before) {
420
+ return { content: [{ type: "text", text: `No link found for node "${node_id}" in diagram "${diagram_id}"` }], isError: true };
421
+ }
422
+ await storage.updateDiagram(diagram_id, { nodeLinks: updatedLinks });
423
+ return { content: [{ type: "text", text: `Link removed from node "${node_id}"` }] };
424
+ }
425
+ );
426
+ server2.tool(
427
+ "export_for_bluelens",
428
+ "Export MCP data as a BlueprintExport JSON file importable directly into the Bluelens web app via the Sidebar import button. Saves the file to disk and returns the path.",
429
+ {
430
+ workspace_id: z2.string().optional().describe("Export only this workspace (omit to export everything)"),
431
+ output_path: z2.string().optional().describe("Where to save the file (default: ~/bluelens-export.bluelens)")
432
+ },
433
+ async ({ workspace_id, output_path }) => {
434
+ const allWorkspaces = await storage.listWorkspaces();
435
+ const targetWorkspaces = workspace_id ? allWorkspaces.filter((w) => w.id === workspace_id) : allWorkspaces;
436
+ if (workspace_id && targetWorkspaces.length === 0) {
437
+ return { content: [{ type: "text", text: `Workspace "${workspace_id}" not found` }], isError: true };
438
+ }
439
+ const folders = (await Promise.all(targetWorkspaces.map((w) => storage.listFolders(w.id)))).flat();
440
+ const diagrams = (await Promise.all(targetWorkspaces.map((w) => storage.listDiagrams(w.id)))).flat();
441
+ const graphIndex = (await Promise.all(targetWorkspaces.map((w) => storage.listCodeGraphs(w.id)))).flat();
442
+ const codeGraphs = (await Promise.all(graphIndex.map((e) => storage.getCodeGraph(e.id)))).filter((g) => g !== null);
443
+ const blueprint = {
444
+ version: 1,
445
+ exportType: workspace_id ? "workspace" : "all",
446
+ exportDate: (/* @__PURE__ */ new Date()).toISOString(),
447
+ workspaces: targetWorkspaces,
448
+ folders,
449
+ diagrams,
450
+ codeGraphs
451
+ };
452
+ const dest = output_path ?? join2(homedir2(), "bluelens-export.bluelens");
453
+ await writeFile2(dest, JSON.stringify(blueprint, null, 2), "utf-8");
454
+ return {
455
+ content: [{
456
+ type: "text",
457
+ text: JSON.stringify({
458
+ message: "Export ready. Open Bluelens \u2192 Sidebar \u2192 Import and select this file.",
459
+ file: dest,
460
+ workspaces: targetWorkspaces.length,
461
+ folders: folders.length,
462
+ diagrams: diagrams.length,
463
+ codeGraphs: codeGraphs.length
464
+ }, null, 2)
465
+ }]
466
+ };
467
+ }
468
+ );
469
+ }
470
+
471
+ // src/tools/codegraph.ts
472
+ import { z as z3 } from "zod";
473
+ import { existsSync as existsSync2 } from "fs";
474
+
475
+ // src/services/nodeFs.ts
476
+ import { readdir, readFile as readFile2 } from "fs/promises";
477
+ import { join as join3 } from "path";
478
+
479
+ // ../services/IFileSystemProvider.ts
480
+ var CODE_EXTENSIONS = /* @__PURE__ */ new Set([
481
+ ".ts",
482
+ ".tsx",
483
+ ".js",
484
+ ".jsx",
485
+ ".py",
486
+ ".rs",
487
+ ".go",
488
+ ".java",
489
+ ".kt",
490
+ ".rb",
491
+ ".php",
492
+ ".cs",
493
+ ".cpp",
494
+ ".cc",
495
+ ".c",
496
+ ".h",
497
+ ".hpp",
498
+ ".swift",
499
+ ".dart"
500
+ ]);
501
+ var EXTENSION_LANGUAGE_MAP = {
502
+ ".ts": "typescript",
503
+ ".tsx": "typescript",
504
+ ".js": "javascript",
505
+ ".jsx": "javascript",
506
+ ".py": "python",
507
+ ".rs": "rust",
508
+ ".go": "go",
509
+ ".java": "java",
510
+ ".kt": "kotlin",
511
+ ".rb": "ruby",
512
+ ".php": "php",
513
+ ".cs": "csharp",
514
+ ".cpp": "cpp",
515
+ ".cc": "cpp",
516
+ ".cxx": "cpp",
517
+ ".h": "cpp",
518
+ ".hpp": "cpp",
519
+ ".c": "c",
520
+ ".swift": "swift",
521
+ ".dart": "dart",
522
+ ".json": "json",
523
+ ".yaml": "yaml",
524
+ ".yml": "yaml",
525
+ ".toml": "toml",
526
+ ".xml": "xml",
527
+ ".html": "html",
528
+ ".css": "css",
529
+ ".scss": "scss",
530
+ ".sql": "sql",
531
+ ".sh": "shell",
532
+ ".bash": "shell",
533
+ ".zsh": "shell",
534
+ ".md": "markdown",
535
+ ".dockerfile": "dockerfile"
536
+ };
537
+ function getLanguage(filename) {
538
+ const ext = filename.substring(filename.lastIndexOf(".")).toLowerCase();
539
+ return EXTENSION_LANGUAGE_MAP[ext] || "plaintext";
540
+ }
541
+
542
+ // src/services/nodeFs.ts
543
+ var IGNORED_DIRS = /* @__PURE__ */ new Set([
544
+ "node_modules",
545
+ ".git",
546
+ ".svn",
547
+ "dist",
548
+ "build",
549
+ "out",
550
+ ".next",
551
+ ".nuxt",
552
+ "__pycache__",
553
+ ".pytest_cache",
554
+ ".mypy_cache",
555
+ "venv",
556
+ ".venv",
557
+ "env",
558
+ "target",
559
+ "vendor",
560
+ ".cargo",
561
+ "coverage",
562
+ ".nyc_output",
563
+ ".turbo"
564
+ ]);
565
+ var NodeFileSystemProvider = class {
566
+ constructor(rootPath) {
567
+ this.rootPath = rootPath;
568
+ }
569
+ rootPath;
570
+ async listDirectory(relPath) {
571
+ const absPath = relPath ? join3(this.rootPath, relPath) : this.rootPath;
572
+ const entries = await readdir(absPath, { withFileTypes: true });
573
+ return entries.filter((e) => !e.name.startsWith(".") || e.name === ".env").filter((e) => !(e.isDirectory() && IGNORED_DIRS.has(e.name))).map((e) => ({
574
+ name: e.name,
575
+ kind: e.isDirectory() ? "directory" : "file",
576
+ path: relPath ? `${relPath}/${e.name}` : e.name
577
+ }));
578
+ }
579
+ async readFile(relPath) {
580
+ return readFile2(join3(this.rootPath, relPath), "utf-8");
581
+ }
582
+ };
583
+
584
+ // ../services/codeParserService.ts
585
+ var TS_JS_PATTERNS = [
586
+ { regex: /^(?:export\s+)?(?:abstract\s+)?class\s+(\w+)/gm, kind: "class" },
587
+ { regex: /^(?:export\s+)?interface\s+(\w+)/gm, kind: "interface" },
588
+ { regex: /^(?:export\s+)?(?:async\s+)?function\s+(\w+)/gm, kind: "function" },
589
+ { regex: /^(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\(/gm, kind: "function" },
590
+ { regex: /^(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(?:\([^)]*\)|[a-zA-Z_]\w*)\s*=>/gm, kind: "function" }
591
+ ];
592
+ var PYTHON_PATTERNS = [
593
+ { regex: /^class\s+(\w+)/gm, kind: "class" },
594
+ { regex: /^(?:async\s+)?def\s+(\w+)/gm, kind: "function" }
595
+ ];
596
+ var CPP_PATTERNS = [
597
+ { regex: /^(?:template\s*<[^>]*>\s*)?(?:class|struct)\s+(\w+)/gm, kind: "class" },
598
+ // Function definitions (ending with `{`)
599
+ { regex: /^(?!\s*(?:if|for|while|switch|else|do|return|throw|delete|new)\b)[\w:*&<>~\t ]+?\b(\w+)\s*\([^)]*\)\s*(?:const\s*)?(?:noexcept\s*)?(?:override\s*)?\s*\{/gm, kind: "function" },
600
+ // Function declarations in headers (ending with `;`)
601
+ { regex: /^(?:(?:virtual|static|inline|explicit|constexpr)\s+)*(?:[\w:*&<>~]+\s+)+(\w+)\s*\([^)]*\)\s*(?:const\s*)?(?:noexcept\s*)?(?:= *0\s*)?\s*;/gm, kind: "function" }
602
+ ];
603
+ var C_PATTERNS = [
604
+ { regex: /^(?:struct|typedef\s+struct)\s+(\w+)/gm, kind: "class" },
605
+ { regex: /^(?!\s*(?:if|for|while|switch|else|do|return)\b)[\w*\t ]+?\b(\w+)\s*\([^)]*\)\s*\{/gm, kind: "function" }
606
+ ];
607
+ function getPatterns(language) {
608
+ switch (language) {
609
+ case "typescript":
610
+ case "javascript":
611
+ return TS_JS_PATTERNS;
612
+ case "python":
613
+ return PYTHON_PATTERNS;
614
+ case "cpp":
615
+ return CPP_PATTERNS;
616
+ case "c":
617
+ return C_PATTERNS;
618
+ default:
619
+ return [];
620
+ }
621
+ }
622
+ function estimateEnd(lines, startLine) {
623
+ const startIndent = lines[startLine]?.search(/\S/) ?? 0;
624
+ for (let i = startLine + 1; i < lines.length; i++) {
625
+ const line = lines[i];
626
+ if (line.trim() === "") continue;
627
+ const indent = line.search(/\S/);
628
+ if (indent <= startIndent && i > startLine + 1) {
629
+ return i;
630
+ }
631
+ }
632
+ return lines.length;
633
+ }
634
+ function isExternalImport(source) {
635
+ return !source.startsWith(".") && !source.startsWith("/") && !source.startsWith("@/");
636
+ }
637
+ function extractTSJSImports(code) {
638
+ const imports = [];
639
+ const defaultRe = /import\s+(\w+)\s+from\s+['"]([^'"]+)['"]/g;
640
+ let m;
641
+ while ((m = defaultRe.exec(code)) !== null) {
642
+ imports.push({ name: m[1], source: m[2], isDefault: true, isExternal: isExternalImport(m[2]) });
643
+ }
644
+ const namedRe = /import\s*\{([^}]+)\}\s*from\s*['"]([^'"]+)['"]/g;
645
+ while ((m = namedRe.exec(code)) !== null) {
646
+ const names = m[1].split(",").map((s) => s.trim().split(/\s+as\s+/)[0].trim()).filter(Boolean);
647
+ for (const name of names) {
648
+ imports.push({ name, source: m[2], isDefault: false, isExternal: isExternalImport(m[2]) });
649
+ }
650
+ }
651
+ const starRe = /import\s+\*\s+as\s+(\w+)\s+from\s+['"]([^'"]+)['"]/g;
652
+ while ((m = starRe.exec(code)) !== null) {
653
+ imports.push({ name: m[1], source: m[2], isDefault: true, isExternal: isExternalImport(m[2]) });
654
+ }
655
+ const requireRe = /require\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
656
+ while ((m = requireRe.exec(code)) !== null) {
657
+ imports.push({ name: m[1].split("/").pop() || m[1], source: m[1], isDefault: true, isExternal: isExternalImport(m[1]) });
658
+ }
659
+ return imports;
660
+ }
661
+ function extractPythonImports(code) {
662
+ const imports = [];
663
+ let m;
664
+ const fromRe = /^from\s+([\w.]+)\s+import\s+(.+)$/gm;
665
+ while ((m = fromRe.exec(code)) !== null) {
666
+ const source = m[1];
667
+ const names = m[2].split(",").map((s) => s.trim().split(/\s+as\s+/)[0].trim()).filter(Boolean);
668
+ for (const name of names) {
669
+ imports.push({ name, source, isDefault: false, isExternal: !source.startsWith(".") });
670
+ }
671
+ }
672
+ const importRe = /^import\s+([\w.]+(?:\s*,\s*[\w.]+)*)/gm;
673
+ while ((m = importRe.exec(code)) !== null) {
674
+ const names = m[1].split(",").map((s) => s.trim().split(/\s+as\s+/)[0].trim()).filter(Boolean);
675
+ for (const name of names) {
676
+ imports.push({ name: name.split(".").pop() || name, source: name, isDefault: true, isExternal: !name.startsWith(".") });
677
+ }
678
+ }
679
+ return imports;
680
+ }
681
+ function extractCppImports(code) {
682
+ const imports = [];
683
+ let m;
684
+ const sysRe = /^#include\s*<([^>]+)>/gm;
685
+ while ((m = sysRe.exec(code)) !== null) {
686
+ const name = m[1].split("/").pop()?.replace(/\.\w+$/, "") || m[1];
687
+ imports.push({ name, source: m[1], isDefault: true, isExternal: true });
688
+ }
689
+ const localRe = /^#include\s*"([^"]+)"/gm;
690
+ while ((m = localRe.exec(code)) !== null) {
691
+ const name = m[1].split("/").pop()?.replace(/\.\w+$/, "") || m[1];
692
+ imports.push({ name, source: m[1], isDefault: true, isExternal: false });
693
+ }
694
+ return imports;
695
+ }
696
+ function extractTSJSClassHierarchy(code) {
697
+ const results = [];
698
+ const re = /^(?:export\s+)?(?:abstract\s+)?class\s+(\w+)(?:\s+extends\s+(\w+))?(?:\s+implements\s+([\w\s,]+))?\s*\{/gm;
699
+ let m;
700
+ const lines = code.split("\n");
701
+ while ((m = re.exec(code)) !== null) {
702
+ const charsBefore = code.substring(0, m.index);
703
+ const lineStart = charsBefore.split("\n").length;
704
+ const entry = {
705
+ name: m[1],
706
+ lineStart,
707
+ implements: []
708
+ };
709
+ if (m[2]) entry.extends = m[2];
710
+ if (m[3]) entry.implements = m[3].split(",").map((s) => s.trim()).filter(Boolean);
711
+ results.push(entry);
712
+ }
713
+ return results;
714
+ }
715
+ function extractPythonClassHierarchy(code) {
716
+ const results = [];
717
+ const re = /^class\s+(\w+)(?:\(([^)]*)\))?\s*:/gm;
718
+ let m;
719
+ while ((m = re.exec(code)) !== null) {
720
+ const charsBefore = code.substring(0, m.index);
721
+ const lineStart = charsBefore.split("\n").length;
722
+ const entry = {
723
+ name: m[1],
724
+ lineStart,
725
+ implements: []
726
+ };
727
+ if (m[2]) {
728
+ const bases = m[2].split(",").map((s) => s.trim()).filter((s) => s && s !== "object");
729
+ if (bases.length > 0) entry.extends = bases[0];
730
+ if (bases.length > 1) entry.implements = bases.slice(1);
731
+ }
732
+ results.push(entry);
733
+ }
734
+ return results;
735
+ }
736
+ function extractCppClassHierarchy(code) {
737
+ const results = [];
738
+ const re = /^(?:class|struct)\s+(\w+)\s*(?:final\s*)?:\s*([\w\s,]+?)(?:\s*\{|$)/gm;
739
+ let m;
740
+ while ((m = re.exec(code)) !== null) {
741
+ const charsBefore = code.substring(0, m.index);
742
+ const lineStart = charsBefore.split("\n").length;
743
+ const bases = m[2].split(",").map((s) => s.trim().replace(/^(?:public|private|protected)\s+/, "").trim()).filter(Boolean);
744
+ const entry = { name: m[1], lineStart, implements: [] };
745
+ if (bases.length > 0) entry.extends = bases[0];
746
+ if (bases.length > 1) entry.implements = bases.slice(1);
747
+ results.push(entry);
748
+ }
749
+ return results;
750
+ }
751
+ function extractCallRefsFromCode(code, language, knownSymbols) {
752
+ if (knownSymbols.length === 0) return [];
753
+ const results = [];
754
+ const lines = code.split("\n");
755
+ const symbolSet = new Set(knownSymbols);
756
+ const symbolPattern = new RegExp(`\\b(${knownSymbols.map((s) => s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")).join("|")})\\s*\\(`, "g");
757
+ let currentScope = null;
758
+ const scopeRe = language === "python" ? /^(?:class|(?:async\s+)?def)\s+(\w+)/ : language === "cpp" || language === "c" ? /^(?!\s*(?:if|for|while|switch|else|do|return|throw)\b)(?:[\w:*&<>~\t ]+?\b)(\w+)\s*\([^)]*\)\s*(?:const\s*)?(?:noexcept\s*)?\s*\{/ : /^(?:export\s+)?(?:(?:async\s+)?function\s+(\w+)|(?:const|let|var)\s+(\w+)\s*=|(?:abstract\s+)?class\s+(\w+))/;
759
+ for (let i = 0; i < lines.length; i++) {
760
+ const line = lines[i];
761
+ const scopeMatch = scopeRe.exec(line);
762
+ if (scopeMatch) {
763
+ currentScope = scopeMatch[1] || scopeMatch[2] || scopeMatch[3] || null;
764
+ }
765
+ if (!currentScope) continue;
766
+ let m;
767
+ symbolPattern.lastIndex = 0;
768
+ while ((m = symbolPattern.exec(line)) !== null) {
769
+ const callee = m[1];
770
+ if (callee !== currentScope && symbolSet.has(callee)) {
771
+ results.push({ caller: currentScope, callee });
772
+ }
773
+ }
774
+ }
775
+ return results;
776
+ }
777
+ async function computeContentHash(content) {
778
+ const encoder = new TextEncoder();
779
+ const data = encoder.encode(content);
780
+ const hashBuffer = await crypto.subtle.digest("SHA-256", data);
781
+ const hashArray = Array.from(new Uint8Array(hashBuffer));
782
+ return hashArray.map((b) => b.toString(16).padStart(2, "0")).join("");
783
+ }
784
+ var codeParserService = {
785
+ extractImports(code, language) {
786
+ switch (language) {
787
+ case "typescript":
788
+ case "javascript":
789
+ return extractTSJSImports(code);
790
+ case "python":
791
+ return extractPythonImports(code);
792
+ case "cpp":
793
+ case "c":
794
+ return extractCppImports(code);
795
+ default:
796
+ return [];
797
+ }
798
+ },
799
+ extractExports(code, language) {
800
+ const exports = [];
801
+ let m;
802
+ if (language === "typescript" || language === "javascript") {
803
+ const namedRe = /^export\s+(?:default\s+)?(?:const|let|var|function|class|interface|type|enum|abstract\s+class)\s+(\w+)/gm;
804
+ while ((m = namedRe.exec(code)) !== null) {
805
+ exports.push(m[1]);
806
+ }
807
+ const defaultRe = /^export\s+default\s+(\w+)\s*;/gm;
808
+ while ((m = defaultRe.exec(code)) !== null) {
809
+ if (!["function", "class", "const", "let", "var", "interface", "type", "enum", "abstract"].includes(m[1])) {
810
+ exports.push(m[1]);
811
+ }
812
+ }
813
+ const braceRe = /^export\s*\{([^}]+)\}/gm;
814
+ while ((m = braceRe.exec(code)) !== null) {
815
+ const names = m[1].split(",").map((s) => {
816
+ const parts = s.trim().split(/\s+as\s+/);
817
+ return (parts[1] || parts[0]).trim();
818
+ }).filter(Boolean);
819
+ exports.push(...names);
820
+ }
821
+ } else if (language === "python") {
822
+ const allRe = /__all__\s*=\s*\[([^\]]+)\]/;
823
+ const allMatch = allRe.exec(code);
824
+ if (allMatch) {
825
+ const names = allMatch[1].match(/['"](\w+)['"]/g);
826
+ if (names) {
827
+ exports.push(...names.map((n) => n.replace(/['"]/g, "")));
828
+ }
829
+ }
830
+ } else if (language === "cpp" || language === "c") {
831
+ const declRe = /^(?:(?:extern|static|inline)\s+)*(?:[\w:*&<>~]+\s+)+(\w+)\s*\([^)]*\)\s*(?:const\s*)?\s*;/gm;
832
+ while ((m = declRe.exec(code)) !== null) {
833
+ const name = m[1];
834
+ if (!["if", "for", "while", "switch", "return", "throw", "delete", "new"].includes(name)) {
835
+ exports.push(name);
836
+ }
837
+ }
838
+ }
839
+ return exports;
840
+ },
841
+ extractSymbols(code, language) {
842
+ const patterns = getPatterns(language);
843
+ if (patterns.length === 0) return [];
844
+ const lines = code.split("\n");
845
+ const symbols = [];
846
+ for (const { regex, kind } of patterns) {
847
+ regex.lastIndex = 0;
848
+ let match;
849
+ while ((match = regex.exec(code)) !== null) {
850
+ const name = match[1];
851
+ const charsBefore = code.substring(0, match.index);
852
+ const lineStart = charsBefore.split("\n").length;
853
+ const lineEnd = estimateEnd(lines, lineStart - 1);
854
+ symbols.push({ name, kind, lineStart, lineEnd });
855
+ }
856
+ }
857
+ symbols.sort((a, b) => a.lineStart - b.lineStart);
858
+ return symbols;
859
+ },
860
+ extractClassHierarchy(code, language) {
861
+ switch (language) {
862
+ case "typescript":
863
+ case "javascript":
864
+ return extractTSJSClassHierarchy(code);
865
+ case "python":
866
+ return extractPythonClassHierarchy(code);
867
+ case "cpp":
868
+ case "c":
869
+ return extractCppClassHierarchy(code);
870
+ default:
871
+ return [];
872
+ }
873
+ },
874
+ extractCallReferences(code, language, knownSymbols) {
875
+ return extractCallRefsFromCode(code, language, knownSymbols);
876
+ },
877
+ computeContentHash
878
+ };
879
+
880
+ // ../services/codebaseAnalyzerService.ts
881
+ var ENTRY_POINT_NAMES = /* @__PURE__ */ new Set([
882
+ "index.ts",
883
+ "index.tsx",
884
+ "index.js",
885
+ "index.jsx",
886
+ "main.ts",
887
+ "main.tsx",
888
+ "main.js",
889
+ "main.jsx",
890
+ "main.py",
891
+ "app.ts",
892
+ "app.tsx",
893
+ "app.js",
894
+ "app.jsx",
895
+ "app.py",
896
+ "App.ts",
897
+ "App.tsx",
898
+ "App.js",
899
+ "App.jsx",
900
+ "server.ts",
901
+ "server.js",
902
+ "server.py",
903
+ "mod.rs",
904
+ "lib.rs",
905
+ "main.rs",
906
+ "setup.py",
907
+ "__main__.py",
908
+ "main.cpp",
909
+ "main.cc",
910
+ "main.c"
911
+ ]);
912
+ function matchesGlob(path, pattern) {
913
+ const regexStr = pattern.replace(/\./g, "\\.").replace(/\*\*/g, "{{GLOBSTAR}}").replace(/\*/g, "[^/]*").replace(/\{\{GLOBSTAR\}\}/g, ".*");
914
+ return new RegExp(`^${regexStr}$`).test(path);
915
+ }
916
+ function shouldIncludeFile(filePath, config) {
917
+ if (!config) return true;
918
+ if (config.includePaths.length > 0) {
919
+ const included = config.includePaths.some((p) => matchesGlob(filePath, p));
920
+ if (!included) return false;
921
+ }
922
+ if (config.excludePaths.length > 0) {
923
+ const excluded = config.excludePaths.some((p) => matchesGlob(filePath, p));
924
+ if (excluded) return false;
925
+ }
926
+ return true;
927
+ }
928
+ function isCodeFile(name) {
929
+ const ext = name.substring(name.lastIndexOf("."));
930
+ return CODE_EXTENSIONS.has(ext.toLowerCase());
931
+ }
932
+ async function listAllFiles(provider, basePath = "", config) {
933
+ const entries = await provider.listDirectory(basePath);
934
+ const files = [];
935
+ for (const entry of entries) {
936
+ if (entry.kind === "directory") {
937
+ const subFiles = await listAllFiles(provider, entry.path, config);
938
+ files.push(...subFiles);
939
+ } else if (isCodeFile(entry.name)) {
940
+ if (shouldIncludeFile(entry.path, config)) {
941
+ files.push(entry.path);
942
+ }
943
+ }
944
+ }
945
+ return files;
946
+ }
947
+ function resolveImportPath(importSource, currentFile, allFiles) {
948
+ if (!importSource.startsWith(".")) return null;
949
+ const currentDir = currentFile.substring(0, currentFile.lastIndexOf("/"));
950
+ const parts = importSource.split("/");
951
+ let resolved = currentDir;
952
+ for (const part of parts) {
953
+ if (part === ".") continue;
954
+ if (part === "..") {
955
+ resolved = resolved.substring(0, resolved.lastIndexOf("/"));
956
+ } else {
957
+ resolved = resolved ? `${resolved}/${part}` : part;
958
+ }
959
+ }
960
+ const extensions = [".ts", ".tsx", ".js", ".jsx", ".py", ""];
961
+ const indexFiles = ["/index.ts", "/index.tsx", "/index.js", "/index.jsx"];
962
+ for (const ext of extensions) {
963
+ const candidate = resolved + ext;
964
+ if (allFiles.includes(candidate)) return candidate;
965
+ }
966
+ for (const idx of indexFiles) {
967
+ const candidate = resolved + idx;
968
+ if (allFiles.includes(candidate)) return candidate;
969
+ }
970
+ return null;
971
+ }
972
+ function getModuleName(filePath) {
973
+ const parts = filePath.split("/");
974
+ if (parts.length === 1) return "(root)";
975
+ return parts[0];
976
+ }
977
+ function groupIntoModules(files, allFilePaths) {
978
+ const moduleMap = /* @__PURE__ */ new Map();
979
+ for (const file of files) {
980
+ const moduleName = getModuleName(file.filePath);
981
+ if (!moduleMap.has(moduleName)) {
982
+ moduleMap.set(moduleName, []);
983
+ }
984
+ moduleMap.get(moduleName).push(file);
985
+ }
986
+ const modules = [];
987
+ for (const [name, moduleFiles] of moduleMap) {
988
+ const depSet = /* @__PURE__ */ new Set();
989
+ for (const file of moduleFiles) {
990
+ for (const imp of file.imports) {
991
+ if (imp.isExternal) continue;
992
+ const resolved = resolveImportPath(imp.source, file.filePath, allFilePaths);
993
+ if (resolved) {
994
+ const depModule = getModuleName(resolved);
995
+ if (depModule !== name) {
996
+ depSet.add(depModule);
997
+ }
998
+ }
999
+ }
1000
+ }
1001
+ modules.push({
1002
+ name,
1003
+ path: name === "(root)" ? "" : name,
1004
+ files: moduleFiles,
1005
+ dependencies: Array.from(depSet)
1006
+ });
1007
+ }
1008
+ modules.sort((a, b) => {
1009
+ if (a.name === "(root)") return -1;
1010
+ if (b.name === "(root)") return 1;
1011
+ return a.name.localeCompare(b.name);
1012
+ });
1013
+ return modules;
1014
+ }
1015
+ var codebaseAnalyzerService = {
1016
+ /**
1017
+ * Analyse un codebase complet via un IFileSystemProvider.
1018
+ *
1019
+ * @param provider Accès aux fichiers — peut être local (LocalFileSystemProvider)
1020
+ * ou historique (GitFileSystemProvider) sans modifier cette fonction.
1021
+ * @param scanConfig Filtres include/exclude optionnels.
1022
+ * @param onProgress Callback de progression (filesScanned, totalFiles).
1023
+ */
1024
+ async analyzeCodebase(provider, scanConfig, onProgress) {
1025
+ const allFilePaths = await listAllFiles(provider, "", scanConfig);
1026
+ const analyzedFiles = [];
1027
+ const externalDeps = /* @__PURE__ */ new Set();
1028
+ const entryPoints = [];
1029
+ let totalSymbols = 0;
1030
+ for (let i = 0; i < allFilePaths.length; i++) {
1031
+ const filePath = allFilePaths[i];
1032
+ onProgress?.(i + 1, allFilePaths.length);
1033
+ try {
1034
+ const content = await provider.readFile(filePath);
1035
+ const language = getLanguage(filePath);
1036
+ const symbols = codeParserService.extractSymbols(content, language);
1037
+ const imports = codeParserService.extractImports(content, language);
1038
+ const exportedSymbols = codeParserService.extractExports(content, language);
1039
+ for (const imp of imports) {
1040
+ if (imp.isExternal) {
1041
+ const pkgName = imp.source.startsWith("@") ? imp.source.split("/").slice(0, 2).join("/") : imp.source.split("/")[0];
1042
+ externalDeps.add(pkgName);
1043
+ }
1044
+ }
1045
+ const scannedSymbols = symbols.map((s) => ({
1046
+ name: s.name,
1047
+ kind: s.kind,
1048
+ filePath,
1049
+ lineStart: s.lineStart,
1050
+ lineEnd: s.lineEnd,
1051
+ repoId: ""
1052
+ }));
1053
+ totalSymbols += symbols.length;
1054
+ analyzedFiles.push({
1055
+ filePath,
1056
+ language,
1057
+ symbols: scannedSymbols || [],
1058
+ imports: imports || [],
1059
+ exportedSymbols: exportedSymbols || [],
1060
+ size: content.length
1061
+ });
1062
+ const fileName = filePath.split("/").pop() || "";
1063
+ if (ENTRY_POINT_NAMES.has(fileName)) {
1064
+ entryPoints.push(filePath);
1065
+ }
1066
+ } catch {
1067
+ }
1068
+ }
1069
+ const modules = groupIntoModules(analyzedFiles, allFilePaths);
1070
+ return {
1071
+ modules,
1072
+ externalDeps: Array.from(externalDeps).sort(),
1073
+ entryPoints,
1074
+ totalFiles: analyzedFiles.length,
1075
+ totalSymbols
1076
+ };
1077
+ }
1078
+ };
1079
+
1080
+ // ../services/codeGraphModelService.ts
1081
+ var generateId = () => Math.random().toString(36).substr(2, 9);
1082
+ function getDefaultLenses() {
1083
+ const componentLens = {
1084
+ id: "lens-component",
1085
+ name: "Component",
1086
+ type: "component",
1087
+ nodeFilter: {
1088
+ kinds: ["system", "package", "module", "class", "interface", "function", "method", "variable", "field"],
1089
+ minDepth: 0,
1090
+ maxDepth: 3
1091
+ },
1092
+ relationFilter: {
1093
+ types: ["contains", "depends_on", "implements", "inherits"]
1094
+ },
1095
+ styleRules: [
1096
+ { match: { kind: ["system"] }, shape: "rounded", style: "fill:#1e3a5f,stroke:#3b82f6,color:#93c5fd" },
1097
+ { match: { kind: ["package"] }, shape: "rounded", style: "fill:#1e3a2f,stroke:#22c55e,color:#86efac" },
1098
+ { match: { kind: ["module"] }, shape: "default", style: "fill:#2d2d3d,stroke:#8b5cf6,color:#c4b5fd" },
1099
+ { match: { kind: ["class", "interface"] }, shape: "stadium", style: "fill:#3d2d2d,stroke:#f97316,color:#fdba74" }
1100
+ ],
1101
+ layoutHint: "TD"
1102
+ };
1103
+ const flowLens = {
1104
+ id: "lens-flow",
1105
+ name: "Flow",
1106
+ type: "flow",
1107
+ nodeFilter: {
1108
+ kinds: ["package", "module", "class", "function"],
1109
+ minDepth: 1,
1110
+ maxDepth: 3
1111
+ },
1112
+ relationFilter: {
1113
+ types: ["calls", "emits", "subscribes", "depends_on"]
1114
+ },
1115
+ styleRules: [
1116
+ { match: { kind: ["function"] }, shape: "stadium", style: "fill:#1e3a5f,stroke:#3b82f6,color:#93c5fd" },
1117
+ { match: { kind: ["class"] }, shape: "rounded", style: "fill:#3d2d2d,stroke:#f97316,color:#fdba74" }
1118
+ ],
1119
+ layoutHint: "LR"
1120
+ };
1121
+ const domainLens = {
1122
+ id: "lens-domain",
1123
+ name: "Domain",
1124
+ type: "domain",
1125
+ nodeFilter: {
1126
+ minDepth: 0,
1127
+ maxDepth: 4
1128
+ },
1129
+ relationFilter: {},
1130
+ styleRules: [
1131
+ { match: { kind: ["system"] }, shape: "rounded", style: "fill:#1e3a5f,stroke:#60a5fa,color:#93c5fd" }
1132
+ ],
1133
+ layoutHint: "TD"
1134
+ };
1135
+ return [componentLens, flowLens, domainLens];
1136
+ }
1137
+ function createEmptyGraph(workspaceId, repoId, name) {
1138
+ const lenses = getDefaultLenses();
1139
+ const rootId = generateId();
1140
+ const now = Date.now();
1141
+ const rootNode = {
1142
+ id: rootId,
1143
+ name,
1144
+ kind: "system",
1145
+ depth: 0,
1146
+ parentId: null,
1147
+ children: [],
1148
+ sourceRef: null,
1149
+ tags: [],
1150
+ lensConfig: {},
1151
+ domainProjections: []
1152
+ };
1153
+ return {
1154
+ id: generateId(),
1155
+ name,
1156
+ workspaceId,
1157
+ repoId,
1158
+ createdAt: now,
1159
+ updatedAt: now,
1160
+ nodes: { [rootId]: rootNode },
1161
+ relations: {},
1162
+ domainNodes: {},
1163
+ domainRelations: {},
1164
+ flows: {},
1165
+ lenses,
1166
+ activeLensId: lenses[0].id,
1167
+ syncLock: {},
1168
+ rootNodeId: rootId
1169
+ };
1170
+ }
1171
+ function addNode(graph, node) {
1172
+ const nodeId = node.id || generateId();
1173
+ const newNode = { ...node, id: nodeId };
1174
+ const nodes = { ...graph.nodes, [nodeId]: newNode };
1175
+ if (newNode.parentId && nodes[newNode.parentId]) {
1176
+ const parent = nodes[newNode.parentId];
1177
+ nodes[newNode.parentId] = {
1178
+ ...parent,
1179
+ children: [...parent.children, nodeId]
1180
+ };
1181
+ }
1182
+ return {
1183
+ graph: { ...graph, nodes, updatedAt: Date.now() },
1184
+ nodeId
1185
+ };
1186
+ }
1187
+ function removeNode(graph, nodeId) {
1188
+ const node = graph.nodes[nodeId];
1189
+ if (!node) return graph;
1190
+ const toRemove = /* @__PURE__ */ new Set();
1191
+ const collect = (id) => {
1192
+ toRemove.add(id);
1193
+ const n = graph.nodes[id];
1194
+ if (n) n.children.forEach(collect);
1195
+ };
1196
+ collect(nodeId);
1197
+ const nodes = { ...graph.nodes };
1198
+ for (const id of toRemove) {
1199
+ delete nodes[id];
1200
+ }
1201
+ if (node.parentId && nodes[node.parentId]) {
1202
+ const parent = nodes[node.parentId];
1203
+ nodes[node.parentId] = {
1204
+ ...parent,
1205
+ children: parent.children.filter((c) => c !== nodeId)
1206
+ };
1207
+ }
1208
+ const relations = { ...graph.relations };
1209
+ for (const [relId, rel] of Object.entries(relations)) {
1210
+ if (toRemove.has(rel.sourceId) || toRemove.has(rel.targetId)) {
1211
+ delete relations[relId];
1212
+ }
1213
+ }
1214
+ const syncLock = { ...graph.syncLock };
1215
+ for (const id of toRemove) {
1216
+ delete syncLock[id];
1217
+ }
1218
+ return { ...graph, nodes, relations, syncLock, updatedAt: Date.now() };
1219
+ }
1220
+ function addRelation(graph, sourceId, targetId, type, label) {
1221
+ const relationId = generateId();
1222
+ const lensVisibility = {};
1223
+ for (const lens of graph.lenses) {
1224
+ lensVisibility[lens.id] = true;
1225
+ }
1226
+ const relation = {
1227
+ id: relationId,
1228
+ sourceId,
1229
+ targetId,
1230
+ type,
1231
+ label,
1232
+ lensVisibility
1233
+ };
1234
+ return {
1235
+ graph: {
1236
+ ...graph,
1237
+ relations: { ...graph.relations, [relationId]: relation },
1238
+ updatedAt: Date.now()
1239
+ },
1240
+ relationId
1241
+ };
1242
+ }
1243
+ function removeRelation(graph, relationId) {
1244
+ const relations = { ...graph.relations };
1245
+ delete relations[relationId];
1246
+ return { ...graph, relations, updatedAt: Date.now() };
1247
+ }
1248
+ function getChildren(graph, nodeId) {
1249
+ const node = graph.nodes[nodeId];
1250
+ if (!node) return [];
1251
+ return node.children.map((id) => graph.nodes[id]).filter(Boolean);
1252
+ }
1253
+ function getDescendants(graph, nodeId) {
1254
+ const result = [];
1255
+ const visit = (id) => {
1256
+ const node = graph.nodes[id];
1257
+ if (!node) return;
1258
+ for (const childId of node.children) {
1259
+ const child = graph.nodes[childId];
1260
+ if (child) {
1261
+ result.push(child);
1262
+ visit(childId);
1263
+ }
1264
+ }
1265
+ };
1266
+ visit(nodeId);
1267
+ return result;
1268
+ }
1269
+ function getAncestors(graph, nodeId) {
1270
+ const result = [];
1271
+ let current = graph.nodes[nodeId];
1272
+ while (current?.parentId) {
1273
+ const parent = graph.nodes[current.parentId];
1274
+ if (!parent) break;
1275
+ result.push(parent);
1276
+ current = parent;
1277
+ }
1278
+ return result;
1279
+ }
1280
+ function getVisibleNodes(graph, lens, focusNodeId, depth) {
1281
+ const allNodes = Object.values(graph.nodes);
1282
+ if (lens.type === "domain") {
1283
+ return allNodes;
1284
+ }
1285
+ return allNodes.filter((node) => {
1286
+ const override = node.lensConfig[lens.id];
1287
+ if (override?.visible === false) return false;
1288
+ if (lens.nodeFilter.kinds && !lens.nodeFilter.kinds.includes(node.kind)) return false;
1289
+ const minD = depth?.min ?? lens.nodeFilter.minDepth ?? 0;
1290
+ const maxD = depth?.max ?? lens.nodeFilter.maxDepth ?? 4;
1291
+ if (node.depth < minD || node.depth > maxD) return false;
1292
+ if (lens.nodeFilter.tags && lens.nodeFilter.tags.length > 0) {
1293
+ if (!lens.nodeFilter.tags.some((tag) => node.tags.includes(tag))) return false;
1294
+ }
1295
+ if (focusNodeId && focusNodeId !== graph.rootNodeId) {
1296
+ const focusNode = graph.nodes[focusNodeId];
1297
+ if (!focusNode) return true;
1298
+ if (node.id === focusNodeId) return true;
1299
+ const ancestors = getAncestors(graph, focusNodeId);
1300
+ if (ancestors.some((a) => a.id === node.id)) return true;
1301
+ const descendants = getDescendants(graph, focusNodeId);
1302
+ if (descendants.some((d) => d.id === node.id)) return true;
1303
+ return false;
1304
+ }
1305
+ return true;
1306
+ });
1307
+ }
1308
+ function getVisibleRelations(graph, lens, visibleNodeIds) {
1309
+ return Object.values(graph.relations).filter((rel) => {
1310
+ if (!visibleNodeIds.has(rel.sourceId) || !visibleNodeIds.has(rel.targetId)) return false;
1311
+ if (rel.lensVisibility[lens.id] === false) return false;
1312
+ if (lens.relationFilter.types && !lens.relationFilter.types.includes(rel.type)) return false;
1313
+ return true;
1314
+ });
1315
+ }
1316
+ function validateGraph(graph) {
1317
+ const anomalies = [];
1318
+ const nodes = Object.values(graph.nodes);
1319
+ const relations = Object.values(graph.relations);
1320
+ for (const node of nodes) {
1321
+ if (node.id === graph.rootNodeId) continue;
1322
+ if (!node.parentId || !graph.nodes[node.parentId]) {
1323
+ anomalies.push({
1324
+ type: "orphan_node",
1325
+ severity: "warning",
1326
+ message: `Node "${node.name}" has no valid parent`,
1327
+ nodeIds: [node.id]
1328
+ });
1329
+ }
1330
+ }
1331
+ for (const rel of relations) {
1332
+ if (!graph.nodes[rel.sourceId] || !graph.nodes[rel.targetId]) {
1333
+ anomalies.push({
1334
+ type: "broken_reference",
1335
+ severity: "error",
1336
+ message: `Relation "${rel.type}" references missing node(s)`,
1337
+ nodeIds: [rel.sourceId, rel.targetId].filter((id) => !graph.nodes[id]),
1338
+ relationIds: [rel.id]
1339
+ });
1340
+ }
1341
+ }
1342
+ const depEdges = relations.filter((r) => r.type !== "contains");
1343
+ const visited = /* @__PURE__ */ new Set();
1344
+ const inStack = /* @__PURE__ */ new Set();
1345
+ function dfs(nodeId, path) {
1346
+ if (inStack.has(nodeId)) {
1347
+ const cycleStart = path.indexOf(nodeId);
1348
+ const cycle = path.slice(cycleStart);
1349
+ anomalies.push({
1350
+ type: "circular_dependency",
1351
+ severity: "warning",
1352
+ message: `Circular dependency: ${cycle.map((id) => graph.nodes[id]?.name || id).join(" \u2192 ")}`,
1353
+ nodeIds: cycle
1354
+ });
1355
+ return;
1356
+ }
1357
+ if (visited.has(nodeId)) return;
1358
+ visited.add(nodeId);
1359
+ inStack.add(nodeId);
1360
+ const outgoing = depEdges.filter((e) => e.sourceId === nodeId);
1361
+ for (const edge of outgoing) {
1362
+ dfs(edge.targetId, [...path, nodeId]);
1363
+ }
1364
+ inStack.delete(nodeId);
1365
+ }
1366
+ for (const node of nodes) {
1367
+ if (!visited.has(node.id)) {
1368
+ dfs(node.id, []);
1369
+ }
1370
+ }
1371
+ for (const node of nodes) {
1372
+ const fanOut = depEdges.filter((e) => e.sourceId === node.id).length;
1373
+ if (fanOut > 8) {
1374
+ anomalies.push({
1375
+ type: "high_coupling",
1376
+ severity: "warning",
1377
+ message: `Node "${node.name}" has high fan-out (${fanOut} dependencies)`,
1378
+ nodeIds: [node.id]
1379
+ });
1380
+ }
1381
+ }
1382
+ for (const node of nodes) {
1383
+ const fanIn = depEdges.filter((e) => e.targetId === node.id).length;
1384
+ if (fanIn > 10) {
1385
+ anomalies.push({
1386
+ type: "god_node",
1387
+ severity: "warning",
1388
+ message: `Node "${node.name}" has high fan-in (${fanIn} dependents) \u2014 potential god node`,
1389
+ nodeIds: [node.id]
1390
+ });
1391
+ }
1392
+ }
1393
+ return anomalies;
1394
+ }
1395
+ function getDirectChildrenForDisplay(graph, lens, focusNodeId) {
1396
+ const contextNodeId = focusNodeId || graph.rootNodeId;
1397
+ const contextNode = graph.nodes[contextNodeId];
1398
+ if (!contextNode) return [];
1399
+ return contextNode.children.map((id) => graph.nodes[id]).filter((node) => {
1400
+ if (!node) return false;
1401
+ const override = node.lensConfig[lens.id];
1402
+ if (override?.visible === false) return false;
1403
+ if (lens.nodeFilter.kinds && !lens.nodeFilter.kinds.includes(node.kind)) return false;
1404
+ if (lens.nodeFilter.tags?.length) {
1405
+ if (!lens.nodeFilter.tags.some((tag) => node.tags.includes(tag))) return false;
1406
+ }
1407
+ return true;
1408
+ });
1409
+ }
1410
+ var codeGraphModelService = {
1411
+ createEmptyGraph,
1412
+ addNode,
1413
+ removeNode,
1414
+ addRelation,
1415
+ removeRelation,
1416
+ getChildren,
1417
+ getDescendants,
1418
+ getAncestors,
1419
+ getVisibleNodes,
1420
+ getDirectChildrenForDisplay,
1421
+ getVisibleRelations,
1422
+ validateGraph,
1423
+ getDefaultLenses
1424
+ };
1425
+
1426
+ // src/services/codeToGraph.ts
1427
+ var RESOLVE_EXTENSIONS = [".ts", ".tsx", ".js", ".jsx", ".py", ""];
1428
+ var INDEX_SUFFIXES = ["/index.ts", "/index.tsx", "/index.js", "/index.jsx"];
1429
+ function resolveImportToFile(importSource, currentFile, allFiles) {
1430
+ if (!importSource.startsWith(".") && !importSource.startsWith("@/")) return null;
1431
+ let basePath;
1432
+ if (importSource.startsWith(".")) {
1433
+ const currentDir = currentFile.substring(0, currentFile.lastIndexOf("/"));
1434
+ const parts = importSource.split("/");
1435
+ let resolved = currentDir;
1436
+ for (const part of parts) {
1437
+ if (part === ".") continue;
1438
+ if (part === "..") {
1439
+ const idx = resolved.lastIndexOf("/");
1440
+ resolved = idx >= 0 ? resolved.substring(0, idx) : "";
1441
+ } else {
1442
+ resolved = resolved ? `${resolved}/${part}` : part;
1443
+ }
1444
+ }
1445
+ basePath = resolved;
1446
+ } else {
1447
+ basePath = importSource.slice(2);
1448
+ }
1449
+ for (const ext of RESOLVE_EXTENSIONS) {
1450
+ if (allFiles.includes(basePath + ext)) return basePath + ext;
1451
+ }
1452
+ for (const idx of INDEX_SUFFIXES) {
1453
+ if (allFiles.includes(basePath + idx)) return basePath + idx;
1454
+ }
1455
+ return null;
1456
+ }
1457
+ function symbolKindToNodeKind(kind) {
1458
+ switch (kind) {
1459
+ case "class":
1460
+ return "class";
1461
+ case "function":
1462
+ return "function";
1463
+ case "interface":
1464
+ return "interface";
1465
+ case "variable":
1466
+ return "variable";
1467
+ default:
1468
+ return "function";
1469
+ }
1470
+ }
1471
+ async function parseCodebaseToGraph(analysis, repoId, repoName, workspaceId, provider, onProgress) {
1472
+ let graph = codeGraphModelService.createEmptyGraph(workspaceId, repoId, repoName);
1473
+ const rootId = graph.rootNodeId;
1474
+ const totalFiles = analysis.modules.reduce((sum, m) => sum + m.files.length, 0);
1475
+ const moduleIdMap = /* @__PURE__ */ new Map();
1476
+ const fileIdMap = /* @__PURE__ */ new Map();
1477
+ for (const mod of analysis.modules) {
1478
+ const result = codeGraphModelService.addNode(graph, {
1479
+ name: mod.name,
1480
+ description: mod.description,
1481
+ kind: "package",
1482
+ depth: 1,
1483
+ parentId: rootId,
1484
+ children: [],
1485
+ sourceRef: null,
1486
+ tags: [],
1487
+ lensConfig: {},
1488
+ domainProjections: []
1489
+ });
1490
+ graph = result.graph;
1491
+ moduleIdMap.set(mod.name, result.nodeId);
1492
+ }
1493
+ let fileIdx = 0;
1494
+ for (const mod of analysis.modules) {
1495
+ const moduleNodeId = moduleIdMap.get(mod.name);
1496
+ for (const file of mod.files) {
1497
+ fileIdx++;
1498
+ onProgress?.(`Parsing ${file.filePath}`, fileIdx, totalFiles);
1499
+ let contentHash = "";
1500
+ if (provider) {
1501
+ try {
1502
+ const content = await provider.readFile(file.filePath);
1503
+ contentHash = await codeParserService.computeContentHash(content);
1504
+ } catch {
1505
+ }
1506
+ }
1507
+ const fileResult = codeGraphModelService.addNode(graph, {
1508
+ name: file.filePath.split("/").pop() ?? file.filePath,
1509
+ kind: "module",
1510
+ depth: 2,
1511
+ parentId: moduleNodeId,
1512
+ children: [],
1513
+ sourceRef: contentHash ? { filePath: file.filePath, lineStart: 1, lineEnd: Math.max(...file.symbols.map((s) => s.lineEnd), 1), contentHash } : null,
1514
+ tags: [file.language],
1515
+ lensConfig: {},
1516
+ domainProjections: []
1517
+ });
1518
+ graph = fileResult.graph;
1519
+ fileIdMap.set(file.filePath, fileResult.nodeId);
1520
+ for (const symbol of file.symbols) {
1521
+ const symResult = codeGraphModelService.addNode(graph, {
1522
+ name: symbol.name,
1523
+ kind: symbolKindToNodeKind(symbol.kind),
1524
+ depth: 3,
1525
+ parentId: fileResult.nodeId,
1526
+ children: [],
1527
+ sourceRef: contentHash ? { filePath: file.filePath, lineStart: symbol.lineStart, lineEnd: symbol.lineEnd, contentHash } : null,
1528
+ tags: [],
1529
+ lensConfig: {},
1530
+ domainProjections: []
1531
+ });
1532
+ graph = symResult.graph;
1533
+ }
1534
+ }
1535
+ }
1536
+ const allFiles = analysis.modules.flatMap((m) => m.files.map((f) => f.filePath));
1537
+ for (const mod of analysis.modules) {
1538
+ for (const file of mod.files) {
1539
+ const sourceFileId = fileIdMap.get(file.filePath);
1540
+ if (!sourceFileId) continue;
1541
+ for (const imp of file.imports) {
1542
+ if (imp.isExternal) continue;
1543
+ const resolved = resolveImportToFile(imp.source, file.filePath, allFiles);
1544
+ if (!resolved) continue;
1545
+ const targetFileId = fileIdMap.get(resolved);
1546
+ if (!targetFileId || targetFileId === sourceFileId) continue;
1547
+ const relResult = codeGraphModelService.addRelation(graph, sourceFileId, targetFileId, "depends_on");
1548
+ graph = relResult.graph;
1549
+ }
1550
+ }
1551
+ }
1552
+ for (const mod of analysis.modules) {
1553
+ const sourceModId = moduleIdMap.get(mod.name);
1554
+ if (!sourceModId) continue;
1555
+ for (const dep of mod.dependencies) {
1556
+ const targetModId = moduleIdMap.get(dep);
1557
+ if (!targetModId || targetModId === sourceModId) continue;
1558
+ const relResult = codeGraphModelService.addRelation(graph, sourceModId, targetModId, "depends_on");
1559
+ graph = relResult.graph;
1560
+ }
1561
+ }
1562
+ return graph;
1563
+ }
1564
+
1565
+ // ../services/llmService.ts
1566
+ import { GoogleGenAI, Type as GeminiType } from "@google/genai";
1567
+ import OpenAI from "openai";
1568
+ var MAX_AGENT_ITERATIONS = 20;
1569
+ var LLMConfigError = class _LLMConfigError extends Error {
1570
+ name = "LLMConfigError";
1571
+ constructor(message = "No AI API key configured. Open AI Settings to add one.") {
1572
+ super(message);
1573
+ Object.setPrototypeOf(this, _LLMConfigError.prototype);
1574
+ }
1575
+ };
1576
+ var LLMRateLimitError = class _LLMRateLimitError extends Error {
1577
+ name = "LLMRateLimitError";
1578
+ constructor(provider) {
1579
+ super(`${provider} quota or rate limit exceeded. Wait a moment then try again.`);
1580
+ Object.setPrototypeOf(this, _LLMRateLimitError.prototype);
1581
+ }
1582
+ };
1583
+ var DEFAULT_MODELS = {
1584
+ gemini: "gemini-3.1-flash-lite-preview",
1585
+ openai: "gpt-4o-mini",
1586
+ anthropic: "claude-sonnet-4-6"
1587
+ };
1588
+ async function sendGemini(messages, systemPrompt, config, signal) {
1589
+ const ai = new GoogleGenAI({ apiKey: config.apiKey });
1590
+ const model = config.model || DEFAULT_MODELS.gemini;
1591
+ const contents = messages.map((m) => ({
1592
+ role: m.role === "assistant" ? "model" : "user",
1593
+ parts: [{ text: m.content }]
1594
+ }));
1595
+ try {
1596
+ const response = await ai.models.generateContent({
1597
+ model,
1598
+ contents,
1599
+ config: {
1600
+ systemInstruction: systemPrompt,
1601
+ temperature: 0.3,
1602
+ abortSignal: signal
1603
+ }
1604
+ });
1605
+ const text = response.text;
1606
+ if (!text) throw new Error("No response from Gemini");
1607
+ const meta = response.usageMetadata;
1608
+ const usage = meta ? {
1609
+ inputTokens: meta.promptTokenCount ?? 0,
1610
+ outputTokens: meta.candidatesTokenCount ?? 0,
1611
+ totalTokens: meta.totalTokenCount ?? 0
1612
+ } : void 0;
1613
+ return { content: text, provider: "gemini", model, usage };
1614
+ } catch (err) {
1615
+ if (err instanceof LLMConfigError || err instanceof LLMRateLimitError) throw err;
1616
+ const msg = err instanceof Error ? err.message : String(err);
1617
+ if (msg.includes("429") || msg.includes("RESOURCE_EXHAUSTED") || msg.includes("quota") || msg.includes("503") || msg.includes("UNAVAILABLE")) {
1618
+ throw new LLMRateLimitError("Gemini");
1619
+ }
1620
+ if (msg.includes("401") || msg.includes("403") || msg.includes("API_KEY_INVALID") || msg.includes("permission")) {
1621
+ throw new LLMConfigError(`Gemini API key is invalid or missing permissions. Open AI Settings.`);
1622
+ }
1623
+ throw err;
1624
+ }
1625
+ }
1626
+ async function sendOpenAI(messages, systemPrompt, config, signal) {
1627
+ const client = new OpenAI({
1628
+ apiKey: config.apiKey,
1629
+ dangerouslyAllowBrowser: true
1630
+ });
1631
+ const model = config.model || DEFAULT_MODELS.openai;
1632
+ try {
1633
+ const response = await client.chat.completions.create({
1634
+ model,
1635
+ messages: [
1636
+ { role: "system", content: systemPrompt },
1637
+ ...messages.map((m) => ({
1638
+ role: m.role,
1639
+ content: m.content
1640
+ }))
1641
+ ],
1642
+ temperature: 0.3
1643
+ }, { signal });
1644
+ const text = response.choices[0]?.message?.content;
1645
+ if (!text) throw new Error("No response from OpenAI");
1646
+ const usage = response.usage ? {
1647
+ inputTokens: response.usage.prompt_tokens,
1648
+ outputTokens: response.usage.completion_tokens,
1649
+ totalTokens: response.usage.total_tokens
1650
+ } : void 0;
1651
+ return { content: text, provider: "openai", model, usage };
1652
+ } catch (err) {
1653
+ if (err instanceof LLMConfigError || err instanceof LLMRateLimitError) throw err;
1654
+ const status = err instanceof OpenAI.APIError ? err.status : void 0;
1655
+ if (status === 429) throw new LLMRateLimitError("OpenAI");
1656
+ if (status === 401 || status === 403) throw new LLMConfigError(`OpenAI API key is invalid. Open AI Settings.`);
1657
+ throw err;
1658
+ }
1659
+ }
1660
+ async function sendAnthropic(messages, systemPrompt, config, signal) {
1661
+ const model = config.model || DEFAULT_MODELS.anthropic;
1662
+ const baseUrl = config.proxyUrl || "https://api.anthropic.com";
1663
+ const response = await fetch(`${baseUrl}/v1/messages`, {
1664
+ method: "POST",
1665
+ headers: {
1666
+ "Content-Type": "application/json",
1667
+ "x-api-key": config.apiKey,
1668
+ "anthropic-version": "2023-06-01"
1669
+ },
1670
+ body: JSON.stringify({
1671
+ model,
1672
+ max_tokens: 4096,
1673
+ system: systemPrompt,
1674
+ messages: messages.map((m) => ({ role: m.role, content: m.content }))
1675
+ }),
1676
+ signal
1677
+ });
1678
+ if (!response.ok) {
1679
+ const errorText = await response.text().catch(() => "Unknown error");
1680
+ if (response.status === 429) throw new LLMRateLimitError("Anthropic");
1681
+ if (response.status === 401 || response.status === 403) {
1682
+ throw new LLMConfigError(`Anthropic API key is invalid. Open AI Settings.`);
1683
+ }
1684
+ if (response.status === 0 || errorText.includes("CORS") || errorText.includes("Failed to fetch")) {
1685
+ throw new Error(
1686
+ "CORS error: The Anthropic API does not allow direct browser requests. Please configure a CORS proxy URL in AI Settings."
1687
+ );
1688
+ }
1689
+ throw new Error(`Anthropic API error (${response.status}): ${errorText}`);
1690
+ }
1691
+ const data = await response.json();
1692
+ const text = data.content?.[0]?.text;
1693
+ if (!text) throw new Error("No response from Anthropic");
1694
+ const usage = data.usage ? {
1695
+ inputTokens: data.usage.input_tokens,
1696
+ outputTokens: data.usage.output_tokens,
1697
+ totalTokens: data.usage.input_tokens + data.usage.output_tokens
1698
+ } : void 0;
1699
+ return { content: text, provider: "anthropic", model, usage };
1700
+ }
1701
+ async function runAgentLoopGemini(messages, systemPrompt, tools, executor, config, continuationContext, signal) {
1702
+ const ai = new GoogleGenAI({ apiKey: config.apiKey });
1703
+ const model = config.model || DEFAULT_MODELS.gemini;
1704
+ const toolSteps = [];
1705
+ const contents = continuationContext ? [...continuationContext] : messages.map((m) => ({
1706
+ role: m.role === "assistant" ? "model" : "user",
1707
+ parts: [{ text: m.content }]
1708
+ }));
1709
+ let totalInput = 0, totalOutput = 0;
1710
+ const geminiTypeMap = {
1711
+ string: GeminiType.STRING,
1712
+ number: GeminiType.NUMBER,
1713
+ boolean: GeminiType.BOOLEAN
1714
+ };
1715
+ const functionDeclarations = tools.map((t) => ({
1716
+ name: t.name,
1717
+ description: t.description,
1718
+ parameters: {
1719
+ type: GeminiType.OBJECT,
1720
+ properties: Object.fromEntries(
1721
+ Object.entries(t.parameters.properties).map(([key, param]) => [
1722
+ key,
1723
+ { ...param, type: geminiTypeMap[param.type] ?? GeminiType.STRING }
1724
+ ])
1725
+ ),
1726
+ required: t.parameters.required
1727
+ }
1728
+ }));
1729
+ for (let i = 0; i < MAX_AGENT_ITERATIONS; i++) {
1730
+ signal?.throwIfAborted();
1731
+ const response = await ai.models.generateContent({
1732
+ model,
1733
+ contents,
1734
+ config: {
1735
+ systemInstruction: systemPrompt,
1736
+ temperature: 0.3,
1737
+ tools: [{ functionDeclarations }],
1738
+ abortSignal: signal
1739
+ }
1740
+ });
1741
+ const candidate = response.candidates?.[0];
1742
+ if (!candidate?.content?.parts) throw new Error("No response from Gemini");
1743
+ const meta = response.usageMetadata;
1744
+ if (meta) {
1745
+ totalInput += meta.promptTokenCount ?? 0;
1746
+ totalOutput += meta.candidatesTokenCount ?? 0;
1747
+ }
1748
+ const parts = candidate.content.parts;
1749
+ const fnParts = parts.filter((p) => p.functionCall != null);
1750
+ const textParts = parts.filter((p) => p.text != null);
1751
+ if (fnParts.length === 0) {
1752
+ const usage2 = { inputTokens: totalInput, outputTokens: totalOutput, totalTokens: totalInput + totalOutput };
1753
+ return { content: textParts.map((p) => p.text ?? "").join(""), toolSteps, usage: usage2 };
1754
+ }
1755
+ contents.push({ role: "model", parts });
1756
+ const fnResponses = await Promise.all(fnParts.map(async (part) => {
1757
+ const { name, args } = part.functionCall;
1758
+ if (!name) throw new Error("Gemini returned a function call with no name");
1759
+ const step = await executor(name, args ?? {});
1760
+ toolSteps.push(step);
1761
+ return { functionResponse: { name, response: { output: step.result } } };
1762
+ }));
1763
+ contents.push({ role: "user", parts: fnResponses });
1764
+ }
1765
+ const usage = { inputTokens: totalInput, outputTokens: totalOutput, totalTokens: totalInput + totalOutput };
1766
+ return { content: "", toolSteps, interrupted: true, continuationContext: [...contents], usage };
1767
+ }
1768
+ async function runAgentLoopOpenAI(messages, systemPrompt, tools, executor, config, continuationContext, signal) {
1769
+ const client = new OpenAI({ apiKey: config.apiKey, dangerouslyAllowBrowser: true });
1770
+ const model = config.model || DEFAULT_MODELS.openai;
1771
+ const toolSteps = [];
1772
+ const openAITools = tools.map((t) => ({
1773
+ type: "function",
1774
+ function: { name: t.name, description: t.description, parameters: t.parameters }
1775
+ }));
1776
+ const conv = continuationContext ? [...continuationContext] : [
1777
+ { role: "system", content: systemPrompt },
1778
+ ...messages.map((m) => ({ role: m.role, content: m.content }))
1779
+ ];
1780
+ let totalInput = 0, totalOutput = 0;
1781
+ for (let i = 0; i < MAX_AGENT_ITERATIONS; i++) {
1782
+ signal?.throwIfAborted();
1783
+ const response = await client.chat.completions.create({
1784
+ model,
1785
+ messages: conv,
1786
+ tools: openAITools,
1787
+ tool_choice: "auto",
1788
+ temperature: 0.3
1789
+ }, { signal });
1790
+ const msg = response.choices[0]?.message;
1791
+ if (!msg) throw new Error("No response from OpenAI");
1792
+ if (response.usage) {
1793
+ totalInput += response.usage.prompt_tokens;
1794
+ totalOutput += response.usage.completion_tokens;
1795
+ }
1796
+ conv.push(msg);
1797
+ if (!msg.tool_calls?.length) {
1798
+ const usage2 = { inputTokens: totalInput, outputTokens: totalOutput, totalTokens: totalInput + totalOutput };
1799
+ return { content: msg.content ?? "", toolSteps, usage: usage2 };
1800
+ }
1801
+ const toolMessages = await Promise.all(msg.tool_calls.filter((tc) => tc.type === "function").map(async (tc) => {
1802
+ const args = JSON.parse(tc.function.arguments || "{}");
1803
+ const step = await executor(tc.function.name, args);
1804
+ toolSteps.push(step);
1805
+ return { role: "tool", tool_call_id: tc.id, content: step.result };
1806
+ }));
1807
+ toolMessages.forEach((m) => conv.push(m));
1808
+ }
1809
+ const usage = { inputTokens: totalInput, outputTokens: totalOutput, totalTokens: totalInput + totalOutput };
1810
+ return { content: "", toolSteps, interrupted: true, continuationContext: [...conv], usage };
1811
+ }
1812
+ async function runAgentLoopAnthropic(messages, systemPrompt, tools, executor, config, continuationContext, signal) {
1813
+ const model = config.model || DEFAULT_MODELS.anthropic;
1814
+ const baseUrl = config.proxyUrl || "https://api.anthropic.com";
1815
+ const toolSteps = [];
1816
+ const anthropicTools = tools.map((t) => ({
1817
+ name: t.name,
1818
+ description: t.description,
1819
+ input_schema: t.parameters
1820
+ }));
1821
+ const conv = continuationContext ? [...continuationContext] : messages.map((m) => ({ role: m.role, content: m.content }));
1822
+ let totalInput = 0, totalOutput = 0;
1823
+ for (let i = 0; i < MAX_AGENT_ITERATIONS; i++) {
1824
+ signal?.throwIfAborted();
1825
+ const res = await fetch(`${baseUrl}/v1/messages`, {
1826
+ method: "POST",
1827
+ headers: {
1828
+ "Content-Type": "application/json",
1829
+ "x-api-key": config.apiKey,
1830
+ "anthropic-version": "2023-06-01"
1831
+ },
1832
+ body: JSON.stringify({
1833
+ model,
1834
+ max_tokens: 4096,
1835
+ system: systemPrompt,
1836
+ tools: anthropicTools,
1837
+ messages: conv
1838
+ }),
1839
+ signal
1840
+ });
1841
+ if (!res.ok) {
1842
+ const text = await res.text().catch(() => "");
1843
+ if (res.status === 429) throw new LLMRateLimitError("Anthropic");
1844
+ if (res.status === 401 || res.status === 403) throw new LLMConfigError("Anthropic API key is invalid. Open AI Settings.");
1845
+ throw new Error(`Anthropic error (${res.status}): ${text}`);
1846
+ }
1847
+ const data = await res.json();
1848
+ const content = data.content ?? [];
1849
+ const stopReason = data.stop_reason;
1850
+ if (data.usage) {
1851
+ totalInput += data.usage.input_tokens;
1852
+ totalOutput += data.usage.output_tokens;
1853
+ }
1854
+ conv.push({ role: "assistant", content });
1855
+ if (stopReason !== "tool_use") {
1856
+ const text = content.filter((b) => b.type === "text").map((b) => b.text ?? "").join("");
1857
+ const usage2 = { inputTokens: totalInput, outputTokens: totalOutput, totalTokens: totalInput + totalOutput };
1858
+ return { content: text, toolSteps, usage: usage2 };
1859
+ }
1860
+ const toolUseBlocks = content.filter((b) => b.type === "tool_use");
1861
+ const toolResults = await Promise.all(toolUseBlocks.map(async (block) => {
1862
+ const step = await executor(block.name, block.input ?? {});
1863
+ toolSteps.push(step);
1864
+ return { type: "tool_result", tool_use_id: block.id, content: step.result };
1865
+ }));
1866
+ conv.push({ role: "user", content: toolResults });
1867
+ }
1868
+ const usage = { inputTokens: totalInput, outputTokens: totalOutput, totalTokens: totalInput + totalOutput };
1869
+ return { content: "", toolSteps, interrupted: true, continuationContext: [...conv], usage };
1870
+ }
1871
+ var _usageListener = null;
1872
+ function notifyUsage(usage, provider, model, source) {
1873
+ if (usage && usage.totalTokens > 0 && _usageListener) {
1874
+ _usageListener(usage, provider, model, source);
1875
+ }
1876
+ }
1877
+ var llmService = {
1878
+ async sendMessage(messages, systemPrompt, settings, options) {
1879
+ const config = settings.providers[settings.activeProvider];
1880
+ if (!config?.apiKey) {
1881
+ throw new LLMConfigError(
1882
+ `No API key configured for ${settings.activeProvider}. Open AI Settings to configure.`
1883
+ );
1884
+ }
1885
+ const signal = options instanceof AbortSignal ? options : options?.signal;
1886
+ const source = options instanceof AbortSignal ? "unknown" : options?.source ?? "unknown";
1887
+ let response;
1888
+ switch (settings.activeProvider) {
1889
+ case "gemini":
1890
+ response = await sendGemini(messages, systemPrompt, config, signal);
1891
+ break;
1892
+ case "openai":
1893
+ response = await sendOpenAI(messages, systemPrompt, config, signal);
1894
+ break;
1895
+ case "anthropic":
1896
+ response = await sendAnthropic(messages, systemPrompt, config, signal);
1897
+ break;
1898
+ default:
1899
+ throw new Error(`Unknown provider: ${settings.activeProvider}`);
1900
+ }
1901
+ notifyUsage(response.usage, settings.activeProvider, config.model, source);
1902
+ return response;
1903
+ },
1904
+ async runAgentLoop(messages, systemPrompt, tools, executor, settings, options) {
1905
+ const config = settings.providers[settings.activeProvider];
1906
+ if (!config?.apiKey) {
1907
+ throw new LLMConfigError(`No API key configured for ${settings.activeProvider}. Open AI Settings to configure.`);
1908
+ }
1909
+ const ctx = options?.continuationContext;
1910
+ const sig = options?.signal;
1911
+ const source = options?.source ?? "unknown";
1912
+ let result;
1913
+ switch (settings.activeProvider) {
1914
+ case "gemini":
1915
+ result = await runAgentLoopGemini(messages, systemPrompt, tools, executor, config, ctx, sig);
1916
+ break;
1917
+ case "openai":
1918
+ result = await runAgentLoopOpenAI(messages, systemPrompt, tools, executor, config, ctx, sig);
1919
+ break;
1920
+ case "anthropic":
1921
+ result = await runAgentLoopAnthropic(messages, systemPrompt, tools, executor, config, ctx, sig);
1922
+ break;
1923
+ default:
1924
+ throw new Error(`Unknown provider: ${settings.activeProvider}`);
1925
+ }
1926
+ notifyUsage(result.usage, settings.activeProvider, config.model, source);
1927
+ return result;
1928
+ },
1929
+ async testConnection(provider, config) {
1930
+ const testMessage = [{ role: "user", content: 'Say "OK"' }];
1931
+ const testSettings = {
1932
+ activeProvider: provider,
1933
+ providers: { gemini: null, openai: null, anthropic: null, [provider]: config }
1934
+ };
1935
+ const response = await this.sendMessage(testMessage, 'Respond with just "OK".', testSettings);
1936
+ return !!response.content;
1937
+ }
1938
+ };
1939
+
1940
+ // ../services/codeGraphOrchestrator.ts
1941
+ var generateId2 = () => Math.random().toString(36).substr(2, 9);
1942
+ function buildContext(analysis, provider) {
1943
+ const fileByPath = /* @__PURE__ */ new Map();
1944
+ const astImportPairs = /* @__PURE__ */ new Set();
1945
+ const allFiles = analysis.modules.flatMap((m) => m.files);
1946
+ const allFilePaths = new Set(allFiles.map((f) => f.filePath));
1947
+ for (const file of allFiles) {
1948
+ fileByPath.set(file.filePath, file);
1949
+ for (const imp of file.imports) {
1950
+ if (imp.isExternal) continue;
1951
+ const base = imp.source.replace(/^@\//, "").replace(/^\.\//, "");
1952
+ for (const target of allFilePaths) {
1953
+ const norm = target.replace(/\.[^/.]+$/, "");
1954
+ if (norm.endsWith(base) || target === imp.source) {
1955
+ astImportPairs.add(`${file.filePath}\u2192${target}`);
1956
+ break;
1957
+ }
1958
+ }
1959
+ }
1960
+ }
1961
+ return { analysis, astImportPairs, fileByPath, provider, fileCache: /* @__PURE__ */ new Map() };
1962
+ }
1963
+ function extractJSON(text) {
1964
+ const fenced = text.match(/```(?:json)?\s*\n?([\s\S]*?)```/);
1965
+ if (fenced) return fenced[1].trim();
1966
+ const obj = text.match(/\{[\s\S]*\}/);
1967
+ if (obj) return obj[0];
1968
+ const arr = text.match(/\[[\s\S]*\]/);
1969
+ if (arr) return arr[0];
1970
+ return text.trim();
1971
+ }
1972
+ async function readFileCached(ctx, path, maxLines = 150) {
1973
+ const cacheKey = `${path}:${maxLines}`;
1974
+ if (ctx.fileCache.has(cacheKey)) return ctx.fileCache.get(cacheKey);
1975
+ if (!ctx.provider) return "(file content not available \u2014 no filesystem provider)";
1976
+ try {
1977
+ const content = await ctx.provider.readFile(path);
1978
+ const limited = content.split("\n").slice(0, maxLines).join("\n");
1979
+ ctx.fileCache.set(cacheKey, limited);
1980
+ return limited;
1981
+ } catch {
1982
+ return "(could not read file)";
1983
+ }
1984
+ }
1985
+ var ANALYSTE_SYSTEM = `You are a senior software architect specializing in domain-driven design.
1986
+
1987
+ Your task: analyze a codebase and group files into cohesive SEMANTIC DOMAINS \u2014 not technical layers.
1988
+
1989
+ You have tools:
1990
+ - list_files_by_coupling(): list files sorted by import connections. Start here.
1991
+ - get_file_info(path): AST-extracted symbols + imports for a file. Fast and cheap.
1992
+ - read_file(path): actual source code (first 150 lines). Use selectively on pivotal/ambiguous files.
1993
+
1994
+ STRATEGY:
1995
+ 1. Call list_files_by_coupling() first to see the most connected files
1996
+ 2. Use get_file_info() broadly to understand roles without reading everything
1997
+ 3. Use read_file() only for files where you're unsure about their domain (max 8 reads)
1998
+ 4. Identify 3-10 semantic domains based on BUSINESS RESPONSIBILITY
1999
+
2000
+ DOMAIN RULES:
2001
+ - Name by WHAT the code does: "Diagram Editor", "Code Intelligence", "Workspace Management"
2002
+ - NEVER use: "Services", "Hooks", "Components", "Utils", "Core", "Lib"
2003
+ - A hook + its service + its component \u2192 same domain if they serve the same feature
2004
+ - Cross-directory grouping is expected
2005
+ - Every file must appear in exactly one cluster
2006
+ - Infrastructure/utility files (db adapters, config, shared in-memory stores) used by many domains \u2192 merge into the domain that uses them most. Do NOT isolate them as standalone clusters unless there are 3+ cohesive infrastructure files that belong together.
2007
+ - Client-side entry points (app.js, index.html, main.ts) that orchestrate many features \u2192 group by their primary responsibility, or create a "Client Application" cluster if they span multiple domains.
2008
+
2009
+ When ready, output ONLY this JSON (no other text):
2010
+ {
2011
+ "clusters": [
2012
+ {
2013
+ "name": "Domain Name",
2014
+ "description": "What this domain handles (1-2 sentences)",
2015
+ "files": ["exact/path/to/file.ts", ...]
2016
+ }
2017
+ ]
2018
+ }`;
2019
+ function buildAnalysteTools() {
2020
+ return [
2021
+ {
2022
+ name: "list_files_by_coupling",
2023
+ description: "List all files sorted by import connections (most-connected first). Good starting point for analysis.",
2024
+ parameters: { type: "object", properties: {}, required: [] }
2025
+ },
2026
+ {
2027
+ name: "get_file_info",
2028
+ description: "Get AST-extracted symbols and imports for a file. Cheaper than read_file \u2014 use broadly.",
2029
+ parameters: {
2030
+ type: "object",
2031
+ properties: { path: { type: "string", description: "Relative file path from repo root" } },
2032
+ required: ["path"]
2033
+ }
2034
+ },
2035
+ {
2036
+ name: "read_file",
2037
+ description: "Read actual source code (first 150 lines). Use selectively for pivotal or ambiguous files only.",
2038
+ parameters: {
2039
+ type: "object",
2040
+ properties: { path: { type: "string", description: "Relative file path from repo root" } },
2041
+ required: ["path"]
2042
+ }
2043
+ }
2044
+ ];
2045
+ }
2046
+ function buildAnalysteExecutor(ctx) {
2047
+ const importedCount = /* @__PURE__ */ new Map();
2048
+ for (const file of ctx.analysis.modules.flatMap((m) => m.files)) {
2049
+ for (const imp of file.imports) {
2050
+ if (!imp.isExternal) {
2051
+ importedCount.set(imp.source, (importedCount.get(imp.source) || 0) + 1);
2052
+ }
2053
+ }
2054
+ }
2055
+ return async (name, args) => {
2056
+ switch (name) {
2057
+ case "list_files_by_coupling": {
2058
+ const allFiles = ctx.analysis.modules.flatMap((m) => m.files);
2059
+ const sorted = allFiles.map((f) => ({
2060
+ path: f.filePath,
2061
+ language: f.language,
2062
+ importedByCount: importedCount.get(f.filePath) || 0,
2063
+ importsCount: f.imports.filter((i) => !i.isExternal).length,
2064
+ symbolCount: f.symbols.length
2065
+ })).sort((a, b) => b.importedByCount + b.importsCount - (a.importedByCount + a.importsCount)).slice(0, 60);
2066
+ return {
2067
+ toolName: name,
2068
+ args,
2069
+ result: JSON.stringify(sorted, null, 2),
2070
+ label: "list_files_by_coupling()"
2071
+ };
2072
+ }
2073
+ case "get_file_info": {
2074
+ const path = String(args.path || "");
2075
+ const file = ctx.fileByPath.get(path);
2076
+ if (!file) {
2077
+ return { toolName: name, args, result: `File not found: ${path}`, label: `get_file_info("${path}")` };
2078
+ }
2079
+ const info = {
2080
+ path: file.filePath,
2081
+ language: file.language,
2082
+ symbols: file.symbols.map((s) => `${s.name} (${s.kind})`),
2083
+ imports: file.imports.map((i) => `${i.name} from "${i.source}"${i.isExternal ? " [external]" : ""}`),
2084
+ exports: file.exportedSymbols
2085
+ };
2086
+ return {
2087
+ toolName: name,
2088
+ args,
2089
+ result: JSON.stringify(info, null, 2),
2090
+ label: `get_file_info("${path}")`
2091
+ };
2092
+ }
2093
+ case "read_file": {
2094
+ const path = String(args.path || "");
2095
+ const content = await readFileCached(ctx, path, 150);
2096
+ return { toolName: name, args, result: content, label: `read_file("${path}")` };
2097
+ }
2098
+ default:
2099
+ return { toolName: name, args, result: "Unknown tool", label: name };
2100
+ }
2101
+ };
2102
+ }
2103
+ async function runAnalysteAgent(ctx, llmSettings, onLog, signal, previousIssues, onAgentEvent, onToolStart) {
2104
+ const allFiles = ctx.analysis.modules.flatMap((m) => m.files);
2105
+ const allFilePaths = new Set(allFiles.map((f) => f.filePath));
2106
+ let prompt = `Analyze this codebase and identify semantic domains.
2107
+
2108
+ FILE LIST (${allFiles.length} files):
2109
+ `;
2110
+ prompt += allFiles.map((f) => `- ${f.filePath} (${f.language})`).join("\n");
2111
+ prompt += "\n\nUse your tools to explore key files, then output the JSON cluster mapping.";
2112
+ if (previousIssues && previousIssues.length > 0) {
2113
+ const errorText = previousIssues.filter((i) => i.severity === "error").map((i) => `- ${i.message}${i.target ? ` [${i.target}]` : ""}`).join("\n");
2114
+ if (errorText) prompt += `
2115
+
2116
+ ISSUES FROM PREVIOUS ATTEMPT (fix these):
2117
+ ${errorText}`;
2118
+ }
2119
+ onLog?.("ai-cluster", "Analyste: exploring codebase structure...");
2120
+ try {
2121
+ const rawAnalysteExecutor = buildAnalysteExecutor(ctx);
2122
+ const analysteExecutor = onAgentEvent || onToolStart ? async (name, args) => {
2123
+ const argsSummary = args.path || args.query || Object.values(args).join(", ") || "";
2124
+ onToolStart?.("analyste", name, argsSummary);
2125
+ const t0 = Date.now();
2126
+ const step = await rawAnalysteExecutor(name, args);
2127
+ onAgentEvent?.({
2128
+ agent: "analyste",
2129
+ toolName: name,
2130
+ argsSummary,
2131
+ resultSummary: step.result.slice(0, 300),
2132
+ durationMs: Date.now() - t0
2133
+ });
2134
+ return step;
2135
+ } : rawAnalysteExecutor;
2136
+ const result = await llmService.runAgentLoop(
2137
+ [{ role: "user", content: prompt }],
2138
+ ANALYSTE_SYSTEM,
2139
+ buildAnalysteTools(),
2140
+ analysteExecutor,
2141
+ llmSettings,
2142
+ { signal, source: "code-agent-analyste" }
2143
+ );
2144
+ if (result.interrupted) {
2145
+ throw new Error("Analyste: agent reached max iterations without producing output. Try again or switch to a more capable model.");
2146
+ }
2147
+ onLog?.("ai-cluster", `Analyste: ${result.toolSteps.length} tool calls \u2014 parsing clusters`);
2148
+ const jsonStr = extractJSON(result.content);
2149
+ const parsed = JSON.parse(jsonStr);
2150
+ if (!Array.isArray(parsed.clusters)) {
2151
+ throw new Error("Analyste: response did not include a valid clusters array. Try again or switch model.");
2152
+ }
2153
+ const assigned = /* @__PURE__ */ new Set();
2154
+ const clusters = [];
2155
+ for (const c of parsed.clusters) {
2156
+ if (typeof c.name !== "string" || !Array.isArray(c.files)) continue;
2157
+ const validFiles = c.files.filter((f) => typeof f === "string" && allFilePaths.has(f) && !assigned.has(f));
2158
+ validFiles.forEach((f) => assigned.add(f));
2159
+ if (validFiles.length > 0) {
2160
+ clusters.push({ name: c.name, description: String(c.description || ""), files: validFiles });
2161
+ }
2162
+ }
2163
+ const rejectedFileRate = 1 - assigned.size / Math.max(allFilePaths.size, 1);
2164
+ if (rejectedFileRate > 0.5) {
2165
+ onLog?.("ai-cluster", `Analyste warning: ${Math.round(rejectedFileRate * 100)}% of files had unresolvable paths \u2014 LLM may have hallucinated paths`);
2166
+ }
2167
+ const unassigned = [...allFilePaths].filter((f) => !assigned.has(f));
2168
+ if (unassigned.length > 0) {
2169
+ onLog?.("ai-cluster", `Analyste: ${unassigned.length} unassigned files \u2192 "Other" cluster`);
2170
+ clusters.push({ name: "Other", description: "Files not assigned to a specific domain", files: unassigned });
2171
+ }
2172
+ onLog?.("ai-cluster", `Analyste: ${clusters.length} domains identified (${assigned.size}/${allFilePaths.size} files assigned)`);
2173
+ return clusters;
2174
+ } catch (err) {
2175
+ onLog?.("ai-cluster", `Analyste failed: ${err instanceof Error ? err.message : String(err)}`);
2176
+ throw err;
2177
+ }
2178
+ }
2179
+ async function evaluateClusters(ctx, clusters, llmSettings, onLog, onAgentEvent, onBlackboard) {
2180
+ const allFiles = ctx.analysis.modules.flatMap((m) => m.files);
2181
+ const allFilePaths = new Set(allFiles.map((f) => f.filePath));
2182
+ const coveredFiles = new Set(clusters.flatMap((c) => c.files));
2183
+ const uncoveredRate = (allFilePaths.size - coveredFiles.size) / Math.max(allFilePaths.size, 1);
2184
+ const fileToCluster = /* @__PURE__ */ new Map();
2185
+ for (const c of clusters) {
2186
+ for (const f of c.files) fileToCluster.set(f, c.name);
2187
+ }
2188
+ const crossBoundarySample = [...ctx.astImportPairs].filter((pair) => {
2189
+ const [src, tgt] = pair.split("\u2192");
2190
+ const srcCluster = fileToCluster.get(src);
2191
+ const tgtCluster = fileToCluster.get(tgt);
2192
+ return srcCluster && tgtCluster && srcCluster !== tgtCluster;
2193
+ }).slice(0, 30);
2194
+ const clusterSummary = clusters.map((c) => ({
2195
+ name: c.name,
2196
+ description: c.description,
2197
+ fileCount: c.files.length,
2198
+ sampleFiles: c.files.slice(0, 6)
2199
+ }));
2200
+ const prompt = `You are an adversarial architecture reviewer. Find real flaws in these semantic domain clusters.
2201
+
2202
+ CLUSTERS:
2203
+ ${JSON.stringify(clusterSummary, null, 2)}
2204
+
2205
+ UNCOVERED FILE RATE: ${(uncoveredRate * 100).toFixed(1)}%
2206
+
2207
+ CROSS-BOUNDARY IMPORT PAIRS (files in different clusters that import each other):
2208
+ ${crossBoundarySample.length > 0 ? crossBoundarySample.join("\n") : "(none detected)"}
2209
+
2210
+ REVIEW:
2211
+ 1. Flag files likely misplaced based on their imports crossing boundaries heavily
2212
+ 2. Flag clusters that appear to group by code type rather than business domain (e.g., a cluster named like "Services" or "Hooks")
2213
+ 3. Flag if uncovered file rate > 10%
2214
+ 4. Do NOT flag legitimate cross-boundary imports (e.g., a shared utility used by multiple domains)
2215
+
2216
+ Output ONLY this JSON:
2217
+ {
2218
+ "issues": [
2219
+ {
2220
+ "type": "misplaced_file" | "missing_coverage" | "hallucinated_relation",
2221
+ "severity": "warning" | "error",
2222
+ "message": "specific actionable description",
2223
+ "target": "cluster or file name"
2224
+ }
2225
+ ]
2226
+ }`;
2227
+ onLog?.("ai-eval", "\xC9valuateur: validating semantic clusters...");
2228
+ const evalStartMs = Date.now();
2229
+ onAgentEvent?.({
2230
+ agent: "evaluateur",
2231
+ toolName: "__eval_start__",
2232
+ argsSummary: "clusters",
2233
+ resultSummary: "Validation en cours...",
2234
+ durationMs: 0
2235
+ });
2236
+ try {
2237
+ const response = await llmService.sendMessage(
2238
+ [{ role: "user", content: prompt }],
2239
+ "You are an adversarial architecture reviewer. Be rigorous and specific.",
2240
+ llmSettings,
2241
+ { source: "code-agent-evaluateur" }
2242
+ );
2243
+ const parsed = JSON.parse(extractJSON(response.content));
2244
+ if (!Array.isArray(parsed.issues)) return [];
2245
+ const issues = parsed.issues.filter((i) => typeof i === "object" && i !== null).map((i) => ({
2246
+ type: i.type || "misplaced_file",
2247
+ severity: i.severity || "warning",
2248
+ message: String(i.message || ""),
2249
+ target: i.target ? String(i.target) : void 0
2250
+ }));
2251
+ const errors = issues.filter((i) => i.severity === "error").length;
2252
+ const warnings = issues.filter((i) => i.severity === "warning").length;
2253
+ onLog?.("ai-eval", `\xC9valuateur clusters: ${errors} errors, ${warnings} warnings`);
2254
+ onAgentEvent?.({
2255
+ agent: "evaluateur",
2256
+ toolName: "__eval_result__",
2257
+ argsSummary: `${errors} errors, ${warnings} warnings`,
2258
+ resultSummary: issues.map((i) => `[${i.severity}] ${i.message}`).join("\n"),
2259
+ durationMs: Date.now() - evalStartMs
2260
+ });
2261
+ onBlackboard?.({
2262
+ clusterIssues: issues.map((i) => ({
2263
+ severity: i.severity,
2264
+ message: i.message,
2265
+ target: i.target
2266
+ }))
2267
+ });
2268
+ return issues;
2269
+ } catch {
2270
+ onLog?.("ai-eval", "\xC9valuateur cluster validation failed (non-fatal)");
2271
+ return [];
2272
+ }
2273
+ }
2274
+ var SYNTHESEUR_SYSTEM = `You are a senior software architect identifying RUNTIME FLOWS in a codebase.
2275
+
2276
+ A flow = a named sequence tracing how a user action or system event propagates at RUNTIME across multiple files.
2277
+
2278
+ TOOLS:
2279
+ - find_entry_points(): all entry point files (no or few incoming imports). Call this first.
2280
+ - read_file(path): actual source code. Use this to understand what a file DOES at runtime.
2281
+ - get_node_relations(node_id): static import edges for a file. Use to discover dependencies.
2282
+ - get_cluster_files(cluster_name): files in a semantic cluster.
2283
+
2284
+ APPROACH \u2014 reason step by step BEFORE generating flows:
2285
+ STEP 1 \u2014 SURVEY: Call find_entry_points(). Call read_file() on the 2-3 most important entry points (server, app, router).
2286
+ STEP 2 \u2014 ENUMERATE (do this in your reasoning before outputting anything): Mentally list ALL distinct user-facing operations this system handles. Think:
2287
+ - What HTTP endpoints exist? (scan server/router/handler files)
2288
+ - What UI interactions can a user perform?
2289
+ - What background jobs or system events exist?
2290
+ Aim for at least 5-10 candidates before selecting.
2291
+ STEP 3 \u2014 SELECT: From your enumeration, pick 3-8 flows that are distinct, important, and span meaningful boundaries (client\u2192server, handler\u2192storage, etc).
2292
+ STEP 4 \u2014 TRACE: For each selected flow, call read_file() on the files it crosses to confirm the runtime chain.
2293
+ STEP 5 \u2014 OUTPUT: Generate the JSON.
2294
+
2295
+ The enumeration in STEP 2 is the key to completeness \u2014 if you skip it, important flows will be missed.
2296
+
2297
+ WHAT MAKES A GOOD FLOW:
2298
+ - Spans multiple files across meaningful boundaries (client\u2192server, handler\u2192service\u2192database)
2299
+ - Reflects what actually happens at runtime, not just what's imported
2300
+ - Has a clear trigger (user action, HTTP request, system event) and outcome
2301
+ - Steps say WHAT each file does in this specific flow
2302
+
2303
+ RULES:
2304
+ - Use exact nodeIds from the FILE LIST in the user message.
2305
+ - Sequence diagrams: descriptive participant aliases (not nodeIds), include return arrows.
2306
+ - For HTTP boundaries: use "->>" with label "HTTP GET /route" or similar.
2307
+ - scopeNodeId MUST be either "rootNodeId" (for cross-cluster end-to-end flows) or the nodeId of a D1 PACKAGE node. NEVER use a D2 file nodeId as scopeNodeId \u2014 file-level flows don't exist at this stage. All flows generated here are either cross-system (rootNodeId) or within one domain cluster (D1 package nodeId).
2308
+
2309
+ Output ONLY this JSON (no other text):
2310
+ {
2311
+ "flows": [
2312
+ {
2313
+ "name": "Human-readable flow name",
2314
+ "description": "What happens end-to-end (1-2 sentences)",
2315
+ "scopeNodeId": "rootNodeId \u2190 use this for flows spanning multiple clusters; OR the D1 package nodeId for flows within one cluster",
2316
+ "steps": [
2317
+ { "nodeId": "exact-nodeId-from-FILE-LIST", "label": "What this file does in this flow", "order": 0 }
2318
+ ],
2319
+ "sequenceDiagram": "sequenceDiagram\\n participant A as Name\\n A->>B: action\\n B-->>A: response"
2320
+ }
2321
+ ]
2322
+ }`;
2323
+ function buildSyntheseurTools() {
2324
+ return [
2325
+ {
2326
+ name: "find_entry_points",
2327
+ description: "Returns all codebase entry points: files with no or few incoming dependencies (index, main, App, handlers). Start here to identify where flows begin.",
2328
+ parameters: { type: "object", properties: {}, required: [] }
2329
+ },
2330
+ {
2331
+ name: "get_node_relations",
2332
+ description: "Get all outgoing/incoming call/import relations for a graph node. Use to follow where calls go next and trace cross-file chains.",
2333
+ parameters: {
2334
+ type: "object",
2335
+ properties: { node_id: { type: "string", description: "Graph node ID" } },
2336
+ required: ["node_id"]
2337
+ }
2338
+ },
2339
+ {
2340
+ name: "get_cluster_files",
2341
+ description: "List all files (with graph nodeIds) in a semantic cluster. Use to explore a specific domain.",
2342
+ parameters: {
2343
+ type: "object",
2344
+ properties: { cluster_name: { type: "string", description: "Cluster/domain name" } },
2345
+ required: ["cluster_name"]
2346
+ }
2347
+ },
2348
+ {
2349
+ name: "read_file",
2350
+ description: "Read source code of a file (first 200 lines). Use for orchestrator files to confirm call sequences.",
2351
+ parameters: {
2352
+ type: "object",
2353
+ properties: { path: { type: "string", description: "Relative file path from repo root" } },
2354
+ required: ["path"]
2355
+ }
2356
+ }
2357
+ ];
2358
+ }
2359
+ function buildSyntheseurExecutor(ctx, graph) {
2360
+ const filePathToNodeId = /* @__PURE__ */ new Map();
2361
+ for (const node of Object.values(graph.nodes)) {
2362
+ if (node.sourceRef && node.depth === 2) filePathToNodeId.set(node.sourceRef.filePath, node.id);
2363
+ }
2364
+ return async (name, args) => {
2365
+ switch (name) {
2366
+ case "find_entry_points": {
2367
+ const d2Nodes = Object.values(graph.nodes).filter((n) => n.depth === 2);
2368
+ const incomingCount = /* @__PURE__ */ new Map();
2369
+ for (const n of d2Nodes) incomingCount.set(n.id, 0);
2370
+ for (const rel of Object.values(graph.relations)) {
2371
+ if (rel.type === "depends_on" && incomingCount.has(rel.targetId)) {
2372
+ incomingCount.set(rel.targetId, (incomingCount.get(rel.targetId) || 0) + 1);
2373
+ }
2374
+ }
2375
+ const ENTRY_PATTERN = /^(index|main|app|server|cli|background|content|handler)\./i;
2376
+ const entries = d2Nodes.filter((n) => (incomingCount.get(n.id) || 0) === 0 || ENTRY_PATTERN.test(n.name)).map((n) => {
2377
+ const cluster = ctx.semanticClusters?.find((c) => c.files.includes(n.sourceRef?.filePath || ""));
2378
+ return {
2379
+ nodeId: n.id,
2380
+ name: n.name,
2381
+ filePath: n.sourceRef?.filePath || null,
2382
+ cluster: cluster?.name || null,
2383
+ outgoingRelations: Object.values(graph.relations).filter((r) => r.sourceId === n.id && r.type !== "contains").length
2384
+ };
2385
+ }).sort((a, b) => b.outgoingRelations - a.outgoingRelations).slice(0, 20);
2386
+ return {
2387
+ toolName: name,
2388
+ args,
2389
+ result: JSON.stringify(entries, null, 2),
2390
+ label: "find_entry_points()"
2391
+ };
2392
+ }
2393
+ case "get_cluster_files": {
2394
+ const clusterName = String(args.cluster_name || "");
2395
+ const cluster = ctx.semanticClusters?.find((c) => c.name === clusterName);
2396
+ if (!cluster) {
2397
+ const names = ctx.semanticClusters?.map((c) => c.name).join(", ") || "none";
2398
+ return {
2399
+ toolName: name,
2400
+ args,
2401
+ result: `Cluster not found. Available: ${names}`,
2402
+ label: `get_cluster_files("${clusterName}")`
2403
+ };
2404
+ }
2405
+ const files = cluster.files.map((f) => {
2406
+ const nodeId = filePathToNodeId.get(f);
2407
+ const node = nodeId ? graph.nodes[nodeId] : null;
2408
+ return { path: f, nodeId: nodeId || null, nodeName: node?.name || null, depth: node?.depth ?? null };
2409
+ });
2410
+ return {
2411
+ toolName: name,
2412
+ args,
2413
+ result: JSON.stringify(files, null, 2),
2414
+ label: `get_cluster_files("${clusterName}")`
2415
+ };
2416
+ }
2417
+ case "get_node_relations": {
2418
+ const nodeId = String(args.node_id || "");
2419
+ const node = graph.nodes[nodeId];
2420
+ if (!node) {
2421
+ return { toolName: name, args, result: `Node not found: ${nodeId}`, label: `get_node_relations("${nodeId}")` };
2422
+ }
2423
+ const outgoing = Object.values(graph.relations).filter((r) => r.sourceId === nodeId && r.type !== "contains").map((r) => ({
2424
+ direction: "outgoing",
2425
+ type: r.type,
2426
+ targetId: r.targetId,
2427
+ targetName: graph.nodes[r.targetId]?.name || r.targetId,
2428
+ targetFile: graph.nodes[r.targetId]?.sourceRef?.filePath || null
2429
+ }));
2430
+ const incoming = Object.values(graph.relations).filter((r) => r.targetId === nodeId && r.type !== "contains").map((r) => ({
2431
+ direction: "incoming",
2432
+ type: r.type,
2433
+ sourceId: r.sourceId,
2434
+ sourceName: graph.nodes[r.sourceId]?.name || r.sourceId,
2435
+ sourceFile: graph.nodes[r.sourceId]?.sourceRef?.filePath || null
2436
+ }));
2437
+ return {
2438
+ toolName: name,
2439
+ args,
2440
+ result: JSON.stringify({ nodeId, name: node.name, file: node.sourceRef?.filePath, outgoing, incoming }, null, 2),
2441
+ label: `get_node_relations("${node.name}")`
2442
+ };
2443
+ }
2444
+ case "read_file": {
2445
+ const path = String(args.path || "");
2446
+ const content = await readFileCached(ctx, path, 200);
2447
+ return { toolName: name, args, result: content, label: `read_file("${path}")` };
2448
+ }
2449
+ default:
2450
+ return { toolName: name, args, result: "Unknown tool", label: name };
2451
+ }
2452
+ };
2453
+ }
2454
+ async function runSyntheseurAgent(ctx, graph, llmSettings, onLog, signal, previousIssues, onAgentEvent, scopeCluster, frozenFlowNames, missingFlows, onToolStart) {
2455
+ const d2Nodes = Object.values(graph.nodes).filter((n) => n.depth === 2);
2456
+ const d2ById = new Map(d2Nodes.map((n) => [n.id, n]));
2457
+ const fileToClusterName = /* @__PURE__ */ new Map();
2458
+ for (const cluster of ctx.semanticClusters ?? []) {
2459
+ for (const fp of cluster.files) fileToClusterName.set(fp, cluster.name);
2460
+ }
2461
+ const fileLines = d2Nodes.map((n) => {
2462
+ const cluster = n.sourceRef ? fileToClusterName.get(n.sourceRef.filePath) ?? "?" : "?";
2463
+ return ` ${n.id} ${n.sourceRef?.filePath ?? n.name} [${cluster}]`;
2464
+ }).join("\n");
2465
+ const edgeLines = Object.values(graph.relations).filter((r) => r.type === "depends_on").map((r) => {
2466
+ const src = d2ById.get(r.sourceId);
2467
+ const tgt = d2ById.get(r.targetId);
2468
+ if (!src || !tgt) return null;
2469
+ return ` ${src.sourceRef?.filePath ?? src.id} \u2192 ${tgt.sourceRef?.filePath ?? tgt.id}`;
2470
+ }).filter(Boolean).join("\n");
2471
+ const clusterSummary = (ctx.semanticClusters ?? []).map(
2472
+ (c) => ` "${c.name}": ${c.files.join(", ")}`
2473
+ ).join("\n");
2474
+ let prompt = scopeCluster ? `Generate runtime flows FOCUSED ON the "${scopeCluster.name}" domain cluster.
2475
+
2476
+ ` : `Generate runtime flows for this codebase.
2477
+
2478
+ `;
2479
+ prompt += `FILES (nodeId path [cluster]):
2480
+ ${fileLines || "(none)"}
2481
+
2482
+ `;
2483
+ prompt += `DEPENDENCY EDGES (A \u2192 B means A imports B):
2484
+ ${edgeLines || "(none)"}
2485
+
2486
+ `;
2487
+ prompt += `SEMANTIC CLUSTERS:
2488
+ ${clusterSummary || "(none)"}
2489
+
2490
+ `;
2491
+ prompt += `rootNodeId="${graph.rootNodeId}"
2492
+
2493
+ `;
2494
+ if (scopeCluster) {
2495
+ prompt += `FOCUS: Generate 2-5 flows specifically about the "${scopeCluster.name}" domain.
2496
+ `;
2497
+ prompt += `- Flows should involve at least one of these files: ${scopeCluster.files.join(", ")}
2498
+ `;
2499
+ prompt += `- Flows may cross into other clusters (that's fine \u2014 show how this domain interacts with the rest)
2500
+ `;
2501
+ prompt += `- Set scopeNodeId="${scopeCluster.nodeId}" on ALL generated flows
2502
+
2503
+ `;
2504
+ }
2505
+ prompt += "Use find_entry_points() to confirm entry points, read_file() to understand orchestration logic, then output the flows JSON.";
2506
+ if (previousIssues || frozenFlowNames || missingFlows) {
2507
+ const flowErrors = /* @__PURE__ */ new Map();
2508
+ const untargetedErrors = [];
2509
+ for (const issue of (previousIssues ?? []).filter((i) => i.severity === "error")) {
2510
+ if (issue.target) {
2511
+ if (!flowErrors.has(issue.target)) flowErrors.set(issue.target, []);
2512
+ flowErrors.get(issue.target).push(issue.message);
2513
+ } else {
2514
+ untargetedErrors.push(issue.message);
2515
+ }
2516
+ }
2517
+ if (frozenFlowNames && frozenFlowNames.length > 0) {
2518
+ prompt += `
2519
+
2520
+ FROZEN FLOWS (already verified correct \u2014 do NOT regenerate, do not include in output):
2521
+ ${frozenFlowNames.map((n) => ` \u2713 "${n}"`).join("\n")}`;
2522
+ }
2523
+ const toFix = [...flowErrors.keys()];
2524
+ const toAdd = missingFlows ?? [];
2525
+ const totalExpected = toFix.length + toAdd.length;
2526
+ if (toFix.length > 0) {
2527
+ prompt += `
2528
+
2529
+ FLOWS TO REGENERATE WITH FIXES (include ALL of these in your output):`;
2530
+ for (const [name, errs] of flowErrors) {
2531
+ prompt += `
2532
+ - "${name}"
2533
+ Fix: ${errs.join("; ")}`;
2534
+ }
2535
+ }
2536
+ if (toAdd.length > 0) {
2537
+ prompt += `
2538
+
2539
+ NEW FLOWS TO ADD (include ALL of these in your output):
2540
+ ${toAdd.map((n) => ` + "${n}"`).join("\n")}`;
2541
+ }
2542
+ if (untargetedErrors.length > 0) {
2543
+ prompt += `
2544
+
2545
+ GENERAL ISSUES TO AVOID:
2546
+ ${untargetedErrors.map((e) => `- ${e}`).join("\n")}`;
2547
+ }
2548
+ if (totalExpected > 0) {
2549
+ prompt += `
2550
+
2551
+ OUTPUT REQUIREMENT: your JSON must contain exactly ${totalExpected} flow(s) \u2014 one for each item listed above (flows to fix + new flows). Do not add or drop any.`;
2552
+ }
2553
+ }
2554
+ onLog?.("ai-synth", "Synth\xE9tiseur: tracing runtime flows...");
2555
+ try {
2556
+ const rawSyntheseurExecutor = buildSyntheseurExecutor(ctx, graph);
2557
+ const syntheseurExecutor = onAgentEvent || onToolStart ? async (name, args) => {
2558
+ const argsSummary = args.path || args.query || Object.values(args).join(", ") || "";
2559
+ onToolStart?.("syntheseur", name, argsSummary);
2560
+ const t0 = Date.now();
2561
+ const step = await rawSyntheseurExecutor(name, args);
2562
+ onAgentEvent?.({
2563
+ agent: "syntheseur",
2564
+ toolName: name,
2565
+ argsSummary,
2566
+ resultSummary: step.result.slice(0, 300),
2567
+ durationMs: Date.now() - t0
2568
+ });
2569
+ return step;
2570
+ } : rawSyntheseurExecutor;
2571
+ const result = await llmService.runAgentLoop(
2572
+ [{ role: "user", content: prompt }],
2573
+ SYNTHESEUR_SYSTEM,
2574
+ buildSyntheseurTools(),
2575
+ syntheseurExecutor,
2576
+ llmSettings,
2577
+ { signal, source: "code-agent-syntheseur" }
2578
+ );
2579
+ if (result.interrupted) {
2580
+ onLog?.("ai-synth", "Synth\xE9tiseur: agent reached max iterations without producing output \u2014 no flows generated");
2581
+ return null;
2582
+ }
2583
+ onLog?.("ai-synth", `Synth\xE9tiseur: ${result.toolSteps.length} tool calls \u2014 parsing flows`);
2584
+ const jsonStr = extractJSON(result.content);
2585
+ const parsed = JSON.parse(jsonStr);
2586
+ const flows = validateAndBuildFlows(parsed, graph);
2587
+ if (!flows) {
2588
+ onLog?.("ai-synth", "Synth\xE9tiseur: flow validation failed");
2589
+ return null;
2590
+ }
2591
+ onLog?.("ai-synth", `Synth\xE9tiseur: ${Object.keys(flows).length} valid flows generated`);
2592
+ return flows;
2593
+ } catch (err) {
2594
+ if (err instanceof LLMRateLimitError || err instanceof LLMConfigError) throw err;
2595
+ onLog?.("ai-synth", `Synth\xE9tiseur failed: ${err instanceof Error ? err.message : String(err)}`);
2596
+ return null;
2597
+ }
2598
+ }
2599
+ function validateAndBuildFlows(raw, graph) {
2600
+ if (typeof raw !== "object" || raw === null) return null;
2601
+ const obj = raw;
2602
+ const flowsArr = Array.isArray(obj.flows) ? obj.flows : null;
2603
+ if (!flowsArr) return null;
2604
+ const d1Ids = new Set(Object.values(graph.nodes).filter((n) => n.depth === 1).map((n) => n.id));
2605
+ const d2Ids = new Set(Object.values(graph.nodes).filter((n) => n.depth === 2).map((n) => n.id));
2606
+ const allValidIds = /* @__PURE__ */ new Set([graph.rootNodeId, ...d1Ids, ...d2Ids]);
2607
+ const nameToId = /* @__PURE__ */ new Map();
2608
+ for (const node of Object.values(graph.nodes)) {
2609
+ const lower = node.name.toLowerCase();
2610
+ if (!nameToId.has(lower)) nameToId.set(lower, node.id);
2611
+ const noExt = node.name.replace(/\.[^.]+$/, "").toLowerCase();
2612
+ if (!nameToId.has(noExt)) nameToId.set(noExt, node.id);
2613
+ }
2614
+ const resolveId = (id) => {
2615
+ if (allValidIds.has(id)) return id;
2616
+ const byName = nameToId.get(id.toLowerCase());
2617
+ if (byName && allValidIds.has(byName)) return byName;
2618
+ return null;
2619
+ };
2620
+ const flows = {};
2621
+ for (const item of flowsArr) {
2622
+ if (typeof item !== "object" || item === null) continue;
2623
+ const { name, description, scopeNodeId, steps, sequenceDiagram } = item;
2624
+ if (typeof name !== "string" || !name) continue;
2625
+ if (!Array.isArray(steps) || steps.length < 2) continue;
2626
+ let resolvedScope = typeof scopeNodeId === "string" ? resolveId(scopeNodeId) : null;
2627
+ if (resolvedScope) {
2628
+ const scopeNode = graph.nodes[resolvedScope];
2629
+ if (scopeNode && scopeNode.depth > 1) resolvedScope = null;
2630
+ }
2631
+ if (!resolvedScope) resolvedScope = graph.rootNodeId;
2632
+ const validSteps = [];
2633
+ for (const step of steps) {
2634
+ if (typeof step !== "object" || step === null) continue;
2635
+ const s = step;
2636
+ if (typeof s.nodeId !== "string") continue;
2637
+ const resolved = resolveId(s.nodeId);
2638
+ if (!resolved) continue;
2639
+ validSteps.push({
2640
+ nodeId: resolved,
2641
+ label: typeof s.label === "string" ? s.label : s.nodeId,
2642
+ order: typeof s.order === "number" ? s.order : validSteps.length
2643
+ });
2644
+ }
2645
+ if (validSteps.length < 2) continue;
2646
+ let seqDiagram = typeof sequenceDiagram === "string" ? sequenceDiagram : "";
2647
+ if (!seqDiagram.startsWith("sequenceDiagram")) {
2648
+ const parts = validSteps.map(
2649
+ (s) => ` participant ${s.nodeId} as ${s.label.replace(/\.[^.]+$/, "")}`
2650
+ ).join("\n");
2651
+ const arrows = validSteps.slice(0, -1).map(
2652
+ (s, i) => ` ${s.nodeId}->>${validSteps[i + 1].nodeId}: calls`
2653
+ ).join("\n");
2654
+ seqDiagram = `sequenceDiagram
2655
+ ${parts}
2656
+ ${arrows}`;
2657
+ }
2658
+ const id = generateId2();
2659
+ flows[id] = {
2660
+ id,
2661
+ name,
2662
+ description: typeof description === "string" ? description : "",
2663
+ scopeNodeId: resolvedScope,
2664
+ steps: validSteps,
2665
+ sequenceDiagram: seqDiagram
2666
+ };
2667
+ }
2668
+ return Object.keys(flows).length > 0 ? flows : null;
2669
+ }
2670
+ var EVALUATEUR_FLOW_SYSTEM = `You are an adversarial code reviewer verifying that runtime flows accurately represent the source code.
2671
+
2672
+ You have one tool: read_file(path) \u2014 read source code to verify claims.
2673
+
2674
+ PROCESS:
2675
+ 1. Read the entry point file (step 0) of each flow.
2676
+ 2. Verify: does the code actually connect to the next step at runtime?
2677
+ - Static import \u2192 look for require() / import statements
2678
+ - HTTP call \u2192 look for fetch(), axios, XMLHttpRequest, http.request()
2679
+ - Event \u2192 look for emit(), addEventListener(), on()
2680
+ 3. Read subsequent files as needed to verify the chain.
2681
+ 4. Be STRICT \u2014 if a connection is not verifiable in the code, flag it as an error.
2682
+ 5. Be ACCURATE \u2014 if you read the code and the connection IS there, do NOT flag it.
2683
+ 6. IMPORTANT: read_file only returns the first 200 lines. If a step is NOT visible in the snippet but the entry point file is large and the function could plausibly exist further down, do NOT reject it \u2014 instead, mark it as a warning (cannot verify) rather than an error.
2684
+
2685
+ Flag as ERROR:
2686
+ - A claimed connection (import, HTTP call, event) that does not exist in the source
2687
+ - A step whose label is completely wrong about what the file does
2688
+ - A file included in a flow that plays no role whatsoever
2689
+
2690
+ Flag as WARNING:
2691
+ - A step label that is vague or partially inaccurate
2692
+ - A flow that skips important intermediate files
2693
+
2694
+ Do NOT flag:
2695
+ - HTTP calls between client and server (verify with read_file that fetch/axios/routes exist)
2696
+ - Fan-out from a common parent (A imports B, A imports C, A imports D \u2014 all valid)
2697
+ - Transitive import chains (A imports B imports C)
2698
+
2699
+ COMPLETENESS CHECK:
2700
+ After verifying existing flows, ask yourself: based on the source files you read, are there important user journeys or system events that are clearly NOT represented? Be conservative \u2014 only flag things that are obviously important AND obviously absent (not minor variations, not error edge cases).
2701
+
2702
+ Output ONLY this JSON after your investigation:
2703
+ {
2704
+ "issues": [
2705
+ {
2706
+ "type": "invalid_flow_step" | "trivial_flow" | "hallucinated_relation",
2707
+ "severity": "warning" | "error",
2708
+ "message": "specific description referencing what you found in the code",
2709
+ "target": "flow name"
2710
+ }
2711
+ ],
2712
+ "missing": ["Suggested missing flow name 1", "Suggested missing flow name 2"]
2713
+ }
2714
+ (If nothing important is missing, output "missing": [])`;
2715
+ var ALREADY_READ = "(already provided above)";
2716
+ function buildEvaluateurFlowExecutor(ctx) {
2717
+ const readOnce = /* @__PURE__ */ new Set();
2718
+ return async (name, args) => {
2719
+ if (name === "read_file") {
2720
+ const filePath = String(args.path ?? "");
2721
+ if (readOnce.has(filePath)) {
2722
+ return { toolName: name, args, result: ALREADY_READ, label: `read_file(${filePath}) [dup]` };
2723
+ }
2724
+ const content = await readFileCached(ctx, filePath, 200);
2725
+ readOnce.add(filePath);
2726
+ return { toolName: name, args, result: content, label: `read_file(${filePath})` };
2727
+ }
2728
+ return { toolName: name, args, result: "(unknown tool)", label: name };
2729
+ };
2730
+ }
2731
+ async function evaluateFlows(ctx, graph, flows, llmSettings, onLog, onAgentEvent, onBlackboard, onToolStart) {
2732
+ const flowSummary = Object.values(flows).map((f) => ({
2733
+ name: f.name,
2734
+ description: f.description,
2735
+ steps: f.steps.map((s) => ({
2736
+ file: graph.nodes[s.nodeId]?.sourceRef?.filePath ?? s.nodeId,
2737
+ label: s.label
2738
+ }))
2739
+ }));
2740
+ const prompt = `Verify these runtime flows against the actual source code. Use read_file() to check each connection before judging.
2741
+
2742
+ FLOWS TO VERIFY:
2743
+ ${JSON.stringify(flowSummary, null, 2)}
2744
+
2745
+ For each flow, read the source files of the steps and verify the claimed connections exist in the code.`;
2746
+ onLog?.("ai-eval", "\xC9valuateur: validating flows against source code...");
2747
+ const evalFlowStartMs = Date.now();
2748
+ const rawExecutor = buildEvaluateurFlowExecutor(ctx);
2749
+ const executor = onAgentEvent || onToolStart ? async (name, args) => {
2750
+ const argsSummary = String(args.path ?? args.file ?? "");
2751
+ onToolStart?.("evaluateur", name, argsSummary);
2752
+ const t0 = Date.now();
2753
+ const step = await rawExecutor(name, args);
2754
+ if (step.result !== ALREADY_READ) {
2755
+ onAgentEvent?.({
2756
+ agent: "evaluateur",
2757
+ toolName: name,
2758
+ argsSummary,
2759
+ resultSummary: step.result.slice(0, 300),
2760
+ durationMs: Date.now() - t0
2761
+ });
2762
+ }
2763
+ return step;
2764
+ } : rawExecutor;
2765
+ const evalTools = [{
2766
+ name: "read_file",
2767
+ description: "Read source code of a file to verify a flow step connection",
2768
+ parameters: {
2769
+ type: "object",
2770
+ properties: { path: { type: "string", description: "File path relative to project root" } },
2771
+ required: ["path"]
2772
+ }
2773
+ }];
2774
+ try {
2775
+ const result = await llmService.runAgentLoop(
2776
+ [{ role: "user", content: prompt }],
2777
+ EVALUATEUR_FLOW_SYSTEM,
2778
+ evalTools,
2779
+ executor,
2780
+ llmSettings,
2781
+ { source: "code-agent-evaluateur" }
2782
+ );
2783
+ const parsed = JSON.parse(extractJSON(result.content));
2784
+ if (!Array.isArray(parsed.issues)) return { issues: [], missing: [] };
2785
+ const issues = parsed.issues.filter((i) => typeof i === "object" && i !== null).map((i) => ({
2786
+ type: i.type || "invalid_flow_step",
2787
+ severity: i.severity || "warning",
2788
+ message: String(i.message || ""),
2789
+ target: i.target ? String(i.target) : void 0
2790
+ }));
2791
+ const missing = Array.isArray(parsed.missing) ? parsed.missing.filter((s) => typeof s === "string") : [];
2792
+ const errors = issues.filter((i) => i.severity === "error").length;
2793
+ const warnings = issues.filter((i) => i.severity === "warning").length;
2794
+ onLog?.("ai-eval", `\xC9valuateur flows: ${errors} errors, ${warnings} warnings, ${missing.length} missing (${result.toolSteps.length} files read)`);
2795
+ onAgentEvent?.({
2796
+ agent: "evaluateur",
2797
+ toolName: "__eval_result__",
2798
+ argsSummary: `${errors} errors, ${warnings} warnings`,
2799
+ resultSummary: issues.length === 0 && missing.length === 0 ? "\u2713 All flows verified" : [
2800
+ ...issues.map((i) => `[${i.severity}] ${i.message}`),
2801
+ ...missing.map((m) => `[missing] ${m}`)
2802
+ ].join("\n"),
2803
+ durationMs: Date.now() - evalFlowStartMs
2804
+ });
2805
+ onBlackboard?.({
2806
+ flowIssues: issues.map((i) => ({
2807
+ severity: i.severity,
2808
+ message: i.message,
2809
+ target: i.target
2810
+ }))
2811
+ });
2812
+ return { issues, missing };
2813
+ } catch {
2814
+ onLog?.("ai-eval", "\xC9valuateur flow validation failed (non-fatal)");
2815
+ return { issues: [], missing: [] };
2816
+ }
2817
+ }
2818
+ async function orchestrateCodebaseAnalysis(analysis, provider, llmSettings, onProgress, onLog, signal, onAgentEvent, onBlackboard, onToolStart) {
2819
+ const ctx = buildContext(analysis, provider);
2820
+ let clusters = null;
2821
+ let issues = [];
2822
+ onProgress?.("Semantic clustering", 1, 3);
2823
+ clusters = await runAnalysteAgent(ctx, llmSettings, onLog, signal, void 0, onAgentEvent, onToolStart);
2824
+ if (clusters && clusters.length > 0) {
2825
+ ctx.semanticClusters = clusters;
2826
+ onBlackboard?.({ clusters: clusters.map((c) => ({ name: c.name, fileCount: c.files.length, files: c.files })) });
2827
+ onProgress?.("Validating clusters", 2, 3);
2828
+ issues = await evaluateClusters(ctx, clusters, llmSettings, onLog, onAgentEvent, onBlackboard);
2829
+ const errorCount = issues.filter((i) => i.severity === "error").length;
2830
+ if (errorCount >= 1) {
2831
+ onLog?.("ai-cluster", `Analyste round 2 (${errorCount} errors to fix)...`);
2832
+ onProgress?.("Re-clustering (round 2)", 3, 3);
2833
+ const round2 = await runAnalysteAgent(ctx, llmSettings, onLog, signal, issues, onAgentEvent, onToolStart);
2834
+ if (round2 && round2.length > 0) {
2835
+ clusters = round2;
2836
+ ctx.semanticClusters = clusters;
2837
+ onBlackboard?.({
2838
+ clusters: clusters.map((c) => ({ name: c.name, fileCount: c.files.length, files: c.files })),
2839
+ clusterIssues: []
2840
+ // cleared — round 2 attempted to fix them
2841
+ });
2842
+ }
2843
+ }
2844
+ }
2845
+ onProgress?.("Building modules", 3, 3);
2846
+ if (!clusters || clusters.length === 0) {
2847
+ throw new Error("Analyste produced no clusters. The model may be overloaded \u2014 try again or switch to a different model in AI Settings.");
2848
+ }
2849
+ const fileByPath = ctx.fileByPath;
2850
+ const fileToCluster = /* @__PURE__ */ new Map();
2851
+ for (const cluster of clusters) {
2852
+ for (const fp of cluster.files) fileToCluster.set(fp, cluster.name);
2853
+ }
2854
+ const allFilePaths = Array.from(fileByPath.keys());
2855
+ function resolveImport(source, fromFile) {
2856
+ if (!source.startsWith(".") && !source.startsWith("@/")) return null;
2857
+ let base;
2858
+ if (source.startsWith("@/")) {
2859
+ base = source.slice(2);
2860
+ } else {
2861
+ const dir = fromFile.substring(0, fromFile.lastIndexOf("/"));
2862
+ const parts = source.split("/");
2863
+ let cur = dir;
2864
+ for (const p of parts) {
2865
+ if (p === ".") continue;
2866
+ else if (p === "..") {
2867
+ const i = cur.lastIndexOf("/");
2868
+ cur = i >= 0 ? cur.substring(0, i) : "";
2869
+ } else {
2870
+ cur = cur ? `${cur}/${p}` : p;
2871
+ }
2872
+ }
2873
+ base = cur;
2874
+ }
2875
+ const exts = [".ts", ".tsx", ".js", ".jsx", ".py", ""];
2876
+ const idxs = ["/index.ts", "/index.tsx", "/index.js", "/index.jsx"];
2877
+ for (const ext of exts) {
2878
+ const candidate = base + ext;
2879
+ if (allFilePaths.includes(candidate)) return candidate;
2880
+ }
2881
+ for (const idx of idxs) {
2882
+ const candidate = base + idx;
2883
+ if (allFilePaths.includes(candidate)) return candidate;
2884
+ }
2885
+ return null;
2886
+ }
2887
+ const modules = clusters.map((cluster) => {
2888
+ const files = cluster.files.map((fp) => fileByPath.get(fp)).filter((f) => f !== void 0);
2889
+ const depSet = /* @__PURE__ */ new Set();
2890
+ for (const file of files) {
2891
+ for (const imp of file.imports) {
2892
+ if (imp.isExternal) continue;
2893
+ const resolved = resolveImport(imp.source, file.filePath);
2894
+ if (resolved) {
2895
+ const targetCluster = fileToCluster.get(resolved);
2896
+ if (targetCluster && targetCluster !== cluster.name) depSet.add(targetCluster);
2897
+ }
2898
+ }
2899
+ }
2900
+ return {
2901
+ name: cluster.name,
2902
+ description: cluster.description,
2903
+ path: cluster.name,
2904
+ files,
2905
+ dependencies: Array.from(depSet)
2906
+ };
2907
+ });
2908
+ const enrichedAnalysis = {
2909
+ ...analysis,
2910
+ modules
2911
+ };
2912
+ return { analysis: enrichedAnalysis, clusters };
2913
+ }
2914
+ async function orchestrateFlowGeneration(graph, clusters, provider, llmSettings, onProgress, onLog, signal, onAgentEvent, onBlackboard, scopeCluster, onToolStart) {
2915
+ const ctx = {
2916
+ analysis: { modules: [], externalDeps: [], entryPoints: [], totalFiles: 0, totalSymbols: 0 },
2917
+ astImportPairs: new Set(
2918
+ Object.values(graph.relations).filter((r) => r.type === "depends_on").map((r) => {
2919
+ const src = graph.nodes[r.sourceId]?.sourceRef?.filePath;
2920
+ const tgt = graph.nodes[r.targetId]?.sourceRef?.filePath;
2921
+ return src && tgt ? `${src}\u2192${tgt}` : null;
2922
+ }).filter((p) => p !== null)
2923
+ ),
2924
+ fileByPath: /* @__PURE__ */ new Map(),
2925
+ provider,
2926
+ fileCache: /* @__PURE__ */ new Map(),
2927
+ semanticClusters: clusters
2928
+ };
2929
+ let flows = null;
2930
+ let issues = [];
2931
+ onProgress?.("Generating flows", 1, 3);
2932
+ flows = await runSyntheseurAgent(ctx, graph, llmSettings, onLog, signal, void 0, onAgentEvent, scopeCluster, void 0, void 0, onToolStart);
2933
+ if (flows && Object.keys(flows).length > 0) {
2934
+ onBlackboard?.({ flows: Object.values(flows).map((f) => ({ name: f.name, stepCount: f.steps.length })) });
2935
+ onProgress?.("Validating flows", 2, 3);
2936
+ const { issues: evalIssues, missing } = await evaluateFlows(ctx, graph, flows, llmSettings, onLog, onAgentEvent, onBlackboard, onToolStart);
2937
+ issues = evalIssues;
2938
+ const errorCount = issues.filter((i) => i.severity === "error").length;
2939
+ if (errorCount >= 1 || missing.length > 0) {
2940
+ onLog?.("ai-synth", `Synth\xE9tiseur round 2 (${errorCount} errors to fix, ${missing.length} to add)...`);
2941
+ onProgress?.("Re-generating flows (round 2)", 3, 3);
2942
+ const errorTargets = new Set(
2943
+ issues.filter((i) => i.severity === "error" && i.target).map((i) => i.target)
2944
+ );
2945
+ const hasUntargetedErrors = issues.some((i) => i.severity === "error" && !i.target);
2946
+ const frozenFlowNames = hasUntargetedErrors ? [] : Object.values(flows).filter((f) => !errorTargets.has(f.name)).map((f) => f.name);
2947
+ const round2 = await runSyntheseurAgent(
2948
+ ctx,
2949
+ graph,
2950
+ llmSettings,
2951
+ onLog,
2952
+ signal,
2953
+ issues,
2954
+ onAgentEvent,
2955
+ scopeCluster,
2956
+ frozenFlowNames.length > 0 ? frozenFlowNames : void 0,
2957
+ missing.length > 0 ? missing : void 0,
2958
+ onToolStart
2959
+ );
2960
+ if (round2 && Object.keys(round2).length > 0) {
2961
+ if (frozenFlowNames.length > 0) {
2962
+ const frozenMap = {};
2963
+ for (const f of Object.values(flows)) {
2964
+ if (frozenFlowNames.includes(f.name)) frozenMap[f.id] = f;
2965
+ }
2966
+ flows = { ...frozenMap, ...round2 };
2967
+ } else {
2968
+ flows = round2;
2969
+ }
2970
+ onBlackboard?.({ flows: Object.values(flows).map((f) => ({ name: f.name, stepCount: f.steps.length })) });
2971
+ const regenNames = /* @__PURE__ */ new Set([...errorTargets, ...missing]);
2972
+ onBlackboard?.({
2973
+ flowIssues: issues.filter((i) => i.severity !== "error" || !i.target || !regenNames.has(i.target)).map((i) => ({ severity: i.severity, message: i.message, target: i.target }))
2974
+ });
2975
+ }
2976
+ }
2977
+ }
2978
+ if (scopeCluster && flows) {
2979
+ const rescoped = {};
2980
+ for (const [id, flow] of Object.entries(flows)) {
2981
+ rescoped[id] = { ...flow, scopeNodeId: scopeCluster.nodeId };
2982
+ }
2983
+ flows = rescoped;
2984
+ }
2985
+ if (!flows || Object.keys(flows).length === 0) {
2986
+ onLog?.("ai-synth", "Synth\xE9tiseur produced no valid flows");
2987
+ return {};
2988
+ }
2989
+ onLog?.("ai-synth", `Flow generation complete: ${Object.keys(flows).length} flows`);
2990
+ return flows;
2991
+ }
2992
+
2993
+ // src/tools/codegraph.ts
2994
+ function graphSummary(graph) {
2995
+ const nodes = Object.values(graph.nodes);
2996
+ const relations = Object.values(graph.relations);
2997
+ const nodesByKind = {};
2998
+ for (const n of nodes) nodesByKind[n.kind] = (nodesByKind[n.kind] ?? 0) + 1;
2999
+ const relationsByType = {};
3000
+ for (const r of relations) relationsByType[r.type] = (relationsByType[r.type] ?? 0) + 1;
3001
+ return {
3002
+ id: graph.id,
3003
+ name: graph.name,
3004
+ repoId: graph.repoId,
3005
+ workspaceId: graph.workspaceId,
3006
+ nodeCount: nodes.length,
3007
+ relationCount: relations.length,
3008
+ nodesByKind,
3009
+ relationsByType,
3010
+ packages: nodes.filter((n) => n.depth === 1).map((n) => ({ id: n.id, name: n.name, childCount: n.children.length })),
3011
+ createdAt: graph.createdAt,
3012
+ updatedAt: graph.updatedAt
3013
+ };
3014
+ }
3015
+ function buildLLMSettings(provider, apiKey, model) {
3016
+ return {
3017
+ activeProvider: provider,
3018
+ providers: {
3019
+ gemini: provider === "gemini" ? { provider: "gemini", apiKey, model: model ?? "gemini-2.0-flash" } : null,
3020
+ openai: provider === "openai" ? { provider: "openai", apiKey, model: model ?? "gpt-4o-mini" } : null,
3021
+ anthropic: provider === "anthropic" ? { provider: "anthropic", apiKey, model: model ?? "claude-sonnet-4-6" } : null
3022
+ }
3023
+ };
3024
+ }
3025
+ function registerCodeGraphTools(server2) {
3026
+ server2.tool(
3027
+ "scan_codebase",
3028
+ "Scan a local directory, build a CodeGraph with LLM semantic clustering and flow generation. API keys are read from environment variables only (GEMINI_API_KEY, OPENAI_API_KEY, ANTHROPIC_API_KEY) \u2014 never pass keys as parameters.",
3029
+ {
3030
+ directory: z3.string().describe("Absolute path to the directory to scan"),
3031
+ workspace_id: z3.string().describe("Workspace ID to store the CodeGraph in"),
3032
+ repo_name: z3.string().optional().describe("Display name for this repo (defaults to directory basename)"),
3033
+ llm_provider: z3.enum(["gemini", "openai", "anthropic"]).optional().describe("LLM provider \u2014 auto-detected from env vars if omitted"),
3034
+ llm_model: z3.string().optional().describe("Model override (e.g. gemini-2.5-flash, gpt-4o, claude-sonnet-4-6)")
3035
+ },
3036
+ async ({ directory, workspace_id, repo_name, llm_provider: rawProvider, llm_model }) => {
3037
+ const llm_provider = rawProvider ?? (process.env.GEMINI_API_KEY || process.env.API_KEY ? "gemini" : process.env.OPENAI_API_KEY ? "openai" : process.env.ANTHROPIC_API_KEY ? "anthropic" : void 0);
3038
+ const llm_api_key = llm_provider === "gemini" ? process.env.GEMINI_API_KEY ?? process.env.API_KEY : llm_provider === "openai" ? process.env.OPENAI_API_KEY : llm_provider === "anthropic" ? process.env.ANTHROPIC_API_KEY : void 0;
3039
+ if (!existsSync2(directory)) {
3040
+ return { content: [{ type: "text", text: `Directory not found: ${directory}` }], isError: true };
3041
+ }
3042
+ const workspaces = await storage.listWorkspaces();
3043
+ if (!workspaces.find((w) => w.id === workspace_id)) {
3044
+ return { content: [{ type: "text", text: `Workspace "${workspace_id}" not found` }], isError: true };
3045
+ }
3046
+ const name = repo_name ?? directory.split("/").pop() ?? directory;
3047
+ const repoId = `repo-${Date.now()}`;
3048
+ const provider = new NodeFileSystemProvider(directory);
3049
+ let analysisResult = await codebaseAnalyzerService.analyzeCodebase(provider);
3050
+ const logs = [];
3051
+ const onLog = (_type, msg) => {
3052
+ logs.push(msg);
3053
+ };
3054
+ let clusters = [];
3055
+ const usedLLM = !!(llm_provider && llm_api_key);
3056
+ if (usedLLM) {
3057
+ const llmSettings = buildLLMSettings(llm_provider, llm_api_key, llm_model);
3058
+ try {
3059
+ const orchestrated = await orchestrateCodebaseAnalysis(
3060
+ analysisResult,
3061
+ provider,
3062
+ llmSettings,
3063
+ void 0,
3064
+ onLog
3065
+ );
3066
+ analysisResult = orchestrated.analysis;
3067
+ clusters = orchestrated.clusters;
3068
+ } catch (err) {
3069
+ const msg = err instanceof Error ? err.message : String(err);
3070
+ return { content: [{ type: "text", text: `LLM clustering failed: ${msg}` }], isError: true };
3071
+ }
3072
+ }
3073
+ let graph = await parseCodebaseToGraph(analysisResult, repoId, name, workspace_id, provider);
3074
+ if (usedLLM && clusters.length > 0) {
3075
+ const llmSettings = buildLLMSettings(llm_provider, llm_api_key, llm_model);
3076
+ try {
3077
+ const flows = await orchestrateFlowGeneration(
3078
+ graph,
3079
+ clusters,
3080
+ provider,
3081
+ llmSettings,
3082
+ void 0,
3083
+ onLog
3084
+ );
3085
+ if (Object.keys(flows).length > 0) {
3086
+ graph = { ...graph, flows, updatedAt: Date.now() };
3087
+ }
3088
+ } catch (err) {
3089
+ const msg = err instanceof Error ? err.message : String(err);
3090
+ logs.push(`Flow generation failed (${msg})`);
3091
+ }
3092
+ }
3093
+ await storage.saveCodeGraph(graph);
3094
+ const anomalies = codeGraphModelService.validateGraph(graph);
3095
+ const summary = graphSummary(graph);
3096
+ const flowCount = Object.keys(graph.flows ?? {}).length;
3097
+ return {
3098
+ content: [{
3099
+ type: "text",
3100
+ text: JSON.stringify({
3101
+ message: `CodeGraph "${name}" created successfully`,
3102
+ graphId: graph.id,
3103
+ pipeline: usedLLM ? "agentic (LLM semantic clustering + flow generation)" : "static analysis only",
3104
+ filesScanned: analysisResult.totalFiles,
3105
+ totalSymbols: analysisResult.totalSymbols,
3106
+ modules: analysisResult.modules.length,
3107
+ semanticClusters: clusters.length,
3108
+ flows: flowCount,
3109
+ externalDeps: analysisResult.externalDeps.length,
3110
+ entryPoints: analysisResult.entryPoints,
3111
+ summary,
3112
+ anomalies: anomalies.slice(0, 10),
3113
+ logs: logs.slice(-20)
3114
+ }, null, 2)
3115
+ }]
3116
+ };
3117
+ }
3118
+ );
3119
+ server2.tool(
3120
+ "list_codegraphs",
3121
+ "List all stored CodeGraphs, optionally filtered by workspace",
3122
+ { workspace_id: z3.string().optional().describe("Filter by workspace ID") },
3123
+ async ({ workspace_id }) => {
3124
+ const graphs = await storage.listCodeGraphs(workspace_id);
3125
+ if (graphs.length === 0) {
3126
+ return { content: [{ type: "text", text: "No CodeGraphs found. Use scan_codebase to create one." }] };
3127
+ }
3128
+ return { content: [{ type: "text", text: JSON.stringify(graphs, null, 2) }] };
3129
+ }
3130
+ );
3131
+ server2.tool(
3132
+ "get_codegraph_summary",
3133
+ "Get a high-level summary of a CodeGraph (modules, node counts, dependency stats)",
3134
+ { graph_id: z3.string().describe("CodeGraph ID") },
3135
+ async ({ graph_id }) => {
3136
+ const graph = await storage.getCodeGraph(graph_id);
3137
+ if (!graph) {
3138
+ return { content: [{ type: "text", text: `CodeGraph "${graph_id}" not found` }], isError: true };
3139
+ }
3140
+ return { content: [{ type: "text", text: JSON.stringify(graphSummary(graph), null, 2) }] };
3141
+ }
3142
+ );
3143
+ server2.tool(
3144
+ "get_codegraph_nodes",
3145
+ "Get nodes from a CodeGraph, optionally filtered by depth or kind",
3146
+ {
3147
+ graph_id: z3.string().describe("CodeGraph ID"),
3148
+ depth: z3.number().int().min(0).max(4).optional().describe("Filter by depth: 0=system, 1=package, 2=module, 3=class/function, 4=method/field"),
3149
+ kind: z3.enum(["system", "package", "module", "class", "function", "interface", "variable", "method", "field"]).optional().describe("Filter by node kind"),
3150
+ parent_id: z3.string().optional().describe("Filter by parent node ID (list direct children)")
3151
+ },
3152
+ async ({ graph_id, depth, kind, parent_id }) => {
3153
+ const graph = await storage.getCodeGraph(graph_id);
3154
+ if (!graph) {
3155
+ return { content: [{ type: "text", text: `CodeGraph "${graph_id}" not found` }], isError: true };
3156
+ }
3157
+ let nodes = Object.values(graph.nodes);
3158
+ if (depth !== void 0) nodes = nodes.filter((n) => n.depth === depth);
3159
+ if (kind !== void 0) nodes = nodes.filter((n) => n.kind === kind);
3160
+ if (parent_id !== void 0) nodes = nodes.filter((n) => n.parentId === parent_id);
3161
+ return {
3162
+ content: [{
3163
+ type: "text",
3164
+ text: JSON.stringify(nodes.map((n) => ({
3165
+ id: n.id,
3166
+ name: n.name,
3167
+ kind: n.kind,
3168
+ depth: n.depth,
3169
+ parentId: n.parentId,
3170
+ childCount: n.children.length,
3171
+ sourceRef: n.sourceRef ? { filePath: n.sourceRef.filePath, lines: `${n.sourceRef.lineStart}-${n.sourceRef.lineEnd}` } : null,
3172
+ tags: n.tags
3173
+ })), null, 2)
3174
+ }]
3175
+ };
3176
+ }
3177
+ );
3178
+ server2.tool(
3179
+ "detect_codegraph_anomalies",
3180
+ "Detect anomalies in a CodeGraph: circular deps, god nodes (high fan-in), high coupling (high fan-out), orphan nodes, broken references",
3181
+ { graph_id: z3.string().describe("CodeGraph ID") },
3182
+ async ({ graph_id }) => {
3183
+ const graph = await storage.getCodeGraph(graph_id);
3184
+ if (!graph) {
3185
+ return { content: [{ type: "text", text: `CodeGraph "${graph_id}" not found` }], isError: true };
3186
+ }
3187
+ const anomalies = codeGraphModelService.validateGraph(graph);
3188
+ if (anomalies.length === 0) {
3189
+ return { content: [{ type: "text", text: "No anomalies detected." }] };
3190
+ }
3191
+ return { content: [{ type: "text", text: JSON.stringify(anomalies, null, 2) }] };
3192
+ }
3193
+ );
3194
+ server2.tool(
3195
+ "generate_domain_diagrams",
3196
+ "Drill into a specific package/domain node and generate scoped sequence diagrams for it. Call recursively on child nodes to build a multi-level diagram map. API keys are read from environment variables only.",
3197
+ {
3198
+ graph_id: z3.string().describe("CodeGraph ID"),
3199
+ node_id: z3.string().describe("Package or module node ID to scope the generation to"),
3200
+ workspace_id: z3.string().describe("Workspace ID where diagrams will be saved"),
3201
+ folder_id: z3.string().optional().describe("Folder ID to save diagrams into"),
3202
+ llm_provider: z3.enum(["gemini", "openai", "anthropic"]).optional().describe("LLM provider \u2014 auto-detected from env vars if omitted"),
3203
+ llm_model: z3.string().optional().describe("Model override")
3204
+ },
3205
+ async ({ graph_id, node_id, workspace_id, folder_id, llm_provider: rawProvider, llm_model }) => {
3206
+ const graph = await storage.getCodeGraph(graph_id);
3207
+ if (!graph) {
3208
+ return { content: [{ type: "text", text: `CodeGraph "${graph_id}" not found` }], isError: true };
3209
+ }
3210
+ const scopeNode = graph.nodes[node_id];
3211
+ if (!scopeNode) {
3212
+ return { content: [{ type: "text", text: `Node "${node_id}" not found in graph` }], isError: true };
3213
+ }
3214
+ const llm_provider = rawProvider ?? (process.env.GEMINI_API_KEY || process.env.API_KEY ? "gemini" : process.env.OPENAI_API_KEY ? "openai" : process.env.ANTHROPIC_API_KEY ? "anthropic" : void 0);
3215
+ const llm_api_key = llm_provider === "gemini" ? process.env.GEMINI_API_KEY ?? process.env.API_KEY : llm_provider === "openai" ? process.env.OPENAI_API_KEY : llm_provider === "anthropic" ? process.env.ANTHROPIC_API_KEY : void 0;
3216
+ if (!llm_provider || !llm_api_key) {
3217
+ return { content: [{ type: "text", text: "No LLM API key found in environment variables" }], isError: true };
3218
+ }
3219
+ const getDescendantFiles = (nodeId) => {
3220
+ const node = graph.nodes[nodeId];
3221
+ if (!node) return [];
3222
+ if (node.sourceRef?.filePath && node.kind === "module") return [node.sourceRef.filePath];
3223
+ return node.children.flatMap((childId) => getDescendantFiles(childId));
3224
+ };
3225
+ const files = getDescendantFiles(node_id);
3226
+ if (files.length === 0) {
3227
+ return { content: [{ type: "text", text: `No source files found under node "${scopeNode.name}"` }], isError: true };
3228
+ }
3229
+ const llmSettings = buildLLMSettings(llm_provider, llm_api_key, llm_model);
3230
+ const logs = [];
3231
+ const onLog = (_type, msg) => {
3232
+ logs.push(msg);
3233
+ };
3234
+ const anyFile = files[0];
3235
+ const repoRoot = Object.values(graph.nodes).filter((n) => n.kind === "system").map((n) => n.sourceRef?.filePath).find(Boolean) ?? "";
3236
+ let fsProvider;
3237
+ try {
3238
+ const sampleFilePath = files[0];
3239
+ const possibleRoots = ["/home/nathan/Projects/bluelens", process.cwd()];
3240
+ for (const root of possibleRoots) {
3241
+ const { existsSync: exists } = await import("fs");
3242
+ if (exists(`${root}/${sampleFilePath}`)) {
3243
+ fsProvider = new NodeFileSystemProvider(root);
3244
+ break;
3245
+ }
3246
+ }
3247
+ } catch {
3248
+ }
3249
+ const scopeCluster = { nodeId: node_id, name: scopeNode.name, files };
3250
+ let flows;
3251
+ try {
3252
+ flows = await orchestrateFlowGeneration(
3253
+ graph,
3254
+ [],
3255
+ fsProvider,
3256
+ llmSettings,
3257
+ void 0,
3258
+ onLog,
3259
+ void 0,
3260
+ void 0,
3261
+ void 0,
3262
+ scopeCluster
3263
+ );
3264
+ } catch (err) {
3265
+ const msg = err instanceof Error ? err.message : String(err);
3266
+ return { content: [{ type: "text", text: `Flow generation failed: ${msg}` }], isError: true };
3267
+ }
3268
+ if (Object.keys(flows).length === 0) {
3269
+ return { content: [{ type: "text", text: `No flows generated for "${scopeNode.name}"` }] };
3270
+ }
3271
+ const created = [];
3272
+ for (const flow of Object.values(flows)) {
3273
+ const seq = flow.sequenceDiagram;
3274
+ if (!seq) continue;
3275
+ const diagram = await storage.createDiagram(
3276
+ workspace_id,
3277
+ flow.name,
3278
+ seq,
3279
+ folder_id ?? null,
3280
+ flow.description
3281
+ );
3282
+ created.push({ id: diagram.id, name: diagram.name });
3283
+ }
3284
+ const updatedGraph = { ...graph, flows: { ...graph.flows, ...flows }, updatedAt: Date.now() };
3285
+ await storage.saveCodeGraph(updatedGraph);
3286
+ return {
3287
+ content: [{
3288
+ type: "text",
3289
+ text: JSON.stringify({
3290
+ scope: scopeNode.name,
3291
+ depth: scopeNode.depth,
3292
+ filesAnalyzed: files.length,
3293
+ diagramsCreated: created,
3294
+ logs: logs.slice(-15)
3295
+ }, null, 2)
3296
+ }]
3297
+ };
3298
+ }
3299
+ );
3300
+ server2.tool(
3301
+ "delete_codegraph",
3302
+ "Delete a stored CodeGraph",
3303
+ { graph_id: z3.string().describe("CodeGraph ID to delete") },
3304
+ async ({ graph_id }) => {
3305
+ const graph = await storage.getCodeGraph(graph_id);
3306
+ if (!graph) {
3307
+ return { content: [{ type: "text", text: `CodeGraph "${graph_id}" not found` }], isError: true };
3308
+ }
3309
+ await storage.deleteCodeGraph(graph_id);
3310
+ return { content: [{ type: "text", text: `CodeGraph "${graph_id}" ("${graph.name}") deleted` }] };
3311
+ }
3312
+ );
3313
+ server2.tool(
3314
+ "push_to_cloud",
3315
+ "Upload a local CodeGraph to the BlueLens Cloud MCP server so AI agents can access it remotely. The cloud server stores it under your API key namespace.",
3316
+ {
3317
+ graph_id: z3.string().describe("Local CodeGraph ID to push"),
3318
+ cloud_url: z3.string().describe("BlueLens Cloud MCP server base URL (e.g. https://mcp.bluelens.dev or http://localhost:3003)"),
3319
+ api_key: z3.string().describe("Your BlueLens Cloud API key")
3320
+ },
3321
+ async ({ graph_id, cloud_url, api_key }) => {
3322
+ const graph = await storage.getCodeGraph(graph_id);
3323
+ if (!graph) {
3324
+ return { content: [{ type: "text", text: `CodeGraph "${graph_id}" not found` }], isError: true };
3325
+ }
3326
+ const endpoint = `${cloud_url.replace(/\/$/, "")}/api/graphs`;
3327
+ let response;
3328
+ try {
3329
+ response = await fetch(endpoint, {
3330
+ method: "POST",
3331
+ headers: {
3332
+ "Authorization": `Bearer ${api_key}`,
3333
+ "Content-Type": "application/json"
3334
+ },
3335
+ body: JSON.stringify(graph)
3336
+ });
3337
+ } catch (err) {
3338
+ const msg = err instanceof Error ? err.message : String(err);
3339
+ return { content: [{ type: "text", text: `Network error reaching "${endpoint}": ${msg}` }], isError: true };
3340
+ }
3341
+ let body;
3342
+ try {
3343
+ body = await response.json();
3344
+ } catch {
3345
+ body = { rawStatus: response.status };
3346
+ }
3347
+ if (!response.ok) {
3348
+ return {
3349
+ content: [{
3350
+ type: "text",
3351
+ text: `Upload failed (HTTP ${response.status}): ${JSON.stringify(body)}`
3352
+ }],
3353
+ isError: true
3354
+ };
3355
+ }
3356
+ return {
3357
+ content: [{
3358
+ type: "text",
3359
+ text: JSON.stringify({
3360
+ message: `CodeGraph "${graph.name}" pushed to cloud successfully`,
3361
+ endpoint,
3362
+ graphId: graph.id,
3363
+ nodeCount: Object.keys(graph.nodes).length,
3364
+ server_response: body
3365
+ }, null, 2)
3366
+ }]
3367
+ };
3368
+ }
3369
+ );
3370
+ }
3371
+
3372
+ // src/tools/analysis.ts
3373
+ import { z as z4 } from "zod";
3374
+ import { readFileSync } from "fs";
3375
+ import { join as join4 } from "path";
3376
+ import { execSync } from "child_process";
3377
+ function getDependentIds(graph, nodeId) {
3378
+ return Object.values(graph.relations).filter((r) => r.targetId === nodeId && r.type === "depends_on").map((r) => ({ sourceId: r.sourceId, type: r.type }));
3379
+ }
3380
+ function getDependencyIds(graph, nodeId) {
3381
+ return Object.values(graph.relations).filter((r) => r.sourceId === nodeId && r.type === "depends_on").map((r) => ({ targetId: r.targetId, type: r.type }));
3382
+ }
3383
+ function collectTransitiveNodes(graph, startId, direction, maxDepth) {
3384
+ const visited = /* @__PURE__ */ new Set([startId]);
3385
+ const result = [];
3386
+ const queue = [{ id: startId, distance: 0 }];
3387
+ while (queue.length > 0) {
3388
+ const { id: currentId, distance } = queue.shift();
3389
+ if (distance >= maxDepth) continue;
3390
+ const neighbors = direction === "up" ? getDependentIds(graph, currentId).map((r) => r.sourceId) : getDependencyIds(graph, currentId).map((r) => r.targetId);
3391
+ for (const neighborId of neighbors) {
3392
+ if (visited.has(neighborId)) continue;
3393
+ visited.add(neighborId);
3394
+ const node = graph.nodes[neighborId];
3395
+ if (!node) continue;
3396
+ result.push({ id: neighborId, name: node.name, kind: node.kind, filePath: node.sourceRef?.filePath ?? null, distance: distance + 1 });
3397
+ queue.push({ id: neighborId, distance: distance + 1 });
3398
+ }
3399
+ }
3400
+ return result;
3401
+ }
3402
+ function nodeShape(node) {
3403
+ return { id: node.id, name: node.name, kind: node.kind, filePath: node.sourceRef?.filePath ?? null, depth: node.depth };
3404
+ }
3405
+ function getNodeDomain(graph, nodeId) {
3406
+ let node = graph.nodes[nodeId];
3407
+ while (node) {
3408
+ if (node.depth === 1) return node.name;
3409
+ if (!node.parentId) return null;
3410
+ node = graph.nodes[node.parentId];
3411
+ }
3412
+ return null;
3413
+ }
3414
+ function buildLLMSettings2(provider, apiKey, model) {
3415
+ return {
3416
+ activeProvider: provider,
3417
+ providers: {
3418
+ gemini: provider === "gemini" ? { provider: "gemini", apiKey, model: model ?? "gemini-2.5-flash" } : null,
3419
+ openai: provider === "openai" ? { provider: "openai", apiKey, model: model ?? "gpt-4o-mini" } : null,
3420
+ anthropic: provider === "anthropic" ? { provider: "anthropic", apiKey, model: model ?? "claude-sonnet-4-6" } : null
3421
+ }
3422
+ };
3423
+ }
3424
+ function detectProvider() {
3425
+ if (process.env.GEMINI_API_KEY || process.env.API_KEY) {
3426
+ return { provider: "gemini", apiKey: process.env.GEMINI_API_KEY ?? process.env.API_KEY };
3427
+ }
3428
+ if (process.env.OPENAI_API_KEY) return { provider: "openai", apiKey: process.env.OPENAI_API_KEY };
3429
+ if (process.env.ANTHROPIC_API_KEY) return { provider: "anthropic", apiKey: process.env.ANTHROPIC_API_KEY };
3430
+ return null;
3431
+ }
3432
+ function runRules(graph, rules) {
3433
+ const violations = [];
3434
+ for (const rule of rules) {
3435
+ if (rule.type === "forbidden_cross_domain" && rule.from_domain && rule.to_domain) {
3436
+ for (const rel of Object.values(graph.relations)) {
3437
+ if (rel.type !== "depends_on") continue;
3438
+ const src = graph.nodes[rel.sourceId];
3439
+ const tgt = graph.nodes[rel.targetId];
3440
+ if (src?.kind !== "module" || tgt?.kind !== "module") continue;
3441
+ const srcDomain = getNodeDomain(graph, rel.sourceId);
3442
+ const tgtDomain = getNodeDomain(graph, rel.targetId);
3443
+ if (srcDomain === rule.from_domain && tgtDomain === rule.to_domain) {
3444
+ violations.push({
3445
+ rule: rule.description,
3446
+ severity: "error",
3447
+ message: `Forbidden import: "${src?.name}" (${rule.from_domain}) \u2192 "${tgt?.name}" (${rule.to_domain})`,
3448
+ details: `${src?.sourceRef?.filePath ?? src?.name} \u2192 ${tgt?.sourceRef?.filePath ?? tgt?.name}`
3449
+ });
3450
+ }
3451
+ }
3452
+ }
3453
+ if (rule.type === "fan_in_threshold" && rule.file_query && rule.max !== void 0) {
3454
+ const q = rule.file_query.toLowerCase();
3455
+ const matches = Object.values(graph.nodes).filter(
3456
+ (n) => n.name.toLowerCase().includes(q) || (n.sourceRef?.filePath?.toLowerCase().includes(q) ?? false)
3457
+ );
3458
+ for (const node of matches) {
3459
+ const fanIn = Object.values(graph.relations).filter((r) => r.targetId === node.id && r.type === "depends_on").length;
3460
+ if (fanIn > rule.max) {
3461
+ violations.push({
3462
+ rule: rule.description,
3463
+ severity: "warning",
3464
+ message: `Fan-in exceeded: "${node.name}" has ${fanIn} dependents (max: ${rule.max})`,
3465
+ details: node.sourceRef?.filePath ?? node.name
3466
+ });
3467
+ }
3468
+ }
3469
+ }
3470
+ }
3471
+ return violations;
3472
+ }
3473
+ function violationKey(v) {
3474
+ return `${v.rule}::${v.details}`;
3475
+ }
3476
+ function registerAnalysisTools(server2) {
3477
+ server2.tool(
3478
+ "analyze_node_impact",
3479
+ "Blast-radius analysis: shows direct dependents (fan-in), direct dependencies (fan-out), and transitive blast radius. Run before modifying any file to know the risk.",
3480
+ {
3481
+ graph_id: z4.string().describe("CodeGraph ID"),
3482
+ node_id: z4.string().optional().describe("Node ID to analyze"),
3483
+ file_path: z4.string().optional().describe("Partial file path \u2014 resolves to the matching module node (use if node_id is unknown)"),
3484
+ max_depth: z4.number().int().min(1).max(6).optional().describe("Transitive depth for blast radius (default: 3)")
3485
+ },
3486
+ async ({ graph_id, node_id, file_path, max_depth = 3 }) => {
3487
+ const graph = await storage.getCodeGraph(graph_id);
3488
+ if (!graph) return { content: [{ type: "text", text: `CodeGraph "${graph_id}" not found` }], isError: true };
3489
+ let node;
3490
+ if (node_id) {
3491
+ node = graph.nodes[node_id];
3492
+ } else if (file_path) {
3493
+ const q = file_path.toLowerCase();
3494
+ node = Object.values(graph.nodes).find((n) => n.sourceRef?.filePath?.toLowerCase().includes(q));
3495
+ }
3496
+ if (!node) {
3497
+ return { content: [{ type: "text", text: node_id ? `Node "${node_id}" not found` : `No node matches file path "${file_path}"` }], isError: true };
3498
+ }
3499
+ const directDependents = getDependentIds(graph, node.id).map((r) => {
3500
+ const n = graph.nodes[r.sourceId];
3501
+ return n ? { ...nodeShape(n), relationType: r.type } : null;
3502
+ }).filter(Boolean);
3503
+ const directDependencies = getDependencyIds(graph, node.id).map((r) => {
3504
+ const n = graph.nodes[r.targetId];
3505
+ return n ? { ...nodeShape(n), relationType: r.type } : null;
3506
+ }).filter(Boolean);
3507
+ const blastRadius = collectTransitiveNodes(graph, node.id, "up", max_depth);
3508
+ const riskScore = Math.min(10, Math.ceil(
3509
+ (directDependents.length * 2 + blastRadius.length * 0.4) / 2
3510
+ ));
3511
+ return {
3512
+ content: [{
3513
+ type: "text",
3514
+ text: JSON.stringify({
3515
+ node: { ...nodeShape(node), tags: node.tags },
3516
+ impact: {
3517
+ directDependents: directDependents.length,
3518
+ directDependencies: directDependencies.length,
3519
+ transitiveBlastRadius: blastRadius.length,
3520
+ riskScore: `${riskScore}/10`
3521
+ },
3522
+ directDependents,
3523
+ directDependencies,
3524
+ blastRadius: blastRadius.sort((a, b) => a.distance - b.distance)
3525
+ }, null, 2)
3526
+ }]
3527
+ };
3528
+ }
3529
+ );
3530
+ server2.tool(
3531
+ "find_dependency_path",
3532
+ "Find the shortest dependency path between two nodes (how does A reach B?). Useful for understanding indirect coupling.",
3533
+ {
3534
+ graph_id: z4.string().describe("CodeGraph ID"),
3535
+ from_node_id: z4.string().describe("Source node ID"),
3536
+ to_node_id: z4.string().describe("Target node ID"),
3537
+ max_hops: z4.number().int().min(1).max(10).optional().describe("Maximum path length to search (default: 6)")
3538
+ },
3539
+ async ({ graph_id, from_node_id, to_node_id, max_hops = 6 }) => {
3540
+ const graph = await storage.getCodeGraph(graph_id);
3541
+ if (!graph) return { content: [{ type: "text", text: `CodeGraph "${graph_id}" not found` }], isError: true };
3542
+ if (!graph.nodes[from_node_id]) return { content: [{ type: "text", text: `Node "${from_node_id}" not found` }], isError: true };
3543
+ if (!graph.nodes[to_node_id]) return { content: [{ type: "text", text: `Node "${to_node_id}" not found` }], isError: true };
3544
+ const queue = [{ id: from_node_id, path: [from_node_id], relationPath: [] }];
3545
+ const visited = /* @__PURE__ */ new Set([from_node_id]);
3546
+ while (queue.length > 0) {
3547
+ const { id: currentId, path, relationPath } = queue.shift();
3548
+ if (path.length > max_hops) continue;
3549
+ const outgoing = Object.values(graph.relations).filter((r) => r.sourceId === currentId && r.type === "depends_on");
3550
+ for (const rel of outgoing) {
3551
+ const isTarget = rel.targetId === to_node_id;
3552
+ if (isTarget || !visited.has(rel.targetId)) {
3553
+ const fullPath = [...path, rel.targetId];
3554
+ const fullRelPath = [...relationPath, rel.type];
3555
+ if (isTarget) {
3556
+ return {
3557
+ content: [{
3558
+ type: "text",
3559
+ text: JSON.stringify({
3560
+ found: true,
3561
+ hops: fullPath.length - 1,
3562
+ path: fullPath.map((nodeId, i) => {
3563
+ const n = graph.nodes[nodeId];
3564
+ return { id: nodeId, name: n?.name ?? nodeId, kind: n?.kind, filePath: n?.sourceRef?.filePath ?? null, relationToNext: i < fullPath.length - 1 ? fullRelPath[i] : null };
3565
+ })
3566
+ }, null, 2)
3567
+ }]
3568
+ };
3569
+ }
3570
+ visited.add(rel.targetId);
3571
+ queue.push({ id: rel.targetId, path: fullPath, relationPath: fullRelPath });
3572
+ }
3573
+ }
3574
+ }
3575
+ return {
3576
+ content: [{
3577
+ type: "text",
3578
+ text: JSON.stringify({
3579
+ found: false,
3580
+ message: `No dependency path from "${graph.nodes[from_node_id]?.name}" to "${graph.nodes[to_node_id]?.name}" within ${max_hops} hops`
3581
+ }, null, 2)
3582
+ }]
3583
+ };
3584
+ }
3585
+ );
3586
+ server2.tool(
3587
+ "diff_codegraphs",
3588
+ "Structural diff between two CodeGraphs (before vs after a refactor). Reports added/removed files, new/dropped dependencies, and domain-level drift.",
3589
+ {
3590
+ base_graph_id: z4.string().describe('Base CodeGraph ID (the "before" state)'),
3591
+ target_graph_id: z4.string().describe('Target CodeGraph ID (the "after" state)')
3592
+ },
3593
+ async ({ base_graph_id, target_graph_id }) => {
3594
+ const [base, target] = await Promise.all([storage.getCodeGraph(base_graph_id), storage.getCodeGraph(target_graph_id)]);
3595
+ if (!base) return { content: [{ type: "text", text: `CodeGraph "${base_graph_id}" not found` }], isError: true };
3596
+ if (!target) return { content: [{ type: "text", text: `CodeGraph "${target_graph_id}" not found` }], isError: true };
3597
+ const baseModules = Object.values(base.nodes).filter((n) => n.kind === "module");
3598
+ const targetModules = Object.values(target.nodes).filter((n) => n.kind === "module");
3599
+ const baseByPath = new Map(baseModules.map((n) => [n.sourceRef?.filePath, n]));
3600
+ const targetByPath = new Map(targetModules.map((n) => [n.sourceRef?.filePath, n]));
3601
+ const addedModules = targetModules.filter((n) => n.sourceRef?.filePath && !baseByPath.has(n.sourceRef.filePath));
3602
+ const removedModules = baseModules.filter((n) => n.sourceRef?.filePath && !targetByPath.has(n.sourceRef.filePath));
3603
+ const depKey = (graph, r) => {
3604
+ const src = graph.nodes[r.sourceId]?.sourceRef?.filePath;
3605
+ const tgt = graph.nodes[r.targetId]?.sourceRef?.filePath;
3606
+ return src && tgt ? `${src} \u2192 ${tgt}` : null;
3607
+ };
3608
+ const baseDeps = new Set(Object.values(base.relations).filter((r) => r.type === "depends_on").map((r) => depKey(base, r)).filter(Boolean));
3609
+ const targetDeps = new Set(Object.values(target.relations).filter((r) => r.type === "depends_on").map((r) => depKey(target, r)).filter(Boolean));
3610
+ const addedDeps = [...targetDeps].filter((d) => !baseDeps.has(d));
3611
+ const removedDeps = [...baseDeps].filter((d) => !targetDeps.has(d));
3612
+ const basePackages = new Set(Object.values(base.nodes).filter((n) => n.depth === 1).map((n) => n.name));
3613
+ const targetPackages = new Set(Object.values(target.nodes).filter((n) => n.depth === 1).map((n) => n.name));
3614
+ const addedDomains = [...targetPackages].filter((p) => !basePackages.has(p));
3615
+ const removedDomains = [...basePackages].filter((p) => !targetPackages.has(p));
3616
+ return {
3617
+ content: [{
3618
+ type: "text",
3619
+ text: JSON.stringify({
3620
+ base: { id: base.id, name: base.name, scannedAt: new Date(base.createdAt).toISOString(), modules: baseModules.length, dependencies: baseDeps.size },
3621
+ target: { id: target.id, name: target.name, scannedAt: new Date(target.createdAt).toISOString(), modules: targetModules.length, dependencies: targetDeps.size },
3622
+ summary: {
3623
+ addedModules: addedModules.length,
3624
+ removedModules: removedModules.length,
3625
+ addedDependencies: addedDeps.length,
3626
+ removedDependencies: removedDeps.length,
3627
+ addedDomains: addedDomains.length,
3628
+ removedDomains: removedDomains.length
3629
+ },
3630
+ addedModules: addedModules.map((n) => ({ name: n.name, filePath: n.sourceRef?.filePath })),
3631
+ removedModules: removedModules.map((n) => ({ name: n.name, filePath: n.sourceRef?.filePath })),
3632
+ addedDependencies: addedDeps.slice(0, 40),
3633
+ removedDependencies: removedDeps.slice(0, 40),
3634
+ domainChanges: { added: addedDomains, removed: removedDomains }
3635
+ }, null, 2)
3636
+ }]
3637
+ };
3638
+ }
3639
+ );
3640
+ server2.tool(
3641
+ "find_nodes",
3642
+ "Search for nodes by name, file path, kind, or depth. Use this to resolve a node ID before calling analyze_node_impact or find_dependency_path.",
3643
+ {
3644
+ graph_id: z4.string().describe("CodeGraph ID"),
3645
+ query: z4.string().optional().describe("Partial name or file path match (case-insensitive)"),
3646
+ kind: z4.enum(["system", "package", "module", "class", "function", "interface", "variable", "method", "field"]).optional().describe("Filter by node kind"),
3647
+ depth: z4.number().int().min(0).max(4).optional().describe("Filter by depth (0=system, 1=package/domain, 2=module/file, 3=class/function, 4=method)")
3648
+ },
3649
+ async ({ graph_id, query, kind, depth }) => {
3650
+ const graph = await storage.getCodeGraph(graph_id);
3651
+ if (!graph) return { content: [{ type: "text", text: `CodeGraph "${graph_id}" not found` }], isError: true };
3652
+ let nodes = Object.values(graph.nodes);
3653
+ if (kind !== void 0) nodes = nodes.filter((n) => n.kind === kind);
3654
+ if (depth !== void 0) nodes = nodes.filter((n) => n.depth === depth);
3655
+ if (query) {
3656
+ const q = query.toLowerCase();
3657
+ nodes = nodes.filter((n) => n.name.toLowerCase().includes(q) || (n.sourceRef?.filePath?.toLowerCase().includes(q) ?? false));
3658
+ }
3659
+ if (nodes.length === 0) {
3660
+ return { content: [{ type: "text", text: "No nodes matched the query." }] };
3661
+ }
3662
+ return {
3663
+ content: [{
3664
+ type: "text",
3665
+ text: JSON.stringify(nodes.map((n) => ({
3666
+ id: n.id,
3667
+ name: n.name,
3668
+ kind: n.kind,
3669
+ depth: n.depth,
3670
+ filePath: n.sourceRef?.filePath ?? null,
3671
+ parentId: n.parentId,
3672
+ childCount: n.children.length,
3673
+ tags: n.tags
3674
+ })), null, 2)
3675
+ }]
3676
+ };
3677
+ }
3678
+ );
3679
+ server2.tool(
3680
+ "check_architectural_rules",
3681
+ "Run deterministic architectural rules against a CodeGraph: forbidden cross-domain imports, fan-in thresholds. Optionally compare against a base graph to surface only NEW violations.",
3682
+ {
3683
+ graph_id: z4.string().describe("CodeGraph to check"),
3684
+ rules: z4.array(z4.object({
3685
+ type: z4.enum(["forbidden_cross_domain", "fan_in_threshold"]),
3686
+ from_domain: z4.string().optional().describe("For forbidden_cross_domain: the importing domain name"),
3687
+ to_domain: z4.string().optional().describe("For forbidden_cross_domain: the imported domain name"),
3688
+ file_query: z4.string().optional().describe("For fan_in_threshold: partial file name to match"),
3689
+ max: z4.number().optional().describe("For fan_in_threshold: maximum allowed fan-in"),
3690
+ description: z4.string().describe("Human-readable rule description")
3691
+ })).optional().describe("Rules to enforce. Omit to use the default bluelens ruleset."),
3692
+ base_graph_id: z4.string().optional().describe("If provided, report only NEW violations not present in the base graph")
3693
+ },
3694
+ async ({ graph_id, rules: customRules, base_graph_id }) => {
3695
+ const graph = await storage.getCodeGraph(graph_id);
3696
+ if (!graph) return { content: [{ type: "text", text: `CodeGraph "${graph_id}" not found` }], isError: true };
3697
+ const DEFAULT_RULES = [
3698
+ { type: "forbidden_cross_domain", from_domain: "Shared Utilities & Configuration", to_domain: "Code Graph & Analysis", description: "Shared utils must not import Code Graph layer" },
3699
+ { type: "forbidden_cross_domain", from_domain: "Shared Utilities & Configuration", to_domain: "AI Agent & LLM Integration", description: "Shared utils must not import LLM layer" },
3700
+ { type: "forbidden_cross_domain", from_domain: "Shared Utilities & Configuration", to_domain: "Application Shell & Core UI", description: "Shared utils must not import UI layer" },
3701
+ { type: "forbidden_cross_domain", from_domain: "Diagramming & Visualization", to_domain: "AI Agent & LLM Integration", description: "Diagramming must not directly import LLM agents" },
3702
+ { type: "fan_in_threshold", file_query: "types.ts", max: 310, description: "types.ts fan-in must not grow beyond 310 (currently ~297)" },
3703
+ { type: "fan_in_threshold", file_query: "llmService.ts", max: 30, description: "llmService.ts fan-in must not grow beyond 30 (currently ~23)" },
3704
+ { type: "fan_in_threshold", file_query: "App.tsx", max: 70, description: "App.tsx dependency count must not grow beyond 70 (currently ~56)" }
3705
+ ];
3706
+ const rules = customRules ?? DEFAULT_RULES;
3707
+ const violations = runRules(graph, rules);
3708
+ let newViolations;
3709
+ if (base_graph_id) {
3710
+ const baseGraph = await storage.getCodeGraph(base_graph_id);
3711
+ if (!baseGraph) return { content: [{ type: "text", text: `Base CodeGraph "${base_graph_id}" not found` }], isError: true };
3712
+ const baseViolations = new Set(runRules(baseGraph, rules).map(violationKey));
3713
+ newViolations = violations.filter((v) => !baseViolations.has(violationKey(v)));
3714
+ }
3715
+ return {
3716
+ content: [{
3717
+ type: "text",
3718
+ text: JSON.stringify({
3719
+ graph: { id: graph.id, name: graph.name },
3720
+ rulesChecked: rules.length,
3721
+ totalViolations: violations.length,
3722
+ ...newViolations !== void 0 ? { newViolations: newViolations.length, newViolationDetails: newViolations } : {},
3723
+ violations
3724
+ }, null, 2)
3725
+ }]
3726
+ };
3727
+ }
3728
+ );
3729
+ server2.tool(
3730
+ "review_architectural_drift",
3731
+ "LLM-powered review of structural changes between two CodeGraphs. Evaluates whether the changes align with the project architectural philosophy and flags risks. API keys read from env.",
3732
+ {
3733
+ base_graph_id: z4.string().describe('Base CodeGraph ID (the "before")'),
3734
+ target_graph_id: z4.string().describe('Target CodeGraph ID (the "after")'),
3735
+ project_directory: z4.string().optional().describe("Path to the project root \u2014 used to read CLAUDE.md for architectural context"),
3736
+ architectural_context: z4.string().optional().describe("Architectural philosophy to evaluate against (used if project_directory is not provided)"),
3737
+ llm_model: z4.string().optional().describe("Model override (default: gemini-2.5-flash)")
3738
+ },
3739
+ async ({ base_graph_id, target_graph_id, project_directory, architectural_context, llm_model }) => {
3740
+ const [base, target] = await Promise.all([storage.getCodeGraph(base_graph_id), storage.getCodeGraph(target_graph_id)]);
3741
+ if (!base) return { content: [{ type: "text", text: `CodeGraph "${base_graph_id}" not found` }], isError: true };
3742
+ if (!target) return { content: [{ type: "text", text: `CodeGraph "${target_graph_id}" not found` }], isError: true };
3743
+ const detected = detectProvider();
3744
+ if (!detected) return { content: [{ type: "text", text: "No LLM API key found in environment variables" }], isError: true };
3745
+ const baseModules = Object.values(base.nodes).filter((n) => n.kind === "module");
3746
+ const targetModules = Object.values(target.nodes).filter((n) => n.kind === "module");
3747
+ const baseByPath = new Map(baseModules.map((n) => [n.sourceRef?.filePath, n]));
3748
+ const targetByPath = new Map(targetModules.map((n) => [n.sourceRef?.filePath, n]));
3749
+ const addedModules = targetModules.filter((n) => n.sourceRef?.filePath && !baseByPath.has(n.sourceRef.filePath)).map((n) => n.sourceRef.filePath);
3750
+ const removedModules = baseModules.filter((n) => n.sourceRef?.filePath && !targetByPath.has(n.sourceRef.filePath)).map((n) => n.sourceRef.filePath);
3751
+ const depKey = (graph, r) => {
3752
+ const src = graph.nodes[r.sourceId]?.sourceRef?.filePath;
3753
+ const tgt = graph.nodes[r.targetId]?.sourceRef?.filePath;
3754
+ return src && tgt ? `${src} \u2192 ${tgt}` : null;
3755
+ };
3756
+ const baseDeps = new Set(Object.values(base.relations).filter((r) => r.type === "depends_on").map((r) => depKey(base, r)).filter(Boolean));
3757
+ const targetDeps = new Set(Object.values(target.relations).filter((r) => r.type === "depends_on").map((r) => depKey(target, r)).filter(Boolean));
3758
+ const addedDeps = [...targetDeps].filter((d) => !baseDeps.has(d));
3759
+ const removedDeps = [...baseDeps].filter((d) => !targetDeps.has(d));
3760
+ const baseAnomalies = codeGraphModelService.validateGraph(base);
3761
+ const targetAnomalies = codeGraphModelService.validateGraph(target);
3762
+ const baseAnomalyKeys = new Set(baseAnomalies.map((a) => `${a.type}::${a.message}`));
3763
+ const newAnomalies = targetAnomalies.filter((a) => !baseAnomalyKeys.has(`${a.type}::${a.message}`));
3764
+ const DEFAULT_RULES = [
3765
+ { type: "forbidden_cross_domain", from_domain: "Shared Utilities & Configuration", to_domain: "Code Graph & Analysis", description: "Shared utils must not import Code Graph layer" },
3766
+ { type: "forbidden_cross_domain", from_domain: "Shared Utilities & Configuration", to_domain: "AI Agent & LLM Integration", description: "Shared utils must not import LLM layer" },
3767
+ { type: "forbidden_cross_domain", from_domain: "Diagramming & Visualization", to_domain: "AI Agent & LLM Integration", description: "Diagramming must not directly import LLM agents" },
3768
+ { type: "fan_in_threshold", file_query: "types.ts", max: 310, description: "types.ts fan-in threshold" },
3769
+ { type: "fan_in_threshold", file_query: "llmService.ts", max: 30, description: "llmService.ts fan-in threshold" }
3770
+ ];
3771
+ const baseRuleViolations = new Set(runRules(base, DEFAULT_RULES).map(violationKey));
3772
+ const newRuleViolations = runRules(target, DEFAULT_RULES).filter((v) => !baseRuleViolations.has(violationKey(v)));
3773
+ let context = architectural_context ?? "";
3774
+ if (!context && project_directory) {
3775
+ try {
3776
+ context = readFileSync(join4(project_directory, "CLAUDE.md"), "utf-8");
3777
+ } catch {
3778
+ }
3779
+ if (!context) {
3780
+ try {
3781
+ context = readFileSync(join4(project_directory, "PRD"), "utf-8").slice(0, 3e3);
3782
+ } catch {
3783
+ }
3784
+ }
3785
+ }
3786
+ let gitLog = "";
3787
+ if (project_directory) {
3788
+ try {
3789
+ const since = new Date(base.createdAt).toISOString();
3790
+ const until = new Date(target.createdAt + 1e3).toISOString();
3791
+ gitLog = execSync(
3792
+ `git log --after="${since}" --before="${until}" --format="commit %h%n%s%n%b%n---" --no-merges`,
3793
+ { cwd: project_directory, encoding: "utf-8", stdio: ["ignore", "pipe", "ignore"] }
3794
+ ).trim();
3795
+ } catch {
3796
+ }
3797
+ }
3798
+ const basePackages = Object.values(base.nodes).filter((n) => n.depth === 1).map((n) => n.name);
3799
+ const targetPackages = Object.values(target.nodes).filter((n) => n.depth === 1).map((n) => n.name);
3800
+ const systemPrompt = `You are a senior software architect reviewing codebase evolution.
3801
+ Your job: evaluate whether structural changes between two CodeGraph snapshots are consistent with the project's architectural philosophy, and flag concrete risks.
3802
+ Be direct, concise, and specific. Do not summarize what you were given \u2014 focus on what matters architecturally.`;
3803
+ const userMessage = `## Architectural Philosophy
3804
+ ${context || "(not provided \u2014 use general software architecture principles)"}
3805
+
3806
+ ## Commits Between Scans
3807
+ ${gitLog || "(no git history available \u2014 structural changes cannot be explained by commit context)"}
3808
+
3809
+ ## Structural Changes
3810
+ - Added modules (${addedModules.length}): ${addedModules.slice(0, 20).join(", ") || "none"}
3811
+ - Removed modules (${removedModules.length}): ${removedModules.slice(0, 20).join(", ") || "none"}
3812
+ - New dependencies (${addedDeps.length}): ${addedDeps.slice(0, 20).join("; ") || "none"}
3813
+ - Removed dependencies (${removedDeps.length}): ${removedDeps.slice(0, 20).join("; ") || "none"}
3814
+ - Domain structure changed: base=[${basePackages.join(", ")}] \u2192 target=[${targetPackages.join(", ")}]
3815
+
3816
+ ## New Structural Anomalies Introduced (${newAnomalies.length})
3817
+ ${newAnomalies.map((a) => `- [${a.severity}] ${a.type}: ${a.message}`).join("\n") || "none"}
3818
+
3819
+ ## New Deterministic Rule Violations (${newRuleViolations.length})
3820
+ ${newRuleViolations.map((v) => `- [${v.severity}] ${v.message}`).join("\n") || "none"}
3821
+
3822
+ Answer these three questions:
3823
+ 1. **Alignment**: Use the commit messages to understand intent. Do the structural changes match what the commits describe? Flag any structural change NOT explained by a commit.
3824
+ 2. **Risks**: What are the concrete architectural risks introduced? Distinguish between intentional changes (explained by commits) and unexplained drift.
3825
+ 3. **Recommendations**: What should be fixed or watched? If a commit lacks an architectural explanation for a significant structural change, flag that too.`;
3826
+ const messages = [{ role: "user", content: userMessage }];
3827
+ const llmSettings = buildLLMSettings2(detected.provider, detected.apiKey, llm_model);
3828
+ try {
3829
+ const response = await llmService.sendMessage(messages, systemPrompt, llmSettings, { source: "architectural-review" });
3830
+ return {
3831
+ content: [{
3832
+ type: "text",
3833
+ text: JSON.stringify({
3834
+ base: { id: base.id, name: base.name, scannedAt: new Date(base.createdAt).toISOString() },
3835
+ target: { id: target.id, name: target.name, scannedAt: new Date(target.createdAt).toISOString() },
3836
+ structuralDiff: { addedModules: addedModules.length, removedModules: removedModules.length, addedDeps: addedDeps.length, removedDeps: removedDeps.length },
3837
+ newAnomalies: newAnomalies.length,
3838
+ newRuleViolations: newRuleViolations.length,
3839
+ tokensUsed: response.usage?.totalTokens,
3840
+ review: response.content
3841
+ }, null, 2)
3842
+ }]
3843
+ };
3844
+ } catch (err) {
3845
+ const msg = err instanceof Error ? err.message : String(err);
3846
+ return { content: [{ type: "text", text: `LLM review failed: ${msg}` }], isError: true };
3847
+ }
3848
+ }
3849
+ );
3850
+ }
3851
+
3852
+ // src/index.ts
3853
+ var projectRoot = join5(dirname(fileURLToPath(import.meta.url)), "..", "..");
3854
+ for (const envFile of [join5(projectRoot, ".env"), join5(projectRoot, "mcp", ".env")]) {
3855
+ try {
3856
+ const lines = readFileSync2(envFile, "utf-8").split("\n");
3857
+ for (const line of lines) {
3858
+ const match = line.match(/^\s*([A-Z_][A-Z0-9_]*)\s*=\s*(.*)$/);
3859
+ if (match && !process.env[match[1]]) {
3860
+ process.env[match[1]] = match[2].replace(/^["']|["']$/g, "").trim();
3861
+ }
3862
+ }
3863
+ } catch {
3864
+ }
3865
+ }
3866
+ var server = new McpServer({
3867
+ name: "bluelens",
3868
+ version: "0.1.0"
3869
+ });
3870
+ registerWorkspaceTools(server);
3871
+ registerDiagramTools(server);
3872
+ registerCodeGraphTools(server);
3873
+ registerAnalysisTools(server);
3874
+ var transport = new StdioServerTransport();
3875
+ await server.connect(transport);