aidex-graphra 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +463 -0
  3. package/dist/chunker.d.ts +3 -0
  4. package/dist/chunker.d.ts.map +1 -0
  5. package/dist/chunker.js +116 -0
  6. package/dist/chunker.js.map +1 -0
  7. package/dist/cli.d.ts +3 -0
  8. package/dist/cli.d.ts.map +1 -0
  9. package/dist/cli.js +821 -0
  10. package/dist/cli.js.map +1 -0
  11. package/dist/graph.d.ts +9 -0
  12. package/dist/graph.d.ts.map +1 -0
  13. package/dist/graph.js +97 -0
  14. package/dist/graph.js.map +1 -0
  15. package/dist/init.d.ts +27 -0
  16. package/dist/init.d.ts.map +1 -0
  17. package/dist/init.js +306 -0
  18. package/dist/init.js.map +1 -0
  19. package/dist/mcp.d.ts +13 -0
  20. package/dist/mcp.d.ts.map +1 -0
  21. package/dist/mcp.js +19 -0
  22. package/dist/mcp.js.map +1 -0
  23. package/dist/mcpServer.d.ts +14 -0
  24. package/dist/mcpServer.d.ts.map +1 -0
  25. package/dist/mcpServer.js +373 -0
  26. package/dist/mcpServer.js.map +1 -0
  27. package/dist/neuralEmbedder.d.ts +21 -0
  28. package/dist/neuralEmbedder.d.ts.map +1 -0
  29. package/dist/neuralEmbedder.js +98 -0
  30. package/dist/neuralEmbedder.js.map +1 -0
  31. package/dist/scanner.d.ts +3 -0
  32. package/dist/scanner.d.ts.map +1 -0
  33. package/dist/scanner.js +43 -0
  34. package/dist/scanner.js.map +1 -0
  35. package/dist/search.d.ts +37 -0
  36. package/dist/search.d.ts.map +1 -0
  37. package/dist/search.js +252 -0
  38. package/dist/search.js.map +1 -0
  39. package/dist/signatureExtractor.d.ts +25 -0
  40. package/dist/signatureExtractor.d.ts.map +1 -0
  41. package/dist/signatureExtractor.js +173 -0
  42. package/dist/signatureExtractor.js.map +1 -0
  43. package/dist/storage.d.ts +59 -0
  44. package/dist/storage.d.ts.map +1 -0
  45. package/dist/storage.js +322 -0
  46. package/dist/storage.js.map +1 -0
  47. package/dist/tokenBudget.d.ts +52 -0
  48. package/dist/tokenBudget.d.ts.map +1 -0
  49. package/dist/tokenBudget.js +175 -0
  50. package/dist/tokenBudget.js.map +1 -0
  51. package/dist/types.d.ts +62 -0
  52. package/dist/types.d.ts.map +1 -0
  53. package/dist/types.js +6 -0
  54. package/dist/types.js.map +1 -0
  55. package/dist/utils/hash.d.ts +6 -0
  56. package/dist/utils/hash.d.ts.map +1 -0
  57. package/dist/utils/hash.js +45 -0
  58. package/dist/utils/hash.js.map +1 -0
  59. package/package.json +69 -0
@@ -0,0 +1,373 @@
1
+ "use strict";
2
+ /**
3
+ * Graphra MCP Server — proper Model Context Protocol implementation.
4
+ *
5
+ * Runs via stdio transport (standard for Claude Desktop, Cursor, VS Code).
6
+ * When an AI tool chats, it automatically calls these tools to get context.
7
+ *
8
+ * Tools:
9
+ * Graphra_search — Hybrid search across the codebase
10
+ * Graphra_context — Get relevant context for a file + task
11
+ * Graphra_explain — Get architecture overview
12
+ * Graphra_stats — Get database statistics
13
+ */
14
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
15
+ if (k2 === undefined) k2 = k;
16
+ var desc = Object.getOwnPropertyDescriptor(m, k);
17
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
18
+ desc = { enumerable: true, get: function() { return m[k]; } };
19
+ }
20
+ Object.defineProperty(o, k2, desc);
21
+ }) : (function(o, m, k, k2) {
22
+ if (k2 === undefined) k2 = k;
23
+ o[k2] = m[k];
24
+ }));
25
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
26
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
27
+ }) : function(o, v) {
28
+ o["default"] = v;
29
+ });
30
+ var __importStar = (this && this.__importStar) || (function () {
31
+ var ownKeys = function(o) {
32
+ ownKeys = Object.getOwnPropertyNames || function (o) {
33
+ var ar = [];
34
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
35
+ return ar;
36
+ };
37
+ return ownKeys(o);
38
+ };
39
+ return function (mod) {
40
+ if (mod && mod.__esModule) return mod;
41
+ var result = {};
42
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
43
+ __setModuleDefault(result, mod);
44
+ return result;
45
+ };
46
+ })();
47
+ Object.defineProperty(exports, "__esModule", { value: true });
48
+ exports.startMcpServer = startMcpServer;
49
+ const index_js_1 = require("@modelcontextprotocol/sdk/server/index.js");
50
+ const stdio_js_1 = require("@modelcontextprotocol/sdk/server/stdio.js");
51
+ const path = __importStar(require("path"));
52
+ const neuralEmbedder_1 = require("./neuralEmbedder");
53
+ const search_1 = require("./search");
54
+ const storage_1 = require("./storage");
55
+ const tokenBudget_1 = require("./tokenBudget");
56
+ // Use console.error for logging (stdout is reserved for MCP protocol)
57
+ const log = (...args) => console.error("[Graphra]", ...args);
58
+ // Default MCP context budget — conservative to leave room for conversation
59
+ const MCP_DEFAULT_BUDGET = 4000;
60
+ async function startMcpServer() {
61
+ const server = new index_js_1.Server({ name: "graphra", version: "1.0.0" }, {
62
+ capabilities: {
63
+ tools: {},
64
+ },
65
+ });
66
+ // ============================================
67
+ // Handle tools/list
68
+ // ============================================
69
+ server.setRequestHandler({ method: "tools/list" }, async () => ({
70
+ tools: [
71
+ {
72
+ name: "Graphra_search",
73
+ description: "Search the codebase for relevant functions, classes, and types using hybrid search " +
74
+ "(BM25 + neural embeddings + PageRank + git recency). Returns actual code signatures. " +
75
+ "Results are automatically packed within a token budget.",
76
+ inputSchema: {
77
+ type: "object",
78
+ properties: {
79
+ query: { type: "string", description: "Search query" },
80
+ topK: { type: "number", description: "Number of results", default: 10 },
81
+ maxTokens: { type: "number", description: "Max tokens for response (default: auto based on model)" },
82
+ model: { type: "string", description: "AI model name for auto token budget (e.g. gpt-4o, claude-3-sonnet)" },
83
+ },
84
+ required: ["query"],
85
+ },
86
+ },
87
+ {
88
+ name: "Graphra_context",
89
+ description: "Get relevant codebase context for a task. Returns architecture (dependencies) " +
90
+ "and semantically related code signatures. Token-budget aware.",
91
+ inputSchema: {
92
+ type: "object",
93
+ properties: {
94
+ task: { type: "string", description: "What you're trying to do" },
95
+ file: { type: "string", description: "Target file path (optional)" },
96
+ topK: { type: "number", description: "Max results", default: 15 },
97
+ maxTokens: { type: "number", description: "Max tokens for response" },
98
+ model: { type: "string", description: "AI model name for auto token budget" },
99
+ },
100
+ required: ["task"],
101
+ },
102
+ },
103
+ {
104
+ name: "Graphra_explain",
105
+ description: "Get architecture overview: layers, important files, code composition.",
106
+ inputSchema: {
107
+ type: "object",
108
+ properties: {},
109
+ },
110
+ },
111
+ {
112
+ name: "Graphra_stats",
113
+ description: "Get index statistics: files, chunks, embeddings, graph edges.",
114
+ inputSchema: {
115
+ type: "object",
116
+ properties: {},
117
+ },
118
+ },
119
+ {
120
+ name: "Graphra_auto",
121
+ description: "IMPORTANT: Call this tool BEFORE answering ANY coding question, code review, or code modification request. " +
122
+ "It returns the relevant codebase context (existing functions, types, patterns) so you can write code that " +
123
+ "fits the existing codebase instead of reinventing the wheel. Pass the user's message and the active file path. " +
124
+ "Returns ~2K tokens of compact context: current file signatures, dependencies, and related code.",
125
+ inputSchema: {
126
+ type: "object",
127
+ properties: {
128
+ message: { type: "string", description: "The user's full chat message / question" },
129
+ activeFile: { type: "string", description: "Path of the file currently open in the editor (if known)" },
130
+ },
131
+ required: ["message"],
132
+ },
133
+ },
134
+ ],
135
+ }));
136
+ // ============================================
137
+ // Handle tools/call
138
+ // ============================================
139
+ server.setRequestHandler({ method: "tools/call" }, async (request) => {
140
+ const { name, arguments: args } = request.params;
141
+ log(`tool call: ${name}`, args);
142
+ // --- Graphra_search ---
143
+ if (name === "Graphra_search") {
144
+ if ((0, storage_1.getChunkCount)() === 0) {
145
+ return { content: [{ type: "text", text: "No index. Run `Graphra generate` first." }] };
146
+ }
147
+ const graph = (0, storage_1.loadGraph)();
148
+ const queryEmbedding = await (0, neuralEmbedder_1.embed)(args.query);
149
+ const budget = args.maxTokens || (0, tokenBudget_1.getTokenBudget)(args.model) || MCP_DEFAULT_BUDGET;
150
+ const results = (0, search_1.hybridSearch)(args.query, queryEmbedding, graph, { topK: args.topK || 10 });
151
+ if (results.length === 0) {
152
+ return { content: [{ type: "text", text: "No results found." }] };
153
+ }
154
+ // Token-aware packing — stop adding results when budget is hit
155
+ const lines = [];
156
+ let tokenCount = 0;
157
+ for (const r of results) {
158
+ const short = r.chunk.file.split(/[/\\]/).slice(-2).join("/");
159
+ const sig = r.chunk.signature || r.chunk.name;
160
+ const line = `${short}: ${sig}`;
161
+ const lineTokens = (0, tokenBudget_1.estimateTokens)(line);
162
+ if (tokenCount + lineTokens > budget)
163
+ break;
164
+ lines.push(line);
165
+ tokenCount += lineTokens;
166
+ }
167
+ const text = lines.join("\n");
168
+ log(`search: returned ${lines.length} results, ~${tokenCount} tokens (budget: ${budget})`);
169
+ return { content: [{ type: "text", text }] };
170
+ }
171
+ // --- Graphra_context ---
172
+ if (name === "Graphra_context") {
173
+ if ((0, storage_1.getChunkCount)() === 0) {
174
+ return { content: [{ type: "text", text: "No index. Run `Graphra generate` first." }] };
175
+ }
176
+ const graph = (0, storage_1.loadGraph)();
177
+ const resolvedFile = args.file ? path.resolve(args.file) : "";
178
+ const queryEmbedding = await (0, neuralEmbedder_1.embed)(args.task);
179
+ const neighbors = new Set();
180
+ if (resolvedFile) {
181
+ for (const [src, targets] of Object.entries(graph)) {
182
+ if (src === resolvedFile)
183
+ targets.forEach((t) => neighbors.add(t));
184
+ if (targets.includes(resolvedFile))
185
+ neighbors.add(src);
186
+ }
187
+ neighbors.add(resolvedFile);
188
+ }
189
+ const allChunks = (0, storage_1.getAllChunks)();
190
+ const neighborChunks = resolvedFile ? allChunks.filter((c) => neighbors.has(c.file)) : [];
191
+ const searchResults = (0, search_1.hybridSearch)(args.task, queryEmbedding, graph, { topK: args.topK || 15 });
192
+ // Token-aware packing
193
+ const budget = args.maxTokens || (0, tokenBudget_1.getTokenBudget)(args.model) || MCP_DEFAULT_BUDGET;
194
+ let text = "";
195
+ let tokenCount = 0;
196
+ if (neighborChunks.length > 0) {
197
+ text += "ARCHITECTURE:\n";
198
+ for (const c of neighborChunks.slice(0, 10)) {
199
+ const line = ` ${c.file.split(/[/\\]/).slice(-2).join("/")}: ${c.signature}\n`;
200
+ const lineTokens = (0, tokenBudget_1.estimateTokens)(line);
201
+ if (tokenCount + lineTokens > budget * 0.4)
202
+ break; // Reserve 40% for architecture
203
+ text += line;
204
+ tokenCount += lineTokens;
205
+ }
206
+ text += "\n";
207
+ }
208
+ text += "RELATED CODE:\n";
209
+ const seen = new Set(neighborChunks.map((c) => c.id));
210
+ for (const r of searchResults) {
211
+ if (seen.has(r.chunk.id))
212
+ continue;
213
+ seen.add(r.chunk.id);
214
+ const line = ` ${r.chunk.file.split(/[/\\]/).slice(-2).join("/")}: ${r.chunk.signature || r.chunk.name}\n`;
215
+ const lineTokens = (0, tokenBudget_1.estimateTokens)(line);
216
+ if (tokenCount + lineTokens > budget)
217
+ break;
218
+ text += line;
219
+ tokenCount += lineTokens;
220
+ }
221
+ log(`context: ~${tokenCount} tokens (budget: ${budget})`);
222
+ return { content: [{ type: "text", text }] };
223
+ }
224
+ // --- Graphra_explain ---
225
+ if (name === "Graphra_explain") {
226
+ if ((0, storage_1.getChunkCount)() === 0) {
227
+ return { content: [{ type: "text", text: "No index. Run `Graphra generate` first." }] };
228
+ }
229
+ const db = (0, storage_1.getDb)();
230
+ const graph = (0, storage_1.loadGraph)();
231
+ const allChunks = (0, storage_1.getAllChunks)();
232
+ const fileMap = new Map();
233
+ for (const c of allChunks) {
234
+ const short = c.file.split(/[/\\]/).slice(-2).join("/");
235
+ if (!fileMap.has(short))
236
+ fileMap.set(short, []);
237
+ fileMap.get(short).push(c.name);
238
+ }
239
+ const layers = {};
240
+ for (const [file] of fileMap) {
241
+ const lower = file.toLowerCase();
242
+ let layer = "other";
243
+ if (lower.includes("controller"))
244
+ layer = "controllers";
245
+ else if (lower.includes("service"))
246
+ layer = "services";
247
+ else if (lower.includes("dal") || lower.includes("model"))
248
+ layer = "data-access";
249
+ else if (lower.includes("route"))
250
+ layer = "routes";
251
+ else if (lower.includes("util") || lower.includes("helper"))
252
+ layer = "utilities";
253
+ if (!layers[layer])
254
+ layers[layer] = [];
255
+ layers[layer].push(file);
256
+ }
257
+ const pageRank = (0, search_1.computePageRank)(graph);
258
+ const topFiles = Array.from(pageRank.entries()).sort((a, b) => b[1] - a[1]).slice(0, 8);
259
+ const fileCount = db.prepare("SELECT COUNT(DISTINCT file) as c FROM chunks").get().c;
260
+ let text = `CODEBASE: ${fileCount} files, ${allChunks.length} elements\n\n`;
261
+ text += "LAYERS:\n";
262
+ for (const [layer, files] of Object.entries(layers)) {
263
+ if (files.length === 0)
264
+ continue;
265
+ text += ` ${layer}: ${files.length} files\n`;
266
+ }
267
+ text += "\nTOP FILES:\n";
268
+ for (const [file, rank] of topFiles) {
269
+ text += ` ${file.split(/[/\\]/).slice(-2).join("/")}: ${rank.toFixed(4)}\n`;
270
+ }
271
+ // Pack to budget
272
+ const packed = (0, tokenBudget_1.packText)(text, MCP_DEFAULT_BUDGET);
273
+ log(`explain: ~${packed.tokens} tokens${packed.truncated ? " (truncated)" : ""}`);
274
+ return { content: [{ type: "text", text: packed.text }] };
275
+ }
276
+ // --- Graphra_auto (the key tool — called on every message) ---
277
+ if (name === "Graphra_auto") {
278
+ if ((0, storage_1.getChunkCount)() === 0) {
279
+ return { content: [{ type: "text", text: "No index. Run `Graphra generate` first." }] };
280
+ }
281
+ const message = args.message || "";
282
+ const activeFile = args.activeFile || "";
283
+ const AUTO_BUDGET = 2000; // Compact — leaves room for conversation
284
+ log(`auto: "${message.slice(0, 80)}..." file=${activeFile || "none"}`);
285
+ const graph = (0, storage_1.loadGraph)();
286
+ const resolvedFile = activeFile ? path.resolve(activeFile) : "";
287
+ const queryEmbedding = await (0, neuralEmbedder_1.embed)(message);
288
+ // 1. Get neighbors of active file (architecture)
289
+ const neighbors = new Set();
290
+ if (resolvedFile) {
291
+ for (const [src, targets] of Object.entries(graph)) {
292
+ if (src === resolvedFile)
293
+ targets.forEach((t) => neighbors.add(t));
294
+ if (targets.includes(resolvedFile))
295
+ neighbors.add(src);
296
+ }
297
+ neighbors.add(resolvedFile);
298
+ }
299
+ const allChunks = (0, storage_1.getAllChunks)();
300
+ const neighborChunks = resolvedFile ? allChunks.filter((c) => neighbors.has(c.file)) : [];
301
+ // 2. Hybrid search for the message
302
+ const searchResults = (0, search_1.hybridSearch)(message, queryEmbedding, graph, { topK: 10 });
303
+ // 3. Build compact output within budget
304
+ let text = "";
305
+ let tokenCount = 0;
306
+ // Active file's signatures first (most relevant)
307
+ if (neighborChunks.length > 0) {
308
+ const fileChunks = resolvedFile
309
+ ? neighborChunks.filter((c) => c.file === resolvedFile)
310
+ : [];
311
+ const depChunks = neighborChunks.filter((c) => c.file !== resolvedFile);
312
+ if (fileChunks.length > 0) {
313
+ text += "CURRENT FILE:\n";
314
+ for (const c of fileChunks.slice(0, 8)) {
315
+ const line = ` ${c.name}: ${c.signature}\n`;
316
+ const lt = (0, tokenBudget_1.estimateTokens)(line);
317
+ if (tokenCount + lt > AUTO_BUDGET * 0.3)
318
+ break;
319
+ text += line;
320
+ tokenCount += lt;
321
+ }
322
+ }
323
+ if (depChunks.length > 0) {
324
+ text += "\nDEPENDENCIES:\n";
325
+ for (const c of depChunks.slice(0, 5)) {
326
+ const short = c.file.split(/[/\\]/).slice(-2).join("/");
327
+ const line = ` ${short}: ${c.signature}\n`;
328
+ const lt = (0, tokenBudget_1.estimateTokens)(line);
329
+ if (tokenCount + lt > AUTO_BUDGET * 0.5)
330
+ break;
331
+ text += line;
332
+ tokenCount += lt;
333
+ }
334
+ }
335
+ }
336
+ // Search results
337
+ text += "\nRELATED:\n";
338
+ const seen = new Set(neighborChunks.map((c) => c.id));
339
+ for (const r of searchResults) {
340
+ if (seen.has(r.chunk.id))
341
+ continue;
342
+ seen.add(r.chunk.id);
343
+ const short = r.chunk.file.split(/[/\\]/).slice(-2).join("/");
344
+ const sig = r.chunk.signature || r.chunk.name;
345
+ const line = ` ${short}: ${sig}\n`;
346
+ const lt = (0, tokenBudget_1.estimateTokens)(line);
347
+ if (tokenCount + lt > AUTO_BUDGET)
348
+ break;
349
+ text += line;
350
+ tokenCount += lt;
351
+ }
352
+ log(`auto: ~${tokenCount} tokens`);
353
+ return { content: [{ type: "text", text }] };
354
+ }
355
+ // --- Graphra_stats ---
356
+ if (name === "Graphra_stats") {
357
+ const db = (0, storage_1.getDb)();
358
+ const chunks = db.prepare("SELECT COUNT(*) as c FROM chunks").get().c;
359
+ const embs = db.prepare("SELECT COUNT(*) as c FROM embeddings").get().c;
360
+ const edges = db.prepare("SELECT COUNT(*) as c FROM graph").get().c;
361
+ const files = db.prepare("SELECT COUNT(DISTINCT file) as c FROM chunks").get().c;
362
+ return { content: [{ type: "text", text: `Files: ${files}, Chunks: ${chunks}, Embeddings: ${embs}, Graph: ${edges} edges` }] };
363
+ }
364
+ return { content: [{ type: "text", text: `Unknown tool: ${name}` }], isError: true };
365
+ });
366
+ // ============================================
367
+ // Connect via stdio transport
368
+ // ============================================
369
+ const transport = new stdio_js_1.StdioServerTransport();
370
+ await server.connect(transport);
371
+ log("Graphra MCP server running on stdio");
372
+ }
373
+ //# sourceMappingURL=mcpServer.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"mcpServer.js","sourceRoot":"","sources":["../src/mcpServer.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;GAWG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkBH,wCAqVC;AArWD,wEAAmE;AACnE,wEAAiF;AACjF,2CAA6B;AAC7B,qDAAyC;AACzC,qCAAyD;AACzD,uCAEmB;AACnB,+CAAyE;AAEzE,sEAAsE;AACtE,MAAM,GAAG,GAAG,CAAC,GAAG,IAAW,EAAE,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,CAAC,CAAC;AAEpE,2EAA2E;AAC3E,MAAM,kBAAkB,GAAG,IAAI,CAAC;AAEzB,KAAK,UAAU,cAAc;IAClC,MAAM,MAAM,GAAG,IAAI,iBAAM,CACvB,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,OAAO,EAAE,EACrC;QACE,YAAY,EAAE;YACZ,KAAK,EAAE,EAAE;SACV;KACF,CACF,CAAC;IAEF,+CAA+C;IAC/C,oBAAoB;IACpB,+CAA+C;IAC/C,MAAM,CAAC,iBAAiB,CACtB,EAAE,MAAM,EAAE,YAAY,EAAS,EAC/B,KAAK,IAAI,EAAE,CAAC,CAAC;QACX,KAAK,EAAE;YACL;gBACE,IAAI,EAAE,gBAAgB;gBACtB,WAAW,EACT,qFAAqF;oBACrF,uFAAuF;oBACvF,yDAAyD;gBAC3D,WAAW,EAAE;oBACX,IAAI,EAAE,QAAiB;oBACvB,UAAU,EAAE;wBACV,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,cAAc,EAAE;wBACtD,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,mBAAmB,EAAE,OAAO,EAAE,EAAE,EAAE;wBACvE,SAAS,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,wDAAwD,EAAE;wBACpG,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,oEAAoE,EAAE;qBAC7G;oBACD,QAAQ,EAAE,CAAC,OAAO,CAAC;iBACpB;aACF;YACD;gBACE,IAAI,EAAE,iBAAiB;gBACvB,WAAW,EACT,gFAAgF;oBAChF,+DAA+D;gBACjE,WAAW,EAAE;oBACX,IAAI,EAAE,QAAiB;oBACvB,UAAU,EAAE;wBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,0BAA0B,EAAE;wBACjE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,6BAA6B,EAAE;wBACpE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,aAAa,EAAE,OAAO,EAAE,EAAE,EAAE;wBACjE,SAAS,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,yBAAyB,EAAE;wBACrE,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,qCAAqC,EAAE;qBAC9E;oBACD,QAAQ,EAAE,CAAC,MAAM,CAAC;iBACnB;aACF;YACD;gBACE,IAAI,EAAE,iBAAiB;gBACvB,WAAW,EACT,uEAAuE;gBACzE,WAAW,EAAE;oBACX,IAAI,EAAE,QAAiB;oBACvB,UAAU,EAAE,EAAE;iBACf;aACF;YACD;gBACE,IAAI,EAAE,eAAe;gBACrB,WAAW,EAAE,+DAA+D;gBAC5E,WAAW,EAAE;oBACX,IAAI,EAAE,QAAiB;oBACvB,UAAU,EAAE,EAAE;iBACf;aACF;YACD;gBACE,IAAI,EAAE,cAAc;gBACpB,WAAW,EACT,6GAA6G;oBAC7G,4GAA4G;oBAC5G,iHAAiH;oBACjH,iGAAiG;gBACnG,WAAW,EAAE;oBACX,IAAI,EAAE,QAAiB;oBACvB,UAAU,EAAE;wBACV,OAAO,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,yCAAyC,EAAE;wBACnF,UAAU,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,0DAA0D,EAAE;qBACxG;oBACD,QAAQ,EAAE,CAAC,SAAS,CAAC;iBACtB;aACF;SACF;KACF,CAAC,CACH,CAAC;IAEF,+CAA+C;IAC/C,oBAAoB;IACpB,+CAA+C;IAC/C,MAAM,CAAC,iBAAiB,CACtB,EAAE,MAAM,EAAE,YAAY,EAAS,EAC/B,KAAK,EAAE,OAAY,EAAE,EAAE;QACrB,MAAM,EAAE,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,GAAG,OAAO,CAAC,MAAM,CAAC;QACjD,GAAG,CAAC,cAAc,IAAI,EAAE,EAAE,IAAI,CAAC,CAAC;QAEhC,yBAAyB;QACzB,IAAI,IAAI,KAAK,gBAAgB,EAAE,CAAC;YAC9B,IAAI,IAAA,uBAAa,GAAE,KAAK,CAAC,EAAE,CAAC;gBAC1B,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,yCAAyC,EAAE,CAAC,EAAE,CAAC;YAC1F,CAAC;YACD,MAAM,KAAK,GAAG,IAAA,mBAAS,GAAE,CAAC;YAC1B,MAAM,cAAc,GAAG,MAAM,IAAA,sBAAK,EAAC,IAAI,CAAC,KAAK,CAAC,CAAC;YAC/C,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,IAAI,IAAA,4BAAc,EAAC,IAAI,CAAC,KAAK,CAAC,IAAI,kBAAkB,CAAC;YAClF,MAAM,OAAO,GAAG,IAAA,qBAAY,EAAC,IAAI,CAAC,KAAK,EAAE,cAAc,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,IAAI,IAAI,EAAE,EAAE,CAAC,CAAC;YAE3F,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;gBACzB,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,mBAAmB,EAAE,CAAC,EAAE,CAAC;YACpE,CAAC;YAED,+DAA+D;YAC/D,MAAM,KAAK,GAAa,EAAE,CAAC;YAC3B,IAAI,UAAU,GAAG,CAAC,CAAC;YACnB,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE,CAAC;gBACxB,MAAM,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;gBAC9D,MAAM,GAAG,GAAI,CAAC,CAAC,KAAa,CAAC,SAAS,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC;gBACvD,MAAM,IAAI,GAAG,GAAG,KAAK,KAAK,GAAG,EAAE,CAAC;gBAChC,MAAM,UAAU,GAAG,IAAA,4BAAc,EAAC,IAAI,CAAC,CAAC;gBACxC,IAAI,UAAU,GAAG,UAAU,GAAG,MAAM;oBAAE,MAAM;gBAC5C,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBACjB,UAAU,IAAI,UAAU,CAAC;YAC3B,CAAC;YAED,MAAM,IAAI,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAC9B,GAAG,CAAC,oBAAoB,KAAK,CAAC,MAAM,cAAc,UAAU,oBAAoB,MAAM,GAAG,CAAC,CAAC;YAC3F,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,EAAE,CAAC;QAC/C,CAAC;QAED,0BAA0B;QAC1B,IAAI,IAAI,KAAK,iBAAiB,EAAE,CAAC;YAC/B,IAAI,IAAA,uBAAa,GAAE,KAAK,CAAC,EAAE,CAAC;gBAC1B,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,yCAAyC,EAAE,CAAC,EAAE,CAAC;YAC1F,CAAC;YACD,MAAM,KAAK,GAAG,IAAA,mBAAS,GAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;YAC9D,MAAM,cAAc,GAAG,MAAM,IAAA,sBAAK,EAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAE9C,MAAM,SAAS,GAAG,IAAI,GAAG,EAAU,CAAC;YACpC,IAAI,YAAY,EAAE,CAAC;gBACjB,KAAK,MAAM,CAAC,GAAG,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;oBACnD,IAAI,GAAG,KAAK,YAAY;wBAAE,OAAO,CAAC,OAAO,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC3E,IAAI,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC;wBAAE,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;gBACzD,CAAC;gBACD,SAAS,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;YAC9B,CAAC;YAED,MAAM,SAAS,GAAG,IAAA,sBAAY,GAAE,CAAC;YACjC,MAAM,cAAc,GAAG,YAAY,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;YAC1F,MAAM,aAAa,GAAG,IAAA,qBAAY,EAAC,IAAI,CAAC,IAAI,EAAE,cAAc,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,IAAI,IAAI,EAAE,EAAE,CAAC,CAAC;YAEhG,sBAAsB;YACtB,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,IAAI,IAAA,4BAAc,EAAC,IAAI,CAAC,KAAK,CAAC,IAAI,kBAAkB,CAAC;YAClF,IAAI,IAAI,GAAG,EAAE,CAAC;YACd,IAAI,UAAU,GAAG,CAAC,CAAC;YAEnB,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC9B,IAAI,IAAI,iBAAiB,CAAC;gBAC1B,KAAK,MAAM,CAAC,IAAI,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC;oBAC5C,MAAM,IAAI,GAAG,KAAK,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,SAAS,IAAI,CAAC;oBAChF,MAAM,UAAU,GAAG,IAAA,4BAAc,EAAC,IAAI,CAAC,CAAC;oBACxC,IAAI,UAAU,GAAG,UAAU,GAAG,MAAM,GAAG,GAAG;wBAAE,MAAM,CAAC,+BAA+B;oBAClF,IAAI,IAAI,IAAI,CAAC;oBACb,UAAU,IAAI,UAAU,CAAC;gBAC3B,CAAC;gBACD,IAAI,IAAI,IAAI,CAAC;YACf,CAAC;YAED,IAAI,IAAI,iBAAiB,CAAC;YAC1B,MAAM,IAAI,GAAG,IAAI,GAAG,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YACtD,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE,CAAC;gBAC9B,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC;oBAAE,SAAS;gBACnC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;gBACrB,MAAM,IAAI,GAAG,KAAK,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,KAAM,CAAC,CAAC,KAAa,CAAC,SAAS,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC;gBACrH,MAAM,UAAU,GAAG,IAAA,4BAAc,EAAC,IAAI,CAAC,CAAC;gBACxC,IAAI,UAAU,GAAG,UAAU,GAAG,MAAM;oBAAE,MAAM;gBAC5C,IAAI,IAAI,IAAI,CAAC;gBACb,UAAU,IAAI,UAAU,CAAC;YAC3B,CAAC;YAED,GAAG,CAAC,aAAa,UAAU,oBAAoB,MAAM,GAAG,CAAC,CAAC;YAC1D,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,EAAE,CAAC;QAC/C,CAAC;QAED,0BAA0B;QAC1B,IAAI,IAAI,KAAK,iBAAiB,EAAE,CAAC;YAC/B,IAAI,IAAA,uBAAa,GAAE,KAAK,CAAC,EAAE,CAAC;gBAC1B,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,yCAAyC,EAAE,CAAC,EAAE,CAAC;YAC1F,CAAC;YAED,MAAM,EAAE,GAAG,IAAA,eAAK,GAAE,CAAC;YACnB,MAAM,KAAK,GAAG,IAAA,mBAAS,GAAE,CAAC;YAC1B,MAAM,SAAS,GAAG,IAAA,sBAAY,GAAE,CAAC;YAEjC,MAAM,OAAO,GAAG,IAAI,GAAG,EAAoB,CAAC;YAC5C,KAAK,MAAM,CAAC,IAAI,SAAS,EAAE,CAAC;gBAC1B,MAAM,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;gBACxD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC;oBAAE,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;gBAChD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAE,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;YACnC,CAAC;YAED,MAAM,MAAM,GAA6B,EAAE,CAAC;YAC5C,KAAK,MAAM,CAAC,IAAI,CAAC,IAAI,OAAO,EAAE,CAAC;gBAC7B,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;gBACjC,IAAI,KAAK,GAAG,OAAO,CAAC;gBACpB,IAAI,KAAK,CAAC,QAAQ,CAAC,YAAY,CAAC;oBAAE,KAAK,GAAG,aAAa,CAAC;qBACnD,IAAI,KAAK,CAAC,QAAQ,CAAC,SAAS,CAAC;oBAAE,KAAK,GAAG,UAAU,CAAC;qBAClD,IAAI,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC;oBAAE,KAAK,GAAG,aAAa,CAAC;qBAC5E,IAAI,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC;oBAAE,KAAK,GAAG,QAAQ,CAAC;qBAC9C,IAAI,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,QAAQ,CAAC,QAAQ,CAAC;oBAAE,KAAK,GAAG,WAAW,CAAC;gBACjF,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;oBAAE,MAAM,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC;gBACvC,MAAM,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YAC3B,CAAC;YAED,MAAM,QAAQ,GAAG,IAAA,wBAAe,EAAC,KAAK,CAAC,CAAC;YACxC,MAAM,QAAQ,GAAG,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YAExF,MAAM,SAAS,GAAI,EAAE,CAAC,OAAO,CAAC,8CAA8C,CAAC,CAAC,GAAG,EAAU,CAAC,CAAC,CAAC;YAC9F,IAAI,IAAI,GAAG,aAAa,SAAS,WAAW,SAAS,CAAC,MAAM,eAAe,CAAC;YAC5E,IAAI,IAAI,WAAW,CAAC;YACpB,KAAK,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC;gBACpD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;oBAAE,SAAS;gBACjC,IAAI,IAAI,KAAK,KAAK,KAAK,KAAK,CAAC,MAAM,UAAU,CAAC;YAChD,CAAC;YACD,IAAI,IAAI,gBAAgB,CAAC;YACzB,KAAK,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,IAAI,QAAQ,EAAE,CAAC;gBACpC,IAAI,IAAI,KAAK,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC;YAC/E,CAAC;YAED,iBAAiB;YACjB,MAAM,MAAM,GAAG,IAAA,sBAAQ,EAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC;YAClD,GAAG,CAAC,aAAa,MAAM,CAAC,MAAM,UAAU,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;YAClF,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC;QAC5D,CAAC;QAED,gEAAgE;QAChE,IAAI,IAAI,KAAK,cAAc,EAAE,CAAC;YAC5B,IAAI,IAAA,uBAAa,GAAE,KAAK,CAAC,EAAE,CAAC;gBAC1B,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,yCAAyC,EAAE,CAAC,EAAE,CAAC;YAC1F,CAAC;YAED,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,IAAI,EAAE,CAAC;YACnC,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,IAAI,EAAE,CAAC;YACzC,MAAM,WAAW,GAAG,IAAI,CAAC,CAAC,yCAAyC;YAEnE,GAAG,CAAC,UAAU,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,aAAa,UAAU,IAAI,MAAM,EAAE,CAAC,CAAC;YAEvE,MAAM,KAAK,GAAG,IAAA,mBAAS,GAAE,CAAC;YAC1B,MAAM,YAAY,GAAG,UAAU,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;YAChE,MAAM,cAAc,GAAG,MAAM,IAAA,sBAAK,EAAC,OAAO,CAAC,CAAC;YAE5C,iDAAiD;YACjD,MAAM,SAAS,GAAG,IAAI,GAAG,EAAU,CAAC;YACpC,IAAI,YAAY,EAAE,CAAC;gBACjB,KAAK,MAAM,CAAC,GAAG,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC;oBACnD,IAAI,GAAG,KAAK,YAAY;wBAAE,OAAO,CAAC,OAAO,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC3E,IAAI,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC;wBAAE,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;gBACzD,CAAC;gBACD,SAAS,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;YAC9B,CAAC;YAED,MAAM,SAAS,GAAG,IAAA,sBAAY,GAAE,CAAC;YACjC,MAAM,cAAc,GAAG,YAAY,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;YAE1F,mCAAmC;YACnC,MAAM,aAAa,GAAG,IAAA,qBAAY,EAAC,OAAO,EAAE,cAAc,EAAE,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC,CAAC;YAEjF,wCAAwC;YACxC,IAAI,IAAI,GAAG,EAAE,CAAC;YACd,IAAI,UAAU,GAAG,CAAC,CAAC;YAEnB,iDAAiD;YACjD,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC9B,MAAM,UAAU,GAAG,YAAY;oBAC7B,CAAC,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,CAAC;oBACvD,CAAC,CAAC,EAAE,CAAC;gBACP,MAAM,SAAS,GAAG,cAAc,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,CAAC,CAAC;gBAExE,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;oBAC1B,IAAI,IAAI,iBAAiB,CAAC;oBAC1B,KAAK,MAAM,CAAC,IAAI,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC;wBACvC,MAAM,IAAI,GAAG,KAAK,CAAC,CAAC,IAAI,KAAK,CAAC,CAAC,SAAS,IAAI,CAAC;wBAC7C,MAAM,EAAE,GAAG,IAAA,4BAAc,EAAC,IAAI,CAAC,CAAC;wBAChC,IAAI,UAAU,GAAG,EAAE,GAAG,WAAW,GAAG,GAAG;4BAAE,MAAM;wBAC/C,IAAI,IAAI,IAAI,CAAC;wBACb,UAAU,IAAI,EAAE,CAAC;oBACnB,CAAC;gBACH,CAAC;gBAED,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;oBACzB,IAAI,IAAI,mBAAmB,CAAC;oBAC5B,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC;wBACtC,MAAM,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;wBACxD,MAAM,IAAI,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,SAAS,IAAI,CAAC;wBAC5C,MAAM,EAAE,GAAG,IAAA,4BAAc,EAAC,IAAI,CAAC,CAAC;wBAChC,IAAI,UAAU,GAAG,EAAE,GAAG,WAAW,GAAG,GAAG;4BAAE,MAAM;wBAC/C,IAAI,IAAI,IAAI,CAAC;wBACb,UAAU,IAAI,EAAE,CAAC;oBACnB,CAAC;gBACH,CAAC;YACH,CAAC;YAED,iBAAiB;YACjB,IAAI,IAAI,cAAc,CAAC;YACvB,MAAM,IAAI,GAAG,IAAI,GAAG,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YACtD,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE,CAAC;gBAC9B,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC;oBAAE,SAAS;gBACnC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;gBACrB,MAAM,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;gBAC9D,MAAM,GAAG,GAAI,CAAC,CAAC,KAAa,CAAC,SAAS,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC;gBACvD,MAAM,IAAI,GAAG,KAAK,KAAK,KAAK,GAAG,IAAI,CAAC;gBACpC,MAAM,EAAE,GAAG,IAAA,4BAAc,EAAC,IAAI,CAAC,CAAC;gBAChC,IAAI,UAAU,GAAG,EAAE,GAAG,WAAW;oBAAE,MAAM;gBACzC,IAAI,IAAI,IAAI,CAAC;gBACb,UAAU,IAAI,EAAE,CAAC;YACnB,CAAC;YAED,GAAG,CAAC,UAAU,UAAU,SAAS,CAAC,CAAC;YACnC,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,EAAE,CAAC;QAC/C,CAAC;QAED,wBAAwB;QACxB,IAAI,IAAI,KAAK,eAAe,EAAE,CAAC;YAC7B,MAAM,EAAE,GAAG,IAAA,eAAK,GAAE,CAAC;YACnB,MAAM,MAAM,GAAI,EAAE,CAAC,OAAO,CAAC,kCAAkC,CAAC,CAAC,GAAG,EAAU,CAAC,CAAC,CAAC;YAC/E,MAAM,IAAI,GAAI,EAAE,CAAC,OAAO,CAAC,sCAAsC,CAAC,CAAC,GAAG,EAAU,CAAC,CAAC,CAAC;YACjF,MAAM,KAAK,GAAI,EAAE,CAAC,OAAO,CAAC,iCAAiC,CAAC,CAAC,GAAG,EAAU,CAAC,CAAC,CAAC;YAC7E,MAAM,KAAK,GAAI,EAAE,CAAC,OAAO,CAAC,8CAA8C,CAAC,CAAC,GAAG,EAAU,CAAC,CAAC,CAAC;YAC1F,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,UAAU,KAAK,aAAa,MAAM,iBAAiB,IAAI,YAAY,KAAK,QAAQ,EAAE,CAAC,EAAE,CAAC;QACjI,CAAC;QAED,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,iBAAiB,IAAI,EAAE,EAAE,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;IACvF,CAAC,CACF,CAAC;IAEF,+CAA+C;IAC/C,8BAA8B;IAC9B,+CAA+C;IAC/C,MAAM,SAAS,GAAG,IAAI,+BAAoB,EAAE,CAAC;IAC7C,MAAM,MAAM,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IAChC,GAAG,CAAC,qCAAqC,CAAC,CAAC;AAC7C,CAAC"}
@@ -0,0 +1,21 @@
1
+ /**
2
+ * Neural Embedder — uses TransformersJS (all-MiniLM-L6-v2) for local embeddings.
3
+ *
4
+ * Produces 384-dimensional vectors that understand semantic meaning:
5
+ * "authentication" ≈ "login" ≈ "sign in"
6
+ *
7
+ * 100% local, no API keys, runs in Node.js.
8
+ * First call downloads the model (~23MB), subsequent calls use the cache.
9
+ */
10
+ /**
11
+ * Generate a 384-dim embedding vector for a text string.
12
+ * Uses all-MiniLM-L6-v2 — same model as Continue.dev.
13
+ */
14
+ export declare function embed(text: string): Promise<number[]>;
15
+ /**
16
+ * Batch embed multiple texts (more efficient than one-by-one).
17
+ */
18
+ export declare function embedBatch(texts: string[], batchSize?: number): Promise<number[][]>;
19
+ /** Check if the model is already cached */
20
+ export declare function isModelCached(): Promise<boolean>;
21
+ //# sourceMappingURL=neuralEmbedder.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"neuralEmbedder.d.ts","sourceRoot":"","sources":["../src/neuralEmbedder.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAkBH;;;GAGG;AACH,wBAAsB,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,CAI3D;AAED;;GAEG;AACH,wBAAsB,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,SAAS,GAAE,MAAW,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CAa7F;AAED,2CAA2C;AAC3C,wBAAsB,aAAa,IAAI,OAAO,CAAC,OAAO,CAAC,CAUtD"}
@@ -0,0 +1,98 @@
1
+ "use strict";
2
+ /**
3
+ * Neural Embedder — uses TransformersJS (all-MiniLM-L6-v2) for local embeddings.
4
+ *
5
+ * Produces 384-dimensional vectors that understand semantic meaning:
6
+ * "authentication" ≈ "login" ≈ "sign in"
7
+ *
8
+ * 100% local, no API keys, runs in Node.js.
9
+ * First call downloads the model (~23MB), subsequent calls use the cache.
10
+ */
11
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
12
+ if (k2 === undefined) k2 = k;
13
+ var desc = Object.getOwnPropertyDescriptor(m, k);
14
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
15
+ desc = { enumerable: true, get: function() { return m[k]; } };
16
+ }
17
+ Object.defineProperty(o, k2, desc);
18
+ }) : (function(o, m, k, k2) {
19
+ if (k2 === undefined) k2 = k;
20
+ o[k2] = m[k];
21
+ }));
22
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
23
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
24
+ }) : function(o, v) {
25
+ o["default"] = v;
26
+ });
27
+ var __importStar = (this && this.__importStar) || (function () {
28
+ var ownKeys = function(o) {
29
+ ownKeys = Object.getOwnPropertyNames || function (o) {
30
+ var ar = [];
31
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
32
+ return ar;
33
+ };
34
+ return ownKeys(o);
35
+ };
36
+ return function (mod) {
37
+ if (mod && mod.__esModule) return mod;
38
+ var result = {};
39
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
40
+ __setModuleDefault(result, mod);
41
+ return result;
42
+ };
43
+ })();
44
+ Object.defineProperty(exports, "__esModule", { value: true });
45
+ exports.embed = embed;
46
+ exports.embedBatch = embedBatch;
47
+ exports.isModelCached = isModelCached;
48
+ let pipeline = null;
49
+ let extractor = null;
50
+ /** Initialize the embedding model (lazy, first call downloads) */
51
+ async function getExtractor() {
52
+ if (extractor)
53
+ return extractor;
54
+ // Dynamic import for ESM compatibility
55
+ const { pipeline: createPipeline } = await Promise.resolve().then(() => __importStar(require("@xenova/transformers")));
56
+ extractor = await createPipeline("feature-extraction", "Xenova/all-MiniLM-L6-v2", {
57
+ quantized: true, // Use quantized model for speed
58
+ });
59
+ return extractor;
60
+ }
61
+ /**
62
+ * Generate a 384-dim embedding vector for a text string.
63
+ * Uses all-MiniLM-L6-v2 — same model as Continue.dev.
64
+ */
65
+ async function embed(text) {
66
+ const ext = await getExtractor();
67
+ const output = await ext(text, { pooling: "mean", normalize: true });
68
+ return Array.from(output.data);
69
+ }
70
+ /**
71
+ * Batch embed multiple texts (more efficient than one-by-one).
72
+ */
73
+ async function embedBatch(texts, batchSize = 32) {
74
+ const ext = await getExtractor();
75
+ const results = [];
76
+ for (let i = 0; i < texts.length; i += batchSize) {
77
+ const batch = texts.slice(i, i + batchSize);
78
+ for (const text of batch) {
79
+ const output = await ext(text, { pooling: "mean", normalize: true });
80
+ results.push(Array.from(output.data));
81
+ }
82
+ }
83
+ return results;
84
+ }
85
+ /** Check if the model is already cached */
86
+ async function isModelCached() {
87
+ try {
88
+ const fs = await Promise.resolve().then(() => __importStar(require("fs")));
89
+ const path = await Promise.resolve().then(() => __importStar(require("path")));
90
+ const os = await Promise.resolve().then(() => __importStar(require("os")));
91
+ const cacheDir = path.join(os.homedir(), ".cache", "huggingface");
92
+ return fs.existsSync(cacheDir);
93
+ }
94
+ catch {
95
+ return false;
96
+ }
97
+ }
98
+ //# sourceMappingURL=neuralEmbedder.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"neuralEmbedder.js","sourceRoot":"","sources":["../src/neuralEmbedder.ts"],"names":[],"mappings":";AAAA;;;;;;;;GAQG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsBH,sBAIC;AAKD,gCAaC;AAGD,sCAUC;AAvDD,IAAI,QAAQ,GAAQ,IAAI,CAAC;AACzB,IAAI,SAAS,GAAQ,IAAI,CAAC;AAE1B,kEAAkE;AAClE,KAAK,UAAU,YAAY;IACzB,IAAI,SAAS;QAAE,OAAO,SAAS,CAAC;IAEhC,uCAAuC;IACvC,MAAM,EAAE,QAAQ,EAAE,cAAc,EAAE,GAAG,wDAAa,sBAAsB,GAAC,CAAC;IAC1E,SAAS,GAAG,MAAM,cAAc,CAAC,oBAAoB,EAAE,yBAAyB,EAAE;QAChF,SAAS,EAAE,IAAI,EAAE,gCAAgC;KAClD,CAAC,CAAC;IAEH,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;GAGG;AACI,KAAK,UAAU,KAAK,CAAC,IAAY;IACtC,MAAM,GAAG,GAAG,MAAM,YAAY,EAAE,CAAC;IACjC,MAAM,MAAM,GAAG,MAAM,GAAG,CAAC,IAAI,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IACrE,OAAO,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,IAAoB,CAAC,CAAC;AACjD,CAAC;AAED;;GAEG;AACI,KAAK,UAAU,UAAU,CAAC,KAAe,EAAE,YAAoB,EAAE;IACtE,MAAM,GAAG,GAAG,MAAM,YAAY,EAAE,CAAC;IACjC,MAAM,OAAO,GAAe,EAAE,CAAC;IAE/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,IAAI,SAAS,EAAE,CAAC;QACjD,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,CAAC;QAC5C,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACzB,MAAM,MAAM,GAAG,MAAM,GAAG,CAAC,IAAI,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;YACrE,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,IAAoB,CAAC,CAAC,CAAC;QACxD,CAAC;IACH,CAAC;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,2CAA2C;AACpC,KAAK,UAAU,aAAa;IACjC,IAAI,CAAC;QACH,MAAM,EAAE,GAAG,wDAAa,IAAI,GAAC,CAAC;QAC9B,MAAM,IAAI,GAAG,wDAAa,MAAM,GAAC,CAAC;QAClC,MAAM,EAAE,GAAG,wDAAa,IAAI,GAAC,CAAC;QAC9B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,aAAa,CAAC,CAAC;QAClE,OAAO,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;IACjC,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,KAAK,CAAC;IACf,CAAC;AACH,CAAC"}
@@ -0,0 +1,3 @@
1
+ import { ScanConfig } from "./types";
2
+ export declare function scanFiles(config?: Partial<ScanConfig>): Promise<string[]>;
3
+ //# sourceMappingURL=scanner.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scanner.d.ts","sourceRoot":"","sources":["../src/scanner.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,UAAU,EAAE,MAAM,SAAS,CAAC;AAwBrC,wBAAsB,SAAS,CAC7B,MAAM,GAAE,OAAO,CAAC,UAAU,CAAM,GAC/B,OAAO,CAAC,MAAM,EAAE,CAAC,CAqBnB"}
@@ -0,0 +1,43 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.scanFiles = scanFiles;
4
+ const globby_1 = require("globby");
5
+ /**
6
+ * ES-1.1 — File Scanner
7
+ * Scans the workspace and returns relevant source file paths.
8
+ *
9
+ * - Only returns .ts/.js files
10
+ * - Respects include/ignore patterns
11
+ * - Works on large repos
12
+ */
13
+ const DEFAULT_CONFIG = {
14
+ include: ["src/**/*.ts", "src/**/*.js"],
15
+ ignore: ["node_modules/**", "dist/**", ".graphra/**"],
16
+ };
17
+ /** Patterns that are ALWAYS ignored regardless of user config */
18
+ const ALWAYS_IGNORE = [
19
+ "**/node_modules/**",
20
+ "**/dist/**",
21
+ "**/.git/**",
22
+ "**/.graphra/**",
23
+ ];
24
+ async function scanFiles(config = {}) {
25
+ const merged = {
26
+ include: config.include ?? DEFAULT_CONFIG.include,
27
+ ignore: [
28
+ ...ALWAYS_IGNORE,
29
+ ...(config.ignore ?? DEFAULT_CONFIG.ignore),
30
+ ],
31
+ };
32
+ // Deduplicate ignore patterns
33
+ merged.ignore = [...new Set(merged.ignore)];
34
+ const files = await (0, globby_1.globby)(merged.include, {
35
+ ignore: merged.ignore,
36
+ absolute: true,
37
+ onlyFiles: true,
38
+ gitignore: true, // Also respect .gitignore
39
+ });
40
+ // Filter to only .ts and .js files (safety net)
41
+ return files.filter((f) => f.endsWith(".ts") || f.endsWith(".js"));
42
+ }
43
+ //# sourceMappingURL=scanner.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scanner.js","sourceRoot":"","sources":["../src/scanner.ts"],"names":[],"mappings":";;AAyBA,8BAuBC;AAhDD,mCAAgC;AAGhC;;;;;;;GAOG;AAEH,MAAM,cAAc,GAAe;IACjC,OAAO,EAAE,CAAC,aAAa,EAAE,aAAa,CAAC;IACvC,MAAM,EAAE,CAAC,iBAAiB,EAAE,SAAS,EAAE,aAAa,CAAC;CACtD,CAAC;AAEF,iEAAiE;AACjE,MAAM,aAAa,GAAG;IACpB,oBAAoB;IACpB,YAAY;IACZ,YAAY;IACZ,gBAAgB;CACjB,CAAC;AAEK,KAAK,UAAU,SAAS,CAC7B,SAA8B,EAAE;IAEhC,MAAM,MAAM,GAAe;QACzB,OAAO,EAAE,MAAM,CAAC,OAAO,IAAI,cAAc,CAAC,OAAO;QACjD,MAAM,EAAE;YACN,GAAG,aAAa;YAChB,GAAG,CAAC,MAAM,CAAC,MAAM,IAAI,cAAc,CAAC,MAAM,CAAC;SAC5C;KACF,CAAC;IAEF,8BAA8B;IAC9B,MAAM,CAAC,MAAM,GAAG,CAAC,GAAG,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;IAE5C,MAAM,KAAK,GAAG,MAAM,IAAA,eAAM,EAAC,MAAM,CAAC,OAAO,EAAE;QACzC,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,QAAQ,EAAE,IAAI;QACd,SAAS,EAAE,IAAI;QACf,SAAS,EAAE,IAAI,EAAE,0BAA0B;KAC5C,CAAC,CAAC;IAEH,gDAAgD;IAChD,OAAO,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC;AACrE,CAAC"}
@@ -0,0 +1,37 @@
1
+ /**
2
+ * Hybrid Search Engine — combines:
3
+ * 1. Neural embeddings (TransformersJS all-MiniLM-L6-v2) for semantic search
4
+ * 2. BM25 full-text search for exact keyword matching
5
+ * 3. PageRank importance scores from the dependency graph
6
+ *
7
+ * All 100% local, zero API keys.
8
+ */
9
+ import { DependencyGraph, SearchResult } from "./types";
10
+ /**
11
+ * Compute PageRank scores for files in the dependency graph.
12
+ * Files that are imported by many other files get higher scores.
13
+ */
14
+ export declare function computePageRank(graph: DependencyGraph, damping?: number, iterations?: number): Map<string, number>;
15
+ export interface HybridSearchOptions {
16
+ topK?: number;
17
+ bm25Weight?: number;
18
+ embeddingWeight?: number;
19
+ pageRankWeight?: number;
20
+ nameBoost?: number;
21
+ gitRecencyWeight?: number;
22
+ }
23
+ /**
24
+ * Get git recency scores for files — recently modified files rank higher.
25
+ * Uses `git log` to get last commit timestamp per file.
26
+ */
27
+ export declare function getGitRecencyScores(files: string[]): Map<string, number>;
28
+ /**
29
+ * Hybrid search combining BM25 + neural embeddings + PageRank.
30
+ *
31
+ * @param query - The search query
32
+ * @param queryEmbedding - Pre-computed embedding for the query (or null to skip)
33
+ * @param graph - Dependency graph for PageRank
34
+ * @param options - Tuning parameters
35
+ */
36
+ export declare function hybridSearch(query: string, queryEmbedding: number[] | null, graph: DependencyGraph, options?: HybridSearchOptions): SearchResult[];
37
+ //# sourceMappingURL=search.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"search.d.ts","sourceRoot":"","sources":["../src/search.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAEH,OAAO,EAAS,eAAe,EAAE,YAAY,EAAE,MAAM,SAAS,CAAC;AA2F/D;;;GAGG;AACH,wBAAgB,eAAe,CAC7B,KAAK,EAAE,eAAe,EACtB,OAAO,GAAE,MAAa,EACtB,UAAU,GAAE,MAAW,GACtB,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CA2CrB;AAsBD,MAAM,WAAW,mBAAmB;IAClC,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAMD;;;GAGG;AACH,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAiCxE;AAED;;;;;;;GAOG;AACH,wBAAgB,YAAY,CAC1B,KAAK,EAAE,MAAM,EACb,cAAc,EAAE,MAAM,EAAE,GAAG,IAAI,EAC/B,KAAK,EAAE,eAAe,EACtB,OAAO,GAAE,mBAAwB,GAChC,YAAY,EAAE,CAkFhB"}