src-mcp 1.0.1 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (250) hide show
  1. package/README.md +8 -18
  2. package/dist/bin.d.mts +1 -0
  3. package/dist/bin.mjs +205 -0
  4. package/dist/bin.mjs.map +1 -0
  5. package/dist/index.d.mts +1 -0
  6. package/dist/index.mjs +8 -0
  7. package/dist/index.mjs.map +1 -0
  8. package/dist/server-DL8hfycz.mjs +3791 -0
  9. package/dist/server-DL8hfycz.mjs.map +1 -0
  10. package/package.json +10 -9
  11. package/dist/bin.d.ts +0 -3
  12. package/dist/bin.d.ts.map +0 -1
  13. package/dist/bin.js +0 -4
  14. package/dist/bin.js.map +0 -1
  15. package/dist/cli/adapter.d.ts +0 -7
  16. package/dist/cli/adapter.d.ts.map +0 -1
  17. package/dist/cli/adapter.js +0 -39
  18. package/dist/cli/adapter.js.map +0 -1
  19. package/dist/cli/commands/index.d.ts +0 -24
  20. package/dist/cli/commands/index.d.ts.map +0 -1
  21. package/dist/cli/commands/index.js +0 -13
  22. package/dist/cli/commands/index.js.map +0 -1
  23. package/dist/cli/commands/serve.command.d.ts +0 -21
  24. package/dist/cli/commands/serve.command.d.ts.map +0 -1
  25. package/dist/cli/commands/serve.command.js +0 -62
  26. package/dist/cli/commands/serve.command.js.map +0 -1
  27. package/dist/cli/commands/version.command.d.ts +0 -2
  28. package/dist/cli/commands/version.command.d.ts.map +0 -1
  29. package/dist/cli/commands/version.command.js +0 -12
  30. package/dist/cli/commands/version.command.js.map +0 -1
  31. package/dist/cli/index.d.ts +0 -2
  32. package/dist/cli/index.d.ts.map +0 -1
  33. package/dist/cli/index.js +0 -15
  34. package/dist/cli/index.js.map +0 -1
  35. package/dist/cli/parser.d.ts +0 -7
  36. package/dist/cli/parser.d.ts.map +0 -1
  37. package/dist/cli/parser.js +0 -99
  38. package/dist/cli/parser.js.map +0 -1
  39. package/dist/config/index.d.ts +0 -24
  40. package/dist/config/index.d.ts.map +0 -1
  41. package/dist/config/index.js +0 -38
  42. package/dist/config/index.js.map +0 -1
  43. package/dist/core/ast/index.d.ts +0 -82
  44. package/dist/core/ast/index.d.ts.map +0 -1
  45. package/dist/core/ast/index.js +0 -204
  46. package/dist/core/ast/index.js.map +0 -1
  47. package/dist/core/ast/types.d.ts +0 -152
  48. package/dist/core/ast/types.d.ts.map +0 -1
  49. package/dist/core/ast/types.js +0 -5
  50. package/dist/core/ast/types.js.map +0 -1
  51. package/dist/core/constants.d.ts +0 -17
  52. package/dist/core/constants.d.ts.map +0 -1
  53. package/dist/core/constants.js +0 -49
  54. package/dist/core/constants.js.map +0 -1
  55. package/dist/core/embeddings/callgraph.d.ts +0 -98
  56. package/dist/core/embeddings/callgraph.d.ts.map +0 -1
  57. package/dist/core/embeddings/callgraph.js +0 -415
  58. package/dist/core/embeddings/callgraph.js.map +0 -1
  59. package/dist/core/embeddings/chunker.d.ts +0 -37
  60. package/dist/core/embeddings/chunker.d.ts.map +0 -1
  61. package/dist/core/embeddings/chunker.js +0 -298
  62. package/dist/core/embeddings/chunker.js.map +0 -1
  63. package/dist/core/embeddings/client.d.ts +0 -30
  64. package/dist/core/embeddings/client.d.ts.map +0 -1
  65. package/dist/core/embeddings/client.js +0 -65
  66. package/dist/core/embeddings/client.js.map +0 -1
  67. package/dist/core/embeddings/crossfile.d.ts +0 -58
  68. package/dist/core/embeddings/crossfile.d.ts.map +0 -1
  69. package/dist/core/embeddings/crossfile.js +0 -202
  70. package/dist/core/embeddings/crossfile.js.map +0 -1
  71. package/dist/core/embeddings/enricher.d.ts +0 -53
  72. package/dist/core/embeddings/enricher.d.ts.map +0 -1
  73. package/dist/core/embeddings/enricher.js +0 -308
  74. package/dist/core/embeddings/enricher.js.map +0 -1
  75. package/dist/core/embeddings/index.d.ts +0 -13
  76. package/dist/core/embeddings/index.d.ts.map +0 -1
  77. package/dist/core/embeddings/index.js +0 -20
  78. package/dist/core/embeddings/index.js.map +0 -1
  79. package/dist/core/embeddings/reranker.d.ts +0 -41
  80. package/dist/core/embeddings/reranker.d.ts.map +0 -1
  81. package/dist/core/embeddings/reranker.js +0 -117
  82. package/dist/core/embeddings/reranker.js.map +0 -1
  83. package/dist/core/embeddings/store.d.ts +0 -93
  84. package/dist/core/embeddings/store.d.ts.map +0 -1
  85. package/dist/core/embeddings/store.js +0 -304
  86. package/dist/core/embeddings/store.js.map +0 -1
  87. package/dist/core/embeddings/types.d.ts +0 -77
  88. package/dist/core/embeddings/types.d.ts.map +0 -1
  89. package/dist/core/embeddings/types.js +0 -5
  90. package/dist/core/embeddings/types.js.map +0 -1
  91. package/dist/core/embeddings/watcher.d.ts +0 -130
  92. package/dist/core/embeddings/watcher.d.ts.map +0 -1
  93. package/dist/core/embeddings/watcher.js +0 -448
  94. package/dist/core/embeddings/watcher.js.map +0 -1
  95. package/dist/core/fallback/index.d.ts +0 -26
  96. package/dist/core/fallback/index.d.ts.map +0 -1
  97. package/dist/core/fallback/index.js +0 -76
  98. package/dist/core/fallback/index.js.map +0 -1
  99. package/dist/core/parser/index.d.ts +0 -64
  100. package/dist/core/parser/index.d.ts.map +0 -1
  101. package/dist/core/parser/index.js +0 -205
  102. package/dist/core/parser/index.js.map +0 -1
  103. package/dist/core/parser/languages.d.ts +0 -26
  104. package/dist/core/parser/languages.d.ts.map +0 -1
  105. package/dist/core/parser/languages.js +0 -101
  106. package/dist/core/parser/languages.js.map +0 -1
  107. package/dist/core/queries/helpers.d.ts +0 -72
  108. package/dist/core/queries/helpers.d.ts.map +0 -1
  109. package/dist/core/queries/helpers.js +0 -101
  110. package/dist/core/queries/helpers.js.map +0 -1
  111. package/dist/core/queries/index.d.ts +0 -144
  112. package/dist/core/queries/index.d.ts.map +0 -1
  113. package/dist/core/queries/index.js +0 -396
  114. package/dist/core/queries/index.js.map +0 -1
  115. package/dist/core/queries/loader.d.ts +0 -46
  116. package/dist/core/queries/loader.d.ts.map +0 -1
  117. package/dist/core/queries/loader.js +0 -216
  118. package/dist/core/queries/loader.js.map +0 -1
  119. package/dist/core/queries/patterns.d.ts +0 -10
  120. package/dist/core/queries/patterns.d.ts.map +0 -1
  121. package/dist/core/queries/patterns.js +0 -112
  122. package/dist/core/queries/patterns.js.map +0 -1
  123. package/dist/core/symbols/index.d.ts +0 -70
  124. package/dist/core/symbols/index.d.ts.map +0 -1
  125. package/dist/core/symbols/index.js +0 -359
  126. package/dist/core/symbols/index.js.map +0 -1
  127. package/dist/core/unified/index.d.ts +0 -118
  128. package/dist/core/unified/index.d.ts.map +0 -1
  129. package/dist/core/unified/index.js +0 -428
  130. package/dist/core/unified/index.js.map +0 -1
  131. package/dist/core/utils/assets.d.ts +0 -34
  132. package/dist/core/utils/assets.d.ts.map +0 -1
  133. package/dist/core/utils/assets.js +0 -85
  134. package/dist/core/utils/assets.js.map +0 -1
  135. package/dist/core/utils/cache.d.ts +0 -43
  136. package/dist/core/utils/cache.d.ts.map +0 -1
  137. package/dist/core/utils/cache.js +0 -60
  138. package/dist/core/utils/cache.js.map +0 -1
  139. package/dist/core/utils/index.d.ts +0 -7
  140. package/dist/core/utils/index.d.ts.map +0 -1
  141. package/dist/core/utils/index.js +0 -10
  142. package/dist/core/utils/index.js.map +0 -1
  143. package/dist/core/utils/tsconfig.d.ts +0 -34
  144. package/dist/core/utils/tsconfig.d.ts.map +0 -1
  145. package/dist/core/utils/tsconfig.js +0 -173
  146. package/dist/core/utils/tsconfig.js.map +0 -1
  147. package/dist/features/analyze-file/index.d.ts +0 -15
  148. package/dist/features/analyze-file/index.d.ts.map +0 -1
  149. package/dist/features/analyze-file/index.js +0 -164
  150. package/dist/features/analyze-file/index.js.map +0 -1
  151. package/dist/features/get-call-graph/index.d.ts +0 -24
  152. package/dist/features/get-call-graph/index.d.ts.map +0 -1
  153. package/dist/features/get-call-graph/index.js +0 -246
  154. package/dist/features/get-call-graph/index.js.map +0 -1
  155. package/dist/features/get-index-status/index.d.ts +0 -20
  156. package/dist/features/get-index-status/index.d.ts.map +0 -1
  157. package/dist/features/get-index-status/index.js +0 -90
  158. package/dist/features/get-index-status/index.js.map +0 -1
  159. package/dist/features/index-codebase/index.d.ts +0 -24
  160. package/dist/features/index-codebase/index.d.ts.map +0 -1
  161. package/dist/features/index-codebase/index.js +0 -283
  162. package/dist/features/index-codebase/index.js.map +0 -1
  163. package/dist/features/index.d.ts +0 -15
  164. package/dist/features/index.d.ts.map +0 -1
  165. package/dist/features/index.js +0 -28
  166. package/dist/features/index.js.map +0 -1
  167. package/dist/features/info/index.d.ts +0 -19
  168. package/dist/features/info/index.d.ts.map +0 -1
  169. package/dist/features/info/index.js +0 -41
  170. package/dist/features/info/index.js.map +0 -1
  171. package/dist/features/list-symbols/index.d.ts +0 -22
  172. package/dist/features/list-symbols/index.d.ts.map +0 -1
  173. package/dist/features/list-symbols/index.js +0 -74
  174. package/dist/features/list-symbols/index.js.map +0 -1
  175. package/dist/features/parse-ast/index.d.ts +0 -12
  176. package/dist/features/parse-ast/index.d.ts.map +0 -1
  177. package/dist/features/parse-ast/index.js +0 -71
  178. package/dist/features/parse-ast/index.js.map +0 -1
  179. package/dist/features/query-code/index.d.ts +0 -23
  180. package/dist/features/query-code/index.d.ts.map +0 -1
  181. package/dist/features/query-code/index.js +0 -96
  182. package/dist/features/query-code/index.js.map +0 -1
  183. package/dist/features/search-code/index.d.ts +0 -39
  184. package/dist/features/search-code/index.d.ts.map +0 -1
  185. package/dist/features/search-code/index.js +0 -258
  186. package/dist/features/search-code/index.js.map +0 -1
  187. package/dist/features/types.d.ts +0 -14
  188. package/dist/features/types.d.ts.map +0 -1
  189. package/dist/features/types.js +0 -2
  190. package/dist/features/types.js.map +0 -1
  191. package/dist/features/update-index/index.d.ts +0 -24
  192. package/dist/features/update-index/index.d.ts.map +0 -1
  193. package/dist/features/update-index/index.js +0 -358
  194. package/dist/features/update-index/index.js.map +0 -1
  195. package/dist/features/utils/content.d.ts +0 -30
  196. package/dist/features/utils/content.d.ts.map +0 -1
  197. package/dist/features/utils/content.js +0 -49
  198. package/dist/features/utils/content.js.map +0 -1
  199. package/dist/features/utils/index.d.ts +0 -6
  200. package/dist/features/utils/index.d.ts.map +0 -1
  201. package/dist/features/utils/index.js +0 -8
  202. package/dist/features/utils/index.js.map +0 -1
  203. package/dist/features/utils/result.d.ts +0 -37
  204. package/dist/features/utils/result.d.ts.map +0 -1
  205. package/dist/features/utils/result.js +0 -53
  206. package/dist/features/utils/result.js.map +0 -1
  207. package/dist/index.d.ts +0 -2
  208. package/dist/index.d.ts.map +0 -1
  209. package/dist/index.js +0 -4
  210. package/dist/index.js.map +0 -1
  211. package/dist/prompts/index.d.ts +0 -9
  212. package/dist/prompts/index.d.ts.map +0 -1
  213. package/dist/prompts/index.js +0 -188
  214. package/dist/prompts/index.js.map +0 -1
  215. package/dist/resources/index.d.ts +0 -3
  216. package/dist/resources/index.d.ts.map +0 -1
  217. package/dist/resources/index.js +0 -17
  218. package/dist/resources/index.js.map +0 -1
  219. package/dist/server.d.ts +0 -4
  220. package/dist/server.d.ts.map +0 -1
  221. package/dist/server.js +0 -24
  222. package/dist/server.js.map +0 -1
  223. package/dist/tools/adapter.d.ts +0 -4
  224. package/dist/tools/adapter.d.ts.map +0 -1
  225. package/dist/tools/adapter.js +0 -28
  226. package/dist/tools/adapter.js.map +0 -1
  227. package/dist/tools/index.d.ts +0 -5
  228. package/dist/tools/index.d.ts.map +0 -1
  229. package/dist/tools/index.js +0 -9
  230. package/dist/tools/index.js.map +0 -1
  231. package/dist/types/index.d.ts +0 -20
  232. package/dist/types/index.d.ts.map +0 -1
  233. package/dist/types/index.js +0 -2
  234. package/dist/types/index.js.map +0 -1
  235. package/dist/utils/colors.d.ts +0 -24
  236. package/dist/utils/colors.d.ts.map +0 -1
  237. package/dist/utils/colors.js +0 -30
  238. package/dist/utils/colors.js.map +0 -1
  239. package/dist/utils/index.d.ts +0 -4
  240. package/dist/utils/index.d.ts.map +0 -1
  241. package/dist/utils/index.js +0 -4
  242. package/dist/utils/index.js.map +0 -1
  243. package/dist/utils/logger.d.ts +0 -8
  244. package/dist/utils/logger.d.ts.map +0 -1
  245. package/dist/utils/logger.js +0 -57
  246. package/dist/utils/logger.js.map +0 -1
  247. package/dist/utils/spinner.d.ts +0 -11
  248. package/dist/utils/spinner.d.ts.map +0 -1
  249. package/dist/utils/spinner.js +0 -36
  250. package/dist/utils/spinner.js.map +0 -1
@@ -0,0 +1,3791 @@
1
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
3
+ import { z } from "zod";
4
+ import * as fs from "node:fs";
5
+ import * as path from "node:path";
6
+ import ignore from "ignore";
7
+ import { Ollama } from "ollama";
8
+ import * as lancedb from "@lancedb/lancedb";
9
+ import pc from "picocolors";
10
+ import "ora";
11
+ import * as crypto from "node:crypto";
12
+ import { existsSync, readFileSync } from "fs";
13
+ import { dirname, join } from "path";
14
+ import { Language, Parser, Query } from "web-tree-sitter";
15
+ import { fileURLToPath } from "url";
16
+ import { watch } from "chokidar";
17
+ import fg from "fast-glob";
18
+ import "@langchain/textsplitters";
19
+
20
+ //#region src/config/index.ts
21
+ const config = {
22
+ name: "src-mcp",
23
+ fullName: "SRC (Structured Repo Context)",
24
+ version: "1.0.3",
25
+ description: "MCP server for codebase analysis with Treesitter (SCM queries), AST parsing, and embedding-based indexing"
26
+ };
27
+ const nodeEnv = process.env.NODE_ENV;
28
+ const logLevelEnv = process.env.LOG_LEVEL;
29
+ const ENV = {
30
+ isDev: nodeEnv === "development",
31
+ isProd: nodeEnv === "production",
32
+ logLevel: logLevelEnv ?? "info"
33
+ };
34
+ /**
35
+ * Embedding configuration with environment variable overrides
36
+ */
37
+ const EMBEDDING_CONFIG = {
38
+ ollamaBaseUrl: process.env.OLLAMA_BASE_URL ?? "http://localhost:11434",
39
+ embeddingModel: process.env.EMBEDDING_MODEL ?? "nomic-embed-text",
40
+ embeddingDimensions: Number(process.env.EMBEDDING_DIMENSIONS) || 768,
41
+ defaultChunkSize: Number(process.env.CHUNK_SIZE) || 1e3,
42
+ defaultChunkOverlap: Number(process.env.CHUNK_OVERLAP) || 200,
43
+ batchSize: Number(process.env.EMBEDDING_BATCH_SIZE) || 10
44
+ };
45
+ /**
46
+ * Enrichment configuration for cross-file context
47
+ */
48
+ const ENRICHMENT_CONFIG = {
49
+ includeCrossFileContext: process.env.ENRICHMENT_CROSS_FILE !== "false",
50
+ maxImportsToResolve: Number(process.env.ENRICHMENT_MAX_IMPORTS) || 10,
51
+ maxSymbolsPerImport: Number(process.env.ENRICHMENT_MAX_SYMBOLS_PER_IMPORT) || 5
52
+ };
53
+
54
+ //#endregion
55
+ //#region src/features/info/index.ts
56
+ const infoSchema = z.object({ format: z.enum(["json", "text"]).optional().default("text").describe("Output format") });
57
+ function getServerInfo() {
58
+ return {
59
+ name: config.name,
60
+ fullName: config.fullName,
61
+ version: config.version,
62
+ description: config.description
63
+ };
64
+ }
65
+ function execute$4(input) {
66
+ const info = getServerInfo();
67
+ if (input.format === "json") return {
68
+ success: true,
69
+ data: info,
70
+ message: JSON.stringify(info, null, 2)
71
+ };
72
+ const description = info.description ?? "";
73
+ return {
74
+ success: true,
75
+ data: info,
76
+ message: `${info.fullName} (${info.name}) v${info.version}\n${description}`.trim()
77
+ };
78
+ }
79
+ const infoFeature = {
80
+ name: "get_server_info",
81
+ description: "Get SRC server version and capabilities. Use to verify the MCP server is running correctly.",
82
+ schema: infoSchema,
83
+ execute: execute$4
84
+ };
85
+
86
+ //#endregion
87
+ //#region src/core/embeddings/client.ts
88
+ /**
89
+ * Ollama client for generating embeddings
90
+ * Uses the official ollama library
91
+ */
92
+ var OllamaClient = class {
93
+ client;
94
+ model;
95
+ constructor(config$1) {
96
+ this.client = new Ollama({ host: config$1.ollamaBaseUrl });
97
+ this.model = config$1.embeddingModel;
98
+ }
99
+ /**
100
+ * Generate embeddings for a single text
101
+ */
102
+ async embed(text) {
103
+ const result = (await this.client.embed({
104
+ model: this.model,
105
+ input: text
106
+ })).embeddings[0];
107
+ if (!result) throw new Error("No embedding returned from Ollama");
108
+ return result;
109
+ }
110
+ /**
111
+ * Generate embeddings for multiple texts in a single request
112
+ */
113
+ async embedBatch(texts) {
114
+ return (await this.client.embed({
115
+ model: this.model,
116
+ input: texts
117
+ })).embeddings;
118
+ }
119
+ /**
120
+ * Check if Ollama is reachable and the model is available
121
+ */
122
+ async healthCheck() {
123
+ try {
124
+ if (!(await this.client.list()).models.some((m) => m.name === this.model || m.name.startsWith(`${this.model}:`))) return {
125
+ ok: false,
126
+ error: `Model "${this.model}" not found. Run: ollama pull ${this.model}`
127
+ };
128
+ return { ok: true };
129
+ } catch (error) {
130
+ return {
131
+ ok: false,
132
+ error: `Cannot connect to Ollama: ${error instanceof Error ? error.message : String(error)}`
133
+ };
134
+ }
135
+ }
136
+ };
137
+ /**
138
+ * Create a new Ollama client with default config
139
+ */
140
+ function createOllamaClient(config$1) {
141
+ return new OllamaClient(config$1);
142
+ }
143
+
144
+ //#endregion
145
+ //#region src/utils/logger.ts
146
+ const LOG_LEVELS = {
147
+ debug: 0,
148
+ info: 1,
149
+ warn: 2,
150
+ error: 3
151
+ };
152
+ const LEVEL_COLORS = {
153
+ debug: pc.dim,
154
+ info: pc.blue,
155
+ warn: pc.yellow,
156
+ error: pc.red
157
+ };
158
+ function isValidLogLevel(level) {
159
+ return level in LOG_LEVELS;
160
+ }
161
+ function shouldLog(level) {
162
+ const configLevel = ENV.logLevel;
163
+ const currentLevel = isValidLogLevel(configLevel) ? LOG_LEVELS[configLevel] : LOG_LEVELS.info;
164
+ return LOG_LEVELS[level] >= currentLevel;
165
+ }
166
+ function formatMessage(level, message) {
167
+ return `${pc.dim((/* @__PURE__ */ new Date()).toISOString())} ${LEVEL_COLORS[level](level.toUpperCase().padEnd(5))} ${message}`;
168
+ }
169
+ const logger = {
170
+ debug(message, ...args) {
171
+ if (shouldLog("debug")) console.error(formatMessage("debug", message), ...args);
172
+ },
173
+ info(message, ...args) {
174
+ if (shouldLog("info")) console.error(formatMessage("info", message), ...args);
175
+ },
176
+ warn(message, ...args) {
177
+ if (shouldLog("warn")) console.warn(formatMessage("warn", message), ...args);
178
+ },
179
+ error(message, ...args) {
180
+ if (shouldLog("error")) console.error(formatMessage("error", message), ...args);
181
+ },
182
+ success(message, ...args) {
183
+ console.error(pc.green("✓ ") + message, ...args);
184
+ }
185
+ };
186
+
187
+ //#endregion
188
+ //#region src/utils/colors.ts
189
+ /**
190
+ * Color utilities for CLI output
191
+ */
192
+ const colors = {
193
+ success: pc.green,
194
+ error: pc.red,
195
+ warn: pc.yellow,
196
+ info: pc.blue,
197
+ dim: pc.dim,
198
+ bold: pc.bold,
199
+ cyan: pc.cyan,
200
+ magenta: pc.magenta,
201
+ successBold: (text) => pc.bold(pc.green(text)),
202
+ errorBold: (text) => pc.bold(pc.red(text)),
203
+ infoBold: (text) => pc.bold(pc.blue(text)),
204
+ formatSuccess: (msg) => `${pc.green("✓")} ${msg}`,
205
+ formatError: (msg) => `${pc.red("✗")} ${msg}`,
206
+ formatInfo: (msg) => `${pc.blue("ℹ")} ${msg}`,
207
+ formatWarn: (msg) => `${pc.yellow("⚠")} ${msg}`,
208
+ formatCommand: (cmd) => pc.cyan(cmd),
209
+ formatValue: (val) => pc.magenta(val),
210
+ formatPath: (path$1) => pc.dim(path$1)
211
+ };
212
+
213
+ //#endregion
214
+ //#region src/core/embeddings/store.ts
215
+ /**
216
+ * LanceDB vector store for code embeddings
217
+ *
218
+ * Supports:
219
+ * - Vector similarity search (embeddings)
220
+ * - Full-text search (BM25)
221
+ * - Hybrid search with RRF (Reciprocal Rank Fusion)
222
+ */
223
+ const TABLE_NAME = "code_chunks";
224
+ const INDEX_DIR_NAME = ".src-index";
225
+ /**
226
+ * Reciprocal Rank Fusion (RRF) to combine ranked lists
227
+ *
228
+ * RRF score = sum(1 / (k + rank_i)) for each list
229
+ * where k is a constant (typically 60) and rank_i is the 1-based rank in list i
230
+ */
231
+ function rrfFusion(vectorResults, ftsResults, k = 60) {
232
+ const scores = /* @__PURE__ */ new Map();
233
+ vectorResults.forEach((result, index) => {
234
+ const rrfScore = 1 / (k + (index + 1));
235
+ const existing = scores.get(result.chunk.id);
236
+ if (existing) existing.score += rrfScore;
237
+ else scores.set(result.chunk.id, {
238
+ score: rrfScore,
239
+ result
240
+ });
241
+ });
242
+ ftsResults.forEach((result, index) => {
243
+ const rrfScore = 1 / (k + (index + 1));
244
+ const existing = scores.get(result.chunk.id);
245
+ if (existing) existing.score += rrfScore;
246
+ else scores.set(result.chunk.id, {
247
+ score: rrfScore,
248
+ result
249
+ });
250
+ });
251
+ return Array.from(scores.values()).sort((a, b) => b.score - a.score).map(({ score, result }) => ({
252
+ ...result,
253
+ score
254
+ }));
255
+ }
256
+ /**
257
+ * LanceDB vector store wrapper
258
+ */
259
+ var VectorStore = class {
260
+ db = null;
261
+ table = null;
262
+ indexPath;
263
+ ftsIndexCreated = false;
264
+ constructor(directory, _config) {
265
+ this.indexPath = path.join(directory, INDEX_DIR_NAME);
266
+ }
267
+ /**
268
+ * Initialize the database connection
269
+ */
270
+ async connect() {
271
+ this.db = await lancedb.connect(this.indexPath);
272
+ if ((await this.db.tableNames()).includes(TABLE_NAME)) this.table = await this.db.openTable(TABLE_NAME);
273
+ }
274
+ /**
275
+ * Close the database connection
276
+ */
277
+ close() {
278
+ this.db = null;
279
+ this.table = null;
280
+ }
281
+ /**
282
+ * Check if the index exists
283
+ */
284
+ exists() {
285
+ return fs.existsSync(this.indexPath);
286
+ }
287
+ /**
288
+ * Add embedded chunks to the store
289
+ */
290
+ async addChunks(chunks) {
291
+ if (!this.db) throw new Error("Database not connected. Call connect() first.");
292
+ const records = chunks.map((chunk) => ({
293
+ id: chunk.id,
294
+ content: chunk.content,
295
+ filePath: chunk.filePath,
296
+ language: chunk.language,
297
+ startLine: chunk.startLine,
298
+ endLine: chunk.endLine,
299
+ symbolName: chunk.symbolName ?? "",
300
+ symbolType: chunk.symbolType ?? "",
301
+ vector: chunk.vector
302
+ }));
303
+ if (!this.table) this.table = await this.db.createTable(TABLE_NAME, records);
304
+ else await this.table.add(records);
305
+ }
306
+ /**
307
+ * Create FTS (Full-Text Search) index on content column
308
+ * This enables BM25-based text search
309
+ */
310
+ async createFtsIndex() {
311
+ if (!this.table || this.ftsIndexCreated) return;
312
+ try {
313
+ await this.table.createIndex("content", { config: lancedb.Index.fts() });
314
+ this.ftsIndexCreated = true;
315
+ logger.debug("FTS index created on content column");
316
+ } catch (error) {
317
+ if (error instanceof Error && error.message.includes("already exists")) {
318
+ this.ftsIndexCreated = true;
319
+ logger.debug("FTS index already exists");
320
+ } else logger.warn(`Failed to create FTS index: ${error instanceof Error ? error.message : String(error)}`);
321
+ }
322
+ }
323
+ /**
324
+ * Search for similar chunks using vector similarity
325
+ */
326
+ async search(queryVector, limit = 10) {
327
+ if (!this.table) return [];
328
+ return (await this.table.vectorSearch(queryVector).limit(limit).toArray()).map((row) => ({
329
+ chunk: {
330
+ id: row.id,
331
+ content: row.content,
332
+ filePath: row.filePath,
333
+ language: row.language,
334
+ startLine: row.startLine,
335
+ endLine: row.endLine,
336
+ symbolName: row.symbolName || void 0,
337
+ symbolType: row.symbolType || void 0
338
+ },
339
+ score: row._distance ?? 0
340
+ }));
341
+ }
342
+ /**
343
+ * Full-text search using BM25
344
+ */
345
+ async searchFts(queryText, limit = 10) {
346
+ if (!this.table) return [];
347
+ await this.createFtsIndex();
348
+ try {
349
+ return (await this.table.query().nearestToText(queryText).limit(limit).toArray()).map((row, index) => ({
350
+ chunk: {
351
+ id: row.id,
352
+ content: row.content,
353
+ filePath: row.filePath,
354
+ language: row.language,
355
+ startLine: row.startLine,
356
+ endLine: row.endLine,
357
+ symbolName: row.symbolName || void 0,
358
+ symbolType: row.symbolType || void 0
359
+ },
360
+ score: 1 / (index + 1)
361
+ }));
362
+ } catch (error) {
363
+ logger.warn(`FTS search failed, falling back to empty results: ${error instanceof Error ? error.message : String(error)}`);
364
+ return [];
365
+ }
366
+ }
367
+ /**
368
+ * Hybrid search combining vector similarity and full-text search
369
+ * Uses Reciprocal Rank Fusion (RRF) to combine results
370
+ */
371
+ async searchHybrid(queryVector, queryText, limit = 10, options = {}) {
372
+ const { mode = "hybrid", rrfK = 60 } = options;
373
+ if (!this.table) return [];
374
+ if (mode === "vector") return this.search(queryVector, limit);
375
+ if (mode === "fts") return this.searchFts(queryText, limit);
376
+ const [vectorResults, ftsResults] = await Promise.all([this.search(queryVector, limit * 2), this.searchFts(queryText, limit * 2)]);
377
+ return rrfFusion(vectorResults, ftsResults, rrfK).slice(0, limit);
378
+ }
379
+ /**
380
+ * Delete chunks by file path
381
+ */
382
+ async deleteByFilePath(filePath) {
383
+ if (!this.table) return;
384
+ await this.table.delete(`"filePath" = '${filePath.replace(/'/g, "''")}'`);
385
+ }
386
+ /**
387
+ * Clear all data from the store
388
+ */
389
+ async clear() {
390
+ if (this.db && this.table) {
391
+ await this.db.dropTable(TABLE_NAME);
392
+ this.table = null;
393
+ }
394
+ }
395
+ /**
396
+ * Get index status
397
+ */
398
+ async getStatus(directory) {
399
+ const status = {
400
+ directory,
401
+ indexPath: this.indexPath,
402
+ exists: this.exists(),
403
+ totalChunks: 0,
404
+ totalFiles: 0,
405
+ languages: {}
406
+ };
407
+ if (!this.table) return status;
408
+ const allRows = await this.table.query().toArray();
409
+ status.totalChunks = allRows.length;
410
+ const uniqueFiles = /* @__PURE__ */ new Set();
411
+ const languageCounts = {};
412
+ for (const row of allRows) {
413
+ uniqueFiles.add(row.filePath);
414
+ const lang = row.language;
415
+ languageCounts[lang] = (languageCounts[lang] ?? 0) + 1;
416
+ }
417
+ status.totalFiles = uniqueFiles.size;
418
+ status.languages = languageCounts;
419
+ return status;
420
+ }
421
+ /**
422
+ * Get all indexed file paths
423
+ */
424
+ async getIndexedFiles() {
425
+ if (!this.table) return [];
426
+ const rows = await this.table.query().select(["filePath"]).toArray();
427
+ const uniqueFiles = /* @__PURE__ */ new Set();
428
+ for (const row of rows) uniqueFiles.add(row.filePath);
429
+ return Array.from(uniqueFiles);
430
+ }
431
+ };
432
+ /**
433
+ * Create a vector store for a directory
434
+ */
435
+ function createVectorStore(directory, config$1) {
436
+ return new VectorStore(directory, config$1);
437
+ }
438
+ /**
439
+ * Get the index path for a directory
440
+ */
441
+ function getIndexPath(directory) {
442
+ return path.join(directory, INDEX_DIR_NAME);
443
+ }
444
+
445
+ //#endregion
446
+ //#region src/core/utils/assets.ts
447
+ /**
448
+ * Centralized asset directory utilities
449
+ *
450
+ * Provides consistent access to the assets directory and JSON config loading
451
+ * across all core modules.
452
+ */
453
+ /**
454
+ * Cached assets directory path
455
+ */
456
+ let assetsDirCache = null;
457
+ /**
458
+ * Get the assets directory path
459
+ *
460
+ * Handles both ESM and CJS contexts by trying multiple possible paths
461
+ * relative to the current module location.
462
+ */
463
+ function getAssetsDir() {
464
+ if (assetsDirCache) return assetsDirCache;
465
+ const currentDir = typeof __dirname !== "undefined" ? __dirname : dirname(fileURLToPath(import.meta.url));
466
+ const possiblePaths = [
467
+ join(currentDir, "..", "..", "..", "assets"),
468
+ join(currentDir, "..", "..", "assets"),
469
+ join(process.cwd(), "assets")
470
+ ];
471
+ for (const p of possiblePaths) if (existsSync(p)) {
472
+ assetsDirCache = p;
473
+ return p;
474
+ }
475
+ assetsDirCache = join(process.cwd(), "assets");
476
+ return assetsDirCache;
477
+ }
478
+ /**
479
+ * Load and parse a JSON config file from the assets directory
480
+ *
481
+ * @param filename - Name of the JSON file in assets directory
482
+ * @param defaultValue - Default value to return if file cannot be loaded
483
+ * @returns Parsed JSON content or default value
484
+ */
485
+ function loadJsonConfig(filename, defaultValue) {
486
+ const configPath = join(getAssetsDir(), filename);
487
+ try {
488
+ const content = readFileSync(configPath, "utf-8");
489
+ return JSON.parse(content);
490
+ } catch {
491
+ return defaultValue;
492
+ }
493
+ }
494
+ /**
495
+ * Get the path to a file within the assets directory
496
+ *
497
+ * @param segments - Path segments relative to assets directory
498
+ * @returns Full path to the asset file
499
+ */
500
+ function getAssetPath(...segments) {
501
+ return join(getAssetsDir(), ...segments);
502
+ }
503
+ /**
504
+ * Check if an asset file exists
505
+ *
506
+ * @param segments - Path segments relative to assets directory
507
+ * @returns True if the file exists
508
+ */
509
+ function assetExists(...segments) {
510
+ return existsSync(getAssetPath(...segments));
511
+ }
512
+
513
+ //#endregion
514
+ //#region src/core/utils/cache.ts
515
+ /**
516
+ * Registry of cache clear functions
517
+ */
518
+ const cacheRegistry = /* @__PURE__ */ new Map();
519
+ /**
520
+ * Register a cache clear function
521
+ *
522
+ * @param name - Unique name for this cache (for debugging/identification)
523
+ * @param clearFn - Function that clears the cache
524
+ */
525
+ function registerCache(name, clearFn) {
526
+ cacheRegistry.set(name, clearFn);
527
+ }
528
+
529
+ //#endregion
530
+ //#region src/core/utils/tsconfig.ts
531
+ /**
532
+ * TSConfig utilities for reading path aliases
533
+ *
534
+ * Reads and parses tsconfig.json to extract path aliases
535
+ * in a format usable by the cross-file resolution system.
536
+ */
537
+ /**
538
+ * Strip JSON comments (single-line // and multi-line)
539
+ */
540
+ function stripJsonComments(json) {
541
+ let result = "";
542
+ let inString = false;
543
+ let inSingleLineComment = false;
544
+ let inMultiLineComment = false;
545
+ for (let i = 0; i < json.length; i++) {
546
+ const char = json.charAt(i);
547
+ const nextChar = json.charAt(i + 1);
548
+ if (inSingleLineComment) {
549
+ if (char === "\n") {
550
+ inSingleLineComment = false;
551
+ result += char;
552
+ }
553
+ continue;
554
+ }
555
+ if (inMultiLineComment) {
556
+ if (char === "*" && nextChar === "/") {
557
+ inMultiLineComment = false;
558
+ i++;
559
+ }
560
+ continue;
561
+ }
562
+ if (inString) {
563
+ result += char;
564
+ if (char === "\"" && json.charAt(i - 1) !== "\\") inString = false;
565
+ continue;
566
+ }
567
+ if (char === "\"") {
568
+ inString = true;
569
+ result += char;
570
+ } else if (char === "/" && nextChar === "/") {
571
+ inSingleLineComment = true;
572
+ i++;
573
+ } else if (char === "/" && nextChar === "*") {
574
+ inMultiLineComment = true;
575
+ i++;
576
+ } else result += char;
577
+ }
578
+ return result;
579
+ }
580
+ /**
581
+ * Parse tsconfig.json content
582
+ */
583
+ function parseTsConfig(content) {
584
+ try {
585
+ const strippedContent = stripJsonComments(content);
586
+ return JSON.parse(strippedContent);
587
+ } catch (error) {
588
+ logger.debug(`Failed to parse tsconfig.json: ${error instanceof Error ? error.message : String(error)}`);
589
+ return null;
590
+ }
591
+ }
592
+ /**
593
+ * Convert tsconfig paths to simple path aliases format
594
+ *
595
+ * TSConfig format:
596
+ * "@core": ["src/core"]
597
+ * "@core/*": ["src/core/*"]
598
+ *
599
+ * Output format:
600
+ * "@core": "src/core"
601
+ * "@core/": "src/core/"
602
+ */
603
+ function convertPaths(paths, baseUrl, projectRoot) {
604
+ const aliases = {};
605
+ for (const [pattern, targets] of Object.entries(paths)) {
606
+ const target = targets[0];
607
+ if (!target) continue;
608
+ if (pattern.endsWith("/*") && target.endsWith("/*")) {
609
+ const aliasPrefix = pattern.slice(0, -2) + "/";
610
+ const targetPath = target.slice(0, -2) + "/";
611
+ const resolvedTarget = path.join(projectRoot, baseUrl, targetPath);
612
+ aliases[aliasPrefix] = path.relative(projectRoot, resolvedTarget).replace(/\\/g, "/") + "/";
613
+ } else {
614
+ const resolvedTarget = path.join(projectRoot, baseUrl, target);
615
+ aliases[pattern] = path.relative(projectRoot, resolvedTarget).replace(/\\/g, "/");
616
+ }
617
+ }
618
+ return aliases;
619
+ }
620
+ /**
621
+ * Read tsconfig.json and extract path aliases
622
+ *
623
+ * Handles:
624
+ * - Comments in tsconfig (// and /* *\/)
625
+ * - baseUrl relative paths
626
+ * - Wildcard patterns (@core/* -> src/core/*)
627
+ * - Exact patterns (@core -> src/core)
628
+ *
629
+ * @param projectRoot - The project root directory containing tsconfig.json
630
+ * @returns Path aliases in simple format, or empty object if not found/invalid
631
+ */
632
+ function readPathAliases(projectRoot) {
633
+ const tsconfigPath = path.join(projectRoot, "tsconfig.json");
634
+ if (!fs.existsSync(tsconfigPath)) {
635
+ logger.debug(`No tsconfig.json found at ${tsconfigPath}`);
636
+ return {};
637
+ }
638
+ try {
639
+ const tsconfig = parseTsConfig(fs.readFileSync(tsconfigPath, "utf-8"));
640
+ if (!tsconfig) return {};
641
+ const paths = tsconfig.compilerOptions?.paths;
642
+ const baseUrl = tsconfig.compilerOptions?.baseUrl ?? ".";
643
+ if (!paths || Object.keys(paths).length === 0) {
644
+ logger.debug("No paths defined in tsconfig.json");
645
+ return {};
646
+ }
647
+ const aliases = convertPaths(paths, baseUrl, projectRoot);
648
+ logger.debug(`Loaded ${String(Object.keys(aliases).length)} path aliases from tsconfig.json`);
649
+ return aliases;
650
+ } catch (error) {
651
+ logger.debug(`Failed to read tsconfig.json: ${error instanceof Error ? error.message : String(error)}`);
652
+ return {};
653
+ }
654
+ }
655
+ /**
656
+ * Get cache key for memoization
657
+ */
658
+ const pathAliasCache = /* @__PURE__ */ new Map();
659
+ /**
660
+ * Read path aliases with caching
661
+ */
662
+ function readPathAliasesCached(projectRoot) {
663
+ const normalizedRoot = path.normalize(projectRoot);
664
+ const cached = pathAliasCache.get(normalizedRoot);
665
+ if (cached !== void 0) return cached;
666
+ const aliases = readPathAliases(projectRoot);
667
+ pathAliasCache.set(normalizedRoot, aliases);
668
+ return aliases;
669
+ }
670
+
671
+ //#endregion
672
+ //#region src/core/parser/languages.ts
673
+ /**
674
+ * Language configuration and mapping for Tree-sitter parsers
675
+ * Reads from centralized assets/languages.json
676
+ */
677
+ let configCache$1 = null;
678
+ let languagesCache = null;
679
+ let extensionMapCache = null;
680
+ function loadConfig() {
681
+ if (configCache$1) return configCache$1;
682
+ configCache$1 = loadJsonConfig("languages.json", { treesitter: {} });
683
+ return configCache$1;
684
+ }
685
+ function buildLanguages() {
686
+ if (languagesCache) return languagesCache;
687
+ const config$1 = loadConfig();
688
+ languagesCache = {};
689
+ for (const [name, lang] of Object.entries(config$1.treesitter)) {
690
+ languagesCache[name] = {
691
+ name,
692
+ wasm: lang.wasm,
693
+ queries: lang.queries,
694
+ extensions: lang.extensions,
695
+ aliases: lang.aliases
696
+ };
697
+ if (lang.aliases) for (const alias of lang.aliases) languagesCache[alias] = {
698
+ name,
699
+ wasm: lang.wasm,
700
+ queries: lang.queries,
701
+ extensions: lang.extensions,
702
+ aliases: lang.aliases
703
+ };
704
+ }
705
+ return languagesCache;
706
+ }
707
+ function buildExtensionMap() {
708
+ if (extensionMapCache) return extensionMapCache;
709
+ const languages = buildLanguages();
710
+ extensionMapCache = {};
711
+ for (const config$1 of Object.values(languages)) for (const ext of config$1.extensions) extensionMapCache[ext] = config$1;
712
+ return extensionMapCache;
713
+ }
714
+ /** Get language configuration from file path */
715
+ function getLanguageFromPath(filePath) {
716
+ const ext = filePath.slice(filePath.lastIndexOf(".")).toLowerCase();
717
+ return buildExtensionMap()[ext];
718
+ }
719
+ /** Get language configuration by name */
720
+ function getLanguageByName(name) {
721
+ return buildLanguages()[name.toLowerCase()];
722
+ }
723
+ /** Clear caches (for testing) */
724
+ function clearLanguageCache$1() {
725
+ configCache$1 = null;
726
+ languagesCache = null;
727
+ extensionMapCache = null;
728
+ }
729
+ const LANGUAGES = buildLanguages();
730
+ const EXTENSION_MAP = buildExtensionMap();
731
+ registerCache("languages:config", clearLanguageCache$1);
732
+
733
+ //#endregion
734
+ //#region src/core/parser/index.ts
735
+ /**
736
+ * Tree-sitter parser module
737
+ *
738
+ * Provides code parsing functionality using web-tree-sitter
739
+ * WASM files are loaded from local assets directory for minimal bundle size
740
+ */
741
+ /**
742
+ * Parser initialization state
743
+ */
744
+ let isInitialized = false;
745
+ let initPromise = null;
746
+ /**
747
+ * Cache for loaded languages
748
+ */
749
+ const languageCache = /* @__PURE__ */ new Map();
750
+ /**
751
+ * Parser instance (reused)
752
+ */
753
+ let parser = null;
754
+ /**
755
+ * Initialize the Tree-sitter WASM module
756
+ * Must be called before any parsing operations
757
+ */
758
+ async function initializeParser() {
759
+ if (isInitialized) return;
760
+ if (initPromise) return initPromise;
761
+ initPromise = (async () => {
762
+ await Parser.init();
763
+ parser = new Parser();
764
+ isInitialized = true;
765
+ })();
766
+ return initPromise;
767
+ }
768
+ /**
769
+ * Get or create a parser instance
770
+ */
771
+ async function getParser() {
772
+ await initializeParser();
773
+ if (!parser) throw new Error("Parser not initialized");
774
+ return parser;
775
+ }
776
+ /**
777
+ * Load a language grammar from local assets
778
+ */
779
+ async function loadLanguage(config$1) {
780
+ const cacheKey = config$1.name;
781
+ const cached = languageCache.get(cacheKey);
782
+ if (cached) return cached;
783
+ await initializeParser();
784
+ const wasmPath = join(getAssetsDir(), "wasm", `tree-sitter-${config$1.name}.wasm`);
785
+ if (!existsSync(wasmPath)) throw new Error(`WASM file not found for language ${config$1.name}: ${wasmPath}`);
786
+ const language = await Language.load(wasmPath);
787
+ languageCache.set(cacheKey, language);
788
+ return language;
789
+ }
790
+ /**
791
+ * Parse code content
792
+ */
793
+ async function parseCode(content, options = {}) {
794
+ const { language, filePath } = options;
795
+ let config$1;
796
+ if (language) {
797
+ config$1 = getLanguageByName(language);
798
+ if (!config$1) throw new Error(`Unsupported language: ${language}`);
799
+ } else if (filePath) {
800
+ config$1 = getLanguageFromPath(filePath);
801
+ if (!config$1) throw new Error(`Could not detect language for file: ${filePath}`);
802
+ } else throw new Error("Either language or filePath must be provided");
803
+ const languageInstance = await loadLanguage(config$1);
804
+ const parserInstance = await getParser();
805
+ parserInstance.setLanguage(languageInstance);
806
+ const tree = parserInstance.parse(content);
807
+ if (!tree) throw new Error("Failed to parse content");
808
+ return {
809
+ tree,
810
+ language: config$1.name,
811
+ parser: parserInstance,
812
+ languageInstance
813
+ };
814
+ }
815
+ /**
816
+ * Convert Tree-sitter position to our Position type
817
+ */
818
+ function toPosition(point, offset) {
819
+ return {
820
+ line: point.row + 1,
821
+ column: point.column,
822
+ offset
823
+ };
824
+ }
825
+ /**
826
+ * Convert Tree-sitter node to ASTNode
827
+ */
828
+ function toASTNode(node, maxDepth, currentDepth = 0) {
829
+ const astNode = {
830
+ type: node.type,
831
+ text: node.text,
832
+ start: toPosition(node.startPosition, node.startIndex),
833
+ end: toPosition(node.endPosition, node.endIndex),
834
+ isNamed: node.isNamed
835
+ };
836
+ if (maxDepth !== void 0 && currentDepth >= maxDepth) return astNode;
837
+ if (node.childCount > 0) {
838
+ const namedChildren = node.namedChildren;
839
+ if (namedChildren.length > 0) astNode.children = namedChildren.map((child) => toASTNode(child, maxDepth, currentDepth + 1));
840
+ }
841
+ const treeLang = node.tree.language;
842
+ const fields = {};
843
+ const langFields = treeLang.fields;
844
+ for (const fieldName of langFields) if (fieldName) {
845
+ const fieldNode = node.childForFieldName(fieldName);
846
+ if (fieldNode) fields[fieldName] = toASTNode(fieldNode, maxDepth, currentDepth + 1);
847
+ }
848
+ if (Object.keys(fields).length > 0) astNode.fields = fields;
849
+ return astNode;
850
+ }
851
+ /**
852
+ * Clear the language cache (useful for testing)
853
+ */
854
+ function clearLanguageCache() {
855
+ languageCache.clear();
856
+ }
857
+ /**
858
+ * Reset the parser state (useful for testing)
859
+ */
860
+ function resetParser() {
861
+ languageCache.clear();
862
+ parser = null;
863
+ isInitialized = false;
864
+ initPromise = null;
865
+ }
866
+ registerCache("parser:languageCache", clearLanguageCache);
867
+ registerCache("parser:state", resetParser);
868
+
869
+ //#endregion
870
+ //#region src/core/queries/helpers.ts
871
+ /**
872
+ * Find a capture by exact name
873
+ *
874
+ * @param captures - Array of captures from a query match
875
+ * @param name - Exact capture name to find
876
+ * @returns The matching capture or undefined
877
+ */
878
+ function findCapture(captures, name) {
879
+ return captures.find((c) => c.name === name);
880
+ }
881
+ /**
882
+ * Find a capture matching any of the given names
883
+ *
884
+ * @param captures - Array of captures from a query match
885
+ * @param names - Array of capture names to search for
886
+ * @returns The first matching capture or undefined
887
+ */
888
+ function findCaptureByNames(captures, names) {
889
+ return captures.find((c) => names.includes(c.name));
890
+ }
891
+ /**
892
+ * Find a capture by name prefix
893
+ *
894
+ * @param captures - Array of captures from a query match
895
+ * @param prefix - Prefix to match (e.g., "definition." matches "definition.function")
896
+ * @returns The first matching capture or undefined
897
+ */
898
+ function findCaptureByPrefix(captures, prefix) {
899
+ return captures.find((c) => c.name.startsWith(prefix));
900
+ }
901
+ /**
902
+ * Extract the suffix from a capture name after the prefix
903
+ *
904
+ * @param captureName - Full capture name (e.g., "definition.function")
905
+ * @param prefix - Prefix to remove (e.g., "definition.")
906
+ * @returns The suffix (e.g., "function") or the original name if prefix not found
907
+ */
908
+ function getCaptureKind(captureName, prefix) {
909
+ return captureName.startsWith(prefix) ? captureName.slice(prefix.length) : captureName;
910
+ }
911
+ /**
912
+ * Create a deduplication set from node offsets
913
+ *
914
+ * @returns Object with add and has methods for tracking seen offsets
915
+ */
916
+ function createOffsetTracker() {
917
+ const seen = /* @__PURE__ */ new Set();
918
+ return {
919
+ add: (node) => seen.add(node.start.offset),
920
+ has: (node) => seen.has(node.start.offset)
921
+ };
922
+ }
923
+
924
+ //#endregion
925
+ //#region src/core/queries/loader.ts
926
+ /**
927
+ * SCM Query file loader
928
+ *
929
+ * Loads official Tree-sitter .scm query files from local assets directory
930
+ * Supports inheritance via `; inherits: lang1,lang2` directives
931
+ */
932
+ /**
933
+ * Cache for loaded SCM queries (with inheritance resolved)
934
+ */
935
+ const scmCache = /* @__PURE__ */ new Map();
936
+ /**
937
+ * Normalize language name for directory lookup
938
+ */
939
+ function normalizeLanguageName(language) {
940
+ if (language === "csharp") return "c_sharp";
941
+ if (language === "tsx") return "typescript";
942
+ return language;
943
+ }
944
+ /**
945
+ * Get the path to a .scm query file
946
+ */
947
+ function getSCMPath(language, queryType) {
948
+ const relativePath = join("queries", normalizeLanguageName(language), `${queryType}.scm`);
949
+ if (assetExists(relativePath)) return join(getAssetsDir(), relativePath);
950
+ }
951
+ /**
952
+ * Parse inherit directives from SCM content
953
+ * Supports: `; inherits: lang1,lang2` and `; inherits lang1`
954
+ */
955
+ function parseInherits(content) {
956
+ const inherits = [];
957
+ const lines = content.replace(/\r\n/g, "\n").replace(/\r/g, "\n").split("\n");
958
+ for (const line of lines) {
959
+ const match = /^;\s*inherits:?\s+([^\s].*)$/.exec(line);
960
+ if (match?.[1]) {
961
+ const langs = match[1].split(",").map((l) => l.trim());
962
+ inherits.push(...langs);
963
+ }
964
+ }
965
+ return inherits;
966
+ }
967
+ /**
968
+ * Remove inherit directives from SCM content
969
+ */
970
+ function removeInheritDirectives(content) {
971
+ return content.replace(/\r\n/g, "\n").replace(/\r/g, "\n").split("\n").filter((line) => !/^;\s*inherits:?\s+/.exec(line)).join("\n");
972
+ }
973
+ /**
974
+ * Load a raw .scm file without resolving inheritance
975
+ */
976
+ function loadRawSCM(language, queryType) {
977
+ const scmPath = join(getAssetsDir(), "queries", language, `${queryType}.scm`);
978
+ if (!existsSync(scmPath)) return;
979
+ try {
980
+ return readFileSync(scmPath, "utf-8");
981
+ } catch {
982
+ return;
983
+ }
984
+ }
985
+ /**
986
+ * Load a .scm query file for a language with inheritance resolved
987
+ *
988
+ * @param language - Language name (e.g., "javascript", "python")
989
+ * @param queryType - Type of query (e.g., "tags", "highlights")
990
+ * @param visited - Set of visited languages to prevent circular inheritance
991
+ * @returns Query string or undefined if not found
992
+ */
993
+ function loadSCMQuery(language, queryType, visited = /* @__PURE__ */ new Set()) {
994
+ const cacheKey = `${language}:${queryType}`;
995
+ if (scmCache.has(cacheKey)) return scmCache.get(cacheKey);
996
+ if (visited.has(language)) return;
997
+ visited.add(language);
998
+ const rawContent = loadRawSCM(normalizeLanguageName(language), queryType);
999
+ if (!rawContent) return;
1000
+ const inherits = parseInherits(rawContent);
1001
+ const ownContent = removeInheritDirectives(rawContent).trim();
1002
+ const inheritedParts = [];
1003
+ for (const inheritLang of inherits) {
1004
+ const inheritedContent = loadSCMQuery(inheritLang, queryType, visited);
1005
+ if (inheritedContent) inheritedParts.push(inheritedContent);
1006
+ }
1007
+ const finalContent = [...inheritedParts, ownContent].filter(Boolean).join("\n\n");
1008
+ if (finalContent) scmCache.set(cacheKey, finalContent);
1009
+ return finalContent || void 0;
1010
+ }
1011
+ /**
1012
+ * Load tags.scm for symbol extraction
1013
+ */
1014
+ function loadTagsQuery(language) {
1015
+ return loadSCMQuery(language, "tags");
1016
+ }
1017
+ /**
1018
+ * Load highlights.scm for syntax highlighting
1019
+ */
1020
+ function loadHighlightsQuery(language) {
1021
+ return loadSCMQuery(language, "highlights");
1022
+ }
1023
+ /**
1024
+ * Load locals.scm for local variable scoping
1025
+ */
1026
+ function loadLocalsQuery(language) {
1027
+ return loadSCMQuery(language, "locals");
1028
+ }
1029
+ /**
1030
+ * Check if a language has official tags.scm
1031
+ */
1032
+ function hasOfficialTags(language) {
1033
+ return getSCMPath(language, "tags") !== void 0;
1034
+ }
1035
+ /**
1036
+ * Clear the SCM cache
1037
+ */
1038
+ function clearSCMCache() {
1039
+ scmCache.clear();
1040
+ }
1041
+ registerCache("queries:scm", clearSCMCache);
1042
+
1043
+ //#endregion
1044
+ //#region src/core/queries/patterns.ts
1045
+ const GENERIC_PATTERNS = {
1046
+ comments: `[(comment) @comment]`,
1047
+ strings: `[(string) @string (template_string) @string]`,
1048
+ imports: `(import_statement) @import.statement`,
1049
+ exports: `(export_statement) @export.statement`,
1050
+ variables: `[
1051
+ (variable_declaration (variable_declarator name: (identifier) @variable.name) @variable.declaration)
1052
+ (lexical_declaration (variable_declarator name: (identifier) @variable.name) @variable.declaration)
1053
+ ]`,
1054
+ types: `[
1055
+ (type_alias_declaration name: (type_identifier) @type.alias) @type.definition
1056
+ (enum_declaration name: (identifier) @enum.name) @enum.definition
1057
+ ]`
1058
+ };
1059
+ const FALLBACK_PATTERNS = {
1060
+ typescript: { functions: `[
1061
+ (function_declaration name: (identifier) @function.name) @function.definition
1062
+ (method_definition name: (property_identifier) @function.name) @function.definition
1063
+ (lexical_declaration (variable_declarator name: (identifier) @function.name value: [(arrow_function) (function_expression)]) @function.definition)
1064
+ ]` },
1065
+ json: { strings: `[(string) @string]` },
1066
+ yaml: {
1067
+ strings: `[(string_scalar) @string (double_quote_scalar) @string (single_quote_scalar) @string]`,
1068
+ comments: `[(comment) @comment]`
1069
+ },
1070
+ toml: {
1071
+ strings: `[(string) @string]`,
1072
+ comments: `[(comment) @comment]`
1073
+ },
1074
+ bash: {
1075
+ functions: `(function_definition name: (word) @function.name) @function.definition`,
1076
+ comments: `[(comment) @comment]`,
1077
+ strings: `[(string) @string (raw_string) @string]`,
1078
+ variables: `(variable_assignment name: (variable_name) @variable.name) @variable.declaration`
1079
+ },
1080
+ html: {
1081
+ strings: `[(attribute_value) @string (quoted_attribute_value) @string]`,
1082
+ comments: `[(comment) @comment]`
1083
+ },
1084
+ css: {
1085
+ comments: `[(comment) @comment]`,
1086
+ strings: `[(string_value) @string]`
1087
+ },
1088
+ scala: {
1089
+ functions: `(function_definition (identifier) @function.name) @function.definition`,
1090
+ classes: `[
1091
+ (class_definition (identifier) @class.name) @class.definition
1092
+ (object_definition (identifier) @class.name) @class.definition
1093
+ (trait_definition (identifier) @class.name) @class.definition
1094
+ ]`,
1095
+ comments: `[(comment) @comment]`,
1096
+ strings: `[(string) @string]`
1097
+ },
1098
+ swift: {
1099
+ functions: `[
1100
+ (function_declaration (simple_identifier) @function.name) @function.definition
1101
+ (init_declaration) @function.definition
1102
+ ]`,
1103
+ classes: `[
1104
+ (class_declaration (type_identifier) @class.name) @class.definition
1105
+ (protocol_declaration (type_identifier) @class.name) @class.definition
1106
+ ]`,
1107
+ comments: `[(comment) @comment (multiline_comment) @comment]`,
1108
+ strings: `[(line_string_literal) @string]`
1109
+ },
1110
+ ocaml: {
1111
+ functions: `(value_definition (let_binding (value_name) @function.name)) @function.definition`,
1112
+ classes: `[
1113
+ (type_definition (type_binding (type_constructor) @class.name)) @class.definition
1114
+ (module_definition (module_binding (module_name) @class.name)) @class.definition
1115
+ ]`,
1116
+ comments: `[(comment) @comment]`,
1117
+ strings: `[(string) @string]`
1118
+ },
1119
+ svelte: {
1120
+ comments: `[(comment) @comment]`,
1121
+ strings: `[(attribute_value) @string (quoted_attribute_value) @string]`
1122
+ }
1123
+ };
1124
+ function getQueryPattern(language, preset) {
1125
+ return FALLBACK_PATTERNS[language]?.[preset] ?? GENERIC_PATTERNS[preset];
1126
+ }
1127
+
1128
+ //#endregion
1129
+ //#region src/core/queries/index.ts
1130
+ /**
1131
+ * SCM Query engine for Tree-sitter
1132
+ *
1133
+ * Supports both official .scm query files and custom preset patterns
1134
+ */
1135
+ /**
1136
+ * Execute a SCM query on parsed code
1137
+ */
1138
+ function executeQuery(tree, languageInstance, queryString, language, options = {}) {
1139
+ const { maxMatches, startIndex, endIndex } = options;
1140
+ let query;
1141
+ try {
1142
+ query = new Query(languageInstance, queryString);
1143
+ } catch (error) {
1144
+ const message = error instanceof Error ? error.message : String(error);
1145
+ throw new Error(`Invalid query: ${message}`);
1146
+ }
1147
+ const queryMatches = query.matches(tree.rootNode, {
1148
+ startIndex,
1149
+ endIndex
1150
+ });
1151
+ const matches = [];
1152
+ let count = 0;
1153
+ for (const match of queryMatches) {
1154
+ if (maxMatches !== void 0 && count >= maxMatches) break;
1155
+ const captures = match.captures.map((capture) => ({
1156
+ name: capture.name,
1157
+ node: toASTNode(capture.node)
1158
+ }));
1159
+ matches.push({
1160
+ pattern: match.patternIndex,
1161
+ captures
1162
+ });
1163
+ count++;
1164
+ }
1165
+ return {
1166
+ matches,
1167
+ count,
1168
+ query: queryString,
1169
+ language,
1170
+ source: "preset"
1171
+ };
1172
+ }
1173
+ /**
1174
+ * Execute an official .scm query file
1175
+ *
1176
+ * @param tree - Parsed tree
1177
+ * @param languageInstance - Tree-sitter language instance
1178
+ * @param language - Language name
1179
+ * @param queryType - Type of query (tags, highlights, locals, etc.)
1180
+ * @param options - Query options
1181
+ */
1182
+ function executeOfficialQuery(tree, languageInstance, language, queryType, options = {}) {
1183
+ let queryString;
1184
+ switch (queryType) {
1185
+ case "tags":
1186
+ queryString = loadTagsQuery(language);
1187
+ break;
1188
+ case "highlights":
1189
+ queryString = loadHighlightsQuery(language);
1190
+ break;
1191
+ case "locals":
1192
+ queryString = loadLocalsQuery(language);
1193
+ break;
1194
+ case "injections":
1195
+ case "indents":
1196
+ case "folds": return;
1197
+ }
1198
+ if (!queryString) return;
1199
+ try {
1200
+ return {
1201
+ ...executeQuery(tree, languageInstance, queryString, language, options),
1202
+ source: "official"
1203
+ };
1204
+ } catch {
1205
+ return;
1206
+ }
1207
+ }
1208
+ /**
1209
+ * Execute tags.scm for comprehensive symbol extraction
1210
+ *
1211
+ * This uses the official Tree-sitter tags.scm file which provides:
1212
+ * - Function definitions with documentation
1213
+ * - Class definitions
1214
+ * - Method definitions
1215
+ * - Module/interface definitions
1216
+ * - Reference tracking (calls, types)
1217
+ */
1218
+ function executeTagsQuery(tree, languageInstance, language, options = {}) {
1219
+ return executeOfficialQuery(tree, languageInstance, language, "tags", options);
1220
+ }
1221
+ /**
1222
+ * Execute a preset query
1223
+ * Uses official tags.scm for functions/classes when available,
1224
+ * otherwise falls back to preset patterns
1225
+ */
1226
+ function executePresetQuery(tree, languageInstance, language, preset, options = {}) {
1227
+ const { maxMatches } = options;
1228
+ const fallbackPattern = getQueryPattern(language, preset);
1229
+ if (preset === "functions" || preset === "classes") {
1230
+ if (hasOfficialTags(language)) {
1231
+ const { definitions } = extractSymbolsFromTags(tree, languageInstance, language);
1232
+ let filteredDefs = preset === "functions" ? definitions.filter((d) => d.kind === "function" || d.kind === "method") : definitions.filter((d) => d.kind === "class" || d.kind === "interface" || d.kind === "module");
1233
+ if (filteredDefs.length > 0) {
1234
+ if (maxMatches !== void 0 && filteredDefs.length > maxMatches) filteredDefs = filteredDefs.slice(0, maxMatches);
1235
+ const matches = filteredDefs.map((def) => ({
1236
+ pattern: 0,
1237
+ captures: [{
1238
+ name: preset === "functions" ? "function.definition" : "class.definition",
1239
+ node: def.node
1240
+ }, {
1241
+ name: `${preset.slice(0, -1)}.name`,
1242
+ node: def.nameNode
1243
+ }]
1244
+ }));
1245
+ return {
1246
+ matches,
1247
+ count: matches.length,
1248
+ query: `[tags.scm ${preset}]`,
1249
+ language,
1250
+ source: "official"
1251
+ };
1252
+ }
1253
+ if (fallbackPattern) return executeQuery(tree, languageInstance, fallbackPattern, language, options);
1254
+ return {
1255
+ matches: [],
1256
+ count: 0,
1257
+ query: `[tags.scm ${preset}]`,
1258
+ language,
1259
+ source: "official"
1260
+ };
1261
+ }
1262
+ if (fallbackPattern) return executeQuery(tree, languageInstance, fallbackPattern, language, options);
1263
+ }
1264
+ if (!fallbackPattern) throw new Error(`No '${preset}' query pattern available for ${language}`);
1265
+ return executeQuery(tree, languageInstance, fallbackPattern, language, options);
1266
+ }
1267
+ /**
1268
+ * Extract symbols using official tags.scm when available
1269
+ *
1270
+ * This is the recommended method for symbol extraction as it uses
1271
+ * the official Tree-sitter query files for better accuracy.
1272
+ */
1273
+ function extractSymbolsFromTags(tree, languageInstance, language) {
1274
+ const result = executeTagsQuery(tree, languageInstance, language);
1275
+ if (!result) return {
1276
+ definitions: [],
1277
+ references: []
1278
+ };
1279
+ const definitions = [];
1280
+ const references = [];
1281
+ for (const match of result.matches) {
1282
+ const nameCapture = findCapture(match.captures, "name");
1283
+ if (!nameCapture) continue;
1284
+ const defCapture = findCaptureByPrefix(match.captures, "definition.");
1285
+ const refCapture = findCaptureByPrefix(match.captures, "reference.");
1286
+ const docCapture = findCapture(match.captures, "doc");
1287
+ if (defCapture) {
1288
+ const kind = getCaptureKind(defCapture.name, "definition.");
1289
+ definitions.push({
1290
+ name: nameCapture.node.text,
1291
+ kind,
1292
+ node: defCapture.node,
1293
+ nameNode: nameCapture.node,
1294
+ documentation: docCapture?.node.text
1295
+ });
1296
+ } else if (refCapture) {
1297
+ const kind = getCaptureKind(refCapture.name, "reference.");
1298
+ references.push({
1299
+ name: nameCapture.node.text,
1300
+ kind,
1301
+ node: refCapture.node,
1302
+ nameNode: nameCapture.node
1303
+ });
1304
+ }
1305
+ }
1306
+ return {
1307
+ definitions,
1308
+ references
1309
+ };
1310
+ }
1311
+ /**
1312
+ * Get function name from a function node
1313
+ */
1314
+ function getFunctionName(funcNode) {
1315
+ if (funcNode.fields?.name) {
1316
+ const nameNode = funcNode.fields.name;
1317
+ if (!Array.isArray(nameNode)) return nameNode.text;
1318
+ }
1319
+ if (funcNode.children) for (const child of funcNode.children) {
1320
+ if (child.type === "identifier" || child.type === "property_identifier" || child.type === "field_identifier") return child.text;
1321
+ if (child.type === "function_declarator") return getFunctionName(child);
1322
+ }
1323
+ }
1324
+ /**
1325
+ * Get class name from a class node
1326
+ */
1327
+ function getClassName(classNode) {
1328
+ if (classNode.fields?.name) {
1329
+ const nameNode = classNode.fields.name;
1330
+ if (!Array.isArray(nameNode)) return nameNode.text;
1331
+ }
1332
+ if (classNode.children) {
1333
+ for (const child of classNode.children) if (child.type === "identifier" || child.type === "type_identifier") return child.text;
1334
+ }
1335
+ }
1336
+
1337
+ //#endregion
1338
+ //#region src/core/symbols/index.ts
1339
+ /**
1340
+ * Extract symbols from parsed code
1341
+ */
1342
+ function extractSymbols(tree, languageInstance, language, filter = {}) {
1343
+ const symbols = [];
1344
+ const { types, excludeTypes } = filter;
1345
+ const shouldInclude = (type) => {
1346
+ if (types && !types.includes(type)) return false;
1347
+ if (excludeTypes?.includes(type)) return false;
1348
+ return true;
1349
+ };
1350
+ if (shouldInclude("function") || shouldInclude("method") || shouldInclude("class") || shouldInclude("interface")) {
1351
+ const tagsResult = executePresetQuery(tree, languageInstance, language, "functions");
1352
+ if (shouldInclude("function") || shouldInclude("method")) for (const match of tagsResult.matches) {
1353
+ const defCapture = findCapture(match.captures, "function.definition");
1354
+ const nameCapture = findCapture(match.captures, "function.name");
1355
+ if (defCapture) {
1356
+ const name = nameCapture?.node.text ?? getFunctionName(defCapture.node);
1357
+ if (name) {
1358
+ const type = defCapture.node.type.includes("method") || defCapture.node.type === "method_definition" ? "method" : "function";
1359
+ if (shouldInclude(type)) symbols.push({
1360
+ name,
1361
+ type,
1362
+ start: defCapture.node.start,
1363
+ end: defCapture.node.end,
1364
+ signature: extractFunctionSignature(defCapture.node),
1365
+ modifiers: extractModifiers(defCapture.node)
1366
+ });
1367
+ }
1368
+ }
1369
+ }
1370
+ }
1371
+ if (shouldInclude("class") || shouldInclude("interface")) {
1372
+ const classResult = executePresetQuery(tree, languageInstance, language, "classes");
1373
+ for (const match of classResult.matches) {
1374
+ const defCapture = findCapture(match.captures, "class.definition");
1375
+ const nameCapture = findCapture(match.captures, "class.name");
1376
+ if (defCapture) {
1377
+ const name = nameCapture?.node.text ?? getClassName(defCapture.node);
1378
+ if (name) {
1379
+ const nodeType = defCapture.node.type;
1380
+ let symbolType = "class";
1381
+ if (nodeType.includes("interface") || nodeType === "interface_declaration") symbolType = "interface";
1382
+ else if (nodeType.includes("struct")) symbolType = "interface";
1383
+ if (shouldInclude(symbolType)) symbols.push({
1384
+ name,
1385
+ type: symbolType,
1386
+ start: defCapture.node.start,
1387
+ end: defCapture.node.end,
1388
+ modifiers: extractModifiers(defCapture.node)
1389
+ });
1390
+ }
1391
+ }
1392
+ }
1393
+ }
1394
+ if (shouldInclude("variable") || shouldInclude("constant")) try {
1395
+ const varResult = executePresetQuery(tree, languageInstance, language, "variables");
1396
+ for (const match of varResult.matches) {
1397
+ const nameCapture = findCaptureByNames(match.captures, [
1398
+ "variable.name",
1399
+ "constant.name",
1400
+ "field.name"
1401
+ ]);
1402
+ const declCapture = findCaptureByNames(match.captures, [
1403
+ "variable.declaration",
1404
+ "constant.declaration",
1405
+ "field.declaration"
1406
+ ]);
1407
+ if (nameCapture && declCapture) {
1408
+ const type = declCapture.node.text.startsWith("const ") || findCapture(match.captures, "constant.name") !== void 0 ? "constant" : "variable";
1409
+ if (shouldInclude(type)) symbols.push({
1410
+ name: nameCapture.node.text,
1411
+ type,
1412
+ start: declCapture.node.start,
1413
+ end: declCapture.node.end,
1414
+ modifiers: extractModifiers(declCapture.node)
1415
+ });
1416
+ }
1417
+ }
1418
+ } catch {}
1419
+ if (shouldInclude("interface") || shouldInclude("type") || shouldInclude("enum")) try {
1420
+ const typeResult = executePresetQuery(tree, languageInstance, language, "types");
1421
+ for (const match of typeResult.matches) {
1422
+ const nameCapture = findCaptureByNames(match.captures, [
1423
+ "type.name",
1424
+ "interface.name",
1425
+ "enum.name",
1426
+ "type.alias"
1427
+ ]);
1428
+ const defCapture = findCaptureByNames(match.captures, [
1429
+ "type.definition",
1430
+ "interface.definition",
1431
+ "enum.definition"
1432
+ ]);
1433
+ if (nameCapture && defCapture) {
1434
+ let type = "type";
1435
+ if (nameCapture.name === "interface.name") type = "interface";
1436
+ else if (nameCapture.name === "enum.name") type = "enum";
1437
+ if (shouldInclude(type)) symbols.push({
1438
+ name: nameCapture.node.text,
1439
+ type,
1440
+ start: defCapture.node.start,
1441
+ end: defCapture.node.end,
1442
+ modifiers: extractModifiers(defCapture.node)
1443
+ });
1444
+ }
1445
+ }
1446
+ } catch {}
1447
+ return {
1448
+ symbols,
1449
+ summary: {
1450
+ functions: symbols.filter((s) => s.type === "function").length,
1451
+ classes: symbols.filter((s) => s.type === "class").length,
1452
+ variables: symbols.filter((s) => s.type === "variable").length,
1453
+ constants: symbols.filter((s) => s.type === "constant").length,
1454
+ interfaces: symbols.filter((s) => s.type === "interface").length,
1455
+ types: symbols.filter((s) => s.type === "type").length,
1456
+ enums: symbols.filter((s) => s.type === "enum").length,
1457
+ methods: symbols.filter((s) => s.type === "method").length,
1458
+ properties: symbols.filter((s) => s.type === "property").length,
1459
+ total: symbols.length
1460
+ }
1461
+ };
1462
+ }
1463
+ /**
1464
+ * Extract function signature from AST node
1465
+ */
1466
+ function extractFunctionSignature(node) {
1467
+ const text = node.text;
1468
+ const jsMatch = /^(async\s+)?function\s*\*?\s*(\w*)\s*(<[^>]*>)?\s*\([^)]*\)(\s*:\s*[^{]+)?/.exec(text);
1469
+ if (jsMatch) return jsMatch[0].trim();
1470
+ const arrowMatch = /^\([^)]*\)\s*(:\s*[^=]+)?\s*=>/.exec(text);
1471
+ if (arrowMatch) return arrowMatch[0].trim();
1472
+ const pyMatch = /^def\s+(\w+)\s*\([^)]*\)(\s*->\s*[^:]+)?:/.exec(text);
1473
+ if (pyMatch) return pyMatch[0].replace(/:$/, "").trim();
1474
+ const goMatch = /^func\s*(\([^)]*\)\s*)?(\w+)\s*\([^)]*\)/.exec(text);
1475
+ if (goMatch) return goMatch[0].trim();
1476
+ const firstLine = text.split(/[{:]/)[0];
1477
+ return firstLine ? firstLine.trim() : void 0;
1478
+ }
1479
+ /**
1480
+ * Extract modifiers from AST node
1481
+ */
1482
+ function extractModifiers(node) {
1483
+ const modifiers = [];
1484
+ const text = node.text;
1485
+ for (const mod of [
1486
+ "export",
1487
+ "default",
1488
+ "async",
1489
+ "static",
1490
+ "public",
1491
+ "private",
1492
+ "protected",
1493
+ "readonly",
1494
+ "abstract",
1495
+ "const",
1496
+ "let",
1497
+ "var",
1498
+ "final",
1499
+ "override",
1500
+ "pub",
1501
+ "mut"
1502
+ ]) if ((/* @__PURE__ */ new RegExp(`\\b${mod}\\b`)).test(text.slice(0, 100))) modifiers.push(mod);
1503
+ return modifiers.length > 0 ? modifiers : void 0;
1504
+ }
1505
+ /**
1506
+ * Extract imports from parsed code
1507
+ */
1508
+ function extractImports(tree, languageInstance, language) {
1509
+ const imports = [];
1510
+ try {
1511
+ const result = executePresetQuery(tree, languageInstance, language, "imports");
1512
+ const tracker = createOffsetTracker();
1513
+ for (const match of result.matches) {
1514
+ const stmtCapture = findCaptureByNames(match.captures, ["import.statement", "include.statement"]);
1515
+ if (!stmtCapture || tracker.has(stmtCapture.node)) continue;
1516
+ tracker.add(stmtCapture.node);
1517
+ const sourceCapture = findCaptureByNames(match.captures, [
1518
+ "import.source",
1519
+ "import.path",
1520
+ "include.path"
1521
+ ]);
1522
+ const defaultCapture = findCapture(match.captures, "import.default");
1523
+ const nameCaptures = match.captures.filter((c) => c.name === "import.name");
1524
+ const source = sourceCapture ? sourceCapture.node.text.replace(/['"]/g, "") : "";
1525
+ const names = [];
1526
+ if (defaultCapture) names.push({ name: defaultCapture.node.text });
1527
+ for (const nameCapture of nameCaptures) names.push({ name: nameCapture.node.text });
1528
+ imports.push({
1529
+ source,
1530
+ names,
1531
+ isDefault: !!defaultCapture && nameCaptures.length === 0,
1532
+ start: stmtCapture.node.start,
1533
+ end: stmtCapture.node.end
1534
+ });
1535
+ }
1536
+ } catch {}
1537
+ return imports;
1538
+ }
1539
+ /**
1540
+ * Extract exports from parsed code
1541
+ */
1542
+ function extractExports(tree, languageInstance, language) {
1543
+ const exportList = [];
1544
+ try {
1545
+ const result = executePresetQuery(tree, languageInstance, language, "exports");
1546
+ const tracker = createOffsetTracker();
1547
+ for (const match of result.matches) {
1548
+ const stmtCapture = findCaptureByNames(match.captures, [
1549
+ "export.statement",
1550
+ "export.function",
1551
+ "export.class",
1552
+ "export.type"
1553
+ ]);
1554
+ if (!stmtCapture || tracker.has(stmtCapture.node)) continue;
1555
+ tracker.add(stmtCapture.node);
1556
+ const nameCapture = findCapture(match.captures, "export.name");
1557
+ const text = stmtCapture.node.text;
1558
+ const isDefault = text.includes("export default");
1559
+ let name = nameCapture?.node.text;
1560
+ if (!name) {
1561
+ const nameMatch = /export\s+(?:default\s+)?(?:function|class|const|let|var|interface|type|enum)\s+(\w+)/.exec(text);
1562
+ if (nameMatch?.[1]) name = nameMatch[1];
1563
+ }
1564
+ exportList.push({
1565
+ name: name ?? "default",
1566
+ isDefault,
1567
+ start: stmtCapture.node.start,
1568
+ end: stmtCapture.node.end
1569
+ });
1570
+ }
1571
+ } catch {}
1572
+ return exportList;
1573
+ }
1574
+
1575
+ //#endregion
1576
+ //#region src/core/embeddings/chunker.ts
1577
+ /**
1578
+ * Code chunker for splitting source files into embeddable chunks
1579
+ *
1580
+ * Uses tree-sitter for semantic chunking based on symbols (functions, classes, etc.)
1581
+ * This produces much better embeddings than character-based splitting.
1582
+ */
1583
+ /**
1584
+ * Generate a unique ID for a chunk
1585
+ */
1586
+ function generateChunkId(filePath, content, startLine) {
1587
+ return `chunk_${crypto.createHash("md5").update(`${filePath}:${String(startLine)}:${content}`).digest("hex").slice(0, 12)}`;
1588
+ }
1589
+ /**
1590
+ * Detect language from file extension
1591
+ */
1592
+ function detectLanguage(filePath) {
1593
+ return {
1594
+ ts: "typescript",
1595
+ tsx: "typescript",
1596
+ js: "javascript",
1597
+ jsx: "javascript",
1598
+ mjs: "javascript",
1599
+ cjs: "javascript",
1600
+ py: "python",
1601
+ rs: "rust",
1602
+ go: "go",
1603
+ java: "java",
1604
+ kt: "kotlin",
1605
+ rb: "ruby",
1606
+ php: "php",
1607
+ c: "c",
1608
+ cpp: "cpp",
1609
+ h: "c",
1610
+ hpp: "cpp",
1611
+ cs: "csharp",
1612
+ swift: "swift",
1613
+ scala: "scala",
1614
+ vue: "vue",
1615
+ svelte: "svelte",
1616
+ md: "markdown",
1617
+ json: "json",
1618
+ yaml: "yaml",
1619
+ yml: "yaml",
1620
+ toml: "toml",
1621
+ xml: "xml",
1622
+ html: "html",
1623
+ css: "css",
1624
+ scss: "scss",
1625
+ less: "less",
1626
+ sql: "sql",
1627
+ sh: "bash",
1628
+ bash: "bash",
1629
+ zsh: "bash"
1630
+ }[filePath.split(".").pop()?.toLowerCase() ?? ""] ?? "unknown";
1631
+ }
1632
+ /**
1633
+ * Get line number from byte offset
1634
+ */
1635
+ function getLineFromOffset(content, offset) {
1636
+ return (content.slice(0, offset).match(/\n/g) ?? []).length + 1;
1637
+ }
1638
+ /**
1639
+ * Extract code content for a symbol using its offsets
1640
+ */
1641
+ function getSymbolContent(content, symbol) {
1642
+ return content.slice(symbol.start.offset, symbol.end.offset);
1643
+ }
1644
+ /**
1645
+ * Split large content into smaller chunks while respecting line boundaries
1646
+ */
1647
+ function splitLargeContent(content, maxSize, overlap) {
1648
+ const normalizedContent = content.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
1649
+ if (normalizedContent.length <= maxSize) return [normalizedContent];
1650
+ const chunks = [];
1651
+ const lines = normalizedContent.split("\n");
1652
+ let currentChunk = [];
1653
+ let currentSize = 0;
1654
+ for (const line of lines) {
1655
+ const lineSize = line.length + 1;
1656
+ if (currentSize + lineSize > maxSize && currentChunk.length > 0) {
1657
+ chunks.push(currentChunk.join("\n"));
1658
+ const overlapLines = [];
1659
+ let overlapSize = 0;
1660
+ for (let i = currentChunk.length - 1; i >= 0 && overlapSize < overlap; i--) {
1661
+ const l = currentChunk[i];
1662
+ if (l !== void 0) {
1663
+ overlapLines.unshift(l);
1664
+ overlapSize += l.length + 1;
1665
+ }
1666
+ }
1667
+ currentChunk = overlapLines;
1668
+ currentSize = overlapSize;
1669
+ }
1670
+ currentChunk.push(line);
1671
+ currentSize += lineSize;
1672
+ }
1673
+ if (currentChunk.length > 0) chunks.push(currentChunk.join("\n"));
1674
+ return chunks;
1675
+ }
1676
+ /**
1677
+ * Create a chunk from content
1678
+ */
1679
+ function createChunk(filePath, language, content, startLine, endLine, symbolName, symbolType) {
1680
+ return {
1681
+ id: generateChunkId(filePath, content, startLine),
1682
+ content,
1683
+ filePath,
1684
+ language,
1685
+ startLine,
1686
+ endLine,
1687
+ symbolName,
1688
+ symbolType
1689
+ };
1690
+ }
1691
+ /**
1692
+ * Chunk a source file using tree-sitter for semantic boundaries
1693
+ *
1694
+ * Strategy:
1695
+ * 1. Parse file with tree-sitter and extract symbols
1696
+ * 2. Each function/class/method becomes its own chunk
1697
+ * 3. Code between symbols (imports, top-level code) is grouped together
1698
+ * 4. Large symbols are split at line boundaries if they exceed maxSize
1699
+ */
1700
+ async function chunkFile(filePath, content, config$1) {
1701
+ const language = detectLanguage(filePath);
1702
+ const maxSize = config$1.defaultChunkSize;
1703
+ const overlap = config$1.defaultChunkOverlap;
1704
+ let symbols = [];
1705
+ try {
1706
+ const parseResult = await parseCode(content, { filePath });
1707
+ symbols = extractSymbols(parseResult.tree, parseResult.languageInstance, parseResult.language).symbols;
1708
+ } catch (error) {
1709
+ logger.debug(`Tree-sitter parsing failed for ${filePath}, using fallback chunking: ${error instanceof Error ? error.message : String(error)}`);
1710
+ return fallbackChunk(filePath, content, language, maxSize, overlap);
1711
+ }
1712
+ if (symbols.length === 0) return fallbackChunk(filePath, content, language, maxSize, overlap);
1713
+ const sortedSymbols = [...symbols].sort((a, b) => a.start.offset - b.start.offset);
1714
+ const regions = [];
1715
+ let lastEndOffset = 0;
1716
+ for (const symbol of sortedSymbols) {
1717
+ if (symbol.start.offset > lastEndOffset) {
1718
+ if (content.slice(lastEndOffset, symbol.start.offset).trim().length > 0) regions.push({
1719
+ content: content.slice(lastEndOffset, symbol.start.offset),
1720
+ startOffset: lastEndOffset,
1721
+ endOffset: symbol.start.offset,
1722
+ startLine: getLineFromOffset(content, lastEndOffset),
1723
+ endLine: getLineFromOffset(content, symbol.start.offset)
1724
+ });
1725
+ }
1726
+ const symbolContent = getSymbolContent(content, symbol);
1727
+ regions.push({
1728
+ content: symbolContent,
1729
+ startOffset: symbol.start.offset,
1730
+ endOffset: symbol.end.offset,
1731
+ startLine: symbol.start.line,
1732
+ endLine: symbol.end.line,
1733
+ symbolName: symbol.name,
1734
+ symbolType: symbol.type
1735
+ });
1736
+ lastEndOffset = Math.max(lastEndOffset, symbol.end.offset);
1737
+ }
1738
+ if (lastEndOffset < content.length) {
1739
+ if (content.slice(lastEndOffset).trim().length > 0) regions.push({
1740
+ content: content.slice(lastEndOffset),
1741
+ startOffset: lastEndOffset,
1742
+ endOffset: content.length,
1743
+ startLine: getLineFromOffset(content, lastEndOffset),
1744
+ endLine: getLineFromOffset(content, content.length)
1745
+ });
1746
+ }
1747
+ const chunks = [];
1748
+ for (const region of regions) {
1749
+ const regionContent = region.content.trim();
1750
+ if (regionContent.length === 0) continue;
1751
+ if (regionContent.length <= maxSize) chunks.push(createChunk(filePath, language, regionContent, region.startLine, region.endLine, region.symbolName, region.symbolType));
1752
+ else {
1753
+ const parts = splitLargeContent(regionContent, maxSize, overlap);
1754
+ let currentLine = region.startLine;
1755
+ for (const part of parts) {
1756
+ const partLines = (part.match(/\n/g) ?? []).length + 1;
1757
+ chunks.push(createChunk(filePath, language, part, currentLine, currentLine + partLines - 1, region.symbolName, region.symbolType));
1758
+ currentLine += partLines - Math.floor(overlap / 50);
1759
+ }
1760
+ }
1761
+ }
1762
+ return chunks;
1763
+ }
1764
+ /**
1765
+ * Fallback chunking when tree-sitter fails or finds no symbols
1766
+ * Uses simple line-based splitting
1767
+ */
1768
+ function fallbackChunk(filePath, content, language, maxSize, overlap) {
1769
+ if (content.trim().length === 0) return [];
1770
+ const chunks = [];
1771
+ const parts = splitLargeContent(content, maxSize, overlap);
1772
+ let currentLine = 1;
1773
+ for (const part of parts) {
1774
+ const partLines = (part.match(/\n/g) ?? []).length + 1;
1775
+ chunks.push(createChunk(filePath, language, part, currentLine, currentLine + partLines - 1));
1776
+ currentLine += partLines - Math.floor(overlap / 50);
1777
+ }
1778
+ return chunks;
1779
+ }
1780
+ /**
1781
+ * Supported file extensions for indexing
1782
+ */
1783
+ const SUPPORTED_EXTENSIONS = [
1784
+ ".ts",
1785
+ ".tsx",
1786
+ ".js",
1787
+ ".jsx",
1788
+ ".mjs",
1789
+ ".cjs",
1790
+ ".py",
1791
+ ".rs",
1792
+ ".go",
1793
+ ".java",
1794
+ ".kt",
1795
+ ".rb",
1796
+ ".php",
1797
+ ".c",
1798
+ ".cpp",
1799
+ ".h",
1800
+ ".hpp",
1801
+ ".cs",
1802
+ ".swift",
1803
+ ".scala",
1804
+ ".vue",
1805
+ ".svelte",
1806
+ ".md"
1807
+ ];
1808
+ /**
1809
+ * Check if a file should be indexed
1810
+ */
1811
+ function shouldIndexFile(filePath) {
1812
+ const ext = "." + (filePath.split(".").pop()?.toLowerCase() ?? "");
1813
+ return SUPPORTED_EXTENSIONS.includes(ext);
1814
+ }
1815
+
1816
+ //#endregion
1817
+ //#region src/core/embeddings/crossfile.ts
1818
+ /**
1819
+ * Cross-file context resolution for enriched embeddings
1820
+ *
1821
+ * Resolves imports and includes relevant symbol definitions from
1822
+ * imported files to provide better context for semantic search.
1823
+ */
1824
+ const resolvedFileCache = /* @__PURE__ */ new Map();
1825
+ /**
1826
+ * Clear the resolved file cache
1827
+ */
1828
+ function clearCrossFileCache() {
1829
+ resolvedFileCache.clear();
1830
+ }
1831
+ registerCache("embeddings:crossFileCache", clearCrossFileCache);
1832
+ /**
1833
+ * Common file extensions to try when resolving imports
1834
+ */
1835
+ const EXTENSIONS = [
1836
+ ".ts",
1837
+ ".tsx",
1838
+ ".js",
1839
+ ".jsx",
1840
+ ".mjs",
1841
+ ".cjs"
1842
+ ];
1843
+ /**
1844
+ * Resolve an import source to an absolute file path
1845
+ */
1846
+ function resolveImportPath(importSource, currentFilePath, options) {
1847
+ const { projectRoot, pathAliases = {} } = options;
1848
+ if (!importSource.startsWith(".") && !importSource.startsWith("@") && !Object.keys(pathAliases).some((alias) => importSource.startsWith(alias))) return null;
1849
+ let resolvedPath;
1850
+ for (const [alias, target] of Object.entries(pathAliases)) if (importSource.startsWith(alias)) {
1851
+ const relativePart = importSource.slice(alias.length);
1852
+ resolvedPath = path.join(projectRoot, target, relativePart);
1853
+ break;
1854
+ }
1855
+ if (resolvedPath === void 0) if (importSource.startsWith(".")) {
1856
+ const currentDir = path.dirname(currentFilePath);
1857
+ resolvedPath = path.resolve(currentDir, importSource);
1858
+ } else return null;
1859
+ for (const ext of EXTENSIONS) {
1860
+ const withExt = resolvedPath + ext;
1861
+ if (fs.existsSync(withExt) && fs.statSync(withExt).isFile()) return withExt;
1862
+ }
1863
+ if (fs.existsSync(resolvedPath) && fs.statSync(resolvedPath).isFile()) return resolvedPath;
1864
+ for (const ext of EXTENSIONS) {
1865
+ const indexPath = path.join(resolvedPath, `index${ext}`);
1866
+ if (fs.existsSync(indexPath) && fs.statSync(indexPath).isFile()) return indexPath;
1867
+ }
1868
+ return null;
1869
+ }
1870
+ /**
1871
+ * Analyze a resolved file and extract its symbols
1872
+ */
1873
+ async function analyzeResolvedFile(filePath) {
1874
+ const cached = resolvedFileCache.get(filePath);
1875
+ if (cached !== void 0) return cached;
1876
+ try {
1877
+ const parseResult = await parseCode(fs.readFileSync(filePath, "utf-8"), { filePath });
1878
+ const { symbols } = extractSymbols(parseResult.tree, parseResult.languageInstance, parseResult.language);
1879
+ const result = {
1880
+ symbols,
1881
+ exports: extractExports(parseResult.tree, parseResult.languageInstance, parseResult.language)
1882
+ };
1883
+ resolvedFileCache.set(filePath, result);
1884
+ return result;
1885
+ } catch (error) {
1886
+ logger.debug(`Failed to analyze ${filePath}: ${error instanceof Error ? error.message : String(error)}`);
1887
+ resolvedFileCache.set(filePath, null);
1888
+ return null;
1889
+ }
1890
+ }
1891
+ /**
1892
+ * Find symbols that match imported names
1893
+ */
1894
+ function findImportedSymbols(importStatement, symbols, exports) {
1895
+ const importedNames = /* @__PURE__ */ new Set();
1896
+ for (const name of importStatement.names) importedNames.add(name.name);
1897
+ if (importStatement.isNamespace) {
1898
+ const exportedNames = new Set(exports.map((e) => e.name));
1899
+ return symbols.filter((s) => exportedNames.has(s.name));
1900
+ }
1901
+ if (importStatement.isDefault) {
1902
+ const defaultExport = exports.find((e) => e.isDefault);
1903
+ if (defaultExport) importedNames.add(defaultExport.name);
1904
+ }
1905
+ return symbols.filter((s) => importedNames.has(s.name));
1906
+ }
1907
+ /**
1908
+ * Resolve imports and get cross-file context
1909
+ */
1910
+ async function resolveCrossFileContext(imports, currentFilePath, options) {
1911
+ const maxImports = options.maxImports ?? 10;
1912
+ const maxSymbolsPerFile = options.maxSymbolsPerFile ?? 5;
1913
+ const resolvedImports = [];
1914
+ for (const imp of imports.slice(0, maxImports)) {
1915
+ const resolvedPath = resolveImportPath(imp.source, currentFilePath, options);
1916
+ if (!resolvedPath) {
1917
+ resolvedImports.push({
1918
+ import: imp,
1919
+ resolvedPath: null,
1920
+ symbols: [],
1921
+ exports: []
1922
+ });
1923
+ continue;
1924
+ }
1925
+ const analysis = await analyzeResolvedFile(resolvedPath);
1926
+ if (!analysis) {
1927
+ resolvedImports.push({
1928
+ import: imp,
1929
+ resolvedPath,
1930
+ symbols: [],
1931
+ exports: []
1932
+ });
1933
+ continue;
1934
+ }
1935
+ const importedSymbols = findImportedSymbols(imp, analysis.symbols, analysis.exports).slice(0, maxSymbolsPerFile);
1936
+ resolvedImports.push({
1937
+ import: imp,
1938
+ resolvedPath,
1939
+ symbols: importedSymbols,
1940
+ exports: analysis.exports
1941
+ });
1942
+ }
1943
+ return {
1944
+ resolvedImports,
1945
+ importedSymbolsSummary: buildImportedSymbolsSummary(resolvedImports)
1946
+ };
1947
+ }
1948
+ /**
1949
+ * Build a summary string of imported symbols for enrichment
1950
+ */
1951
+ function buildImportedSymbolsSummary(resolvedImports) {
1952
+ const lines = [];
1953
+ for (const resolved of resolvedImports) {
1954
+ if (resolved.symbols.length === 0) continue;
1955
+ const symbolDescriptions = resolved.symbols.map((s) => {
1956
+ if (s.signature) return `${s.name}: ${s.signature}`;
1957
+ return `${s.name} (${s.type})`;
1958
+ });
1959
+ if (symbolDescriptions.length > 0) lines.push(`From ${resolved.import.source}: ${symbolDescriptions.join("; ")}`);
1960
+ }
1961
+ return lines.join("\n");
1962
+ }
1963
+
1964
+ //#endregion
1965
+ //#region src/core/embeddings/enricher.ts
1966
+ /**
1967
+ * AST cache per file path to avoid re-parsing
1968
+ */
1969
+ const astCache = /* @__PURE__ */ new Map();
1970
+ /**
1971
+ * Clear the AST cache
1972
+ */
1973
+ function clearASTCache() {
1974
+ astCache.clear();
1975
+ }
1976
+ registerCache("embeddings:astCache", clearASTCache);
1977
+ /** Maximum number of imports to include in enriched content */
1978
+ const MAX_IMPORTS = 10;
1979
+ /** Maximum number of exports to include in enriched content */
1980
+ const MAX_EXPORTS = 10;
1981
+ /**
1982
+ * Get or create file analysis from cache
1983
+ */
1984
+ async function getFileAnalysis(filePath, content, options) {
1985
+ const cached = astCache.get(filePath);
1986
+ if (cached) return cached;
1987
+ try {
1988
+ const parseResult = await parseCode(content, { filePath });
1989
+ const { symbols } = extractSymbols(parseResult.tree, parseResult.languageInstance, parseResult.language);
1990
+ const imports = extractImports(parseResult.tree, parseResult.languageInstance, parseResult.language);
1991
+ const analysis = {
1992
+ parseResult,
1993
+ symbols,
1994
+ imports,
1995
+ exports: extractExports(parseResult.tree, parseResult.languageInstance, parseResult.language)
1996
+ };
1997
+ if ((options?.includeCrossFileContext ?? ENRICHMENT_CONFIG.includeCrossFileContext) && imports.length > 0 && options?.projectRoot) try {
1998
+ const crossFileContext = await resolveCrossFileContext(imports, filePath, {
1999
+ projectRoot: options.projectRoot,
2000
+ pathAliases: options.pathAliases,
2001
+ maxImports: ENRICHMENT_CONFIG.maxImportsToResolve,
2002
+ maxSymbolsPerFile: ENRICHMENT_CONFIG.maxSymbolsPerImport
2003
+ });
2004
+ analysis.crossFileContext = crossFileContext;
2005
+ logger.debug(`Resolved cross-file context for ${filePath}: ${String(crossFileContext.resolvedImports.length)} imports`);
2006
+ } catch (error) {
2007
+ logger.debug(`Failed to resolve cross-file context for ${filePath}: ${error instanceof Error ? error.message : String(error)}`);
2008
+ }
2009
+ astCache.set(filePath, analysis);
2010
+ return analysis;
2011
+ } catch (error) {
2012
+ logger.debug(`Failed to parse ${filePath}: ${error instanceof Error ? error.message : String(error)}`);
2013
+ return null;
2014
+ }
2015
+ }
2016
+ /**
2017
+ * Find symbols that overlap with a chunk's line range
2018
+ */
2019
+ function findSymbolsInRange(symbols, startLine, endLine) {
2020
+ const chunkSymbols = [];
2021
+ for (const symbol of symbols) {
2022
+ const symbolStart = symbol.start.line;
2023
+ const symbolEnd = symbol.end.line;
2024
+ if (symbolStart <= endLine && symbolEnd >= startLine) chunkSymbols.push({
2025
+ name: symbol.name,
2026
+ type: symbol.type,
2027
+ signature: symbol.signature
2028
+ });
2029
+ }
2030
+ return chunkSymbols;
2031
+ }
2032
+ /**
2033
+ * Format import sources for enrichment header
2034
+ */
2035
+ function formatImportSources(imports) {
2036
+ return imports.slice(0, MAX_IMPORTS).map((imp) => imp.source).filter((source) => source.length > 0).join(", ");
2037
+ }
2038
+ /**
2039
+ * Format export names for enrichment header
2040
+ */
2041
+ function formatExportNames(exports) {
2042
+ return exports.slice(0, MAX_EXPORTS).map((exp) => exp.name).filter((name) => name.length > 0 && name !== "default").join(", ");
2043
+ }
2044
+ /**
2045
+ * Format symbols for enrichment header
2046
+ */
2047
+ function formatSymbols(symbols) {
2048
+ return symbols.map((s) => `${s.name} (${s.type})`).join(", ");
2049
+ }
2050
+ /**
2051
+ * Build enriched content with metadata header
2052
+ */
2053
+ function buildEnrichedContent(chunk, symbols, imports, exports, crossFileContext) {
2054
+ const headerLines = [];
2055
+ headerLines.push(`File: ${chunk.filePath}`);
2056
+ headerLines.push(`Language: ${chunk.language}`);
2057
+ if (symbols.length > 0) headerLines.push(`Symbols: ${formatSymbols(symbols)}`);
2058
+ if (imports.length > 0) {
2059
+ const importStr = formatImportSources(imports);
2060
+ if (importStr.length > 0) headerLines.push(`Imports: ${importStr}`);
2061
+ }
2062
+ if (exports.length > 0) {
2063
+ const exportStr = formatExportNames(exports);
2064
+ if (exportStr.length > 0) headerLines.push(`Exports: ${exportStr}`);
2065
+ }
2066
+ if (crossFileContext && crossFileContext.importedSymbolsSummary.length > 0) headerLines.push(`Imported definitions:\n${crossFileContext.importedSymbolsSummary}`);
2067
+ return headerLines.join("\n") + "\n\n---\n" + chunk.content;
2068
+ }
2069
+ /**
2070
+ * Enrich all chunks from a single file (optimized - parses once)
2071
+ */
2072
+ async function enrichChunksFromFile(chunks, content, options) {
2073
+ if (chunks.length === 0) return [];
2074
+ const filePath = chunks[0]?.filePath;
2075
+ if (!filePath) return chunks.map((chunk) => {
2076
+ const basicHeader = `File: ${chunk.filePath}\nLanguage: ${chunk.language}\n\n---\n`;
2077
+ return {
2078
+ ...chunk,
2079
+ enrichedContent: basicHeader + chunk.content,
2080
+ containedSymbols: [],
2081
+ wasEnriched: false
2082
+ };
2083
+ });
2084
+ const analysis = await getFileAnalysis(filePath, content, options);
2085
+ if (!analysis) return chunks.map((chunk) => {
2086
+ const basicHeader = `File: ${chunk.filePath}\nLanguage: ${chunk.language}\n\n---\n`;
2087
+ return {
2088
+ ...chunk,
2089
+ enrichedContent: basicHeader + chunk.content,
2090
+ containedSymbols: [],
2091
+ wasEnriched: false
2092
+ };
2093
+ });
2094
+ return chunks.map((chunk) => {
2095
+ const chunkSymbols = findSymbolsInRange(analysis.symbols, chunk.startLine, chunk.endLine);
2096
+ const enrichedContent = buildEnrichedContent(chunk, chunkSymbols, analysis.imports, analysis.exports, analysis.crossFileContext);
2097
+ return {
2098
+ ...chunk,
2099
+ enrichedContent,
2100
+ containedSymbols: chunkSymbols,
2101
+ wasEnriched: true
2102
+ };
2103
+ });
2104
+ }
2105
+
2106
+ //#endregion
2107
+ //#region src/core/embeddings/watcher.ts
2108
+ /**
2109
+ * File watcher for automatic index updates
2110
+ *
2111
+ * Features:
2112
+ * - SHA-256 hash comparison to detect real content changes
2113
+ * - Debounce (5s default) to handle rapid changes
2114
+ * - Persistent hash cache to avoid unnecessary re-indexing
2115
+ * - fast-glob for efficient file scanning
2116
+ */
2117
+ /** Default debounce delay in milliseconds */
2118
+ const DEFAULT_DEBOUNCE_MS = 5e3;
2119
+ /** Cache file name for storing hashes */
2120
+ const HASH_CACHE_FILE$1 = ".src-index-hashes.json";
2121
+ var IndexWatcher = class {
2122
+ directory;
2123
+ config;
2124
+ debounceMs;
2125
+ ollamaClient;
2126
+ vectorStore;
2127
+ watcher = null;
2128
+ ig;
2129
+ isProcessing = false;
2130
+ hashCache = {};
2131
+ pendingChanges = /* @__PURE__ */ new Map();
2132
+ operationQueue = [];
2133
+ onReady;
2134
+ onError;
2135
+ onIndexed;
2136
+ onRemoved;
2137
+ constructor(options) {
2138
+ this.directory = path.resolve(options.directory);
2139
+ this.config = options.config;
2140
+ this.debounceMs = options.debounceMs ?? DEFAULT_DEBOUNCE_MS;
2141
+ this.ollamaClient = new OllamaClient(options.config);
2142
+ this.vectorStore = new VectorStore(this.directory, options.config);
2143
+ this.ig = this.createIgnoreFilter();
2144
+ this.onReady = options.onReady;
2145
+ this.onError = options.onError;
2146
+ this.onIndexed = options.onIndexed;
2147
+ this.onRemoved = options.onRemoved;
2148
+ this.loadHashCache();
2149
+ }
2150
+ /**
2151
+ * Compute SHA-256 hash of content
2152
+ */
2153
+ computeHash(content) {
2154
+ return crypto.createHash("sha256").update(content, "utf8").digest("hex");
2155
+ }
2156
+ /**
2157
+ * Get hash cache file path
2158
+ */
2159
+ getHashCachePath() {
2160
+ return path.join(this.directory, ".src-index", HASH_CACHE_FILE$1);
2161
+ }
2162
+ /**
2163
+ * Load hash cache from disk
2164
+ */
2165
+ loadHashCache() {
2166
+ const cachePath = this.getHashCachePath();
2167
+ if (fs.existsSync(cachePath)) try {
2168
+ const content = fs.readFileSync(cachePath, "utf-8");
2169
+ this.hashCache = JSON.parse(content);
2170
+ logger.debug(`Loaded ${String(Object.keys(this.hashCache).length)} cached hashes`);
2171
+ } catch {
2172
+ this.hashCache = {};
2173
+ }
2174
+ }
2175
+ /**
2176
+ * Save hash cache to disk
2177
+ */
2178
+ saveHashCache() {
2179
+ const cachePath = this.getHashCachePath();
2180
+ const cacheDir = path.dirname(cachePath);
2181
+ try {
2182
+ if (!fs.existsSync(cacheDir)) fs.mkdirSync(cacheDir, { recursive: true });
2183
+ fs.writeFileSync(cachePath, JSON.stringify(this.hashCache, null, 2));
2184
+ } catch (err) {
2185
+ const error = err instanceof Error ? err : new Error(String(err));
2186
+ logger.debug(`Failed to save hash cache: ${error.message}`);
2187
+ }
2188
+ }
2189
+ /**
2190
+ * Check if file content has changed by comparing hashes
2191
+ */
2192
+ hasContentChanged(filePath, content) {
2193
+ const newHash = this.computeHash(content);
2194
+ if (this.hashCache[filePath] === newHash) return false;
2195
+ this.hashCache[filePath] = newHash;
2196
+ return true;
2197
+ }
2198
+ /**
2199
+ * Remove file from hash cache
2200
+ */
2201
+ removeFromHashCache(filePath) {
2202
+ const { [filePath]: _, ...rest } = this.hashCache;
2203
+ this.hashCache = rest;
2204
+ }
2205
+ /**
2206
+ * Create ignore filter from .gitignore
2207
+ */
2208
+ createIgnoreFilter() {
2209
+ const ig = ignore();
2210
+ const gitignorePath = path.join(this.directory, ".gitignore");
2211
+ if (fs.existsSync(gitignorePath)) try {
2212
+ const content = fs.readFileSync(gitignorePath, "utf-8");
2213
+ ig.add(content);
2214
+ } catch {}
2215
+ return ig;
2216
+ }
2217
+ /**
2218
+ * Check if a file should be indexed
2219
+ */
2220
+ shouldIndex(filePath) {
2221
+ const relativePath = path.relative(this.directory, filePath).replace(/\\/g, "/");
2222
+ if (relativePath.split("/").some((part) => part.startsWith("."))) return false;
2223
+ if (this.ig.ignores(relativePath)) return false;
2224
+ return shouldIndexFile(filePath);
2225
+ }
2226
+ /**
2227
+ * Schedule a file change with debouncing
2228
+ */
2229
+ scheduleChange(type, filePath) {
2230
+ const existing = this.pendingChanges.get(filePath);
2231
+ if (existing) clearTimeout(existing.timer);
2232
+ const timer = setTimeout(() => {
2233
+ this.pendingChanges.delete(filePath);
2234
+ this.queueOperation(async () => this.processChange(type, filePath));
2235
+ }, this.debounceMs);
2236
+ this.pendingChanges.set(filePath, {
2237
+ type,
2238
+ filePath,
2239
+ timer
2240
+ });
2241
+ logger.debug(`Scheduled ${type}: ${path.basename(filePath)} (${String(this.debounceMs)}ms)`);
2242
+ }
2243
+ /**
2244
+ * Process a file change after debounce
2245
+ */
2246
+ async processChange(type, filePath) {
2247
+ if (type === "unlink") await this.removeFile(filePath);
2248
+ else await this.indexFile(filePath);
2249
+ }
2250
+ /**
2251
+ * Index a single file
2252
+ */
2253
+ async indexFile(filePath) {
2254
+ if (!this.shouldIndex(filePath)) return;
2255
+ try {
2256
+ const content = fs.readFileSync(filePath, "utf-8");
2257
+ if (!this.hasContentChanged(filePath, content)) {
2258
+ logger.debug(`Skipped (unchanged): ${path.basename(filePath)}`);
2259
+ return;
2260
+ }
2261
+ const chunks = await chunkFile(filePath, content, this.config);
2262
+ if (chunks.length === 0) return;
2263
+ const enrichedChunks = await enrichChunksFromFile(chunks, content);
2264
+ const texts = enrichedChunks.map((c) => c.enrichedContent);
2265
+ const embeddings = await this.ollamaClient.embedBatch(texts);
2266
+ const embeddedChunks = enrichedChunks.map((chunk, i) => ({
2267
+ id: chunk.id,
2268
+ content: chunk.content,
2269
+ filePath: chunk.filePath,
2270
+ language: chunk.language,
2271
+ startLine: chunk.startLine,
2272
+ endLine: chunk.endLine,
2273
+ symbolName: chunk.symbolName,
2274
+ symbolType: chunk.symbolType,
2275
+ vector: embeddings[i] ?? []
2276
+ }));
2277
+ await this.vectorStore.deleteByFilePath(filePath);
2278
+ await this.vectorStore.addChunks(embeddedChunks);
2279
+ this.saveHashCache();
2280
+ logger.debug(`Indexed: ${path.relative(this.directory, filePath)}`);
2281
+ this.onIndexed?.(filePath);
2282
+ } catch (err) {
2283
+ const error = err instanceof Error ? err : new Error(String(err));
2284
+ logger.error(`Failed to index ${filePath}: ${error.message}`);
2285
+ this.onError?.(error);
2286
+ }
2287
+ }
2288
+ /**
2289
+ * Remove a file from the index
2290
+ */
2291
+ async removeFile(filePath) {
2292
+ try {
2293
+ await this.vectorStore.deleteByFilePath(filePath);
2294
+ this.removeFromHashCache(filePath);
2295
+ this.saveHashCache();
2296
+ logger.debug(`Removed: ${path.relative(this.directory, filePath)}`);
2297
+ this.onRemoved?.(filePath);
2298
+ } catch (err) {
2299
+ const error = err instanceof Error ? err : new Error(String(err));
2300
+ logger.error(`Failed to remove ${filePath}: ${error.message}`);
2301
+ this.onError?.(error);
2302
+ }
2303
+ }
2304
+ /**
2305
+ * Queue an operation to prevent concurrent modifications
2306
+ */
2307
+ queueOperation(operation) {
2308
+ this.operationQueue.push(operation);
2309
+ this.processQueue();
2310
+ }
2311
+ /**
2312
+ * Process queued operations sequentially
2313
+ */
2314
+ async processQueue() {
2315
+ if (this.isProcessing) return;
2316
+ this.isProcessing = true;
2317
+ while (this.operationQueue.length > 0) {
2318
+ const operation = this.operationQueue.shift();
2319
+ if (operation) try {
2320
+ await operation();
2321
+ } catch (err) {
2322
+ const error = err instanceof Error ? err : new Error(String(err));
2323
+ logger.error(`Operation failed: ${error.message}`);
2324
+ }
2325
+ }
2326
+ this.isProcessing = false;
2327
+ }
2328
+ /**
2329
+ * Collect files using fast-glob
2330
+ */
2331
+ async collectFilesWithGlob() {
2332
+ return (await fg(`**/*.{${SUPPORTED_EXTENSIONS.map((ext) => ext.slice(1)).join(",")}}`, {
2333
+ cwd: this.directory,
2334
+ absolute: true,
2335
+ ignore: ["**/.*", "**/.*/**"],
2336
+ dot: false,
2337
+ onlyFiles: true,
2338
+ followSymbolicLinks: false
2339
+ })).filter((file) => {
2340
+ const relativePath = path.relative(this.directory, file).replace(/\\/g, "/");
2341
+ return !this.ig.ignores(relativePath);
2342
+ });
2343
+ }
2344
+ /**
2345
+ * Perform full initial indexing
2346
+ */
2347
+ async fullIndex() {
2348
+ logger.info("Starting full index...");
2349
+ const files = await this.collectFilesWithGlob();
2350
+ let indexed = 0;
2351
+ let skipped = 0;
2352
+ for (const filePath of files) try {
2353
+ const content = fs.readFileSync(filePath, "utf-8");
2354
+ if (!this.hasContentChanged(filePath, content)) {
2355
+ skipped++;
2356
+ continue;
2357
+ }
2358
+ const chunks = await chunkFile(filePath, content, this.config);
2359
+ if (chunks.length === 0) continue;
2360
+ const enrichedChunks = await enrichChunksFromFile(chunks, content);
2361
+ const texts = enrichedChunks.map((c) => c.enrichedContent);
2362
+ const embeddings = await this.ollamaClient.embedBatch(texts);
2363
+ const embeddedChunks = enrichedChunks.map((chunk, i) => ({
2364
+ id: chunk.id,
2365
+ content: chunk.content,
2366
+ filePath: chunk.filePath,
2367
+ language: chunk.language,
2368
+ startLine: chunk.startLine,
2369
+ endLine: chunk.endLine,
2370
+ symbolName: chunk.symbolName,
2371
+ symbolType: chunk.symbolType,
2372
+ vector: embeddings[i] ?? []
2373
+ }));
2374
+ await this.vectorStore.addChunks(embeddedChunks);
2375
+ indexed++;
2376
+ } catch (err) {
2377
+ const error = err instanceof Error ? err : new Error(String(err));
2378
+ logger.debug(`Error indexing ${filePath}: ${error.message}`);
2379
+ }
2380
+ this.saveHashCache();
2381
+ logger.info(`Full index: ${String(indexed)} indexed, ${String(skipped)} skipped`);
2382
+ }
2383
+ /**
2384
+ * Start watching for file changes
2385
+ */
2386
+ async start() {
2387
+ const health = await this.ollamaClient.healthCheck();
2388
+ if (!health.ok) throw new Error(health.error ?? "Ollama is not available");
2389
+ const needsFullIndex = !this.vectorStore.exists();
2390
+ await this.vectorStore.connect();
2391
+ if (needsFullIndex) await this.fullIndex();
2392
+ this.watcher = watch(this.directory, {
2393
+ ignored: (filePath) => {
2394
+ const relativePath = path.relative(this.directory, filePath).replace(/\\/g, "/");
2395
+ if (!relativePath) return false;
2396
+ if (relativePath.split("/").some((part) => part.startsWith("."))) return true;
2397
+ return this.ig.ignores(relativePath);
2398
+ },
2399
+ persistent: true,
2400
+ ignoreInitial: true,
2401
+ awaitWriteFinish: {
2402
+ stabilityThreshold: 500,
2403
+ pollInterval: 100
2404
+ }
2405
+ });
2406
+ this.watcher.on("add", (filePath) => {
2407
+ if (shouldIndexFile(filePath)) this.scheduleChange("add", filePath);
2408
+ });
2409
+ this.watcher.on("change", (filePath) => {
2410
+ if (shouldIndexFile(filePath)) this.scheduleChange("change", filePath);
2411
+ });
2412
+ this.watcher.on("unlink", (filePath) => {
2413
+ if (shouldIndexFile(filePath)) this.scheduleChange("unlink", filePath);
2414
+ });
2415
+ this.watcher.on("ready", () => {
2416
+ logger.info(`Watching: ${this.directory} (${String(this.debounceMs)}ms debounce)`);
2417
+ this.onReady?.();
2418
+ });
2419
+ this.watcher.on("error", (err) => {
2420
+ const error = err instanceof Error ? err : new Error(String(err));
2421
+ logger.error(`Watcher error: ${error.message}`);
2422
+ this.onError?.(error);
2423
+ });
2424
+ }
2425
+ /**
2426
+ * Stop watching and cleanup
2427
+ */
2428
+ async stop() {
2429
+ for (const pending of this.pendingChanges.values()) clearTimeout(pending.timer);
2430
+ this.pendingChanges.clear();
2431
+ this.saveHashCache();
2432
+ if (this.watcher) {
2433
+ await this.watcher.close();
2434
+ this.watcher = null;
2435
+ }
2436
+ this.vectorStore.close();
2437
+ logger.info("Watcher stopped");
2438
+ }
2439
+ /**
2440
+ * Check if watcher is running
2441
+ */
2442
+ isRunning() {
2443
+ return this.watcher !== null;
2444
+ }
2445
+ /**
2446
+ * Clear the hash cache
2447
+ */
2448
+ clearCache() {
2449
+ this.hashCache = {};
2450
+ const cachePath = this.getHashCachePath();
2451
+ if (fs.existsSync(cachePath)) fs.unlinkSync(cachePath);
2452
+ logger.info("Hash cache cleared");
2453
+ }
2454
+ /**
2455
+ * Get cache statistics
2456
+ */
2457
+ getCacheStats() {
2458
+ return {
2459
+ cachedFiles: Object.keys(this.hashCache).length,
2460
+ cacheSize: JSON.stringify(this.hashCache).length
2461
+ };
2462
+ }
2463
+ };
2464
+ /**
2465
+ * Create a new index watcher
2466
+ */
2467
+ function createIndexWatcher(options) {
2468
+ return new IndexWatcher(options);
2469
+ }
2470
+
2471
+ //#endregion
2472
+ //#region src/core/embeddings/callgraph.ts
2473
+ /**
2474
+ * Call graph extraction and storage
2475
+ *
2476
+ * Extracts function call relationships from code using tree-sitter
2477
+ * to build a graph showing which functions call which.
2478
+ *
2479
+ * Features:
2480
+ * - Persistent caching in .src-index/call-graph.json
2481
+ * - Hash-based invalidation for changed files
2482
+ */
2483
+ /**
2484
+ * Compute SHA-256 hash of content
2485
+ */
2486
+ function computeHash$1(content) {
2487
+ return crypto.createHash("sha256").update(content).digest("hex").slice(0, 16);
2488
+ }
2489
+ /**
2490
+ * Get call graph cache path for a directory
2491
+ */
2492
+ function getCachePath(directory) {
2493
+ return path.join(directory, ".src-index", "call-graph.json");
2494
+ }
2495
+ /**
2496
+ * Save call graph to persistent cache
2497
+ */
2498
+ function saveCallGraphCache(directory, graph, fileHashes) {
2499
+ try {
2500
+ const cachePath = getCachePath(directory);
2501
+ const cacheDir = path.dirname(cachePath);
2502
+ if (!fs.existsSync(cacheDir)) fs.mkdirSync(cacheDir, { recursive: true });
2503
+ const serialized = {
2504
+ nodes: Object.fromEntries(graph.nodes),
2505
+ files: graph.files,
2506
+ edgeCount: graph.edgeCount,
2507
+ fileHashes,
2508
+ timestamp: Date.now()
2509
+ };
2510
+ fs.writeFileSync(cachePath, JSON.stringify(serialized), "utf-8");
2511
+ logger.debug(`Call graph cache saved: ${String(graph.nodes.size)} nodes`);
2512
+ } catch {
2513
+ logger.debug("Call graph cache save skipped: directory not writable");
2514
+ }
2515
+ }
2516
+ /**
2517
+ * Load call graph from persistent cache if valid
2518
+ */
2519
+ function loadCallGraphCache(directory, currentHashes) {
2520
+ const cachePath = getCachePath(directory);
2521
+ if (!fs.existsSync(cachePath)) return null;
2522
+ try {
2523
+ const content = fs.readFileSync(cachePath, "utf-8");
2524
+ const cached = JSON.parse(content);
2525
+ const cachedFiles = new Set(Object.keys(cached.fileHashes));
2526
+ const currentFiles = new Set(Object.keys(currentHashes));
2527
+ if (cachedFiles.size !== currentFiles.size) {
2528
+ logger.debug("Call graph cache invalid: file count changed");
2529
+ return null;
2530
+ }
2531
+ for (const [filePath, hash] of Object.entries(currentHashes)) if (cached.fileHashes[filePath] !== hash) {
2532
+ logger.debug(`Call graph cache invalid: ${filePath} changed`);
2533
+ return null;
2534
+ }
2535
+ const nodes = new Map(Object.entries(cached.nodes));
2536
+ logger.debug(`Call graph cache loaded: ${String(nodes.size)} nodes`);
2537
+ return {
2538
+ nodes,
2539
+ files: cached.files,
2540
+ edgeCount: cached.edgeCount
2541
+ };
2542
+ } catch (error) {
2543
+ logger.debug(`Failed to load call graph cache: ${error instanceof Error ? error.message : String(error)}`);
2544
+ return null;
2545
+ }
2546
+ }
2547
+ const callGraphCache = /* @__PURE__ */ new Map();
2548
+ /**
2549
+ * Clear the call graph cache
2550
+ */
2551
+ function clearCallGraphCache() {
2552
+ callGraphCache.clear();
2553
+ }
2554
+ registerCache("embeddings:callGraphCache", clearCallGraphCache);
2555
+ /**
2556
+ * Extract function calls from a tree-sitter node
2557
+ */
2558
+ function extractCallsFromTree(tree, languageInstance, language) {
2559
+ const callsBySymbol = /* @__PURE__ */ new Map();
2560
+ const pattern = {
2561
+ typescript: `
2562
+ (call_expression
2563
+ function: [(identifier) @callee
2564
+ (member_expression property: (property_identifier) @callee)]
2565
+ arguments: (arguments) @args)
2566
+ `,
2567
+ javascript: `
2568
+ (call_expression
2569
+ function: [(identifier) @callee
2570
+ (member_expression property: (property_identifier) @callee)]
2571
+ arguments: (arguments) @args)
2572
+ `,
2573
+ python: `
2574
+ (call
2575
+ function: [(identifier) @callee
2576
+ (attribute attribute: (identifier) @callee)]
2577
+ arguments: (argument_list) @args)
2578
+ `,
2579
+ go: `
2580
+ (call_expression
2581
+ function: [(identifier) @callee
2582
+ (selector_expression field: (field_identifier) @callee)]
2583
+ arguments: (argument_list) @args)
2584
+ `
2585
+ }[language];
2586
+ if (!pattern) return callsBySymbol;
2587
+ try {
2588
+ const matches = new Query(languageInstance, pattern).matches(tree.rootNode);
2589
+ const callCaptures = [];
2590
+ for (const match of matches) for (const capture of match.captures) if (capture.name === "callee") callCaptures.push({
2591
+ callee: capture.node.text,
2592
+ position: {
2593
+ line: capture.node.startPosition.row + 1,
2594
+ column: capture.node.startPosition.column,
2595
+ offset: capture.node.startIndex
2596
+ }
2597
+ });
2598
+ if (callCaptures.length > 0) callsBySymbol.set("__global__", callCaptures.map((c) => ({
2599
+ callee: c.callee,
2600
+ position: c.position
2601
+ })));
2602
+ } catch (error) {
2603
+ logger.debug(`Failed to extract calls for ${language}: ${error instanceof Error ? error.message : String(error)}`);
2604
+ }
2605
+ return callsBySymbol;
2606
+ }
2607
+ /**
2608
+ * Associate calls with their containing symbols
2609
+ */
2610
+ function associateCallsWithSymbols(symbols, allCalls) {
2611
+ const callsBySymbol = /* @__PURE__ */ new Map();
2612
+ const sortedSymbols = [...symbols].filter((s) => s.type === "function" || s.type === "method").sort((a, b) => a.start.offset - b.start.offset);
2613
+ for (const call of allCalls) {
2614
+ let containingSymbol = null;
2615
+ for (const symbol of sortedSymbols) if (call.position.offset >= symbol.start.offset && call.position.offset <= symbol.end.offset) containingSymbol = symbol;
2616
+ else if (call.position.offset < symbol.start.offset) break;
2617
+ const symbolName = containingSymbol?.name ?? "__global__";
2618
+ const existing = callsBySymbol.get(symbolName) ?? [];
2619
+ existing.push(call);
2620
+ callsBySymbol.set(symbolName, existing);
2621
+ }
2622
+ return callsBySymbol;
2623
+ }
2624
+ /**
2625
+ * Analyze a file and extract call graph data
2626
+ */
2627
+ async function analyzeFileForCallGraph(filePath, content) {
2628
+ const cached = callGraphCache.get(filePath);
2629
+ if (cached) return cached;
2630
+ try {
2631
+ const parseResult = await parseCode(content, { filePath });
2632
+ const { symbols } = extractSymbols(parseResult.tree, parseResult.languageInstance, parseResult.language);
2633
+ const data = {
2634
+ symbols,
2635
+ calls: associateCallsWithSymbols(symbols, extractCallsFromTree(parseResult.tree, parseResult.languageInstance, parseResult.language).get("__global__") ?? [])
2636
+ };
2637
+ callGraphCache.set(filePath, data);
2638
+ return data;
2639
+ } catch (error) {
2640
+ logger.debug(`Failed to analyze ${filePath} for call graph: ${error instanceof Error ? error.message : String(error)}`);
2641
+ return null;
2642
+ }
2643
+ }
2644
+ /**
2645
+ * Build a call graph from multiple files
2646
+ *
2647
+ * Uses persistent caching with hash-based invalidation for performance.
2648
+ */
2649
+ async function buildCallGraph(files) {
2650
+ if (files.length === 0) return {
2651
+ nodes: /* @__PURE__ */ new Map(),
2652
+ files: [],
2653
+ edgeCount: 0
2654
+ };
2655
+ const fileHashes = {};
2656
+ for (const file of files) fileHashes[file.path] = computeHash$1(file.content);
2657
+ const baseDir = findCommonDirectory(files.map((f) => f.path));
2658
+ const cached = loadCallGraphCache(baseDir, fileHashes);
2659
+ if (cached) return cached;
2660
+ const nodes = /* @__PURE__ */ new Map();
2661
+ const filePaths = [];
2662
+ let edgeCount = 0;
2663
+ for (const file of files) {
2664
+ filePaths.push(file.path);
2665
+ const data = await analyzeFileForCallGraph(file.path, file.content);
2666
+ if (!data) continue;
2667
+ for (const symbol of data.symbols) if (symbol.type === "function" || symbol.type === "method") {
2668
+ const qualifiedName = `${file.path}:${symbol.name}`;
2669
+ nodes.set(qualifiedName, {
2670
+ name: symbol.name,
2671
+ qualifiedName,
2672
+ filePath: file.path,
2673
+ type: symbol.type,
2674
+ start: symbol.start,
2675
+ end: symbol.end,
2676
+ calls: [],
2677
+ calledBy: []
2678
+ });
2679
+ }
2680
+ }
2681
+ for (const file of files) {
2682
+ const data = callGraphCache.get(file.path);
2683
+ if (!data) continue;
2684
+ for (const [symbolName, calls] of data.calls) {
2685
+ const callerKey = `${file.path}:${symbolName}`;
2686
+ const callerNode = nodes.get(callerKey);
2687
+ if (!callerNode && symbolName !== "__global__") continue;
2688
+ for (const call of calls) for (const [nodeKey, node] of nodes) if (node.name === call.callee) {
2689
+ if (callerNode) callerNode.calls.push(nodeKey);
2690
+ node.calledBy.push(callerKey);
2691
+ edgeCount++;
2692
+ }
2693
+ }
2694
+ }
2695
+ const graph = {
2696
+ nodes,
2697
+ files: filePaths,
2698
+ edgeCount
2699
+ };
2700
+ saveCallGraphCache(baseDir, graph, fileHashes);
2701
+ return graph;
2702
+ }
2703
+ /**
2704
+ * Find common directory from a list of file paths
2705
+ */
2706
+ function findCommonDirectory(paths) {
2707
+ if (paths.length === 0) return ".";
2708
+ const firstPathStr = paths[0];
2709
+ if (!firstPathStr) return ".";
2710
+ if (paths.length === 1) return path.dirname(firstPathStr);
2711
+ const segments = paths.map((p) => path.normalize(p).split(path.sep));
2712
+ const firstPath = segments[0];
2713
+ if (!firstPath) return ".";
2714
+ let commonLength = 0;
2715
+ for (let i = 0; i < firstPath.length; i++) {
2716
+ const segment = firstPath[i];
2717
+ if (segment && segments.every((s) => s[i] === segment)) commonLength = i + 1;
2718
+ else break;
2719
+ }
2720
+ const commonDir = firstPath.slice(0, commonLength).join(path.sep);
2721
+ if (commonDir && fs.existsSync(commonDir) && fs.statSync(commonDir).isFile()) return path.dirname(commonDir);
2722
+ return commonDir || ".";
2723
+ }
2724
+ /**
2725
+ * Get callers and callees for a specific function
2726
+ */
2727
+ function getCallContext(graph, filePath, functionName) {
2728
+ const qualifiedName = `${filePath}:${functionName}`;
2729
+ const node = graph.nodes.get(qualifiedName);
2730
+ if (!node) return null;
2731
+ const callers = [];
2732
+ const callees = [];
2733
+ for (const callerKey of node.calledBy) {
2734
+ const caller = graph.nodes.get(callerKey);
2735
+ if (caller) callers.push(caller);
2736
+ }
2737
+ for (const calleeKey of node.calls) {
2738
+ const callee = graph.nodes.get(calleeKey);
2739
+ if (callee) callees.push(callee);
2740
+ }
2741
+ return {
2742
+ callers,
2743
+ callees
2744
+ };
2745
+ }
2746
+
2747
+ //#endregion
2748
+ //#region src/features/index-codebase/index.ts
2749
+ /**
2750
+ * Index Codebase Feature
2751
+ *
2752
+ * Indexes a directory by:
2753
+ * 1. Scanning for supported files
2754
+ * 2. Chunking each file
2755
+ * 3. Generating embeddings via Ollama
2756
+ * 4. Storing in LanceDB
2757
+ */
2758
+ /** Default concurrency for parallel file processing */
2759
+ const DEFAULT_CONCURRENCY = 4;
2760
+ /**
2761
+ * Process items in parallel with concurrency limit using worker pool pattern
2762
+ */
2763
+ async function parallelMap(items, processor, concurrency) {
2764
+ const results = new Array(items.length);
2765
+ let currentIndex = 0;
2766
+ const worker = async () => {
2767
+ while (currentIndex < items.length) {
2768
+ const index = currentIndex++;
2769
+ const item = items[index];
2770
+ if (item !== void 0) results[index] = await processor(item);
2771
+ }
2772
+ };
2773
+ const workers = Array.from({ length: Math.min(concurrency, items.length) }, async () => worker());
2774
+ await Promise.all(workers);
2775
+ return results.filter((r) => r !== void 0);
2776
+ }
2777
+ const indexCodebaseSchema = z.object({
2778
+ directory: z.string().optional().default(".").describe("Path to the directory to index (defaults to current directory)"),
2779
+ force: z.boolean().optional().default(false).describe("Force re-indexing even if index exists"),
2780
+ exclude: z.array(z.string()).optional().default([]).describe("Additional glob patterns to exclude"),
2781
+ concurrency: z.number().int().positive().optional().default(DEFAULT_CONCURRENCY).describe("Number of files to process in parallel (default: 4)")
2782
+ });
2783
+ /**
2784
+ * Create an ignore instance with gitignore patterns and additional exclusions
2785
+ */
2786
+ function createIgnoreFilter$2(baseDir, additionalExclusions) {
2787
+ const ig = ignore();
2788
+ const gitignorePath = path.join(baseDir, ".gitignore");
2789
+ if (fs.existsSync(gitignorePath)) try {
2790
+ const content = fs.readFileSync(gitignorePath, "utf-8");
2791
+ ig.add(content);
2792
+ } catch {}
2793
+ if (additionalExclusions.length > 0) ig.add(additionalExclusions);
2794
+ return ig;
2795
+ }
2796
+ /**
2797
+ * Check if a name starts with a dot (hidden file/folder)
2798
+ */
2799
+ function isHidden$2(name) {
2800
+ return name.startsWith(".");
2801
+ }
2802
+ /**
2803
+ * Recursively collect files from a directory
2804
+ */
2805
+ function collectFiles$2(dir, ig, baseDir) {
2806
+ const files = [];
2807
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
2808
+ for (const entry of entries) {
2809
+ if (isHidden$2(entry.name)) continue;
2810
+ const fullPath = path.join(dir, entry.name);
2811
+ const relativePath = path.relative(baseDir, fullPath).replace(/\\/g, "/");
2812
+ if (ig.ignores(relativePath)) continue;
2813
+ if (entry.isDirectory()) files.push(...collectFiles$2(fullPath, ig, baseDir));
2814
+ else if (entry.isFile() && shouldIndexFile(entry.name)) files.push(fullPath);
2815
+ }
2816
+ return files;
2817
+ }
2818
+ /**
2819
+ * Execute the index_codebase feature
2820
+ */
2821
+ async function execute$3(input) {
2822
+ const { directory, force, exclude, concurrency } = input;
2823
+ if (!fs.existsSync(directory)) return {
2824
+ success: false,
2825
+ error: `Directory not found: ${directory}`
2826
+ };
2827
+ const absoluteDir = path.resolve(directory);
2828
+ const ollamaClient = createOllamaClient(EMBEDDING_CONFIG);
2829
+ const vectorStore = createVectorStore(absoluteDir, EMBEDDING_CONFIG);
2830
+ const health = await ollamaClient.healthCheck();
2831
+ if (!health.ok) return {
2832
+ success: false,
2833
+ error: health.error ?? "Ollama is not available"
2834
+ };
2835
+ if (vectorStore.exists() && !force) return {
2836
+ success: false,
2837
+ error: "Index already exists. Use force=true to re-index or search_code to query."
2838
+ };
2839
+ const result = {
2840
+ directory: absoluteDir,
2841
+ filesIndexed: 0,
2842
+ chunksCreated: 0,
2843
+ languages: {},
2844
+ errors: []
2845
+ };
2846
+ try {
2847
+ await vectorStore.connect();
2848
+ if (force && vectorStore.exists()) await vectorStore.clear();
2849
+ const files = collectFiles$2(absoluteDir, createIgnoreFilter$2(absoluteDir, exclude), absoluteDir);
2850
+ if (files.length === 0) return {
2851
+ success: true,
2852
+ message: "No indexable files found in directory",
2853
+ data: result
2854
+ };
2855
+ const pathAliases = readPathAliasesCached(absoluteDir);
2856
+ const aliasCount = Object.keys(pathAliases).length;
2857
+ const enrichmentOptions = {
2858
+ projectRoot: absoluteDir,
2859
+ pathAliases,
2860
+ includeCrossFileContext: true
2861
+ };
2862
+ logger.debug(`Indexing ${String(files.length)} files with concurrency=${String(concurrency)} (projectRoot: ${absoluteDir}, ${String(aliasCount)} path aliases)`);
2863
+ const processFile = async (filePath) => {
2864
+ try {
2865
+ const content = fs.readFileSync(filePath, "utf-8");
2866
+ return { chunks: await enrichChunksFromFile(await chunkFile(filePath, content, EMBEDDING_CONFIG), content, enrichmentOptions) };
2867
+ } catch (err) {
2868
+ return {
2869
+ chunks: [],
2870
+ error: `Error processing ${filePath}: ${err instanceof Error ? err.message : String(err)}`
2871
+ };
2872
+ }
2873
+ };
2874
+ const fileResults = await parallelMap(files, processFile, concurrency);
2875
+ const allEnrichedChunks = [];
2876
+ for (const fileResult of fileResults) if (fileResult.error) result.errors.push(fileResult.error);
2877
+ else {
2878
+ allEnrichedChunks.push(...fileResult.chunks);
2879
+ result.filesIndexed++;
2880
+ for (const chunk of fileResult.chunks) result.languages[chunk.language] = (result.languages[chunk.language] ?? 0) + 1;
2881
+ }
2882
+ const { batchSize } = EMBEDDING_CONFIG;
2883
+ const embeddedChunks = [];
2884
+ for (let i = 0; i < allEnrichedChunks.length; i += batchSize) {
2885
+ const batch = allEnrichedChunks.slice(i, i + batchSize);
2886
+ const texts = batch.map((c) => c.enrichedContent);
2887
+ try {
2888
+ const embeddings = await ollamaClient.embedBatch(texts);
2889
+ for (let j = 0; j < batch.length; j++) {
2890
+ const chunk = batch[j];
2891
+ const vector = embeddings[j];
2892
+ if (chunk && vector) embeddedChunks.push({
2893
+ id: chunk.id,
2894
+ content: chunk.content,
2895
+ filePath: chunk.filePath,
2896
+ language: chunk.language,
2897
+ startLine: chunk.startLine,
2898
+ endLine: chunk.endLine,
2899
+ symbolName: chunk.symbolName,
2900
+ symbolType: chunk.symbolType,
2901
+ vector
2902
+ });
2903
+ }
2904
+ } catch (err) {
2905
+ const errorMsg = err instanceof Error ? err.message : String(err);
2906
+ result.errors.push(`Embedding batch error: ${errorMsg}`);
2907
+ }
2908
+ }
2909
+ if (embeddedChunks.length > 0) {
2910
+ await vectorStore.addChunks(embeddedChunks);
2911
+ result.chunksCreated = embeddedChunks.length;
2912
+ }
2913
+ vectorStore.close();
2914
+ return {
2915
+ success: true,
2916
+ message: result.errors.length > 0 ? `Indexed ${String(result.filesIndexed)} files (${String(result.chunksCreated)} chunks) with ${String(result.errors.length)} errors` : `Successfully indexed ${String(result.filesIndexed)} files (${String(result.chunksCreated)} chunks)`,
2917
+ data: result
2918
+ };
2919
+ } catch (err) {
2920
+ vectorStore.close();
2921
+ return {
2922
+ success: false,
2923
+ error: `Indexing failed: ${err instanceof Error ? err.message : String(err)}`,
2924
+ data: result
2925
+ };
2926
+ }
2927
+ }
2928
+ const indexCodebaseFeature = {
2929
+ name: "index_codebase",
2930
+ description: "Index a codebase for semantic code search. USE THIS FIRST before search_code. Required once per project - creates vector embeddings for 50+ languages. After initial indexing, use update_index for incremental updates.",
2931
+ schema: indexCodebaseSchema,
2932
+ execute: execute$3
2933
+ };
2934
+
2935
+ //#endregion
2936
+ //#region src/features/search-code/index.ts
2937
+ /**
2938
+ * Search Code Feature
2939
+ *
2940
+ * Performs hybrid search on indexed codebase combining:
2941
+ * 1. Vector similarity search (semantic embeddings via Ollama)
2942
+ * 2. Full-text search (BM25 keyword matching)
2943
+ * 3. RRF (Reciprocal Rank Fusion) to combine results
2944
+ *
2945
+ * Supports three search modes:
2946
+ * - 'hybrid' (default): Best of both vector and keyword search
2947
+ * - 'vector': Semantic search only
2948
+ * - 'fts': Keyword search only
2949
+ *
2950
+ * Optional features:
2951
+ * - LLM re-ranking for improved relevance
2952
+ * - Call context to show callers/callees for each result
2953
+ */
2954
+ const searchCodeSchema = z.object({
2955
+ query: z.string().min(1).describe("Natural language search query"),
2956
+ directory: z.string().optional().default(".").describe("Path to the indexed directory (defaults to current directory)"),
2957
+ limit: z.number().int().positive().optional().default(10).describe("Maximum number of results to return"),
2958
+ threshold: z.number().min(0).max(2).optional().describe("Maximum distance threshold for results (lower = more similar)"),
2959
+ mode: z.enum([
2960
+ "vector",
2961
+ "fts",
2962
+ "hybrid"
2963
+ ]).optional().default("hybrid").describe("Search mode: 'vector' (semantic only), 'fts' (keyword only), 'hybrid' (combined with RRF fusion)"),
2964
+ includeCallContext: z.boolean().optional().default(true).describe("Include caller/callee information for each result (uses cached call graph)")
2965
+ });
2966
+ /**
2967
+ * Create gitignore filter
2968
+ */
2969
+ function createIgnoreFilter$1(directory) {
2970
+ const ig = ignore();
2971
+ ig.add([
2972
+ "node_modules",
2973
+ ".git",
2974
+ "dist",
2975
+ "build",
2976
+ ".src-index"
2977
+ ]);
2978
+ const gitignorePath = path.join(directory, ".gitignore");
2979
+ if (fs.existsSync(gitignorePath)) {
2980
+ const content = fs.readFileSync(gitignorePath, "utf-8");
2981
+ ig.add(content);
2982
+ }
2983
+ return ig;
2984
+ }
2985
+ /**
2986
+ * Check if hidden file/folder
2987
+ */
2988
+ function isHidden$1(name) {
2989
+ return name.startsWith(".");
2990
+ }
2991
+ /**
2992
+ * Recursively collect files for call graph
2993
+ */
2994
+ function collectFiles$1(dir, ig, baseDir) {
2995
+ const files = [];
2996
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
2997
+ for (const entry of entries) {
2998
+ if (isHidden$1(entry.name)) continue;
2999
+ const fullPath = path.join(dir, entry.name);
3000
+ const relativePath = path.relative(baseDir, fullPath).replace(/\\/g, "/");
3001
+ if (ig.ignores(relativePath)) continue;
3002
+ if (entry.isDirectory()) files.push(...collectFiles$1(fullPath, ig, baseDir));
3003
+ else if (entry.isFile() && shouldIndexFile(entry.name)) files.push(fullPath);
3004
+ }
3005
+ return files;
3006
+ }
3007
+ /**
3008
+ * Format search results for output
3009
+ */
3010
+ function formatResults(results, baseDir) {
3011
+ return results.map((r) => ({
3012
+ filePath: path.relative(baseDir, r.chunk.filePath),
3013
+ language: r.chunk.language,
3014
+ startLine: r.chunk.startLine,
3015
+ endLine: r.chunk.endLine,
3016
+ content: r.chunk.content,
3017
+ score: r.score,
3018
+ symbolName: r.chunk.symbolName,
3019
+ symbolType: r.chunk.symbolType
3020
+ }));
3021
+ }
3022
+ /**
3023
+ * Execute the search_code feature
3024
+ */
3025
+ async function execute$2(input) {
3026
+ const { query, directory, limit, threshold, mode, includeCallContext } = input;
3027
+ if (!fs.existsSync(directory)) return {
3028
+ success: false,
3029
+ error: `Directory not found: ${directory}`
3030
+ };
3031
+ const absoluteDir = path.resolve(directory);
3032
+ const ollamaClient = createOllamaClient(EMBEDDING_CONFIG);
3033
+ const vectorStore = createVectorStore(absoluteDir, EMBEDDING_CONFIG);
3034
+ if (!vectorStore.exists()) return {
3035
+ success: false,
3036
+ error: `No index found for directory. Run index_codebase first: ${absoluteDir}`
3037
+ };
3038
+ try {
3039
+ const health = await ollamaClient.healthCheck();
3040
+ if (!health.ok) return {
3041
+ success: false,
3042
+ error: health.error ?? "Ollama is not available"
3043
+ };
3044
+ await vectorStore.connect();
3045
+ const queryVector = await ollamaClient.embed(query);
3046
+ let results = await vectorStore.searchHybrid(queryVector, query, limit, { mode });
3047
+ if (threshold !== void 0 && mode === "vector") results = results.filter((r) => r.score <= threshold);
3048
+ vectorStore.close();
3049
+ let formattedResults = formatResults(results, absoluteDir);
3050
+ if (includeCallContext && formattedResults.length > 0) {
3051
+ const callGraph = await buildCallGraph(collectFiles$1(absoluteDir, createIgnoreFilter$1(absoluteDir), absoluteDir).map((f) => ({
3052
+ path: f,
3053
+ content: fs.readFileSync(f, "utf-8")
3054
+ })));
3055
+ formattedResults = formattedResults.map((result) => {
3056
+ if (!result.symbolName) return result;
3057
+ const context = getCallContext(callGraph, path.join(absoluteDir, result.filePath), result.symbolName);
3058
+ if (context) return {
3059
+ ...result,
3060
+ callContext: {
3061
+ callers: context.callers.map((c) => c.name),
3062
+ callees: context.callees.map((c) => c.name)
3063
+ }
3064
+ };
3065
+ return result;
3066
+ });
3067
+ }
3068
+ const output = {
3069
+ query,
3070
+ directory: absoluteDir,
3071
+ resultsCount: formattedResults.length,
3072
+ results: formattedResults
3073
+ };
3074
+ if (formattedResults.length === 0) return {
3075
+ success: true,
3076
+ message: "No matching code found",
3077
+ data: output
3078
+ };
3079
+ const resultLines = formattedResults.map((r, i) => {
3080
+ const location = `${r.filePath}:${String(r.startLine)}-${String(r.endLine)}`;
3081
+ const symbol = r.symbolName ? ` (${r.symbolType ?? "symbol"}: ${r.symbolName})` : "";
3082
+ const preview = r.content.slice(0, 100).replace(/\n/g, " ");
3083
+ let callInfo = "";
3084
+ if (r.callContext) {
3085
+ const callers = r.callContext.callers.length > 0 ? `Called by: ${r.callContext.callers.slice(0, 3).join(", ")}${r.callContext.callers.length > 3 ? "..." : ""}` : "";
3086
+ const callees = r.callContext.callees.length > 0 ? `Calls: ${r.callContext.callees.slice(0, 3).join(", ")}${r.callContext.callees.length > 3 ? "..." : ""}` : "";
3087
+ if (callers || callees) callInfo = `\n ${[callers, callees].filter(Boolean).join(" | ")}`;
3088
+ }
3089
+ return `${String(i + 1)}. [${r.language}] ${location}${symbol}\n ${preview}...${callInfo}`;
3090
+ });
3091
+ return {
3092
+ success: true,
3093
+ message: `Found ${String(formattedResults.length)} results for "${query}":\n\n${resultLines.join("\n\n")}`,
3094
+ data: output
3095
+ };
3096
+ } catch (err) {
3097
+ vectorStore.close();
3098
+ return {
3099
+ success: false,
3100
+ error: `Search failed: ${err instanceof Error ? err.message : String(err)}`
3101
+ };
3102
+ }
3103
+ }
3104
+ const searchCodeFeature = {
3105
+ name: "search_code",
3106
+ description: "Search code semantically using natural language queries. USE THIS to find code by concept/meaning (e.g., 'authentication logic', 'error handling'). Requires index_codebase first. Returns relevant code chunks with file locations, function names, and call relationships (who calls what).",
3107
+ schema: searchCodeSchema,
3108
+ execute: execute$2
3109
+ };
3110
+
3111
+ //#endregion
3112
+ //#region src/features/get-index-status/index.ts
3113
+ /**
3114
+ * Get Index Status Feature
3115
+ *
3116
+ * Returns information about the embedding index for a directory:
3117
+ * - Whether an index exists
3118
+ * - Total chunks and files indexed
3119
+ * - Language breakdown
3120
+ */
3121
+ const getIndexStatusSchema = z.object({ directory: z.string().optional().default(".").describe("Path to the directory to check (defaults to current directory)") });
3122
+ /**
3123
+ * Execute the get_index_status feature
3124
+ */
3125
+ async function execute$1(input) {
3126
+ const { directory } = input;
3127
+ if (!fs.existsSync(directory)) return {
3128
+ success: false,
3129
+ error: `Directory not found: ${directory}`
3130
+ };
3131
+ const absoluteDir = path.resolve(directory);
3132
+ const indexPath = getIndexPath(absoluteDir);
3133
+ if (!fs.existsSync(indexPath)) {
3134
+ const status = {
3135
+ directory: absoluteDir,
3136
+ indexPath,
3137
+ exists: false,
3138
+ totalChunks: 0,
3139
+ totalFiles: 0,
3140
+ languages: {}
3141
+ };
3142
+ return {
3143
+ success: true,
3144
+ message: `No index found for ${absoluteDir}. Run index_codebase to create one.`,
3145
+ data: status
3146
+ };
3147
+ }
3148
+ try {
3149
+ const vectorStore = createVectorStore(absoluteDir, EMBEDDING_CONFIG);
3150
+ await vectorStore.connect();
3151
+ const status = await vectorStore.getStatus(absoluteDir);
3152
+ vectorStore.close();
3153
+ const languageLines = Object.entries(status.languages).sort(([, a], [, b]) => b - a).map(([lang, count]) => ` - ${lang}: ${String(count)} chunks`);
3154
+ return {
3155
+ success: true,
3156
+ message: [
3157
+ `Index Status for ${absoluteDir}`,
3158
+ ``,
3159
+ `Index Path: ${status.indexPath}`,
3160
+ `Total Files: ${String(status.totalFiles)}`,
3161
+ `Total Chunks: ${String(status.totalChunks)}`,
3162
+ ``,
3163
+ `Languages:`,
3164
+ ...languageLines
3165
+ ].join("\n"),
3166
+ data: status
3167
+ };
3168
+ } catch (err) {
3169
+ return {
3170
+ success: false,
3171
+ error: `Failed to read index status: ${err instanceof Error ? err.message : String(err)}`
3172
+ };
3173
+ }
3174
+ }
3175
+ const getIndexStatusFeature = {
3176
+ name: "get_index_status",
3177
+ description: "Check if a codebase is indexed and ready for search. USE THIS to verify index exists before searching. Returns file count, chunk count, and indexed languages.",
3178
+ schema: getIndexStatusSchema,
3179
+ execute: execute$1
3180
+ };
3181
+
3182
+ //#endregion
3183
+ //#region src/features/get-call-graph/index.ts
3184
+ /**
3185
+ * Get Call Graph Feature
3186
+ *
3187
+ * Analyzes function call relationships in a codebase.
3188
+ * Can either:
3189
+ * 1. Build a full call graph for a directory
3190
+ * 2. Query callers/callees for a specific function
3191
+ */
3192
+ const getCallGraphSchema = z.object({
3193
+ directory: z.string().optional().default(".").describe("Path to the directory to analyze"),
3194
+ functionName: z.string().optional().describe("Optional: specific function name to query callers/callees for"),
3195
+ filePath: z.string().optional().describe("Optional: file path to narrow down function search (used with functionName)"),
3196
+ maxDepth: z.number().int().positive().optional().default(2).describe("Maximum depth for call chain traversal (default: 2)"),
3197
+ exclude: z.array(z.string()).optional().default([]).describe("Glob patterns to exclude from analysis")
3198
+ });
3199
+
3200
+ //#endregion
3201
+ //#region src/features/update-index/index.ts
3202
+ /**
3203
+ * Update Index Feature
3204
+ *
3205
+ * Incrementally updates the codebase index by:
3206
+ * 1. Detecting files that have changed since last indexing
3207
+ * 2. Re-indexing only the changed files
3208
+ * 3. Removing deleted files from the index
3209
+ *
3210
+ * Uses SHA-256 hash comparison to detect real content changes.
3211
+ */
3212
+ /** Cache file name for storing hashes */
3213
+ const HASH_CACHE_FILE = ".src-index-hashes.json";
3214
+ const updateIndexSchema = z.object({
3215
+ directory: z.string().optional().default(".").describe("Path to the indexed directory"),
3216
+ dryRun: z.boolean().optional().default(false).describe("Only report changes without updating the index"),
3217
+ force: z.boolean().optional().default(false).describe("Force re-index of all files (ignore hash cache)")
3218
+ });
3219
+ /**
3220
+ * Compute SHA-256 hash of content
3221
+ */
3222
+ function computeHash(content) {
3223
+ return crypto.createHash("sha256").update(content, "utf8").digest("hex");
3224
+ }
3225
+ /**
3226
+ * Get hash cache file path
3227
+ */
3228
+ function getHashCachePath(directory) {
3229
+ return path.join(directory, ".src-index", HASH_CACHE_FILE);
3230
+ }
3231
+ /**
3232
+ * Load hash cache from disk
3233
+ */
3234
+ function loadHashCache(directory) {
3235
+ const cachePath = getHashCachePath(directory);
3236
+ if (fs.existsSync(cachePath)) try {
3237
+ const content = fs.readFileSync(cachePath, "utf-8");
3238
+ return JSON.parse(content);
3239
+ } catch {
3240
+ return {};
3241
+ }
3242
+ return {};
3243
+ }
3244
+ /**
3245
+ * Save hash cache to disk
3246
+ */
3247
+ function saveHashCache(directory, cache) {
3248
+ const cachePath = getHashCachePath(directory);
3249
+ const cacheDir = path.dirname(cachePath);
3250
+ if (!fs.existsSync(cacheDir)) fs.mkdirSync(cacheDir, { recursive: true });
3251
+ fs.writeFileSync(cachePath, JSON.stringify(cache, null, 2));
3252
+ }
3253
+ /**
3254
+ * Create gitignore filter
3255
+ */
3256
+ function createIgnoreFilter(directory) {
3257
+ const ig = ignore();
3258
+ ig.add([
3259
+ "node_modules",
3260
+ ".git",
3261
+ "dist",
3262
+ "build",
3263
+ ".src-index"
3264
+ ]);
3265
+ const gitignorePath = path.join(directory, ".gitignore");
3266
+ if (fs.existsSync(gitignorePath)) {
3267
+ const content = fs.readFileSync(gitignorePath, "utf-8");
3268
+ ig.add(content);
3269
+ }
3270
+ return ig;
3271
+ }
3272
+ /**
3273
+ * Check if a name starts with a dot (hidden)
3274
+ */
3275
+ function isHidden(name) {
3276
+ return name.startsWith(".");
3277
+ }
3278
+ /**
3279
+ * Recursively collect files
3280
+ */
3281
+ function collectFiles(dir, ig, baseDir) {
3282
+ const files = [];
3283
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
3284
+ for (const entry of entries) {
3285
+ if (isHidden(entry.name)) continue;
3286
+ const fullPath = path.join(dir, entry.name);
3287
+ const relativePath = path.relative(baseDir, fullPath).replace(/\\/g, "/");
3288
+ if (ig.ignores(relativePath)) continue;
3289
+ if (entry.isDirectory()) files.push(...collectFiles(fullPath, ig, baseDir));
3290
+ else if (entry.isFile() && shouldIndexFile(entry.name)) files.push(fullPath);
3291
+ }
3292
+ return files;
3293
+ }
3294
+ /**
3295
+ * Execute the update_index feature
3296
+ */
3297
+ async function execute(input) {
3298
+ const { directory, dryRun, force } = input;
3299
+ if (!fs.existsSync(directory)) return {
3300
+ success: false,
3301
+ error: `Directory not found: ${directory}`
3302
+ };
3303
+ const absoluteDir = path.resolve(directory);
3304
+ const ollamaClient = createOllamaClient(EMBEDDING_CONFIG);
3305
+ const vectorStore = createVectorStore(absoluteDir, EMBEDDING_CONFIG);
3306
+ if (!vectorStore.exists()) return {
3307
+ success: false,
3308
+ error: `No index found for directory. Run index_codebase first: ${absoluteDir}`
3309
+ };
3310
+ const result = {
3311
+ directory: absoluteDir,
3312
+ dryRun,
3313
+ added: [],
3314
+ modified: [],
3315
+ removed: [],
3316
+ unchanged: 0,
3317
+ errors: []
3318
+ };
3319
+ try {
3320
+ if (!dryRun) {
3321
+ const health = await ollamaClient.healthCheck();
3322
+ if (!health.ok) return {
3323
+ success: false,
3324
+ error: health.error ?? "Ollama is not available"
3325
+ };
3326
+ }
3327
+ await vectorStore.connect();
3328
+ const hashCache = force ? {} : loadHashCache(absoluteDir);
3329
+ const newHashCache = {};
3330
+ const ig = createIgnoreFilter(absoluteDir);
3331
+ const currentFiles = new Set(collectFiles(absoluteDir, ig, absoluteDir));
3332
+ const indexedFiles = new Set(await vectorStore.getIndexedFiles());
3333
+ const filesToProcess = [];
3334
+ for (const filePath of currentFiles) {
3335
+ const hash = computeHash(fs.readFileSync(filePath, "utf-8"));
3336
+ newHashCache[filePath] = hash;
3337
+ if (!indexedFiles.has(filePath)) {
3338
+ result.added.push(path.relative(absoluteDir, filePath));
3339
+ filesToProcess.push({
3340
+ path: filePath,
3341
+ type: "add"
3342
+ });
3343
+ } else if (hashCache[filePath] !== hash) {
3344
+ result.modified.push(path.relative(absoluteDir, filePath));
3345
+ filesToProcess.push({
3346
+ path: filePath,
3347
+ type: "modify"
3348
+ });
3349
+ } else result.unchanged++;
3350
+ }
3351
+ for (const filePath of indexedFiles) if (!currentFiles.has(filePath)) result.removed.push(path.relative(absoluteDir, filePath));
3352
+ if (dryRun) {
3353
+ vectorStore.close();
3354
+ return {
3355
+ success: true,
3356
+ message: buildDryRunMessage(result),
3357
+ data: result
3358
+ };
3359
+ }
3360
+ const enrichmentOptions = {
3361
+ projectRoot: absoluteDir,
3362
+ pathAliases: readPathAliasesCached(absoluteDir),
3363
+ includeCrossFileContext: true
3364
+ };
3365
+ const embeddedChunks = [];
3366
+ for (const { path: filePath, type } of filesToProcess) try {
3367
+ if (type === "modify") await vectorStore.deleteByFilePath(filePath);
3368
+ const content = fs.readFileSync(filePath, "utf-8");
3369
+ const chunks = await chunkFile(filePath, content, EMBEDDING_CONFIG);
3370
+ if (chunks.length === 0) continue;
3371
+ const enrichedChunks = await enrichChunksFromFile(chunks, content, enrichmentOptions);
3372
+ const texts = enrichedChunks.map((c) => c.enrichedContent);
3373
+ const embeddings = await ollamaClient.embedBatch(texts);
3374
+ for (let i = 0; i < enrichedChunks.length; i++) {
3375
+ const chunk = enrichedChunks[i];
3376
+ const vector = embeddings[i];
3377
+ if (chunk && vector) embeddedChunks.push({
3378
+ id: chunk.id,
3379
+ content: chunk.content,
3380
+ filePath: chunk.filePath,
3381
+ language: chunk.language,
3382
+ startLine: chunk.startLine,
3383
+ endLine: chunk.endLine,
3384
+ symbolName: chunk.symbolName,
3385
+ symbolType: chunk.symbolType,
3386
+ vector
3387
+ });
3388
+ }
3389
+ } catch (err) {
3390
+ const errorMsg = err instanceof Error ? err.message : String(err);
3391
+ result.errors.push(`Error processing ${filePath}: ${errorMsg}`);
3392
+ }
3393
+ if (embeddedChunks.length > 0) await vectorStore.addChunks(embeddedChunks);
3394
+ for (const relativePath of result.removed) {
3395
+ const filePath = path.join(absoluteDir, relativePath);
3396
+ await vectorStore.deleteByFilePath(filePath);
3397
+ }
3398
+ saveHashCache(absoluteDir, newHashCache);
3399
+ vectorStore.close();
3400
+ return {
3401
+ success: true,
3402
+ message: buildResultMessage(result),
3403
+ data: result
3404
+ };
3405
+ } catch (err) {
3406
+ vectorStore.close();
3407
+ return {
3408
+ success: false,
3409
+ error: `Update failed: ${err instanceof Error ? err.message : String(err)}`
3410
+ };
3411
+ }
3412
+ }
3413
+ /**
3414
+ * Build message for dry run
3415
+ */
3416
+ function buildDryRunMessage(result) {
3417
+ const lines = ["Dry run - changes detected:"];
3418
+ if (result.added.length > 0) {
3419
+ lines.push(`\nFiles to add (${String(result.added.length)}):`);
3420
+ for (const f of result.added.slice(0, 10)) lines.push(` + ${f}`);
3421
+ if (result.added.length > 10) lines.push(` ... and ${String(result.added.length - 10)} more`);
3422
+ }
3423
+ if (result.modified.length > 0) {
3424
+ lines.push(`\nFiles to update (${String(result.modified.length)}):`);
3425
+ for (const f of result.modified.slice(0, 10)) lines.push(` ~ ${f}`);
3426
+ if (result.modified.length > 10) lines.push(` ... and ${String(result.modified.length - 10)} more`);
3427
+ }
3428
+ if (result.removed.length > 0) {
3429
+ lines.push(`\nFiles to remove (${String(result.removed.length)}):`);
3430
+ for (const f of result.removed.slice(0, 10)) lines.push(` - ${f}`);
3431
+ if (result.removed.length > 10) lines.push(` ... and ${String(result.removed.length - 10)} more`);
3432
+ }
3433
+ lines.push(`\nUnchanged: ${String(result.unchanged)} files`);
3434
+ if (result.added.length === 0 && result.modified.length === 0 && result.removed.length === 0) return "Index is up to date - no changes detected.";
3435
+ lines.push("\nRun without --dryRun to apply changes.");
3436
+ return lines.join("\n");
3437
+ }
3438
+ /**
3439
+ * Build message for actual update
3440
+ */
3441
+ function buildResultMessage(result) {
3442
+ if (result.added.length + result.modified.length + result.removed.length === 0) return "Index is up to date - no changes needed.";
3443
+ const lines = ["Index updated successfully:"];
3444
+ if (result.added.length > 0) lines.push(` Added: ${String(result.added.length)} files`);
3445
+ if (result.modified.length > 0) lines.push(` Modified: ${String(result.modified.length)} files`);
3446
+ if (result.removed.length > 0) lines.push(` Removed: ${String(result.removed.length)} files`);
3447
+ lines.push(` Unchanged: ${String(result.unchanged)} files`);
3448
+ if (result.errors.length > 0) {
3449
+ lines.push(`\nErrors (${String(result.errors.length)}):`);
3450
+ for (const err of result.errors.slice(0, 5)) lines.push(` - ${err}`);
3451
+ }
3452
+ return lines.join("\n");
3453
+ }
3454
+ const updateIndexFeature = {
3455
+ name: "update_index",
3456
+ description: "Refresh the search index after code changes. USE THIS instead of re-indexing - it's fast because it only processes changed files (SHA-256 hash detection). Use dryRun=true to preview changes first.",
3457
+ schema: updateIndexSchema,
3458
+ execute
3459
+ };
3460
+
3461
+ //#endregion
3462
+ //#region src/features/parse-ast/index.ts
3463
+ const parseAstSchema = z.object({
3464
+ file_path: z.string().optional().describe("Path to the file to parse (either file_path or content required)"),
3465
+ content: z.string().optional().describe("Code content to parse directly (either file_path or content required)"),
3466
+ language: z.string().optional().describe("Language name (auto-detected from file path if not provided)"),
3467
+ max_depth: z.number().int().positive().optional().describe("Maximum depth of AST to return (default: unlimited)")
3468
+ }).refine((data) => data.file_path ?? data.content, { message: "Either file_path or content must be provided" });
3469
+
3470
+ //#endregion
3471
+ //#region src/features/query-code/index.ts
3472
+ const presetValues = [
3473
+ "functions",
3474
+ "classes",
3475
+ "imports",
3476
+ "exports",
3477
+ "comments",
3478
+ "strings",
3479
+ "variables",
3480
+ "types"
3481
+ ];
3482
+ const queryCodeSchema = z.object({
3483
+ file_path: z.string().optional().describe("Path to the file to query (either file_path or content required)"),
3484
+ content: z.string().optional().describe("Code content to query directly (either file_path or content required)"),
3485
+ language: z.string().optional().describe("Language name (auto-detected from file path if not provided)"),
3486
+ query: z.string().optional().describe("SCM query pattern (either query or preset required)"),
3487
+ preset: z.enum(presetValues).optional().describe("Preset query name: functions, classes, imports, exports, comments, strings, variables, types"),
3488
+ max_matches: z.number().int().positive().optional().describe("Maximum number of matches to return")
3489
+ }).refine((data) => data.file_path ?? data.content, { message: "Either file_path or content must be provided" }).refine((data) => data.query ?? data.preset, { message: "Either query or preset must be provided" });
3490
+
3491
+ //#endregion
3492
+ //#region src/features/list-symbols/index.ts
3493
+ const symbolTypeValues = [
3494
+ "function",
3495
+ "class",
3496
+ "variable",
3497
+ "constant",
3498
+ "interface",
3499
+ "type",
3500
+ "enum",
3501
+ "method",
3502
+ "property"
3503
+ ];
3504
+ const listSymbolsSchema = z.object({
3505
+ file_path: z.string().optional().describe("Path to the file to analyze (either file_path or content required)"),
3506
+ content: z.string().optional().describe("Code content to analyze directly (either file_path or content required)"),
3507
+ language: z.string().optional().describe("Language name (auto-detected from file path if not provided)"),
3508
+ types: z.array(z.enum(symbolTypeValues)).optional().describe("Filter by symbol types: function, class, variable, constant, interface, type, enum, method, property")
3509
+ }).refine((data) => data.file_path ?? data.content, { message: "Either file_path or content must be provided" });
3510
+
3511
+ //#endregion
3512
+ //#region src/core/fallback/index.ts
3513
+ let langchainConfig = null;
3514
+ function clearConfigCache() {
3515
+ langchainConfig = null;
3516
+ }
3517
+ registerCache("fallback:config", clearConfigCache);
3518
+
3519
+ //#endregion
3520
+ //#region src/core/unified/index.ts
3521
+ let configCache = null;
3522
+ let binaryExtensionsCache = null;
3523
+ let extensionToLanguageCache = null;
3524
+ let specialFilenamesCache = null;
3525
+ /** Clear caches (for testing) */
3526
+ function clearUnifiedCache() {
3527
+ configCache = null;
3528
+ binaryExtensionsCache = null;
3529
+ extensionToLanguageCache = null;
3530
+ specialFilenamesCache = null;
3531
+ }
3532
+ registerCache("unified:config", clearUnifiedCache);
3533
+
3534
+ //#endregion
3535
+ //#region src/features/analyze-file/index.ts
3536
+ const analyzeFileSchema = z.object({
3537
+ file_path: z.string().describe("Path to the file to analyze"),
3538
+ include_ast: z.boolean().default(false).describe("Include full AST in response (default: false, can be verbose)"),
3539
+ include_symbols: z.boolean().default(true).describe("Include extracted symbols (default: true)"),
3540
+ include_imports: z.boolean().default(true).describe("Include import statements (default: true)"),
3541
+ include_exports: z.boolean().default(true).describe("Include export statements (default: true)"),
3542
+ ast_max_depth: z.number().int().positive().optional().describe("Maximum depth for AST if included"),
3543
+ include_chunks: z.boolean().default(false).describe("Include text chunks for fallback parsing (default: false)")
3544
+ });
3545
+
3546
+ //#endregion
3547
+ //#region src/features/index.ts
3548
+ const features = [
3549
+ infoFeature,
3550
+ indexCodebaseFeature,
3551
+ searchCodeFeature,
3552
+ getIndexStatusFeature,
3553
+ updateIndexFeature
3554
+ ];
3555
+
3556
+ //#endregion
3557
+ //#region src/tools/adapter.ts
3558
+ function zodToMcpSchema(schema) {
3559
+ if (schema instanceof z.ZodObject) return schema.shape;
3560
+ return { input: schema };
3561
+ }
3562
+ function registerFeatureAsTool(server, feature) {
3563
+ const mcpSchema = zodToMcpSchema(feature.schema);
3564
+ server.tool(feature.name, feature.description, mcpSchema, async (params) => {
3565
+ const result = feature.execute(params);
3566
+ const formatResult = (res) => ({
3567
+ content: [{
3568
+ type: "text",
3569
+ text: res.message ?? JSON.stringify(res.data, null, 2)
3570
+ }],
3571
+ isError: !res.success
3572
+ });
3573
+ if (result instanceof Promise) return await result.then(formatResult);
3574
+ return formatResult(result);
3575
+ });
3576
+ }
3577
+
3578
+ //#endregion
3579
+ //#region src/tools/index.ts
3580
+ function registerTools(server) {
3581
+ for (const feature of features) registerFeatureAsTool(server, feature);
3582
+ }
3583
+
3584
+ //#endregion
3585
+ //#region src/resources/index.ts
3586
+ function registerResources(server) {
3587
+ server.resource("server_info", "src://server/info", (uri) => {
3588
+ const info = getServerInfo();
3589
+ return { contents: [{
3590
+ uri: uri.href,
3591
+ mimeType: "application/json",
3592
+ text: JSON.stringify(info, null, 2)
3593
+ }] };
3594
+ });
3595
+ }
3596
+
3597
+ //#endregion
3598
+ //#region src/prompts/index.ts
3599
+ /**
3600
+ * Register MCP prompts
3601
+ *
3602
+ * Prompts are reusable templates that help AI assistants understand
3603
+ * how to use SRC effectively for code search and analysis.
3604
+ */
3605
+ function registerPrompts(server) {
3606
+ server.registerPrompt("src-overview", {
3607
+ title: "SRC Overview",
3608
+ description: "Learn about SRC capabilities and when to use it for code search and analysis"
3609
+ }, () => ({ messages: [{
3610
+ role: "user",
3611
+ content: {
3612
+ type: "text",
3613
+ text: `# SRC (Structured Repo Context) - Overview
3614
+
3615
+ ## What is SRC?
3616
+ SRC is a semantic code search MCP server. It indexes codebases and provides intelligent search using:
3617
+ - **Vector embeddings** for semantic similarity (understands meaning, not just keywords)
3618
+ - **BM25 keyword search** for exact matches
3619
+ - **Hybrid search** combining both with RRF fusion
3620
+ - **Call graph analysis** showing function relationships
3621
+
3622
+ ## When to use SRC?
3623
+
3624
+ **USE SRC when the user wants to:**
3625
+ - Find code by meaning/concept ("find authentication logic", "where is error handling")
3626
+ - Understand code relationships ("what calls this function", "what does this function call")
3627
+ - Search across a large codebase
3628
+ - Find similar code patterns
3629
+ - Explore unfamiliar code
3630
+
3631
+ **DON'T USE SRC for:**
3632
+ - Reading a specific file (use file read tools instead)
3633
+ - Simple text search in a single file (use grep/search)
3634
+ - Non-code queries
3635
+
3636
+ ## Typical Workflow
3637
+
3638
+ 1. **Check status**: Use \`get_index_status\` to see if index exists
3639
+ 2. **Index if needed**: Use \`index_codebase\` (only once per project)
3640
+ 3. **Search**: Use \`search_code\` with natural language queries
3641
+
3642
+ Note: When using \`serve\` mode, the server auto-indexes on startup and watches for file changes.
3643
+
3644
+ ## Supported Languages
3645
+ - **Full AST support (18)**: JavaScript, TypeScript, Python, Rust, Go, Java, C, C++, C#, Ruby, PHP, Kotlin, Scala, Swift, HTML, Svelte, OCaml
3646
+ - **Text splitting (16+)**: Markdown, LaTeX, Solidity, Haskell, Elixir, and more
3647
+ - **Generic (30+)**: Config files, shell scripts, SQL, and more
3648
+
3649
+ ## Tips
3650
+ - Use natural language queries: "authentication middleware" not "auth*"
3651
+ - The hybrid search mode (default) works best for most queries
3652
+ - Call context is included by default - shows who calls what`
3653
+ }
3654
+ }] }));
3655
+ server.registerPrompt("code-search-workflow", {
3656
+ title: "Code Search Workflow",
3657
+ description: "Step-by-step guide for searching code with SRC"
3658
+ }, () => ({ messages: [{
3659
+ role: "user",
3660
+ content: {
3661
+ type: "text",
3662
+ text: `# Code Search Workflow with SRC
3663
+
3664
+ ## Step 1: Check Index
3665
+ \`\`\`
3666
+ get_index_status()
3667
+ \`\`\`
3668
+
3669
+ ## Step 2: Index if Needed
3670
+ If no index exists:
3671
+ \`\`\`
3672
+ index_codebase()
3673
+ \`\`\`
3674
+
3675
+ ## Step 3: Search
3676
+ \`\`\`
3677
+ search_code(query: "your search query here")
3678
+ \`\`\`
3679
+
3680
+ ## search_code Arguments
3681
+
3682
+ | Argument | Type | Default | Description |
3683
+ |----------|------|---------|-------------|
3684
+ | query | string | required | Natural language search query |
3685
+ | limit | number | 10 | Max results to return |
3686
+ | mode | "hybrid" / "vector" / "fts" | "hybrid" | Search mode |
3687
+ | includeCallContext | boolean | true | Include caller/callee info |
3688
+ | threshold | number | - | Distance threshold (vector mode only) |
3689
+
3690
+ ## Search Modes
3691
+ - **hybrid** (default): Vector + BM25 + RRF fusion - best overall
3692
+ - **vector**: Semantic similarity only - good for conceptual queries
3693
+ - **fts**: Keyword search only - good for exact identifiers
3694
+
3695
+ ## Examples
3696
+ \`\`\`
3697
+ // Find authentication code
3698
+ search_code(query: "user authentication and login")
3699
+
3700
+ // More results
3701
+ search_code(query: "error handling", limit: 20)
3702
+
3703
+ // Exact identifier search
3704
+ search_code(query: "UserAuthService", mode: "fts")
3705
+
3706
+ // Without call context (faster)
3707
+ search_code(query: "database queries", includeCallContext: false)
3708
+ \`\`\``
3709
+ }
3710
+ }] }));
3711
+ server.registerPrompt("search-tips", {
3712
+ title: "Search Tips",
3713
+ description: "Tips for writing effective code search queries"
3714
+ }, () => ({ messages: [{
3715
+ role: "user",
3716
+ content: {
3717
+ type: "text",
3718
+ text: `# Effective Code Search Tips
3719
+
3720
+ ## Good Query Examples
3721
+
3722
+ | Goal | Good Query | Why |
3723
+ |------|------------|-----|
3724
+ | Find auth code | "user authentication and login validation" | Describes the concept |
3725
+ | Find error handling | "error handling and exception catching" | Uses related terms |
3726
+ | Find API endpoints | "REST API route handlers" | Specifies the pattern |
3727
+ | Find database code | "database query and data persistence" | Covers the domain |
3728
+ | Find a function | "calculateTotalPrice function" | Includes the name |
3729
+
3730
+ ## Query Writing Tips
3731
+
3732
+ 1. **Be descriptive, not literal**
3733
+ - Good: "user password validation and hashing"
3734
+ - Bad: "validatePassword"
3735
+
3736
+ 2. **Include context**
3737
+ - Good: "authentication middleware for Express routes"
3738
+ - Bad: "auth middleware"
3739
+
3740
+ 3. **Use domain language**
3741
+ - Good: "shopping cart checkout process"
3742
+ - Bad: "cart function"
3743
+
3744
+ 4. **Combine concepts**
3745
+ - Good: "file upload with size validation and error handling"
3746
+ - Bad: "upload"
3747
+
3748
+ ## Search Mode Selection
3749
+
3750
+ | Mode | Use When |
3751
+ |------|----------|
3752
+ | **hybrid** | Default choice, works for most queries |
3753
+ | **vector** | Conceptual searches like "code that handles retries" |
3754
+ | **fts** | Exact identifiers like "UserAuthService" |
3755
+
3756
+ ## Understanding Results
3757
+
3758
+ Each result includes:
3759
+ - **content**: The matching code chunk
3760
+ - **filePath**: Source file location
3761
+ - **startLine/endLine**: Line numbers
3762
+ - **symbolName/Type**: Function or class name if detected
3763
+ - **score**: Relevance score (higher = better match)
3764
+ - **callers**: Functions that call this code
3765
+ - **callees**: Functions this code calls`
3766
+ }
3767
+ }] }));
3768
+ }
3769
+
3770
+ //#endregion
3771
+ //#region src/server.ts
3772
+ function createServer() {
3773
+ const server = new McpServer({
3774
+ name: config.name,
3775
+ version: config.version
3776
+ });
3777
+ registerTools(server);
3778
+ registerResources(server);
3779
+ registerPrompts(server);
3780
+ return server;
3781
+ }
3782
+ async function startServer() {
3783
+ const server = createServer();
3784
+ const transport = new StdioServerTransport();
3785
+ await server.connect(transport);
3786
+ logger.info(`${config.name} v${config.version} started`);
3787
+ }
3788
+
3789
+ //#endregion
3790
+ export { logger as a, colors as i, features as n, EMBEDDING_CONFIG as o, createIndexWatcher as r, config as s, startServer as t };
3791
+ //# sourceMappingURL=server-DL8hfycz.mjs.map