src-mcp 1.0.1 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (249) hide show
  1. package/dist/bin.d.mts +1 -0
  2. package/dist/bin.mjs +205 -0
  3. package/dist/bin.mjs.map +1 -0
  4. package/dist/index.d.mts +1 -0
  5. package/dist/index.mjs +8 -0
  6. package/dist/index.mjs.map +1 -0
  7. package/dist/server-B2Ms4jQx.mjs +3889 -0
  8. package/dist/server-B2Ms4jQx.mjs.map +1 -0
  9. package/package.json +10 -9
  10. package/dist/bin.d.ts +0 -3
  11. package/dist/bin.d.ts.map +0 -1
  12. package/dist/bin.js +0 -4
  13. package/dist/bin.js.map +0 -1
  14. package/dist/cli/adapter.d.ts +0 -7
  15. package/dist/cli/adapter.d.ts.map +0 -1
  16. package/dist/cli/adapter.js +0 -39
  17. package/dist/cli/adapter.js.map +0 -1
  18. package/dist/cli/commands/index.d.ts +0 -24
  19. package/dist/cli/commands/index.d.ts.map +0 -1
  20. package/dist/cli/commands/index.js +0 -13
  21. package/dist/cli/commands/index.js.map +0 -1
  22. package/dist/cli/commands/serve.command.d.ts +0 -21
  23. package/dist/cli/commands/serve.command.d.ts.map +0 -1
  24. package/dist/cli/commands/serve.command.js +0 -62
  25. package/dist/cli/commands/serve.command.js.map +0 -1
  26. package/dist/cli/commands/version.command.d.ts +0 -2
  27. package/dist/cli/commands/version.command.d.ts.map +0 -1
  28. package/dist/cli/commands/version.command.js +0 -12
  29. package/dist/cli/commands/version.command.js.map +0 -1
  30. package/dist/cli/index.d.ts +0 -2
  31. package/dist/cli/index.d.ts.map +0 -1
  32. package/dist/cli/index.js +0 -15
  33. package/dist/cli/index.js.map +0 -1
  34. package/dist/cli/parser.d.ts +0 -7
  35. package/dist/cli/parser.d.ts.map +0 -1
  36. package/dist/cli/parser.js +0 -99
  37. package/dist/cli/parser.js.map +0 -1
  38. package/dist/config/index.d.ts +0 -24
  39. package/dist/config/index.d.ts.map +0 -1
  40. package/dist/config/index.js +0 -38
  41. package/dist/config/index.js.map +0 -1
  42. package/dist/core/ast/index.d.ts +0 -82
  43. package/dist/core/ast/index.d.ts.map +0 -1
  44. package/dist/core/ast/index.js +0 -204
  45. package/dist/core/ast/index.js.map +0 -1
  46. package/dist/core/ast/types.d.ts +0 -152
  47. package/dist/core/ast/types.d.ts.map +0 -1
  48. package/dist/core/ast/types.js +0 -5
  49. package/dist/core/ast/types.js.map +0 -1
  50. package/dist/core/constants.d.ts +0 -17
  51. package/dist/core/constants.d.ts.map +0 -1
  52. package/dist/core/constants.js +0 -49
  53. package/dist/core/constants.js.map +0 -1
  54. package/dist/core/embeddings/callgraph.d.ts +0 -98
  55. package/dist/core/embeddings/callgraph.d.ts.map +0 -1
  56. package/dist/core/embeddings/callgraph.js +0 -415
  57. package/dist/core/embeddings/callgraph.js.map +0 -1
  58. package/dist/core/embeddings/chunker.d.ts +0 -37
  59. package/dist/core/embeddings/chunker.d.ts.map +0 -1
  60. package/dist/core/embeddings/chunker.js +0 -298
  61. package/dist/core/embeddings/chunker.js.map +0 -1
  62. package/dist/core/embeddings/client.d.ts +0 -30
  63. package/dist/core/embeddings/client.d.ts.map +0 -1
  64. package/dist/core/embeddings/client.js +0 -65
  65. package/dist/core/embeddings/client.js.map +0 -1
  66. package/dist/core/embeddings/crossfile.d.ts +0 -58
  67. package/dist/core/embeddings/crossfile.d.ts.map +0 -1
  68. package/dist/core/embeddings/crossfile.js +0 -202
  69. package/dist/core/embeddings/crossfile.js.map +0 -1
  70. package/dist/core/embeddings/enricher.d.ts +0 -53
  71. package/dist/core/embeddings/enricher.d.ts.map +0 -1
  72. package/dist/core/embeddings/enricher.js +0 -308
  73. package/dist/core/embeddings/enricher.js.map +0 -1
  74. package/dist/core/embeddings/index.d.ts +0 -13
  75. package/dist/core/embeddings/index.d.ts.map +0 -1
  76. package/dist/core/embeddings/index.js +0 -20
  77. package/dist/core/embeddings/index.js.map +0 -1
  78. package/dist/core/embeddings/reranker.d.ts +0 -41
  79. package/dist/core/embeddings/reranker.d.ts.map +0 -1
  80. package/dist/core/embeddings/reranker.js +0 -117
  81. package/dist/core/embeddings/reranker.js.map +0 -1
  82. package/dist/core/embeddings/store.d.ts +0 -93
  83. package/dist/core/embeddings/store.d.ts.map +0 -1
  84. package/dist/core/embeddings/store.js +0 -304
  85. package/dist/core/embeddings/store.js.map +0 -1
  86. package/dist/core/embeddings/types.d.ts +0 -77
  87. package/dist/core/embeddings/types.d.ts.map +0 -1
  88. package/dist/core/embeddings/types.js +0 -5
  89. package/dist/core/embeddings/types.js.map +0 -1
  90. package/dist/core/embeddings/watcher.d.ts +0 -130
  91. package/dist/core/embeddings/watcher.d.ts.map +0 -1
  92. package/dist/core/embeddings/watcher.js +0 -448
  93. package/dist/core/embeddings/watcher.js.map +0 -1
  94. package/dist/core/fallback/index.d.ts +0 -26
  95. package/dist/core/fallback/index.d.ts.map +0 -1
  96. package/dist/core/fallback/index.js +0 -76
  97. package/dist/core/fallback/index.js.map +0 -1
  98. package/dist/core/parser/index.d.ts +0 -64
  99. package/dist/core/parser/index.d.ts.map +0 -1
  100. package/dist/core/parser/index.js +0 -205
  101. package/dist/core/parser/index.js.map +0 -1
  102. package/dist/core/parser/languages.d.ts +0 -26
  103. package/dist/core/parser/languages.d.ts.map +0 -1
  104. package/dist/core/parser/languages.js +0 -101
  105. package/dist/core/parser/languages.js.map +0 -1
  106. package/dist/core/queries/helpers.d.ts +0 -72
  107. package/dist/core/queries/helpers.d.ts.map +0 -1
  108. package/dist/core/queries/helpers.js +0 -101
  109. package/dist/core/queries/helpers.js.map +0 -1
  110. package/dist/core/queries/index.d.ts +0 -144
  111. package/dist/core/queries/index.d.ts.map +0 -1
  112. package/dist/core/queries/index.js +0 -396
  113. package/dist/core/queries/index.js.map +0 -1
  114. package/dist/core/queries/loader.d.ts +0 -46
  115. package/dist/core/queries/loader.d.ts.map +0 -1
  116. package/dist/core/queries/loader.js +0 -216
  117. package/dist/core/queries/loader.js.map +0 -1
  118. package/dist/core/queries/patterns.d.ts +0 -10
  119. package/dist/core/queries/patterns.d.ts.map +0 -1
  120. package/dist/core/queries/patterns.js +0 -112
  121. package/dist/core/queries/patterns.js.map +0 -1
  122. package/dist/core/symbols/index.d.ts +0 -70
  123. package/dist/core/symbols/index.d.ts.map +0 -1
  124. package/dist/core/symbols/index.js +0 -359
  125. package/dist/core/symbols/index.js.map +0 -1
  126. package/dist/core/unified/index.d.ts +0 -118
  127. package/dist/core/unified/index.d.ts.map +0 -1
  128. package/dist/core/unified/index.js +0 -428
  129. package/dist/core/unified/index.js.map +0 -1
  130. package/dist/core/utils/assets.d.ts +0 -34
  131. package/dist/core/utils/assets.d.ts.map +0 -1
  132. package/dist/core/utils/assets.js +0 -85
  133. package/dist/core/utils/assets.js.map +0 -1
  134. package/dist/core/utils/cache.d.ts +0 -43
  135. package/dist/core/utils/cache.d.ts.map +0 -1
  136. package/dist/core/utils/cache.js +0 -60
  137. package/dist/core/utils/cache.js.map +0 -1
  138. package/dist/core/utils/index.d.ts +0 -7
  139. package/dist/core/utils/index.d.ts.map +0 -1
  140. package/dist/core/utils/index.js +0 -10
  141. package/dist/core/utils/index.js.map +0 -1
  142. package/dist/core/utils/tsconfig.d.ts +0 -34
  143. package/dist/core/utils/tsconfig.d.ts.map +0 -1
  144. package/dist/core/utils/tsconfig.js +0 -173
  145. package/dist/core/utils/tsconfig.js.map +0 -1
  146. package/dist/features/analyze-file/index.d.ts +0 -15
  147. package/dist/features/analyze-file/index.d.ts.map +0 -1
  148. package/dist/features/analyze-file/index.js +0 -164
  149. package/dist/features/analyze-file/index.js.map +0 -1
  150. package/dist/features/get-call-graph/index.d.ts +0 -24
  151. package/dist/features/get-call-graph/index.d.ts.map +0 -1
  152. package/dist/features/get-call-graph/index.js +0 -246
  153. package/dist/features/get-call-graph/index.js.map +0 -1
  154. package/dist/features/get-index-status/index.d.ts +0 -20
  155. package/dist/features/get-index-status/index.d.ts.map +0 -1
  156. package/dist/features/get-index-status/index.js +0 -90
  157. package/dist/features/get-index-status/index.js.map +0 -1
  158. package/dist/features/index-codebase/index.d.ts +0 -24
  159. package/dist/features/index-codebase/index.d.ts.map +0 -1
  160. package/dist/features/index-codebase/index.js +0 -283
  161. package/dist/features/index-codebase/index.js.map +0 -1
  162. package/dist/features/index.d.ts +0 -15
  163. package/dist/features/index.d.ts.map +0 -1
  164. package/dist/features/index.js +0 -28
  165. package/dist/features/index.js.map +0 -1
  166. package/dist/features/info/index.d.ts +0 -19
  167. package/dist/features/info/index.d.ts.map +0 -1
  168. package/dist/features/info/index.js +0 -41
  169. package/dist/features/info/index.js.map +0 -1
  170. package/dist/features/list-symbols/index.d.ts +0 -22
  171. package/dist/features/list-symbols/index.d.ts.map +0 -1
  172. package/dist/features/list-symbols/index.js +0 -74
  173. package/dist/features/list-symbols/index.js.map +0 -1
  174. package/dist/features/parse-ast/index.d.ts +0 -12
  175. package/dist/features/parse-ast/index.d.ts.map +0 -1
  176. package/dist/features/parse-ast/index.js +0 -71
  177. package/dist/features/parse-ast/index.js.map +0 -1
  178. package/dist/features/query-code/index.d.ts +0 -23
  179. package/dist/features/query-code/index.d.ts.map +0 -1
  180. package/dist/features/query-code/index.js +0 -96
  181. package/dist/features/query-code/index.js.map +0 -1
  182. package/dist/features/search-code/index.d.ts +0 -39
  183. package/dist/features/search-code/index.d.ts.map +0 -1
  184. package/dist/features/search-code/index.js +0 -258
  185. package/dist/features/search-code/index.js.map +0 -1
  186. package/dist/features/types.d.ts +0 -14
  187. package/dist/features/types.d.ts.map +0 -1
  188. package/dist/features/types.js +0 -2
  189. package/dist/features/types.js.map +0 -1
  190. package/dist/features/update-index/index.d.ts +0 -24
  191. package/dist/features/update-index/index.d.ts.map +0 -1
  192. package/dist/features/update-index/index.js +0 -358
  193. package/dist/features/update-index/index.js.map +0 -1
  194. package/dist/features/utils/content.d.ts +0 -30
  195. package/dist/features/utils/content.d.ts.map +0 -1
  196. package/dist/features/utils/content.js +0 -49
  197. package/dist/features/utils/content.js.map +0 -1
  198. package/dist/features/utils/index.d.ts +0 -6
  199. package/dist/features/utils/index.d.ts.map +0 -1
  200. package/dist/features/utils/index.js +0 -8
  201. package/dist/features/utils/index.js.map +0 -1
  202. package/dist/features/utils/result.d.ts +0 -37
  203. package/dist/features/utils/result.d.ts.map +0 -1
  204. package/dist/features/utils/result.js +0 -53
  205. package/dist/features/utils/result.js.map +0 -1
  206. package/dist/index.d.ts +0 -2
  207. package/dist/index.d.ts.map +0 -1
  208. package/dist/index.js +0 -4
  209. package/dist/index.js.map +0 -1
  210. package/dist/prompts/index.d.ts +0 -9
  211. package/dist/prompts/index.d.ts.map +0 -1
  212. package/dist/prompts/index.js +0 -188
  213. package/dist/prompts/index.js.map +0 -1
  214. package/dist/resources/index.d.ts +0 -3
  215. package/dist/resources/index.d.ts.map +0 -1
  216. package/dist/resources/index.js +0 -17
  217. package/dist/resources/index.js.map +0 -1
  218. package/dist/server.d.ts +0 -4
  219. package/dist/server.d.ts.map +0 -1
  220. package/dist/server.js +0 -24
  221. package/dist/server.js.map +0 -1
  222. package/dist/tools/adapter.d.ts +0 -4
  223. package/dist/tools/adapter.d.ts.map +0 -1
  224. package/dist/tools/adapter.js +0 -28
  225. package/dist/tools/adapter.js.map +0 -1
  226. package/dist/tools/index.d.ts +0 -5
  227. package/dist/tools/index.d.ts.map +0 -1
  228. package/dist/tools/index.js +0 -9
  229. package/dist/tools/index.js.map +0 -1
  230. package/dist/types/index.d.ts +0 -20
  231. package/dist/types/index.d.ts.map +0 -1
  232. package/dist/types/index.js +0 -2
  233. package/dist/types/index.js.map +0 -1
  234. package/dist/utils/colors.d.ts +0 -24
  235. package/dist/utils/colors.d.ts.map +0 -1
  236. package/dist/utils/colors.js +0 -30
  237. package/dist/utils/colors.js.map +0 -1
  238. package/dist/utils/index.d.ts +0 -4
  239. package/dist/utils/index.d.ts.map +0 -1
  240. package/dist/utils/index.js +0 -4
  241. package/dist/utils/index.js.map +0 -1
  242. package/dist/utils/logger.d.ts +0 -8
  243. package/dist/utils/logger.d.ts.map +0 -1
  244. package/dist/utils/logger.js +0 -57
  245. package/dist/utils/logger.js.map +0 -1
  246. package/dist/utils/spinner.d.ts +0 -11
  247. package/dist/utils/spinner.d.ts.map +0 -1
  248. package/dist/utils/spinner.js +0 -36
  249. package/dist/utils/spinner.js.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"file":"server-B2Ms4jQx.mjs","names":["execute","config","path","config","configCache","config","clearLanguageCache","config","config","HASH_CACHE_FILE","computeHash","createIgnoreFilter","isHidden","collectFiles","execute","createIgnoreFilter","isHidden","collectFiles","execute","execute"],"sources":["../src/config/index.ts","../src/features/info/index.ts","../src/core/embeddings/client.ts","../src/utils/logger.ts","../src/utils/colors.ts","../src/core/embeddings/store.ts","../src/core/utils/assets.ts","../src/core/utils/cache.ts","../src/core/utils/tsconfig.ts","../src/core/parser/languages.ts","../src/core/parser/index.ts","../src/core/queries/helpers.ts","../src/core/queries/loader.ts","../src/core/queries/patterns.ts","../src/core/queries/index.ts","../src/core/symbols/index.ts","../src/core/embeddings/chunker.ts","../src/core/embeddings/crossfile.ts","../src/core/embeddings/enricher.ts","../src/core/embeddings/watcher.ts","../src/core/embeddings/reranker.ts","../src/core/embeddings/callgraph.ts","../src/features/index-codebase/index.ts","../src/features/search-code/index.ts","../src/features/get-index-status/index.ts","../src/features/get-call-graph/index.ts","../src/features/update-index/index.ts","../src/features/parse-ast/index.ts","../src/features/query-code/index.ts","../src/features/list-symbols/index.ts","../src/core/fallback/index.ts","../src/core/unified/index.ts","../src/features/analyze-file/index.ts","../src/features/index.ts","../src/tools/adapter.ts","../src/tools/index.ts","../src/resources/index.ts","../src/prompts/index.ts","../src/server.ts"],"sourcesContent":["import type { EmbeddingConfig } from \"@core/embeddings/types\";\nimport type { ServerConfig } from \"@types\";\n\nexport const config: ServerConfig = {\n name: \"src-mcp\",\n fullName: \"SRC (Structured Repo Context)\",\n version: \"1.0.2\",\n description:\n \"MCP server for codebase analysis with Treesitter (SCM queries), AST parsing, and embedding-based indexing\",\n};\n\nconst nodeEnv = process.env.NODE_ENV;\nconst logLevelEnv = process.env.LOG_LEVEL;\n\nexport const ENV = {\n isDev: nodeEnv === \"development\",\n isProd: nodeEnv === \"production\",\n logLevel: logLevelEnv ?? \"info\",\n};\n\n/**\n * Embedding configuration with environment variable overrides\n */\nexport const EMBEDDING_CONFIG: EmbeddingConfig = {\n ollamaBaseUrl: process.env.OLLAMA_BASE_URL ?? \"http://localhost:11434\",\n embeddingModel: process.env.EMBEDDING_MODEL ?? \"nomic-embed-text\",\n embeddingDimensions: Number(process.env.EMBEDDING_DIMENSIONS) || 768,\n defaultChunkSize: Number(process.env.CHUNK_SIZE) || 1000,\n defaultChunkOverlap: Number(process.env.CHUNK_OVERLAP) || 200,\n batchSize: Number(process.env.EMBEDDING_BATCH_SIZE) || 10,\n /** Model for re-ranking (lightweight model recommended) */\n rerankModel: process.env.RERANK_MODEL ?? \"qwen2.5:1.5b\",\n};\n\n/**\n * Enrichment configuration for cross-file context\n */\nexport const ENRICHMENT_CONFIG = {\n /** Include cross-file import definitions in enrichment */\n includeCrossFileContext: process.env.ENRICHMENT_CROSS_FILE !== \"false\",\n /** Maximum number of imports to resolve per file */\n maxImportsToResolve: Number(process.env.ENRICHMENT_MAX_IMPORTS) || 10,\n /** Maximum symbols per imported file to include */\n maxSymbolsPerImport:\n Number(process.env.ENRICHMENT_MAX_SYMBOLS_PER_IMPORT) || 5,\n};\n","import { z } from \"zod\";\nimport type { Feature, FeatureResult } from \"@features/types\";\nimport { config } from \"@config\";\n\nexport const infoSchema = z.object({\n format: z\n .enum([\"json\", \"text\"])\n .optional()\n .default(\"text\")\n .describe(\"Output format\"),\n});\n\nexport type InfoInput = z.infer<typeof infoSchema>;\n\nexport interface ServerInfo {\n name: string;\n fullName: string;\n version: string;\n description: string | undefined;\n}\n\nexport function getServerInfo(): ServerInfo {\n return {\n name: config.name,\n fullName: config.fullName,\n version: config.version,\n description: config.description,\n };\n}\n\nexport function execute(input: InfoInput): FeatureResult {\n const info = getServerInfo();\n\n if (input.format === \"json\") {\n return {\n success: true,\n data: info,\n message: JSON.stringify(info, null, 2),\n };\n }\n\n const description = info.description ?? \"\";\n const text =\n `${info.fullName} (${info.name}) v${info.version}\\n${description}`.trim();\n\n return {\n success: true,\n data: info,\n message: text,\n };\n}\n\nexport const infoFeature: Feature<typeof infoSchema> = {\n name: \"get_server_info\",\n description:\n \"Get SRC server version and capabilities. Use to verify the MCP server is running correctly.\",\n schema: infoSchema,\n execute,\n};\n","/**\n * Ollama client for generating embeddings\n * Uses the official ollama library\n */\n\nimport { Ollama } from \"ollama\";\nimport type { EmbeddingConfig } from \"@core/embeddings/types\";\n\nexport class OllamaClient {\n private readonly client: Ollama;\n private readonly model: string;\n\n constructor(\n config: Pick<EmbeddingConfig, \"ollamaBaseUrl\" | \"embeddingModel\">,\n ) {\n this.client = new Ollama({ host: config.ollamaBaseUrl });\n this.model = config.embeddingModel;\n }\n\n /**\n * Generate embeddings for a single text\n */\n async embed(text: string): Promise<number[]> {\n const response = await this.client.embed({\n model: this.model,\n input: text,\n });\n\n const result = response.embeddings[0];\n if (!result) {\n throw new Error(\"No embedding returned from Ollama\");\n }\n return result;\n }\n\n /**\n * Generate embeddings for multiple texts in a single request\n */\n async embedBatch(texts: string[]): Promise<number[][]> {\n const response = await this.client.embed({\n model: this.model,\n input: texts,\n });\n\n return response.embeddings;\n }\n\n /**\n * Check if Ollama is reachable and the model is available\n */\n async healthCheck(): Promise<{ ok: boolean; error?: string }> {\n try {\n const response = await this.client.list();\n const models = response.models;\n const modelExists = models.some(\n (m) => m.name === this.model || m.name.startsWith(`${this.model}:`),\n );\n\n if (!modelExists) {\n return {\n ok: false,\n error: `Model \"${this.model}\" not found. Run: ollama pull ${this.model}`,\n };\n }\n\n return { ok: true };\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n return { ok: false, error: `Cannot connect to Ollama: ${message}` };\n }\n }\n}\n\n/**\n * Create a new Ollama client with default config\n */\nexport function createOllamaClient(\n config: Pick<EmbeddingConfig, \"ollamaBaseUrl\" | \"embeddingModel\">,\n): OllamaClient {\n return new OllamaClient(config);\n}\n","import { ENV } from \"@config\";\nimport pc from \"picocolors\";\n\ntype LogLevel = \"debug\" | \"info\" | \"warn\" | \"error\";\n\nconst LOG_LEVELS: Record<LogLevel, number> = {\n debug: 0,\n info: 1,\n warn: 2,\n error: 3,\n};\n\nconst LEVEL_COLORS: Record<LogLevel, (s: string) => string> = {\n debug: pc.dim,\n info: pc.blue,\n warn: pc.yellow,\n error: pc.red,\n};\n\nfunction isValidLogLevel(level: string): level is LogLevel {\n return level in LOG_LEVELS;\n}\n\nfunction shouldLog(level: LogLevel): boolean {\n const configLevel = ENV.logLevel;\n const currentLevel = isValidLogLevel(configLevel)\n ? LOG_LEVELS[configLevel]\n : LOG_LEVELS.info;\n return LOG_LEVELS[level] >= currentLevel;\n}\n\nfunction formatMessage(level: LogLevel, message: string): string {\n const timestamp = pc.dim(new Date().toISOString());\n const levelTag = LEVEL_COLORS[level](level.toUpperCase().padEnd(5));\n return `${timestamp} ${levelTag} ${message}`;\n}\n\nexport const logger = {\n debug(message: string, ...args: unknown[]): void {\n if (shouldLog(\"debug\")) {\n console.error(formatMessage(\"debug\", message), ...args);\n }\n },\n\n info(message: string, ...args: unknown[]): void {\n if (shouldLog(\"info\")) {\n console.error(formatMessage(\"info\", message), ...args);\n }\n },\n\n warn(message: string, ...args: unknown[]): void {\n if (shouldLog(\"warn\")) {\n console.warn(formatMessage(\"warn\", message), ...args);\n }\n },\n\n error(message: string, ...args: unknown[]): void {\n if (shouldLog(\"error\")) {\n console.error(formatMessage(\"error\", message), ...args);\n }\n },\n\n success(message: string, ...args: unknown[]): void {\n console.error(pc.green(\"✓ \") + message, ...args);\n },\n};\n","import pc from \"picocolors\";\n\n/**\n * Color utilities for CLI output\n */\nexport const colors = {\n // Status colors\n success: pc.green,\n error: pc.red,\n warn: pc.yellow,\n info: pc.blue,\n\n // UI elements\n dim: pc.dim,\n bold: pc.bold,\n cyan: pc.cyan,\n magenta: pc.magenta,\n\n // Composite helpers\n successBold: (text: string): string => pc.bold(pc.green(text)),\n errorBold: (text: string): string => pc.bold(pc.red(text)),\n infoBold: (text: string): string => pc.bold(pc.blue(text)),\n\n // Format messages with icons\n formatSuccess: (msg: string): string => `${pc.green(\"✓\")} ${msg}`,\n formatError: (msg: string): string => `${pc.red(\"✗\")} ${msg}`,\n formatInfo: (msg: string): string => `${pc.blue(\"ℹ\")} ${msg}`,\n formatWarn: (msg: string): string => `${pc.yellow(\"⚠\")} ${msg}`,\n\n // Format for CLI output\n formatCommand: (cmd: string): string => pc.cyan(cmd),\n formatValue: (val: string): string => pc.magenta(val),\n formatPath: (path: string): string => pc.dim(path),\n};\n","/**\n * LanceDB vector store for code embeddings\n *\n * Supports:\n * - Vector similarity search (embeddings)\n * - Full-text search (BM25)\n * - Hybrid search with RRF (Reciprocal Rank Fusion)\n */\n\nimport * as lancedb from \"@lancedb/lancedb\";\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport type {\n EmbeddedChunk,\n EmbeddingConfig,\n IndexStatus,\n SearchResult,\n} from \"@core/embeddings/types\";\nimport { logger } from \"@utils\";\n\nconst TABLE_NAME = \"code_chunks\";\nconst INDEX_DIR_NAME = \".src-index\";\n\n/**\n * Search mode for queries\n */\nexport type SearchMode = \"vector\" | \"fts\" | \"hybrid\";\n\n/**\n * Options for hybrid search\n */\nexport interface HybridSearchOptions {\n /** Search mode: vector only, fts only, or hybrid (default: hybrid) */\n mode?: SearchMode;\n /** Weight for vector search in hybrid mode (0-1, default: 0.5) */\n vectorWeight?: number;\n /** RRF constant k for rank fusion (default: 60) */\n rrfK?: number;\n}\n\n/**\n * Reciprocal Rank Fusion (RRF) to combine ranked lists\n *\n * RRF score = sum(1 / (k + rank_i)) for each list\n * where k is a constant (typically 60) and rank_i is the 1-based rank in list i\n */\nfunction rrfFusion(\n vectorResults: SearchResult[],\n ftsResults: SearchResult[],\n k = 60,\n): SearchResult[] {\n const scores = new Map<string, { score: number; result: SearchResult }>();\n\n // Add vector results with RRF scoring\n vectorResults.forEach((result, index) => {\n const rank = index + 1;\n const rrfScore = 1 / (k + rank);\n const existing = scores.get(result.chunk.id);\n if (existing) {\n existing.score += rrfScore;\n } else {\n scores.set(result.chunk.id, { score: rrfScore, result });\n }\n });\n\n // Add FTS results with RRF scoring\n ftsResults.forEach((result, index) => {\n const rank = index + 1;\n const rrfScore = 1 / (k + rank);\n const existing = scores.get(result.chunk.id);\n if (existing) {\n existing.score += rrfScore;\n } else {\n scores.set(result.chunk.id, { score: rrfScore, result });\n }\n });\n\n // Sort by combined RRF score (higher is better)\n const combined = Array.from(scores.values())\n .sort((a, b) => b.score - a.score)\n .map(({ score, result }) => ({\n ...result,\n score, // Replace distance with RRF score\n }));\n\n return combined;\n}\n\n/**\n * Type for LanceDB row results\n */\ninterface LanceDBRow {\n id: string;\n content: string;\n filePath: string;\n language: string;\n startLine: number;\n endLine: number;\n symbolName: string;\n symbolType: string;\n vector: number[];\n _distance?: number;\n}\n\n/**\n * LanceDB vector store wrapper\n */\nexport class VectorStore {\n private db: lancedb.Connection | null = null;\n private table: lancedb.Table | null = null;\n private readonly indexPath: string;\n private ftsIndexCreated = false;\n\n constructor(\n directory: string,\n _config: Pick<EmbeddingConfig, \"embeddingDimensions\">,\n ) {\n this.indexPath = path.join(directory, INDEX_DIR_NAME);\n }\n\n /**\n * Initialize the database connection\n */\n async connect(): Promise<void> {\n this.db = await lancedb.connect(this.indexPath);\n\n const tableNames = await this.db.tableNames();\n if (tableNames.includes(TABLE_NAME)) {\n this.table = await this.db.openTable(TABLE_NAME);\n }\n }\n\n /**\n * Close the database connection\n */\n close(): void {\n this.db = null;\n this.table = null;\n }\n\n /**\n * Check if the index exists\n */\n exists(): boolean {\n return fs.existsSync(this.indexPath);\n }\n\n /**\n * Add embedded chunks to the store\n */\n async addChunks(chunks: EmbeddedChunk[]): Promise<void> {\n if (!this.db) {\n throw new Error(\"Database not connected. Call connect() first.\");\n }\n\n const records = chunks.map((chunk) => ({\n id: chunk.id,\n content: chunk.content,\n filePath: chunk.filePath,\n language: chunk.language,\n startLine: chunk.startLine,\n endLine: chunk.endLine,\n symbolName: chunk.symbolName ?? \"\",\n symbolType: chunk.symbolType ?? \"\",\n vector: chunk.vector,\n }));\n\n if (!this.table) {\n this.table = await this.db.createTable(TABLE_NAME, records);\n } else {\n await this.table.add(records);\n }\n }\n\n /**\n * Create FTS (Full-Text Search) index on content column\n * This enables BM25-based text search\n */\n async createFtsIndex(): Promise<void> {\n if (!this.table || this.ftsIndexCreated) {\n return;\n }\n\n try {\n await this.table.createIndex(\"content\", {\n config: lancedb.Index.fts(),\n });\n this.ftsIndexCreated = true;\n logger.debug(\"FTS index created on content column\");\n } catch (error) {\n // Index may already exist\n if (error instanceof Error && error.message.includes(\"already exists\")) {\n this.ftsIndexCreated = true;\n logger.debug(\"FTS index already exists\");\n } else {\n logger.warn(\n `Failed to create FTS index: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n }\n\n /**\n * Search for similar chunks using vector similarity\n */\n async search(queryVector: number[], limit = 10): Promise<SearchResult[]> {\n if (!this.table) {\n return [];\n }\n\n const results = (await this.table\n .vectorSearch(queryVector)\n .limit(limit)\n .toArray()) as LanceDBRow[];\n\n return results.map((row) => ({\n chunk: {\n id: row.id,\n content: row.content,\n filePath: row.filePath,\n language: row.language,\n startLine: row.startLine,\n endLine: row.endLine,\n symbolName: row.symbolName || undefined,\n symbolType: row.symbolType || undefined,\n },\n score: row._distance ?? 0,\n }));\n }\n\n /**\n * Full-text search using BM25\n */\n async searchFts(queryText: string, limit = 10): Promise<SearchResult[]> {\n if (!this.table) {\n return [];\n }\n\n // Ensure FTS index exists\n await this.createFtsIndex();\n\n try {\n const results = (await this.table\n .query()\n .nearestToText(queryText)\n .limit(limit)\n .toArray()) as LanceDBRow[];\n\n return results.map((row, index) => ({\n chunk: {\n id: row.id,\n content: row.content,\n filePath: row.filePath,\n language: row.language,\n startLine: row.startLine,\n endLine: row.endLine,\n symbolName: row.symbolName || undefined,\n symbolType: row.symbolType || undefined,\n },\n // FTS doesn't return distance, use rank-based score\n score: 1 / (index + 1),\n }));\n } catch (error) {\n logger.warn(\n `FTS search failed, falling back to empty results: ${error instanceof Error ? error.message : String(error)}`,\n );\n return [];\n }\n }\n\n /**\n * Hybrid search combining vector similarity and full-text search\n * Uses Reciprocal Rank Fusion (RRF) to combine results\n */\n async searchHybrid(\n queryVector: number[],\n queryText: string,\n limit = 10,\n options: HybridSearchOptions = {},\n ): Promise<SearchResult[]> {\n const { mode = \"hybrid\", rrfK = 60 } = options;\n\n if (!this.table) {\n return [];\n }\n\n // Vector-only search\n if (mode === \"vector\") {\n return this.search(queryVector, limit);\n }\n\n // FTS-only search\n if (mode === \"fts\") {\n return this.searchFts(queryText, limit);\n }\n\n // Hybrid search: run both searches in parallel\n const [vectorResults, ftsResults] = await Promise.all([\n this.search(queryVector, limit * 2), // Get more results for fusion\n this.searchFts(queryText, limit * 2),\n ]);\n\n // Fuse results using RRF\n const fusedResults = rrfFusion(vectorResults, ftsResults, rrfK);\n\n // Return top N results\n return fusedResults.slice(0, limit);\n }\n\n /**\n * Delete chunks by file path\n */\n async deleteByFilePath(filePath: string): Promise<void> {\n if (!this.table) {\n return;\n }\n\n await this.table.delete(`\"filePath\" = '${filePath.replace(/'/g, \"''\")}'`);\n }\n\n /**\n * Clear all data from the store\n */\n async clear(): Promise<void> {\n if (this.db && this.table) {\n await this.db.dropTable(TABLE_NAME);\n this.table = null;\n }\n }\n\n /**\n * Get index status\n */\n async getStatus(directory: string): Promise<IndexStatus> {\n const status: IndexStatus = {\n directory,\n indexPath: this.indexPath,\n exists: this.exists(),\n totalChunks: 0,\n totalFiles: 0,\n languages: {},\n };\n\n if (!this.table) {\n return status;\n }\n\n const allRows = (await this.table.query().toArray()) as LanceDBRow[];\n\n status.totalChunks = allRows.length;\n\n const uniqueFiles = new Set<string>();\n const languageCounts: Record<string, number> = {};\n\n for (const row of allRows) {\n uniqueFiles.add(row.filePath);\n const lang = row.language;\n languageCounts[lang] = (languageCounts[lang] ?? 0) + 1;\n }\n\n status.totalFiles = uniqueFiles.size;\n status.languages = languageCounts;\n\n return status;\n }\n\n /**\n * Get all indexed file paths\n */\n async getIndexedFiles(): Promise<string[]> {\n if (!this.table) {\n return [];\n }\n\n const rows = (await this.table\n .query()\n .select([\"filePath\"])\n .toArray()) as Pick<LanceDBRow, \"filePath\">[];\n const uniqueFiles = new Set<string>();\n\n for (const row of rows) {\n uniqueFiles.add(row.filePath);\n }\n\n return Array.from(uniqueFiles);\n }\n}\n\n/**\n * Create a vector store for a directory\n */\nexport function createVectorStore(\n directory: string,\n config: Pick<EmbeddingConfig, \"embeddingDimensions\">,\n): VectorStore {\n return new VectorStore(directory, config);\n}\n\n/**\n * Get the index path for a directory\n */\nexport function getIndexPath(directory: string): string {\n return path.join(directory, INDEX_DIR_NAME);\n}\n","/**\n * Centralized asset directory utilities\n *\n * Provides consistent access to the assets directory and JSON config loading\n * across all core modules.\n */\nimport { existsSync, readFileSync } from \"fs\";\nimport { dirname, join } from \"path\";\nimport { fileURLToPath } from \"url\";\n\n/**\n * Cached assets directory path\n */\nlet assetsDirCache: string | null = null;\n\n/**\n * Get the assets directory path\n *\n * Handles both ESM and CJS contexts by trying multiple possible paths\n * relative to the current module location.\n */\nexport function getAssetsDir(): string {\n if (assetsDirCache) {\n return assetsDirCache;\n }\n\n // Handle both ESM and CJS contexts\n const currentDir =\n typeof __dirname !== \"undefined\"\n ? __dirname\n : dirname(fileURLToPath(import.meta.url));\n\n // Try various paths relative to current file location\n const possiblePaths = [\n join(currentDir, \"..\", \"..\", \"..\", \"assets\"), // From dist/core/utils\n join(currentDir, \"..\", \"..\", \"assets\"), // From src/core/utils (dev)\n join(process.cwd(), \"assets\"), // From project root\n ];\n\n for (const p of possiblePaths) {\n if (existsSync(p)) {\n assetsDirCache = p;\n return p;\n }\n }\n\n // Default fallback\n assetsDirCache = join(process.cwd(), \"assets\");\n return assetsDirCache;\n}\n\n/**\n * Load and parse a JSON config file from the assets directory\n *\n * @param filename - Name of the JSON file in assets directory\n * @param defaultValue - Default value to return if file cannot be loaded\n * @returns Parsed JSON content or default value\n */\nexport function loadJsonConfig<T>(filename: string, defaultValue: T): T {\n const configPath = join(getAssetsDir(), filename);\n\n try {\n const content = readFileSync(configPath, \"utf-8\");\n return JSON.parse(content) as T;\n } catch {\n return defaultValue;\n }\n}\n\n/**\n * Get the path to a file within the assets directory\n *\n * @param segments - Path segments relative to assets directory\n * @returns Full path to the asset file\n */\nexport function getAssetPath(...segments: string[]): string {\n return join(getAssetsDir(), ...segments);\n}\n\n/**\n * Check if an asset file exists\n *\n * @param segments - Path segments relative to assets directory\n * @returns True if the file exists\n */\nexport function assetExists(...segments: string[]): boolean {\n return existsSync(getAssetPath(...segments));\n}\n\n/**\n * Clear the assets directory cache (for testing)\n */\nexport function clearAssetsDirCache(): void {\n assetsDirCache = null;\n}\n","/**\n * Centralized cache management\n *\n * Provides a registry for cache clear functions across all modules.\n * Allows clearing all caches at once (useful for testing).\n */\n\n/**\n * Cache clear function type\n */\nexport type CacheClearFn = () => void;\n\n/**\n * Registry of cache clear functions\n */\nconst cacheRegistry = new Map<string, CacheClearFn>();\n\n/**\n * Register a cache clear function\n *\n * @param name - Unique name for this cache (for debugging/identification)\n * @param clearFn - Function that clears the cache\n */\nexport function registerCache(name: string, clearFn: CacheClearFn): void {\n cacheRegistry.set(name, clearFn);\n}\n\n/**\n * Unregister a cache clear function\n *\n * @param name - Name of the cache to unregister\n */\nexport function unregisterCache(name: string): void {\n cacheRegistry.delete(name);\n}\n\n/**\n * Clear all registered caches\n *\n * Useful for testing to ensure a clean state between tests.\n */\nexport function clearAllCaches(): void {\n for (const clearFn of cacheRegistry.values()) {\n clearFn();\n }\n}\n\n/**\n * Get names of all registered caches\n *\n * @returns Array of cache names\n */\nexport function getRegisteredCaches(): string[] {\n return Array.from(cacheRegistry.keys());\n}\n\n/**\n * Clear a specific cache by name\n *\n * @param name - Name of the cache to clear\n * @returns True if cache was found and cleared\n */\nexport function clearCache(name: string): boolean {\n const clearFn = cacheRegistry.get(name);\n if (clearFn) {\n clearFn();\n return true;\n }\n return false;\n}\n","/**\n * TSConfig utilities for reading path aliases\n *\n * Reads and parses tsconfig.json to extract path aliases\n * in a format usable by the cross-file resolution system.\n */\n\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport { logger } from \"@utils\";\n\n/**\n * TSConfig paths as defined in compilerOptions.paths\n */\ntype TsConfigPaths = Record<string, string[]>;\n\n/**\n * Partial TSConfig structure\n */\ninterface TsConfig {\n compilerOptions?: {\n baseUrl?: string;\n paths?: TsConfigPaths;\n };\n extends?: string;\n}\n\n/**\n * Converted path aliases in simple format\n * Key is the alias prefix (e.g., \"@core\", \"@/\")\n * Value is the resolved directory path relative to project root\n */\nexport type PathAliases = Record<string, string>;\n\n/**\n * Strip JSON comments (single-line // and multi-line)\n */\nfunction stripJsonComments(json: string): string {\n let result = \"\";\n let inString = false;\n let inSingleLineComment = false;\n let inMultiLineComment = false;\n\n for (let i = 0; i < json.length; i++) {\n const char = json.charAt(i);\n const nextChar = json.charAt(i + 1);\n\n if (inSingleLineComment) {\n if (char === \"\\n\") {\n inSingleLineComment = false;\n result += char;\n }\n continue;\n }\n\n if (inMultiLineComment) {\n if (char === \"*\" && nextChar === \"/\") {\n inMultiLineComment = false;\n i++; // Skip the '/'\n }\n continue;\n }\n\n if (inString) {\n result += char;\n if (char === '\"' && json.charAt(i - 1) !== \"\\\\\") {\n inString = false;\n }\n continue;\n }\n\n // Not in string or comment\n if (char === '\"') {\n inString = true;\n result += char;\n } else if (char === \"/\" && nextChar === \"/\") {\n inSingleLineComment = true;\n i++; // Skip the second '/'\n } else if (char === \"/\" && nextChar === \"*\") {\n inMultiLineComment = true;\n i++; // Skip the '*'\n } else {\n result += char;\n }\n }\n\n return result;\n}\n\n/**\n * Parse tsconfig.json content\n */\nfunction parseTsConfig(content: string): TsConfig | null {\n try {\n const strippedContent = stripJsonComments(content);\n return JSON.parse(strippedContent) as TsConfig;\n } catch (error) {\n logger.debug(\n `Failed to parse tsconfig.json: ${error instanceof Error ? error.message : String(error)}`,\n );\n return null;\n }\n}\n\n/**\n * Convert tsconfig paths to simple path aliases format\n *\n * TSConfig format:\n * \"@core\": [\"src/core\"]\n * \"@core/*\": [\"src/core/*\"]\n *\n * Output format:\n * \"@core\": \"src/core\"\n * \"@core/\": \"src/core/\"\n */\nfunction convertPaths(\n paths: TsConfigPaths,\n baseUrl: string,\n projectRoot: string,\n): PathAliases {\n const aliases: PathAliases = {};\n\n for (const [pattern, targets] of Object.entries(paths)) {\n const target = targets[0];\n if (!target) {\n continue;\n }\n\n // Handle wildcard patterns like \"@core/*\" -> [\"src/core/*\"]\n if (pattern.endsWith(\"/*\") && target.endsWith(\"/*\")) {\n // Remove the /* from both pattern and target\n const aliasPrefix = pattern.slice(0, -2) + \"/\";\n const targetPath = target.slice(0, -2) + \"/\";\n\n // Resolve relative to baseUrl\n const resolvedTarget = path.join(projectRoot, baseUrl, targetPath);\n const relativeTarget = path.relative(projectRoot, resolvedTarget);\n\n aliases[aliasPrefix] = relativeTarget.replace(/\\\\/g, \"/\") + \"/\";\n } else {\n // Handle exact matches like \"@core\" -> [\"src/core\"]\n const resolvedTarget = path.join(projectRoot, baseUrl, target);\n const relativeTarget = path.relative(projectRoot, resolvedTarget);\n\n aliases[pattern] = relativeTarget.replace(/\\\\/g, \"/\");\n }\n }\n\n return aliases;\n}\n\n/**\n * Read tsconfig.json and extract path aliases\n *\n * Handles:\n * - Comments in tsconfig (// and /* *\\/)\n * - baseUrl relative paths\n * - Wildcard patterns (@core/* -> src/core/*)\n * - Exact patterns (@core -> src/core)\n *\n * @param projectRoot - The project root directory containing tsconfig.json\n * @returns Path aliases in simple format, or empty object if not found/invalid\n */\nexport function readPathAliases(projectRoot: string): PathAliases {\n const tsconfigPath = path.join(projectRoot, \"tsconfig.json\");\n\n if (!fs.existsSync(tsconfigPath)) {\n logger.debug(`No tsconfig.json found at ${tsconfigPath}`);\n return {};\n }\n\n try {\n const content = fs.readFileSync(tsconfigPath, \"utf-8\");\n const tsconfig = parseTsConfig(content);\n\n if (!tsconfig) {\n return {};\n }\n\n const paths = tsconfig.compilerOptions?.paths;\n const baseUrl = tsconfig.compilerOptions?.baseUrl ?? \".\";\n\n if (!paths || Object.keys(paths).length === 0) {\n logger.debug(\"No paths defined in tsconfig.json\");\n return {};\n }\n\n const aliases = convertPaths(paths, baseUrl, projectRoot);\n logger.debug(\n `Loaded ${String(Object.keys(aliases).length)} path aliases from tsconfig.json`,\n );\n\n return aliases;\n } catch (error) {\n logger.debug(\n `Failed to read tsconfig.json: ${error instanceof Error ? error.message : String(error)}`,\n );\n return {};\n }\n}\n\n/**\n * Get cache key for memoization\n */\nconst pathAliasCache = new Map<string, PathAliases>();\n\n/**\n * Read path aliases with caching\n */\nexport function readPathAliasesCached(projectRoot: string): PathAliases {\n const normalizedRoot = path.normalize(projectRoot);\n const cached = pathAliasCache.get(normalizedRoot);\n\n if (cached !== undefined) {\n return cached;\n }\n\n const aliases = readPathAliases(projectRoot);\n pathAliasCache.set(normalizedRoot, aliases);\n return aliases;\n}\n\n/**\n * Clear the path aliases cache\n */\nexport function clearPathAliasCache(): void {\n pathAliasCache.clear();\n}\n","/**\n * Language configuration and mapping for Tree-sitter parsers\n * Reads from centralized assets/languages.json\n */\nimport { loadJsonConfig, registerCache } from \"@core/utils\";\n\nexport interface LanguageConfig {\n name: string;\n wasm: string;\n queries: string;\n extensions: string[];\n aliases?: string[];\n}\n\ninterface LanguagesConfig {\n treesitter: Record<\n string,\n {\n wasm: string;\n queries: string;\n extensions: string[];\n aliases?: string[];\n }\n >;\n}\n\nlet configCache: LanguagesConfig | null = null;\nlet languagesCache: Record<string, LanguageConfig> | null = null;\nlet extensionMapCache: Record<string, LanguageConfig> | null = null;\n\nfunction loadConfig(): LanguagesConfig {\n if (configCache) {\n return configCache;\n }\n\n configCache = loadJsonConfig<LanguagesConfig>(\"languages.json\", {\n treesitter: {},\n });\n return configCache;\n}\n\nfunction buildLanguages(): Record<string, LanguageConfig> {\n if (languagesCache) {\n return languagesCache;\n }\n\n const config = loadConfig();\n languagesCache = {};\n\n for (const [name, lang] of Object.entries(config.treesitter)) {\n languagesCache[name] = {\n name,\n wasm: lang.wasm,\n queries: lang.queries,\n extensions: lang.extensions,\n aliases: lang.aliases,\n };\n\n // Also register aliases\n if (lang.aliases) {\n for (const alias of lang.aliases) {\n languagesCache[alias] = {\n name,\n wasm: lang.wasm,\n queries: lang.queries,\n extensions: lang.extensions,\n aliases: lang.aliases,\n };\n }\n }\n }\n\n return languagesCache;\n}\n\nfunction buildExtensionMap(): Record<string, LanguageConfig> {\n if (extensionMapCache) {\n return extensionMapCache;\n }\n\n const languages = buildLanguages();\n extensionMapCache = {};\n\n for (const config of Object.values(languages)) {\n for (const ext of config.extensions) {\n extensionMapCache[ext] = config;\n }\n }\n\n return extensionMapCache;\n}\n\n/** Get all Tree-sitter supported languages */\nexport function getLanguages(): Record<string, LanguageConfig> {\n return buildLanguages();\n}\n\n/** Get language configuration from file extension */\nexport function getLanguageFromExtension(\n extension: string,\n): LanguageConfig | undefined {\n const ext = extension.startsWith(\".\") ? extension : `.${extension}`;\n return buildExtensionMap()[ext.toLowerCase()];\n}\n\n/** Get language configuration from file path */\nexport function getLanguageFromPath(\n filePath: string,\n): LanguageConfig | undefined {\n const ext = filePath.slice(filePath.lastIndexOf(\".\")).toLowerCase();\n return buildExtensionMap()[ext];\n}\n\n/** Get language configuration by name */\nexport function getLanguageByName(name: string): LanguageConfig | undefined {\n return buildLanguages()[name.toLowerCase()];\n}\n\n/** Check if a language is supported by Tree-sitter */\nexport function isLanguageSupported(language: string): boolean {\n return language.toLowerCase() in buildLanguages();\n}\n\n/** Get all supported language names */\nexport function getSupportedLanguages(): string[] {\n return Object.keys(loadConfig().treesitter);\n}\n\n/** Get all supported file extensions */\nexport function getSupportedExtensions(): string[] {\n return Object.keys(buildExtensionMap());\n}\n\n/** Clear caches (for testing) */\nexport function clearLanguageCache(): void {\n configCache = null;\n languagesCache = null;\n extensionMapCache = null;\n}\n\n// Legacy exports for backward compatibility\nexport const LANGUAGES = buildLanguages();\nexport const EXTENSION_MAP = buildExtensionMap();\n\n// Register cache for centralized clearing\nregisterCache(\"languages:config\", clearLanguageCache);\n","/**\n * Tree-sitter parser module\n *\n * Provides code parsing functionality using web-tree-sitter\n * WASM files are loaded from local assets directory for minimal bundle size\n */\nimport { existsSync } from \"fs\";\nimport { join } from \"path\";\n\nimport {\n Language,\n type Node,\n Parser,\n type Point,\n type Tree,\n} from \"web-tree-sitter\";\n\nimport type { ASTNode, Position } from \"@core/ast/types\";\nimport { getAssetsDir, registerCache } from \"@core/utils\";\n\nimport {\n getLanguageByName,\n getLanguageFromPath,\n type LanguageConfig,\n} from \"./languages\";\n\n// Re-export language utilities\nexport * from \"./languages\";\n\n// Re-export types for external use\nexport type { Language, Node, Point, Tree };\n\n/**\n * Parser initialization state\n */\nlet isInitialized = false;\nlet initPromise: Promise<void> | null = null;\n\n/**\n * Cache for loaded languages\n */\nconst languageCache = new Map<string, Language>();\n\n/**\n * Parser instance (reused)\n */\nlet parser: Parser | null = null;\n\n/**\n * Initialize the Tree-sitter WASM module\n * Must be called before any parsing operations\n */\nexport async function initializeParser(): Promise<void> {\n if (isInitialized) {\n return;\n }\n\n if (initPromise) {\n return initPromise;\n }\n\n initPromise = (async () => {\n // web-tree-sitter loads its WASM from node_modules automatically\n await Parser.init();\n parser = new Parser();\n isInitialized = true;\n })();\n\n return initPromise;\n}\n\n/**\n * Check if the parser is initialized\n */\nexport function isParserInitialized(): boolean {\n return isInitialized;\n}\n\n/**\n * Get or create a parser instance\n */\nasync function getParser(): Promise<Parser> {\n await initializeParser();\n if (!parser) {\n throw new Error(\"Parser not initialized\");\n }\n return parser;\n}\n\n/**\n * Load a language grammar from local assets\n */\nasync function loadLanguage(config: LanguageConfig): Promise<Language> {\n const cacheKey = config.name;\n\n // Check cache first\n const cached = languageCache.get(cacheKey);\n if (cached) {\n return cached;\n }\n\n await initializeParser();\n\n // Load WASM from local assets directory\n const assetsDir = getAssetsDir();\n const wasmPath = join(assetsDir, \"wasm\", `tree-sitter-${config.name}.wasm`);\n\n if (!existsSync(wasmPath)) {\n throw new Error(\n `WASM file not found for language ${config.name}: ${wasmPath}`,\n );\n }\n\n const language = await Language.load(wasmPath);\n languageCache.set(cacheKey, language);\n\n return language;\n}\n\n/**\n * Parse result\n */\nexport interface ParseResult {\n /** The Tree-sitter tree */\n tree: Tree;\n /** Language that was used */\n language: string;\n /** The parser instance (for queries) */\n parser: Parser;\n /** The language instance (for queries) */\n languageInstance: Language;\n}\n\n/**\n * Parse options\n */\nexport interface ParseOptions {\n /** Language name (auto-detected from file path if not provided) */\n language?: string;\n /** File path (for language detection) */\n filePath?: string;\n}\n\n/**\n * Parse code content\n */\nexport async function parseCode(\n content: string,\n options: ParseOptions = {},\n): Promise<ParseResult> {\n const { language, filePath } = options;\n\n // Determine language config\n let config: LanguageConfig | undefined;\n\n if (language) {\n config = getLanguageByName(language);\n if (!config) {\n throw new Error(`Unsupported language: ${language}`);\n }\n } else if (filePath) {\n config = getLanguageFromPath(filePath);\n if (!config) {\n throw new Error(`Could not detect language for file: ${filePath}`);\n }\n } else {\n throw new Error(\"Either language or filePath must be provided\");\n }\n\n // Load the language grammar\n const languageInstance = await loadLanguage(config);\n\n // Get parser and set language\n const parserInstance = await getParser();\n parserInstance.setLanguage(languageInstance);\n\n // Parse the content\n const tree = parserInstance.parse(content);\n\n if (!tree) {\n throw new Error(\"Failed to parse content\");\n }\n\n return {\n tree,\n language: config.name,\n parser: parserInstance,\n languageInstance,\n };\n}\n\n/**\n * Convert Tree-sitter position to our Position type\n */\nexport function toPosition(point: Point, offset: number): Position {\n return {\n line: point.row + 1, // Convert 0-based to 1-based\n column: point.column,\n offset,\n };\n}\n\n/**\n * Convert Tree-sitter node to ASTNode\n */\nexport function toASTNode(\n node: Node,\n maxDepth?: number,\n currentDepth = 0,\n): ASTNode {\n const astNode: ASTNode = {\n type: node.type,\n text: node.text,\n start: toPosition(node.startPosition, node.startIndex),\n end: toPosition(node.endPosition, node.endIndex),\n isNamed: node.isNamed,\n };\n\n // Check depth limit\n if (maxDepth !== undefined && currentDepth >= maxDepth) {\n return astNode;\n }\n\n // Add children if present\n if (node.childCount > 0) {\n const namedChildren = node.namedChildren;\n if (namedChildren.length > 0) {\n astNode.children = namedChildren.map((child) =>\n toASTNode(child, maxDepth, currentDepth + 1),\n );\n }\n }\n\n // Add named fields using the language's field names\n const treeLang = node.tree.language;\n const fields: Record<string, ASTNode | ASTNode[]> = {};\n const langFields = treeLang.fields;\n\n for (const fieldName of langFields) {\n if (fieldName) {\n const fieldNode = node.childForFieldName(fieldName);\n if (fieldNode) {\n fields[fieldName] = toASTNode(fieldNode, maxDepth, currentDepth + 1);\n }\n }\n }\n\n if (Object.keys(fields).length > 0) {\n astNode.fields = fields;\n }\n\n return astNode;\n}\n\n/**\n * Get the root ASTNode from a parse result\n */\nexport function getASTRoot(\n parseResult: ParseResult,\n maxDepth?: number,\n): ASTNode {\n return toASTNode(parseResult.tree.rootNode, maxDepth);\n}\n\n/**\n * Count nodes in the tree\n */\nexport function countNodes(node: Node): number {\n let count = 1;\n for (const child of node.namedChildren) {\n count += countNodes(child);\n }\n return count;\n}\n\n/**\n * Clear the language cache (useful for testing)\n */\nexport function clearLanguageCache(): void {\n languageCache.clear();\n}\n\n/**\n * Reset the parser state (useful for testing)\n */\nexport function resetParser(): void {\n languageCache.clear();\n parser = null;\n isInitialized = false;\n initPromise = null;\n}\n\n// Register caches for centralized clearing\nregisterCache(\"parser:languageCache\", clearLanguageCache);\nregisterCache(\"parser:state\", resetParser);\n","/**\n * Query helper utilities\n *\n * Provides common patterns for working with Tree-sitter query captures\n */\nimport type { ASTNode, QueryCapture, QueryMatch } from \"@core/ast/types\";\n\n/**\n * Find a capture by exact name\n *\n * @param captures - Array of captures from a query match\n * @param name - Exact capture name to find\n * @returns The matching capture or undefined\n */\nexport function findCapture(\n captures: QueryCapture[],\n name: string,\n): QueryCapture | undefined {\n return captures.find((c) => c.name === name);\n}\n\n/**\n * Find a capture matching any of the given names\n *\n * @param captures - Array of captures from a query match\n * @param names - Array of capture names to search for\n * @returns The first matching capture or undefined\n */\nexport function findCaptureByNames(\n captures: QueryCapture[],\n names: string[],\n): QueryCapture | undefined {\n return captures.find((c) => names.includes(c.name));\n}\n\n/**\n * Find a capture by name prefix\n *\n * @param captures - Array of captures from a query match\n * @param prefix - Prefix to match (e.g., \"definition.\" matches \"definition.function\")\n * @returns The first matching capture or undefined\n */\nexport function findCaptureByPrefix(\n captures: QueryCapture[],\n prefix: string,\n): QueryCapture | undefined {\n return captures.find((c) => c.name.startsWith(prefix));\n}\n\n/**\n * Get all captures matching a prefix\n *\n * @param captures - Array of captures from a query match\n * @param prefix - Prefix to match\n * @returns Array of matching captures\n */\nexport function filterCapturesByPrefix(\n captures: QueryCapture[],\n prefix: string,\n): QueryCapture[] {\n return captures.filter((c) => c.name.startsWith(prefix));\n}\n\n/**\n * Extract the suffix from a capture name after the prefix\n *\n * @param captureName - Full capture name (e.g., \"definition.function\")\n * @param prefix - Prefix to remove (e.g., \"definition.\")\n * @returns The suffix (e.g., \"function\") or the original name if prefix not found\n */\nexport function getCaptureKind(captureName: string, prefix: string): string {\n return captureName.startsWith(prefix)\n ? captureName.slice(prefix.length)\n : captureName;\n}\n\n/**\n * Deduplicate nodes from query matches based on their position\n *\n * @param matches - Array of query matches\n * @param captureNames - Capture names to extract nodes from\n * @returns Array of unique ASTNode objects\n */\nexport function deduplicateNodes(\n matches: QueryMatch[],\n captureNames: string[],\n): ASTNode[] {\n const nodes: ASTNode[] = [];\n const seen = new Set<number>();\n\n for (const match of matches) {\n const capture = findCaptureByNames(match.captures, captureNames);\n if (capture && !seen.has(capture.node.start.offset)) {\n nodes.push(capture.node);\n seen.add(capture.node.start.offset);\n }\n }\n\n return nodes;\n}\n\n/**\n * Extract nodes from matches without deduplication\n *\n * @param matches - Array of query matches\n * @param captureNames - Capture names to extract nodes from\n * @returns Array of ASTNode objects\n */\nexport function extractNodes(\n matches: QueryMatch[],\n captureNames: string[],\n): ASTNode[] {\n const nodes: ASTNode[] = [];\n\n for (const match of matches) {\n const capture = findCaptureByNames(match.captures, captureNames);\n if (capture) {\n nodes.push(capture.node);\n }\n }\n\n return nodes;\n}\n\n/**\n * Create a deduplication set from node offsets\n *\n * @returns Object with add and has methods for tracking seen offsets\n */\nexport function createOffsetTracker(): {\n add: (node: ASTNode) => void;\n has: (node: ASTNode) => boolean;\n} {\n const seen = new Set<number>();\n return {\n add: (node: ASTNode) => seen.add(node.start.offset),\n has: (node: ASTNode) => seen.has(node.start.offset),\n };\n}\n","/**\n * SCM Query file loader\n *\n * Loads official Tree-sitter .scm query files from local assets directory\n * Supports inheritance via `; inherits: lang1,lang2` directives\n */\nimport { existsSync, readFileSync } from \"fs\";\nimport { join } from \"path\";\n\nimport { assetExists, getAssetsDir, registerCache } from \"@core/utils\";\n\n/**\n * Query type available in Tree-sitter grammars\n */\nexport type SCMQueryType =\n | \"tags\"\n | \"highlights\"\n | \"locals\"\n | \"injections\"\n | \"indents\"\n | \"folds\";\n\n/**\n * Cache for loaded SCM queries (with inheritance resolved)\n */\nconst scmCache = new Map<string, string>();\n\n/**\n * Supported languages with SCM query files\n */\nconst SUPPORTED_LANGUAGES = new Set([\n // Web & Frontend\n \"javascript\",\n \"typescript\",\n \"tsx\",\n \"svelte\",\n \"html\",\n // Systems Programming\n \"c\",\n \"cpp\",\n \"rust\",\n \"go\",\n \"swift\",\n // JVM Languages\n \"java\",\n \"scala\",\n \"kotlin\",\n // Scripting Languages\n \"python\",\n \"ruby\",\n \"php\",\n // .NET\n \"csharp\",\n \"c_sharp\",\n // Functional\n \"ocaml\",\n // Base/dependency languages (for inheritance)\n \"_javascript\",\n \"_jsx\",\n \"_typescript\",\n \"ecma\",\n \"jsx\",\n]);\n\n/**\n * Normalize language name for directory lookup\n */\nfunction normalizeLanguageName(language: string): string {\n // Map aliases to directory names\n if (language === \"csharp\") {\n return \"c_sharp\";\n }\n if (language === \"tsx\") {\n return \"typescript\"; // TSX uses typescript queries\n }\n return language;\n}\n\n/**\n * Get the path to a .scm query file\n */\nexport function getSCMPath(\n language: string,\n queryType: SCMQueryType,\n): string | undefined {\n const langDir = normalizeLanguageName(language);\n const relativePath = join(\"queries\", langDir, `${queryType}.scm`);\n\n if (assetExists(relativePath)) {\n return join(getAssetsDir(), relativePath);\n }\n\n return undefined;\n}\n\n/**\n * Parse inherit directives from SCM content\n * Supports: `; inherits: lang1,lang2` and `; inherits lang1`\n */\nfunction parseInherits(content: string): string[] {\n const inherits: string[] = [];\n // Normalize line endings (handle CRLF and CR)\n const normalizedContent = content.replace(/\\r\\n/g, \"\\n\").replace(/\\r/g, \"\\n\");\n const lines = normalizedContent.split(\"\\n\");\n\n for (const line of lines) {\n const match = /^;\\s*inherits:?\\s+([^\\s].*)$/.exec(line);\n if (match?.[1]) {\n const langs = match[1].split(\",\").map((l) => l.trim());\n inherits.push(...langs);\n }\n }\n\n return inherits;\n}\n\n/**\n * Remove inherit directives from SCM content\n */\nfunction removeInheritDirectives(content: string): string {\n // Normalize line endings (handle CRLF and CR)\n const normalizedContent = content.replace(/\\r\\n/g, \"\\n\").replace(/\\r/g, \"\\n\");\n return normalizedContent\n .split(\"\\n\")\n .filter((line) => !/^;\\s*inherits:?\\s+/.exec(line))\n .join(\"\\n\");\n}\n\n/**\n * Load a raw .scm file without resolving inheritance\n */\nfunction loadRawSCM(\n language: string,\n queryType: SCMQueryType,\n): string | undefined {\n const assetsDir = getAssetsDir();\n const scmPath = join(assetsDir, \"queries\", language, `${queryType}.scm`);\n\n if (!existsSync(scmPath)) {\n return undefined;\n }\n\n try {\n return readFileSync(scmPath, \"utf-8\");\n } catch {\n return undefined;\n }\n}\n\n/**\n * Load a .scm query file for a language with inheritance resolved\n *\n * @param language - Language name (e.g., \"javascript\", \"python\")\n * @param queryType - Type of query (e.g., \"tags\", \"highlights\")\n * @param visited - Set of visited languages to prevent circular inheritance\n * @returns Query string or undefined if not found\n */\nexport function loadSCMQuery(\n language: string,\n queryType: SCMQueryType,\n visited = new Set<string>(),\n): string | undefined {\n const cacheKey = `${language}:${queryType}`;\n\n // Check cache first\n if (scmCache.has(cacheKey)) {\n return scmCache.get(cacheKey);\n }\n\n // Prevent circular inheritance\n if (visited.has(language)) {\n return undefined;\n }\n visited.add(language);\n\n // Normalize language name for file lookup\n const langDir = normalizeLanguageName(language);\n const rawContent = loadRawSCM(langDir, queryType);\n\n if (!rawContent) {\n return undefined;\n }\n\n // Parse inherits directives\n const inherits = parseInherits(rawContent);\n const ownContent = removeInheritDirectives(rawContent).trim();\n\n // Load inherited content\n const inheritedParts: string[] = [];\n for (const inheritLang of inherits) {\n const inheritedContent = loadSCMQuery(inheritLang, queryType, visited);\n if (inheritedContent) {\n inheritedParts.push(inheritedContent);\n }\n }\n\n // Combine inherited content with own content\n const finalContent = [...inheritedParts, ownContent]\n .filter(Boolean)\n .join(\"\\n\\n\");\n\n if (finalContent) {\n scmCache.set(cacheKey, finalContent);\n }\n\n return finalContent || undefined;\n}\n\n/**\n * Load tags.scm for symbol extraction\n */\nexport function loadTagsQuery(language: string): string | undefined {\n return loadSCMQuery(language, \"tags\");\n}\n\n/**\n * Load highlights.scm for syntax highlighting\n */\nexport function loadHighlightsQuery(language: string): string | undefined {\n return loadSCMQuery(language, \"highlights\");\n}\n\n/**\n * Load locals.scm for local variable scoping\n */\nexport function loadLocalsQuery(language: string): string | undefined {\n return loadSCMQuery(language, \"locals\");\n}\n\n/**\n * Check which query types are available for a language\n */\nexport function getAvailableQueryTypes(language: string): SCMQueryType[] {\n const types: SCMQueryType[] = [\n \"tags\",\n \"highlights\",\n \"locals\",\n \"injections\",\n \"indents\",\n \"folds\",\n ];\n\n return types.filter((type) => getSCMPath(language, type) !== undefined);\n}\n\n/**\n * Check if a language has official tags.scm\n */\nexport function hasOfficialTags(language: string): boolean {\n return getSCMPath(language, \"tags\") !== undefined;\n}\n\n/**\n * Get all languages with official tags.scm\n */\nexport function getLanguagesWithTags(): string[] {\n return Array.from(SUPPORTED_LANGUAGES).filter(hasOfficialTags);\n}\n\n/**\n * Clear the SCM cache\n */\nexport function clearSCMCache(): void {\n scmCache.clear();\n}\n\n// Register cache for centralized clearing\nregisterCache(\"queries:scm\", clearSCMCache);\n","/**\n * Fallback SCM query patterns for languages without official tags.scm\n */\n\nexport type QueryPreset =\n | \"functions\"\n | \"classes\"\n | \"imports\"\n | \"exports\"\n | \"comments\"\n | \"strings\"\n | \"variables\"\n | \"types\";\n\nconst GENERIC_PATTERNS: Partial<Record<QueryPreset, string>> = {\n comments: `[(comment) @comment]`,\n strings: `[(string) @string (template_string) @string]`,\n imports: `(import_statement) @import.statement`,\n exports: `(export_statement) @export.statement`,\n variables: `[\n (variable_declaration (variable_declarator name: (identifier) @variable.name) @variable.declaration)\n (lexical_declaration (variable_declarator name: (identifier) @variable.name) @variable.declaration)\n ]`,\n types: `[\n (type_alias_declaration name: (type_identifier) @type.alias) @type.definition\n (enum_declaration name: (identifier) @enum.name) @enum.definition\n ]`,\n};\n\nexport const FALLBACK_PATTERNS: Record<\n string,\n Partial<Record<QueryPreset, string>>\n> = {\n typescript: {\n functions: `[\n (function_declaration name: (identifier) @function.name) @function.definition\n (method_definition name: (property_identifier) @function.name) @function.definition\n (lexical_declaration (variable_declarator name: (identifier) @function.name value: [(arrow_function) (function_expression)]) @function.definition)\n ]`,\n },\n json: { strings: `[(string) @string]` },\n yaml: {\n strings: `[(string_scalar) @string (double_quote_scalar) @string (single_quote_scalar) @string]`,\n comments: `[(comment) @comment]`,\n },\n toml: { strings: `[(string) @string]`, comments: `[(comment) @comment]` },\n bash: {\n functions: `(function_definition name: (word) @function.name) @function.definition`,\n comments: `[(comment) @comment]`,\n strings: `[(string) @string (raw_string) @string]`,\n variables: `(variable_assignment name: (variable_name) @variable.name) @variable.declaration`,\n },\n html: {\n strings: `[(attribute_value) @string (quoted_attribute_value) @string]`,\n comments: `[(comment) @comment]`,\n },\n css: {\n comments: `[(comment) @comment]`,\n strings: `[(string_value) @string]`,\n },\n scala: {\n functions: `(function_definition (identifier) @function.name) @function.definition`,\n classes: `[\n (class_definition (identifier) @class.name) @class.definition\n (object_definition (identifier) @class.name) @class.definition\n (trait_definition (identifier) @class.name) @class.definition\n ]`,\n comments: `[(comment) @comment]`,\n strings: `[(string) @string]`,\n },\n swift: {\n functions: `[\n (function_declaration (simple_identifier) @function.name) @function.definition\n (init_declaration) @function.definition\n ]`,\n classes: `[\n (class_declaration (type_identifier) @class.name) @class.definition\n (protocol_declaration (type_identifier) @class.name) @class.definition\n ]`,\n comments: `[(comment) @comment (multiline_comment) @comment]`,\n strings: `[(line_string_literal) @string]`,\n },\n ocaml: {\n functions: `(value_definition (let_binding (value_name) @function.name)) @function.definition`,\n classes: `[\n (type_definition (type_binding (type_constructor) @class.name)) @class.definition\n (module_definition (module_binding (module_name) @class.name)) @class.definition\n ]`,\n comments: `[(comment) @comment]`,\n strings: `[(string) @string]`,\n },\n svelte: {\n comments: `[(comment) @comment]`,\n strings: `[(attribute_value) @string (quoted_attribute_value) @string]`,\n },\n};\n\nexport function getQueryPattern(\n language: string,\n preset: QueryPreset,\n): string | undefined {\n return FALLBACK_PATTERNS[language]?.[preset] ?? GENERIC_PATTERNS[preset];\n}\n\nexport function isPresetAvailable(\n language: string,\n preset: QueryPreset,\n): boolean {\n return getQueryPattern(language, preset) !== undefined;\n}\n\nexport function getAvailablePresets(\n language: string,\n hasOfficialTagsFile = false,\n): QueryPreset[] {\n const presets: QueryPreset[] = hasOfficialTagsFile\n ? [\"functions\", \"classes\"]\n : [];\n const allPresets: QueryPreset[] = [\n \"functions\",\n \"classes\",\n \"imports\",\n \"exports\",\n \"comments\",\n \"strings\",\n \"variables\",\n \"types\",\n ];\n\n for (const preset of allPresets) {\n if (!presets.includes(preset) && isPresetAvailable(language, preset)) {\n presets.push(preset);\n }\n }\n return presets;\n}\n\nexport function getQuerySupportedLanguages(): string[] {\n return Object.keys(FALLBACK_PATTERNS);\n}\n","/**\n * SCM Query engine for Tree-sitter\n *\n * Supports both official .scm query files and custom preset patterns\n */\nimport {\n type Language,\n Query,\n type QueryMatch as TSQueryMatch,\n type Tree,\n} from \"web-tree-sitter\";\n\nimport type { ASTNode, QueryCapture, QueryMatch } from \"@core/ast/types\";\nimport { toASTNode } from \"@core/parser\";\n\nimport {\n deduplicateNodes,\n extractNodes,\n findCapture,\n findCaptureByPrefix,\n getCaptureKind,\n} from \"./helpers\";\nimport {\n hasOfficialTags,\n loadHighlightsQuery,\n loadLocalsQuery,\n loadTagsQuery,\n type SCMQueryType,\n} from \"./loader\";\nimport {\n getAvailablePresets as getAvailablePresetsBase,\n getQueryPattern,\n type QueryPreset,\n} from \"./patterns\";\n\n// Re-export helpers for external use\nexport * from \"./helpers\";\n\n// Re-export patterns and loader (except getAvailablePresets which we override)\nexport * from \"./loader\";\nexport {\n getQueryPattern,\n getQuerySupportedLanguages,\n isPresetAvailable,\n FALLBACK_PATTERNS,\n type QueryPreset,\n} from \"./patterns\";\n\n/**\n * Get all available presets for a language\n * Automatically checks for official tags.scm support\n */\nexport function getAvailablePresets(language: string): QueryPreset[] {\n return getAvailablePresetsBase(language, hasOfficialTags(language));\n}\n\n/**\n * Query execution result\n */\nexport interface QueryResult {\n /** Query matches */\n matches: QueryMatch[];\n /** Total match count */\n count: number;\n /** Query that was executed */\n query: string;\n /** Language used */\n language: string;\n /** Source of the query (official .scm or custom preset) */\n source: \"official\" | \"preset\";\n}\n\n/**\n * Query options\n */\nexport interface QueryOptions {\n /** Maximum number of matches to return */\n maxMatches?: number;\n /** Start index (byte offset) */\n startIndex?: number;\n /** End index (byte offset) */\n endIndex?: number;\n /** Prefer official .scm files when available */\n preferOfficial?: boolean;\n}\n\n/**\n * Execute a SCM query on parsed code\n */\nexport function executeQuery(\n tree: Tree,\n languageInstance: Language,\n queryString: string,\n language: string,\n options: QueryOptions = {},\n): QueryResult {\n const { maxMatches, startIndex, endIndex } = options;\n\n // Create query\n let query: Query;\n try {\n query = new Query(languageInstance, queryString);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n throw new Error(`Invalid query: ${message}`);\n }\n\n // Get matches\n const queryMatches: TSQueryMatch[] = query.matches(tree.rootNode, {\n startIndex,\n endIndex,\n });\n\n const matches: QueryMatch[] = [];\n let count = 0;\n\n for (const match of queryMatches) {\n // Check max matches limit\n if (maxMatches !== undefined && count >= maxMatches) {\n break;\n }\n\n const captures: QueryCapture[] = match.captures.map((capture) => ({\n name: capture.name,\n node: toASTNode(capture.node),\n }));\n\n matches.push({\n pattern: match.patternIndex,\n captures,\n });\n\n count++;\n }\n\n return {\n matches,\n count,\n query: queryString,\n language,\n source: \"preset\",\n };\n}\n\n/**\n * Execute an official .scm query file\n *\n * @param tree - Parsed tree\n * @param languageInstance - Tree-sitter language instance\n * @param language - Language name\n * @param queryType - Type of query (tags, highlights, locals, etc.)\n * @param options - Query options\n */\nexport function executeOfficialQuery(\n tree: Tree,\n languageInstance: Language,\n language: string,\n queryType: SCMQueryType,\n options: QueryOptions = {},\n): QueryResult | undefined {\n let queryString: string | undefined;\n\n switch (queryType) {\n case \"tags\":\n queryString = loadTagsQuery(language);\n break;\n case \"highlights\":\n queryString = loadHighlightsQuery(language);\n break;\n case \"locals\":\n queryString = loadLocalsQuery(language);\n break;\n case \"injections\":\n case \"indents\":\n case \"folds\":\n // These query types are not yet implemented\n return undefined;\n }\n\n if (!queryString) {\n return undefined;\n }\n\n try {\n const result = executeQuery(\n tree,\n languageInstance,\n queryString,\n language,\n options,\n );\n return {\n ...result,\n source: \"official\",\n };\n } catch {\n // Official query might have incompatible patterns\n return undefined;\n }\n}\n\n/**\n * Execute tags.scm for comprehensive symbol extraction\n *\n * This uses the official Tree-sitter tags.scm file which provides:\n * - Function definitions with documentation\n * - Class definitions\n * - Method definitions\n * - Module/interface definitions\n * - Reference tracking (calls, types)\n */\nexport function executeTagsQuery(\n tree: Tree,\n languageInstance: Language,\n language: string,\n options: QueryOptions = {},\n): QueryResult | undefined {\n return executeOfficialQuery(\n tree,\n languageInstance,\n language,\n \"tags\",\n options,\n );\n}\n\n/**\n * Execute a preset query\n * Uses official tags.scm for functions/classes when available,\n * otherwise falls back to preset patterns\n */\nexport function executePresetQuery(\n tree: Tree,\n languageInstance: Language,\n language: string,\n preset: QueryPreset,\n options: QueryOptions = {},\n): QueryResult {\n const { maxMatches } = options;\n\n // Check if we have a language-specific fallback pattern for this preset\n // This is needed for languages like TypeScript where official tags.scm is incomplete\n const fallbackPattern = getQueryPattern(language, preset);\n\n // For functions and classes, try official tags.scm first, then fallback patterns\n if (preset === \"functions\" || preset === \"classes\") {\n // First try official tags.scm\n if (hasOfficialTags(language)) {\n const { definitions } = extractSymbolsFromTags(\n tree,\n languageInstance,\n language,\n );\n\n let filteredDefs =\n preset === \"functions\"\n ? definitions.filter(\n (d) => d.kind === \"function\" || d.kind === \"method\",\n )\n : definitions.filter(\n (d) =>\n d.kind === \"class\" ||\n d.kind === \"interface\" ||\n d.kind === \"module\",\n );\n\n // If official tags.scm found results, use them\n if (filteredDefs.length > 0) {\n // Apply maxMatches limit\n if (maxMatches !== undefined && filteredDefs.length > maxMatches) {\n filteredDefs = filteredDefs.slice(0, maxMatches);\n }\n\n // Convert to QueryResult format\n const matches: QueryMatch[] = filteredDefs.map((def) => ({\n pattern: 0,\n captures: [\n {\n name:\n preset === \"functions\"\n ? \"function.definition\"\n : \"class.definition\",\n node: def.node,\n },\n { name: `${preset.slice(0, -1)}.name`, node: def.nameNode },\n ],\n }));\n\n return {\n matches,\n count: matches.length,\n query: `[tags.scm ${preset}]`,\n language,\n source: \"official\",\n };\n }\n\n // Official tags.scm found 0 results - try fallback pattern or return empty\n if (fallbackPattern) {\n return executeQuery(\n tree,\n languageInstance,\n fallbackPattern,\n language,\n options,\n );\n }\n\n // No fallback pattern and 0 results from official - return empty result\n return {\n matches: [],\n count: 0,\n query: `[tags.scm ${preset}]`,\n language,\n source: \"official\",\n };\n }\n\n // No official tags.scm - must use fallback pattern\n if (fallbackPattern) {\n return executeQuery(\n tree,\n languageInstance,\n fallbackPattern,\n language,\n options,\n );\n }\n }\n\n // For other presets, use fallback patterns\n if (!fallbackPattern) {\n throw new Error(`No '${preset}' query pattern available for ${language}`);\n }\n\n return executeQuery(\n tree,\n languageInstance,\n fallbackPattern,\n language,\n options,\n );\n}\n\n/**\n * Extract symbols using official tags.scm when available\n *\n * This is the recommended method for symbol extraction as it uses\n * the official Tree-sitter query files for better accuracy.\n */\nexport function extractSymbolsFromTags(\n tree: Tree,\n languageInstance: Language,\n language: string,\n): {\n definitions: TagDefinition[];\n references: TagReference[];\n} {\n const result = executeTagsQuery(tree, languageInstance, language);\n\n if (!result) {\n return { definitions: [], references: [] };\n }\n\n const definitions: TagDefinition[] = [];\n const references: TagReference[] = [];\n\n for (const match of result.matches) {\n // Extract name capture\n const nameCapture = findCapture(match.captures, \"name\");\n if (!nameCapture) {\n continue;\n }\n\n // Check if it's a definition or reference\n const defCapture = findCaptureByPrefix(match.captures, \"definition.\");\n const refCapture = findCaptureByPrefix(match.captures, \"reference.\");\n const docCapture = findCapture(match.captures, \"doc\");\n\n if (defCapture) {\n const kind = getCaptureKind(defCapture.name, \"definition.\") as TagKind;\n definitions.push({\n name: nameCapture.node.text,\n kind,\n node: defCapture.node,\n nameNode: nameCapture.node,\n documentation: docCapture?.node.text,\n });\n } else if (refCapture) {\n const kind = getCaptureKind(refCapture.name, \"reference.\") as TagKind;\n references.push({\n name: nameCapture.node.text,\n kind,\n node: refCapture.node,\n nameNode: nameCapture.node,\n });\n }\n }\n\n return { definitions, references };\n}\n\n/**\n * Tag definition kinds from official tags.scm\n */\nexport type TagKind =\n | \"function\"\n | \"method\"\n | \"class\"\n | \"module\"\n | \"interface\"\n | \"constant\"\n | \"type\"\n | \"call\";\n\n/**\n * A symbol definition extracted from tags.scm\n */\nexport interface TagDefinition {\n name: string;\n kind: TagKind;\n node: ASTNode;\n nameNode: ASTNode;\n documentation?: string;\n}\n\n/**\n * A symbol reference extracted from tags.scm\n */\nexport interface TagReference {\n name: string;\n kind: TagKind;\n node: ASTNode;\n nameNode: ASTNode;\n}\n\n/**\n * Find all functions in the code\n * Prefers official tags.scm when available\n */\nexport function findFunctions(\n tree: Tree,\n languageInstance: Language,\n language: string,\n options: QueryOptions = {},\n): ASTNode[] {\n // Try official tags.scm first if preferOfficial is true or default\n if (options.preferOfficial !== false && hasOfficialTags(language)) {\n const { definitions } = extractSymbolsFromTags(\n tree,\n languageInstance,\n language,\n );\n return definitions\n .filter((d) => d.kind === \"function\" || d.kind === \"method\")\n .map((d) => d.node);\n }\n\n // Fall back to preset patterns\n try {\n const result = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"functions\",\n options,\n );\n\n return extractNodes(result.matches, [\n \"function.definition\",\n \"method.definition\",\n \"function.declaration\",\n ]);\n } catch {\n return [];\n }\n}\n\n/**\n * Find all classes/structs in the code\n * Prefers official tags.scm when available\n */\nexport function findClasses(\n tree: Tree,\n languageInstance: Language,\n language: string,\n options: QueryOptions = {},\n): ASTNode[] {\n // Try official tags.scm first\n if (options.preferOfficial !== false && hasOfficialTags(language)) {\n const { definitions } = extractSymbolsFromTags(\n tree,\n languageInstance,\n language,\n );\n return definitions\n .filter(\n (d) =>\n d.kind === \"class\" || d.kind === \"interface\" || d.kind === \"module\",\n )\n .map((d) => d.node);\n }\n\n // Fall back to preset patterns\n try {\n const result = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"classes\",\n options,\n );\n\n return extractNodes(result.matches, [\n \"class.definition\",\n \"struct.definition\",\n \"impl.definition\",\n ]);\n } catch {\n return [];\n }\n}\n\n/**\n * Find all imports in the code\n */\nexport function findImports(\n tree: Tree,\n languageInstance: Language,\n language: string,\n options: QueryOptions = {},\n): ASTNode[] {\n try {\n const result = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"imports\",\n options,\n );\n\n return deduplicateNodes(result.matches, [\n \"import.statement\",\n \"include.statement\",\n ]);\n } catch {\n return [];\n }\n}\n\n/**\n * Find all exports in the code\n */\nexport function findExports(\n tree: Tree,\n languageInstance: Language,\n language: string,\n options: QueryOptions = {},\n): ASTNode[] {\n try {\n const result = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"exports\",\n options,\n );\n\n return deduplicateNodes(result.matches, [\n \"export.statement\",\n \"export.function\",\n \"export.class\",\n \"export.type\",\n ]);\n } catch {\n return [];\n }\n}\n\n/**\n * Find all comments in the code\n */\nexport function findComments(\n tree: Tree,\n languageInstance: Language,\n language: string,\n options: QueryOptions = {},\n): ASTNode[] {\n try {\n const result = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"comments\",\n options,\n );\n\n return extractNodes(result.matches, [\"comment\", \"comment.block\"]);\n } catch {\n return [];\n }\n}\n\n/**\n * Find all string literals in the code\n */\nexport function findStrings(\n tree: Tree,\n languageInstance: Language,\n language: string,\n options: QueryOptions = {},\n): ASTNode[] {\n try {\n const result = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"strings\",\n options,\n );\n\n return extractNodes(result.matches, [\n \"string\",\n \"string.template\",\n \"string.raw\",\n ]);\n } catch {\n return [];\n }\n}\n\n/**\n * Get function name from a function node\n */\nexport function getFunctionName(funcNode: ASTNode): string | undefined {\n // Look in fields first\n if (funcNode.fields?.name) {\n const nameNode = funcNode.fields.name;\n if (!Array.isArray(nameNode)) {\n return nameNode.text;\n }\n }\n\n // Look in children for identifier\n if (funcNode.children) {\n for (const child of funcNode.children) {\n if (\n child.type === \"identifier\" ||\n child.type === \"property_identifier\" ||\n child.type === \"field_identifier\"\n ) {\n return child.text;\n }\n // Recursive for function_declarator\n if (child.type === \"function_declarator\") {\n return getFunctionName(child);\n }\n }\n }\n\n return undefined;\n}\n\n/**\n * Get class name from a class node\n */\nexport function getClassName(classNode: ASTNode): string | undefined {\n // Look in fields first\n if (classNode.fields?.name) {\n const nameNode = classNode.fields.name;\n if (!Array.isArray(nameNode)) {\n return nameNode.text;\n }\n }\n\n // Look in children for identifier\n if (classNode.children) {\n for (const child of classNode.children) {\n if (child.type === \"identifier\" || child.type === \"type_identifier\") {\n return child.text;\n }\n }\n }\n\n return undefined;\n}\n","/**\n * Symbol extraction from parsed code\n */\nimport type { Language, Tree } from \"web-tree-sitter\";\n\nimport type {\n Export,\n Import,\n ImportedName,\n Symbol,\n SymbolType,\n} from \"@core/ast/types\";\nimport {\n createOffsetTracker,\n executePresetQuery,\n findCapture,\n findCaptureByNames,\n getClassName,\n getFunctionName,\n} from \"@core/queries\";\n\n/**\n * Symbol filter options\n */\nexport interface SymbolFilter {\n /** Include only specific types */\n types?: SymbolType[];\n /** Exclude specific types */\n excludeTypes?: SymbolType[];\n /** Include only exported symbols */\n exportedOnly?: boolean;\n}\n\n/**\n * Symbol extraction result\n */\nexport interface SymbolsResult {\n /** Extracted symbols */\n symbols: Symbol[];\n /** Summary counts */\n summary: {\n functions: number;\n classes: number;\n variables: number;\n constants: number;\n interfaces: number;\n types: number;\n enums: number;\n methods: number;\n properties: number;\n total: number;\n };\n}\n\n/**\n * Extract symbols from parsed code\n */\nexport function extractSymbols(\n tree: Tree,\n languageInstance: Language,\n language: string,\n filter: SymbolFilter = {},\n): SymbolsResult {\n const symbols: Symbol[] = [];\n const { types, excludeTypes } = filter;\n\n const shouldInclude = (type: SymbolType): boolean => {\n if (types && !types.includes(type)) {\n return false;\n }\n if (excludeTypes?.includes(type)) {\n return false;\n }\n return true;\n };\n\n // Extract functions and classes using official tags.scm when available\n if (\n shouldInclude(\"function\") ||\n shouldInclude(\"method\") ||\n shouldInclude(\"class\") ||\n shouldInclude(\"interface\")\n ) {\n const tagsResult = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"functions\",\n );\n\n // Process function definitions\n if (shouldInclude(\"function\") || shouldInclude(\"method\")) {\n for (const match of tagsResult.matches) {\n const defCapture = findCapture(match.captures, \"function.definition\");\n const nameCapture = findCapture(match.captures, \"function.name\");\n\n if (defCapture) {\n const name =\n nameCapture?.node.text ?? getFunctionName(defCapture.node);\n if (name) {\n const isMethod =\n defCapture.node.type.includes(\"method\") ||\n defCapture.node.type === \"method_definition\";\n const type: SymbolType = isMethod ? \"method\" : \"function\";\n if (shouldInclude(type)) {\n symbols.push({\n name,\n type,\n start: defCapture.node.start,\n end: defCapture.node.end,\n signature: extractFunctionSignature(defCapture.node),\n modifiers: extractModifiers(defCapture.node),\n });\n }\n }\n }\n }\n }\n }\n\n // Extract classes\n if (shouldInclude(\"class\") || shouldInclude(\"interface\")) {\n const classResult = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"classes\",\n );\n\n for (const match of classResult.matches) {\n const defCapture = findCapture(match.captures, \"class.definition\");\n const nameCapture = findCapture(match.captures, \"class.name\");\n\n if (defCapture) {\n const name = nameCapture?.node.text ?? getClassName(defCapture.node);\n if (name) {\n // Determine type based on AST node type\n const nodeType = defCapture.node.type;\n let symbolType: SymbolType = \"class\";\n if (\n nodeType.includes(\"interface\") ||\n nodeType === \"interface_declaration\"\n ) {\n symbolType = \"interface\";\n } else if (nodeType.includes(\"struct\")) {\n symbolType = \"interface\";\n }\n\n if (shouldInclude(symbolType)) {\n symbols.push({\n name,\n type: symbolType,\n start: defCapture.node.start,\n end: defCapture.node.end,\n modifiers: extractModifiers(defCapture.node),\n });\n }\n }\n }\n }\n }\n\n // Extract variables and constants\n if (shouldInclude(\"variable\") || shouldInclude(\"constant\")) {\n try {\n const varResult = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"variables\",\n );\n\n for (const match of varResult.matches) {\n const nameCapture = findCaptureByNames(match.captures, [\n \"variable.name\",\n \"constant.name\",\n \"field.name\",\n ]);\n const declCapture = findCaptureByNames(match.captures, [\n \"variable.declaration\",\n \"constant.declaration\",\n \"field.declaration\",\n ]);\n\n if (nameCapture && declCapture) {\n const isConstant =\n declCapture.node.text.startsWith(\"const \") ||\n findCapture(match.captures, \"constant.name\") !== undefined;\n\n const type: SymbolType = isConstant ? \"constant\" : \"variable\";\n if (shouldInclude(type)) {\n symbols.push({\n name: nameCapture.node.text,\n type,\n start: declCapture.node.start,\n end: declCapture.node.end,\n modifiers: extractModifiers(declCapture.node),\n });\n }\n }\n }\n } catch {\n // Query not available for this language\n }\n }\n\n // Extract types (interfaces, type aliases, enums)\n if (\n shouldInclude(\"interface\") ||\n shouldInclude(\"type\") ||\n shouldInclude(\"enum\")\n ) {\n try {\n const typeResult = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"types\",\n );\n\n for (const match of typeResult.matches) {\n const nameCapture = findCaptureByNames(match.captures, [\n \"type.name\",\n \"interface.name\",\n \"enum.name\",\n \"type.alias\",\n ]);\n const defCapture = findCaptureByNames(match.captures, [\n \"type.definition\",\n \"interface.definition\",\n \"enum.definition\",\n ]);\n\n if (nameCapture && defCapture) {\n let type: SymbolType = \"type\";\n if (nameCapture.name === \"interface.name\") {\n type = \"interface\";\n } else if (nameCapture.name === \"enum.name\") {\n type = \"enum\";\n }\n\n if (shouldInclude(type)) {\n symbols.push({\n name: nameCapture.node.text,\n type,\n start: defCapture.node.start,\n end: defCapture.node.end,\n modifiers: extractModifiers(defCapture.node),\n });\n }\n }\n }\n } catch {\n // Query not available for this language\n }\n }\n\n // Calculate summary\n const summary = {\n functions: symbols.filter((s) => s.type === \"function\").length,\n classes: symbols.filter((s) => s.type === \"class\").length,\n variables: symbols.filter((s) => s.type === \"variable\").length,\n constants: symbols.filter((s) => s.type === \"constant\").length,\n interfaces: symbols.filter((s) => s.type === \"interface\").length,\n types: symbols.filter((s) => s.type === \"type\").length,\n enums: symbols.filter((s) => s.type === \"enum\").length,\n methods: symbols.filter((s) => s.type === \"method\").length,\n properties: symbols.filter((s) => s.type === \"property\").length,\n total: symbols.length,\n };\n\n return { symbols, summary };\n}\n\n/**\n * Extract function signature from AST node\n */\nfunction extractFunctionSignature(node: {\n text: string;\n children?: { type: string; text: string }[];\n}): string | undefined {\n // Try to extract just the signature (function name + params)\n const text = node.text;\n\n // For JavaScript/TypeScript-like syntax\n const jsMatch =\n /^(async\\s+)?function\\s*\\*?\\s*(\\w*)\\s*(<[^>]*>)?\\s*\\([^)]*\\)(\\s*:\\s*[^{]+)?/.exec(\n text,\n );\n if (jsMatch) {\n return jsMatch[0].trim();\n }\n\n // For arrow functions\n const arrowMatch = /^\\([^)]*\\)\\s*(:\\s*[^=]+)?\\s*=>/.exec(text);\n if (arrowMatch) {\n return arrowMatch[0].trim();\n }\n\n // For Python\n const pyMatch = /^def\\s+(\\w+)\\s*\\([^)]*\\)(\\s*->\\s*[^:]+)?:/.exec(text);\n if (pyMatch) {\n return pyMatch[0].replace(/:$/, \"\").trim();\n }\n\n // For Go\n const goMatch = /^func\\s*(\\([^)]*\\)\\s*)?(\\w+)\\s*\\([^)]*\\)/.exec(text);\n if (goMatch) {\n return goMatch[0].trim();\n }\n\n // Fallback: return first line up to opening brace\n const firstLine = text.split(/[{:]/)[0];\n return firstLine ? firstLine.trim() : undefined;\n}\n\n/**\n * Extract modifiers from AST node\n */\nfunction extractModifiers(node: { text: string }): string[] | undefined {\n const modifiers: string[] = [];\n const text = node.text;\n\n // Common modifiers\n const modifierPatterns = [\n \"export\",\n \"default\",\n \"async\",\n \"static\",\n \"public\",\n \"private\",\n \"protected\",\n \"readonly\",\n \"abstract\",\n \"const\",\n \"let\",\n \"var\",\n \"final\",\n \"override\",\n \"pub\",\n \"mut\",\n ];\n\n for (const mod of modifierPatterns) {\n const pattern = new RegExp(`\\\\b${mod}\\\\b`);\n if (pattern.test(text.slice(0, 100))) {\n // Only check start of node\n modifiers.push(mod);\n }\n }\n\n return modifiers.length > 0 ? modifiers : undefined;\n}\n\n/**\n * Extract imports from parsed code\n */\nexport function extractImports(\n tree: Tree,\n languageInstance: Language,\n language: string,\n): Import[] {\n const imports: Import[] = [];\n\n try {\n const result = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"imports\",\n );\n\n const tracker = createOffsetTracker();\n\n for (const match of result.matches) {\n const stmtCapture = findCaptureByNames(match.captures, [\n \"import.statement\",\n \"include.statement\",\n ]);\n\n if (!stmtCapture || tracker.has(stmtCapture.node)) {\n continue;\n }\n tracker.add(stmtCapture.node);\n\n const sourceCapture = findCaptureByNames(match.captures, [\n \"import.source\",\n \"import.path\",\n \"include.path\",\n ]);\n const defaultCapture = findCapture(match.captures, \"import.default\");\n const nameCaptures = match.captures.filter(\n (c) => c.name === \"import.name\",\n );\n\n const source = sourceCapture\n ? sourceCapture.node.text.replace(/['\"]/g, \"\")\n : \"\";\n\n const names: ImportedName[] = [];\n\n if (defaultCapture) {\n names.push({ name: defaultCapture.node.text });\n }\n\n for (const nameCapture of nameCaptures) {\n names.push({ name: nameCapture.node.text });\n }\n\n imports.push({\n source,\n names,\n isDefault: !!defaultCapture && nameCaptures.length === 0,\n start: stmtCapture.node.start,\n end: stmtCapture.node.end,\n });\n }\n } catch {\n // Query not available\n }\n\n return imports;\n}\n\n/**\n * Extract exports from parsed code\n */\nexport function extractExports(\n tree: Tree,\n languageInstance: Language,\n language: string,\n): Export[] {\n const exportList: Export[] = [];\n\n try {\n const result = executePresetQuery(\n tree,\n languageInstance,\n language,\n \"exports\",\n );\n\n const tracker = createOffsetTracker();\n\n for (const match of result.matches) {\n const stmtCapture = findCaptureByNames(match.captures, [\n \"export.statement\",\n \"export.function\",\n \"export.class\",\n \"export.type\",\n ]);\n\n if (!stmtCapture || tracker.has(stmtCapture.node)) {\n continue;\n }\n tracker.add(stmtCapture.node);\n\n const nameCapture = findCapture(match.captures, \"export.name\");\n\n const text = stmtCapture.node.text;\n const isDefault = text.includes(\"export default\");\n\n // Try to extract name from declaration if not captured\n let name = nameCapture?.node.text;\n if (!name) {\n // Try to extract from export statement\n const nameMatch =\n /export\\s+(?:default\\s+)?(?:function|class|const|let|var|interface|type|enum)\\s+(\\w+)/.exec(\n text,\n );\n if (nameMatch?.[1]) {\n name = nameMatch[1];\n }\n }\n\n exportList.push({\n name: name ?? \"default\",\n isDefault,\n start: stmtCapture.node.start,\n end: stmtCapture.node.end,\n });\n }\n } catch {\n // Query not available\n }\n\n return exportList;\n}\n\n/**\n * Get symbols by type\n */\nexport function getSymbolsByType(\n symbols: Symbol[],\n type: SymbolType,\n): Symbol[] {\n return symbols.filter((s) => s.type === type);\n}\n\n/**\n * Find symbol by name\n */\nexport function findSymbolByName(\n symbols: Symbol[],\n name: string,\n): Symbol | undefined {\n return symbols.find((s) => s.name === name);\n}\n\n/**\n * Get symbol at position\n */\nexport function getSymbolAtPosition(\n symbols: Symbol[],\n line: number,\n column: number,\n): Symbol | undefined {\n return symbols.find((s) => {\n const afterStart =\n line > s.start.line ||\n (line === s.start.line && column >= s.start.column);\n const beforeEnd =\n line < s.end.line || (line === s.end.line && column <= s.end.column);\n return afterStart && beforeEnd;\n });\n}\n\n/**\n * Extract all code information\n */\nexport interface CodeInfo {\n symbols: SymbolsResult;\n imports: Import[];\n exports: Export[];\n}\n\nexport function extractCodeInfo(\n tree: Tree,\n languageInstance: Language,\n language: string,\n filter: SymbolFilter = {},\n): CodeInfo {\n return {\n symbols: extractSymbols(tree, languageInstance, language, filter),\n imports: extractImports(tree, languageInstance, language),\n exports: extractExports(tree, languageInstance, language),\n };\n}\n","/**\n * Code chunker for splitting source files into embeddable chunks\n *\n * Uses tree-sitter for semantic chunking based on symbols (functions, classes, etc.)\n * This produces much better embeddings than character-based splitting.\n */\n\nimport * as crypto from \"node:crypto\";\nimport type { Symbol } from \"@core/ast/types\";\nimport { parseCode } from \"@core/parser\";\nimport { extractSymbols } from \"@core/symbols\";\nimport { logger } from \"@utils\";\n\nimport type { CodeChunk, EmbeddingConfig } from \"./types\";\n\n/**\n * Generate a unique ID for a chunk\n */\nfunction generateChunkId(\n filePath: string,\n content: string,\n startLine: number,\n): string {\n const hash = crypto\n .createHash(\"md5\")\n .update(`${filePath}:${String(startLine)}:${content}`)\n .digest(\"hex\")\n .slice(0, 12);\n return `chunk_${hash}`;\n}\n\n/**\n * Detect language from file extension\n */\nexport function detectLanguage(filePath: string): string {\n const ext = filePath.split(\".\").pop()?.toLowerCase() ?? \"\";\n\n const extensionMap: Record<string, string> = {\n ts: \"typescript\",\n tsx: \"typescript\",\n js: \"javascript\",\n jsx: \"javascript\",\n mjs: \"javascript\",\n cjs: \"javascript\",\n py: \"python\",\n rs: \"rust\",\n go: \"go\",\n java: \"java\",\n kt: \"kotlin\",\n rb: \"ruby\",\n php: \"php\",\n c: \"c\",\n cpp: \"cpp\",\n h: \"c\",\n hpp: \"cpp\",\n cs: \"csharp\",\n swift: \"swift\",\n scala: \"scala\",\n vue: \"vue\",\n svelte: \"svelte\",\n md: \"markdown\",\n json: \"json\",\n yaml: \"yaml\",\n yml: \"yaml\",\n toml: \"toml\",\n xml: \"xml\",\n html: \"html\",\n css: \"css\",\n scss: \"scss\",\n less: \"less\",\n sql: \"sql\",\n sh: \"bash\",\n bash: \"bash\",\n zsh: \"bash\",\n };\n\n return extensionMap[ext] ?? \"unknown\";\n}\n\n/**\n * Get line number from byte offset\n */\nfunction getLineFromOffset(content: string, offset: number): number {\n const before = content.slice(0, offset);\n return (before.match(/\\n/g) ?? []).length + 1;\n}\n\n/**\n * Extract code content for a symbol using its offsets\n */\nfunction getSymbolContent(content: string, symbol: Symbol): string {\n return content.slice(symbol.start.offset, symbol.end.offset);\n}\n\n/**\n * Split large content into smaller chunks while respecting line boundaries\n */\nfunction splitLargeContent(\n content: string,\n maxSize: number,\n overlap: number,\n): string[] {\n // Normalize line endings (handle CRLF and CR)\n const normalizedContent = content.replace(/\\r\\n/g, \"\\n\").replace(/\\r/g, \"\\n\");\n\n if (normalizedContent.length <= maxSize) {\n return [normalizedContent];\n }\n\n const chunks: string[] = [];\n const lines = normalizedContent.split(\"\\n\");\n let currentChunk: string[] = [];\n let currentSize = 0;\n\n for (const line of lines) {\n const lineSize = line.length + 1; // +1 for newline\n\n if (currentSize + lineSize > maxSize && currentChunk.length > 0) {\n chunks.push(currentChunk.join(\"\\n\"));\n\n // Keep overlap lines\n const overlapLines: string[] = [];\n let overlapSize = 0;\n for (\n let i = currentChunk.length - 1;\n i >= 0 && overlapSize < overlap;\n i--\n ) {\n const l = currentChunk[i];\n if (l !== undefined) {\n overlapLines.unshift(l);\n overlapSize += l.length + 1;\n }\n }\n currentChunk = overlapLines;\n currentSize = overlapSize;\n }\n\n currentChunk.push(line);\n currentSize += lineSize;\n }\n\n if (currentChunk.length > 0) {\n chunks.push(currentChunk.join(\"\\n\"));\n }\n\n return chunks;\n}\n\n/**\n * Create a chunk from content\n */\nfunction createChunk(\n filePath: string,\n language: string,\n content: string,\n startLine: number,\n endLine: number,\n symbolName?: string,\n symbolType?: string,\n): CodeChunk {\n return {\n id: generateChunkId(filePath, content, startLine),\n content,\n filePath,\n language,\n startLine,\n endLine,\n symbolName,\n symbolType,\n };\n}\n\n/**\n * Group consecutive small items (imports, constants, types) into a single chunk\n */\ninterface ContentRegion {\n content: string;\n startOffset: number;\n endOffset: number;\n startLine: number;\n endLine: number;\n symbolName?: string;\n symbolType?: string;\n}\n\n/**\n * Chunk a source file using tree-sitter for semantic boundaries\n *\n * Strategy:\n * 1. Parse file with tree-sitter and extract symbols\n * 2. Each function/class/method becomes its own chunk\n * 3. Code between symbols (imports, top-level code) is grouped together\n * 4. Large symbols are split at line boundaries if they exceed maxSize\n */\nexport async function chunkFile(\n filePath: string,\n content: string,\n config: Pick<EmbeddingConfig, \"defaultChunkSize\" | \"defaultChunkOverlap\">,\n): Promise<CodeChunk[]> {\n const language = detectLanguage(filePath);\n const maxSize = config.defaultChunkSize;\n const overlap = config.defaultChunkOverlap;\n\n // Try to parse with tree-sitter\n let symbols: Symbol[] = [];\n try {\n const parseResult = await parseCode(content, { filePath });\n const result = extractSymbols(\n parseResult.tree,\n parseResult.languageInstance,\n parseResult.language,\n );\n symbols = result.symbols;\n } catch (error) {\n logger.debug(\n `Tree-sitter parsing failed for ${filePath}, using fallback chunking: ${\n error instanceof Error ? error.message : String(error)\n }`,\n );\n // Fallback to simple line-based chunking\n return fallbackChunk(filePath, content, language, maxSize, overlap);\n }\n\n // If no symbols found, use fallback\n if (symbols.length === 0) {\n return fallbackChunk(filePath, content, language, maxSize, overlap);\n }\n\n // Sort symbols by start offset\n const sortedSymbols = [...symbols].sort(\n (a, b) => a.start.offset - b.start.offset,\n );\n\n // Build regions: symbols + gaps between them\n const regions: ContentRegion[] = [];\n let lastEndOffset = 0;\n\n for (const symbol of sortedSymbols) {\n // Add gap before this symbol (if any significant content)\n if (symbol.start.offset > lastEndOffset) {\n const gapContent = content\n .slice(lastEndOffset, symbol.start.offset)\n .trim();\n if (gapContent.length > 0) {\n regions.push({\n content: content.slice(lastEndOffset, symbol.start.offset),\n startOffset: lastEndOffset,\n endOffset: symbol.start.offset,\n startLine: getLineFromOffset(content, lastEndOffset),\n endLine: getLineFromOffset(content, symbol.start.offset),\n });\n }\n }\n\n // Add symbol region\n const symbolContent = getSymbolContent(content, symbol);\n regions.push({\n content: symbolContent,\n startOffset: symbol.start.offset,\n endOffset: symbol.end.offset,\n startLine: symbol.start.line,\n endLine: symbol.end.line,\n symbolName: symbol.name,\n symbolType: symbol.type,\n });\n\n lastEndOffset = Math.max(lastEndOffset, symbol.end.offset);\n }\n\n // Add trailing content after last symbol\n if (lastEndOffset < content.length) {\n const trailingContent = content.slice(lastEndOffset).trim();\n if (trailingContent.length > 0) {\n regions.push({\n content: content.slice(lastEndOffset),\n startOffset: lastEndOffset,\n endOffset: content.length,\n startLine: getLineFromOffset(content, lastEndOffset),\n endLine: getLineFromOffset(content, content.length),\n });\n }\n }\n\n // Convert regions to chunks, splitting large ones\n const chunks: CodeChunk[] = [];\n\n for (const region of regions) {\n const regionContent = region.content.trim();\n if (regionContent.length === 0) {\n continue;\n }\n\n if (regionContent.length <= maxSize) {\n // Small enough, create single chunk\n chunks.push(\n createChunk(\n filePath,\n language,\n regionContent,\n region.startLine,\n region.endLine,\n region.symbolName,\n region.symbolType,\n ),\n );\n } else {\n // Too large, split it\n const parts = splitLargeContent(regionContent, maxSize, overlap);\n let currentLine = region.startLine;\n\n for (const part of parts) {\n const partLines = (part.match(/\\n/g) ?? []).length + 1;\n chunks.push(\n createChunk(\n filePath,\n language,\n part,\n currentLine,\n currentLine + partLines - 1,\n region.symbolName,\n region.symbolType,\n ),\n );\n currentLine += partLines - Math.floor(overlap / 50); // Approximate line overlap\n }\n }\n }\n\n return chunks;\n}\n\n/**\n * Fallback chunking when tree-sitter fails or finds no symbols\n * Uses simple line-based splitting\n */\nfunction fallbackChunk(\n filePath: string,\n content: string,\n language: string,\n maxSize: number,\n overlap: number,\n): CodeChunk[] {\n // Handle empty content\n if (content.trim().length === 0) {\n return [];\n }\n\n const chunks: CodeChunk[] = [];\n const parts = splitLargeContent(content, maxSize, overlap);\n\n let currentLine = 1;\n for (const part of parts) {\n const partLines = (part.match(/\\n/g) ?? []).length + 1;\n chunks.push(\n createChunk(\n filePath,\n language,\n part,\n currentLine,\n currentLine + partLines - 1,\n ),\n );\n currentLine += partLines - Math.floor(overlap / 50);\n }\n\n return chunks;\n}\n\n/**\n * Chunk multiple files\n */\nexport async function chunkFiles(\n files: { path: string; content: string }[],\n config: Pick<EmbeddingConfig, \"defaultChunkSize\" | \"defaultChunkOverlap\">,\n): Promise<CodeChunk[]> {\n const allChunks: CodeChunk[] = [];\n\n for (const file of files) {\n const chunks = await chunkFile(file.path, file.content, config);\n allChunks.push(...chunks);\n }\n\n return allChunks;\n}\n\n/**\n * Supported file extensions for indexing\n */\nexport const SUPPORTED_EXTENSIONS = [\n \".ts\",\n \".tsx\",\n \".js\",\n \".jsx\",\n \".mjs\",\n \".cjs\",\n \".py\",\n \".rs\",\n \".go\",\n \".java\",\n \".kt\",\n \".rb\",\n \".php\",\n \".c\",\n \".cpp\",\n \".h\",\n \".hpp\",\n \".cs\",\n \".swift\",\n \".scala\",\n \".vue\",\n \".svelte\",\n \".md\",\n];\n\n/**\n * Check if a file should be indexed\n */\nexport function shouldIndexFile(filePath: string): boolean {\n const ext = \".\" + (filePath.split(\".\").pop()?.toLowerCase() ?? \"\");\n return SUPPORTED_EXTENSIONS.includes(ext);\n}\n","/**\n * Cross-file context resolution for enriched embeddings\n *\n * Resolves imports and includes relevant symbol definitions from\n * imported files to provide better context for semantic search.\n */\n\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport type { Export, Import, Symbol } from \"@core/ast/types\";\nimport { parseCode } from \"@core/parser\";\nimport { extractExports, extractSymbols } from \"@core/symbols\";\nimport { registerCache } from \"@core/utils\";\nimport { logger } from \"@utils\";\n\n/**\n * Resolved import with source file information\n */\nexport interface ResolvedImport {\n /** Original import statement */\n import: Import;\n /** Resolved absolute file path (null if unresolved) */\n resolvedPath: string | null;\n /** Exported symbols from the resolved file */\n symbols: Symbol[];\n /** Exports from the resolved file */\n exports: Export[];\n}\n\n/**\n * Cross-file context for a chunk\n */\nexport interface CrossFileContext {\n /** Resolved imports with their definitions */\n resolvedImports: ResolvedImport[];\n /** Summary of imported symbols used */\n importedSymbolsSummary: string;\n}\n\n/**\n * Options for cross-file resolution\n */\nexport interface CrossFileOptions {\n /** Project root directory */\n projectRoot: string;\n /** Path aliases (e.g., {\"@core\": \"src/core\"}) */\n pathAliases?: Record<string, string>;\n /** Maximum number of imports to resolve */\n maxImports?: number;\n /** Maximum symbols per imported file */\n maxSymbolsPerFile?: number;\n}\n\n/**\n * Cache for resolved file analysis\n */\ninterface ResolvedFileCache {\n symbols: Symbol[];\n exports: Export[];\n}\n\nconst resolvedFileCache = new Map<string, ResolvedFileCache | null>();\n\n/**\n * Clear the resolved file cache\n */\nexport function clearCrossFileCache(): void {\n resolvedFileCache.clear();\n}\n\n// Register cache for centralized clearing\nregisterCache(\"embeddings:crossFileCache\", clearCrossFileCache);\n\n/**\n * Common file extensions to try when resolving imports\n */\nconst EXTENSIONS = [\".ts\", \".tsx\", \".js\", \".jsx\", \".mjs\", \".cjs\"];\n\n/**\n * Resolve an import source to an absolute file path\n */\nfunction resolveImportPath(\n importSource: string,\n currentFilePath: string,\n options: CrossFileOptions,\n): string | null {\n const { projectRoot, pathAliases = {} } = options;\n\n // Skip external packages (node_modules)\n if (\n !importSource.startsWith(\".\") &&\n !importSource.startsWith(\"@\") &&\n !Object.keys(pathAliases).some((alias) => importSource.startsWith(alias))\n ) {\n return null;\n }\n\n let resolvedPath: string | undefined;\n\n // Handle path aliases\n for (const [alias, target] of Object.entries(pathAliases)) {\n if (importSource.startsWith(alias)) {\n const relativePart = importSource.slice(alias.length);\n resolvedPath = path.join(projectRoot, target, relativePart);\n break;\n }\n }\n\n // Handle relative imports\n if (resolvedPath === undefined) {\n if (importSource.startsWith(\".\")) {\n const currentDir = path.dirname(currentFilePath);\n resolvedPath = path.resolve(currentDir, importSource);\n } else {\n // Unresolved alias or external package\n return null;\n }\n }\n\n // Try to find the actual file\n // First, check if it's a direct file with extension\n for (const ext of EXTENSIONS) {\n const withExt = resolvedPath + ext;\n if (fs.existsSync(withExt) && fs.statSync(withExt).isFile()) {\n return withExt;\n }\n }\n\n // Check if the path itself exists and is a file (already has extension)\n if (fs.existsSync(resolvedPath) && fs.statSync(resolvedPath).isFile()) {\n return resolvedPath;\n }\n\n // Check for index file in directory\n for (const ext of EXTENSIONS) {\n const indexPath = path.join(resolvedPath, `index${ext}`);\n if (fs.existsSync(indexPath) && fs.statSync(indexPath).isFile()) {\n return indexPath;\n }\n }\n\n return null;\n}\n\n/**\n * Analyze a resolved file and extract its symbols\n */\nasync function analyzeResolvedFile(\n filePath: string,\n): Promise<ResolvedFileCache | null> {\n // Check cache\n const cached = resolvedFileCache.get(filePath);\n if (cached !== undefined) {\n return cached;\n }\n\n try {\n const content = fs.readFileSync(filePath, \"utf-8\");\n const parseResult = await parseCode(content, { filePath });\n\n const { symbols } = extractSymbols(\n parseResult.tree,\n parseResult.languageInstance,\n parseResult.language,\n );\n\n const exports = extractExports(\n parseResult.tree,\n parseResult.languageInstance,\n parseResult.language,\n );\n\n const result: ResolvedFileCache = { symbols, exports };\n resolvedFileCache.set(filePath, result);\n return result;\n } catch (error) {\n logger.debug(\n `Failed to analyze ${filePath}: ${error instanceof Error ? error.message : String(error)}`,\n );\n resolvedFileCache.set(filePath, null);\n return null;\n }\n}\n\n/**\n * Find symbols that match imported names\n */\nfunction findImportedSymbols(\n importStatement: Import,\n symbols: Symbol[],\n exports: Export[],\n): Symbol[] {\n const importedNames = new Set<string>();\n\n // Collect all imported names\n for (const name of importStatement.names) {\n importedNames.add(name.name);\n }\n\n // If namespace import, include all exported symbols\n if (importStatement.isNamespace) {\n const exportedNames = new Set(exports.map((e) => e.name));\n return symbols.filter((s) => exportedNames.has(s.name));\n }\n\n // If default import, look for default export\n if (importStatement.isDefault) {\n const defaultExport = exports.find((e) => e.isDefault);\n if (defaultExport) {\n importedNames.add(defaultExport.name);\n }\n }\n\n // Find matching symbols\n return symbols.filter((s) => importedNames.has(s.name));\n}\n\n/**\n * Resolve imports and get cross-file context\n */\nexport async function resolveCrossFileContext(\n imports: Import[],\n currentFilePath: string,\n options: CrossFileOptions,\n): Promise<CrossFileContext> {\n const maxImports = options.maxImports ?? 10;\n const maxSymbolsPerFile = options.maxSymbolsPerFile ?? 5;\n\n const resolvedImports: ResolvedImport[] = [];\n\n // Process imports (limited to maxImports)\n for (const imp of imports.slice(0, maxImports)) {\n const resolvedPath = resolveImportPath(\n imp.source,\n currentFilePath,\n options,\n );\n\n if (!resolvedPath) {\n resolvedImports.push({\n import: imp,\n resolvedPath: null,\n symbols: [],\n exports: [],\n });\n continue;\n }\n\n const analysis = await analyzeResolvedFile(resolvedPath);\n\n if (!analysis) {\n resolvedImports.push({\n import: imp,\n resolvedPath,\n symbols: [],\n exports: [],\n });\n continue;\n }\n\n // Find symbols that match the imported names\n const importedSymbols = findImportedSymbols(\n imp,\n analysis.symbols,\n analysis.exports,\n ).slice(0, maxSymbolsPerFile);\n\n resolvedImports.push({\n import: imp,\n resolvedPath,\n symbols: importedSymbols,\n exports: analysis.exports,\n });\n }\n\n // Build summary of imported symbols\n const summary = buildImportedSymbolsSummary(resolvedImports);\n\n return {\n resolvedImports,\n importedSymbolsSummary: summary,\n };\n}\n\n/**\n * Build a summary string of imported symbols for enrichment\n */\nfunction buildImportedSymbolsSummary(\n resolvedImports: ResolvedImport[],\n): string {\n const lines: string[] = [];\n\n for (const resolved of resolvedImports) {\n if (resolved.symbols.length === 0) {\n continue;\n }\n\n // Group by import source\n const symbolDescriptions = resolved.symbols.map((s) => {\n if (s.signature) {\n return `${s.name}: ${s.signature}`;\n }\n return `${s.name} (${s.type})`;\n });\n\n if (symbolDescriptions.length > 0) {\n lines.push(\n `From ${resolved.import.source}: ${symbolDescriptions.join(\"; \")}`,\n );\n }\n }\n\n return lines.join(\"\\n\");\n}\n\n/**\n * Get cross-file cache statistics\n */\nexport function getCrossFileCacheStats(): {\n files: number;\n entries: string[];\n} {\n return {\n files: resolvedFileCache.size,\n entries: Array.from(resolvedFileCache.keys()),\n };\n}\n","/**\n * Chunk enrichment module for better embeddings\n *\n * Enriches code chunks with semantic metadata (symbols, imports, exports)\n * and cross-file context (resolved import definitions) to improve\n * embedding quality for semantic search.\n *\n * Pipeline:\n * File → chunkFile() → CodeChunk[] → enrichChunks() → EnrichedChunk[]\n * ↓\n * embedBatch(enrichedContent)\n */\n\nimport type { Export, Import, Symbol } from \"@core/ast/types\";\nimport { parseCode, type ParseResult } from \"@core/parser\";\nimport { extractExports, extractImports, extractSymbols } from \"@core/symbols\";\nimport { registerCache } from \"@core/utils\";\nimport { logger } from \"@utils\";\nimport { ENRICHMENT_CONFIG } from \"@config\";\n\nimport type { ChunkSymbol, CodeChunk, EnrichedChunk } from \"./types\";\nimport {\n resolveCrossFileContext,\n type CrossFileContext,\n type CrossFileOptions,\n} from \"./crossfile\";\n\n/**\n * Cached file analysis result\n */\ninterface FileAnalysisCache {\n parseResult: ParseResult;\n symbols: Symbol[];\n imports: Import[];\n exports: Export[];\n crossFileContext?: CrossFileContext;\n}\n\n/**\n * Options for enrichment\n */\nexport interface EnrichmentOptions {\n /** Project root directory for cross-file resolution */\n projectRoot?: string;\n /** Path aliases (e.g., {\"@core\": \"src/core\"}) */\n pathAliases?: Record<string, string>;\n /** Whether to include cross-file context (default: from config) */\n includeCrossFileContext?: boolean;\n}\n\n/**\n * AST cache per file path to avoid re-parsing\n */\nconst astCache = new Map<string, FileAnalysisCache>();\n\n/**\n * Clear the AST cache\n */\nexport function clearASTCache(): void {\n astCache.clear();\n}\n\n// Register cache for centralized clearing\nregisterCache(\"embeddings:astCache\", clearASTCache);\n\n/** Maximum number of imports to include in enriched content */\nconst MAX_IMPORTS = 10;\n\n/** Maximum number of exports to include in enriched content */\nconst MAX_EXPORTS = 10;\n\n/**\n * Get or create file analysis from cache\n */\nasync function getFileAnalysis(\n filePath: string,\n content: string,\n options?: EnrichmentOptions,\n): Promise<FileAnalysisCache | null> {\n // Check cache first\n const cached = astCache.get(filePath);\n if (cached) {\n return cached;\n }\n\n try {\n // Parse the file\n const parseResult = await parseCode(content, { filePath });\n\n // Extract symbols, imports, and exports\n const { symbols } = extractSymbols(\n parseResult.tree,\n parseResult.languageInstance,\n parseResult.language,\n );\n const imports = extractImports(\n parseResult.tree,\n parseResult.languageInstance,\n parseResult.language,\n );\n const exports = extractExports(\n parseResult.tree,\n parseResult.languageInstance,\n parseResult.language,\n );\n\n const analysis: FileAnalysisCache = {\n parseResult,\n symbols,\n imports,\n exports,\n };\n\n // Resolve cross-file context if enabled\n const shouldIncludeCrossFile =\n options?.includeCrossFileContext ??\n ENRICHMENT_CONFIG.includeCrossFileContext;\n\n if (shouldIncludeCrossFile && imports.length > 0 && options?.projectRoot) {\n try {\n const crossFileOptions: CrossFileOptions = {\n projectRoot: options.projectRoot,\n pathAliases: options.pathAliases,\n maxImports: ENRICHMENT_CONFIG.maxImportsToResolve,\n maxSymbolsPerFile: ENRICHMENT_CONFIG.maxSymbolsPerImport,\n };\n\n const crossFileContext = await resolveCrossFileContext(\n imports,\n filePath,\n crossFileOptions,\n );\n\n analysis.crossFileContext = crossFileContext;\n logger.debug(\n `Resolved cross-file context for ${filePath}: ${String(crossFileContext.resolvedImports.length)} imports`,\n );\n } catch (error) {\n logger.debug(\n `Failed to resolve cross-file context for ${filePath}: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n // Cache for future chunks from same file\n astCache.set(filePath, analysis);\n\n return analysis;\n } catch (error) {\n logger.debug(\n `Failed to parse ${filePath}: ${error instanceof Error ? error.message : String(error)}`,\n );\n return null;\n }\n}\n\n/**\n * Find symbols that overlap with a chunk's line range\n */\nfunction findSymbolsInRange(\n symbols: Symbol[],\n startLine: number,\n endLine: number,\n): ChunkSymbol[] {\n const chunkSymbols: ChunkSymbol[] = [];\n\n for (const symbol of symbols) {\n // Check if symbol overlaps with chunk's line range\n const symbolStart = symbol.start.line;\n const symbolEnd = symbol.end.line;\n\n // Symbol overlaps if it starts before chunk ends AND ends after chunk starts\n if (symbolStart <= endLine && symbolEnd >= startLine) {\n chunkSymbols.push({\n name: symbol.name,\n type: symbol.type,\n signature: symbol.signature,\n });\n }\n }\n\n return chunkSymbols;\n}\n\n/**\n * Format import sources for enrichment header\n */\nfunction formatImportSources(imports: Import[]): string {\n const sources = imports\n .slice(0, MAX_IMPORTS)\n .map((imp) => imp.source)\n .filter((source) => source.length > 0);\n\n return sources.join(\", \");\n}\n\n/**\n * Format export names for enrichment header\n */\nfunction formatExportNames(exports: Export[]): string {\n const names = exports\n .slice(0, MAX_EXPORTS)\n .map((exp) => exp.name)\n .filter((name) => name.length > 0 && name !== \"default\");\n\n return names.join(\", \");\n}\n\n/**\n * Format symbols for enrichment header\n */\nfunction formatSymbols(symbols: ChunkSymbol[]): string {\n return symbols.map((s) => `${s.name} (${s.type})`).join(\", \");\n}\n\n/**\n * Build enriched content with metadata header\n */\nfunction buildEnrichedContent(\n chunk: CodeChunk,\n symbols: ChunkSymbol[],\n imports: Import[],\n exports: Export[],\n crossFileContext?: CrossFileContext,\n): string {\n const headerLines: string[] = [];\n\n // Always include file path\n headerLines.push(`File: ${chunk.filePath}`);\n\n // Always include language\n headerLines.push(`Language: ${chunk.language}`);\n\n // Include symbols if present\n if (symbols.length > 0) {\n headerLines.push(`Symbols: ${formatSymbols(symbols)}`);\n }\n\n // Include imports if present\n if (imports.length > 0) {\n const importStr = formatImportSources(imports);\n if (importStr.length > 0) {\n headerLines.push(`Imports: ${importStr}`);\n }\n }\n\n // Include exports if present\n if (exports.length > 0) {\n const exportStr = formatExportNames(exports);\n if (exportStr.length > 0) {\n headerLines.push(`Exports: ${exportStr}`);\n }\n }\n\n // Include cross-file context (resolved import definitions)\n if (crossFileContext && crossFileContext.importedSymbolsSummary.length > 0) {\n headerLines.push(\n `Imported definitions:\\n${crossFileContext.importedSymbolsSummary}`,\n );\n }\n\n // Always have header with at least file path and language\n return headerLines.join(\"\\n\") + \"\\n\\n---\\n\" + chunk.content;\n}\n\n/**\n * Enrich a single chunk with semantic metadata\n *\n * Note: For multiple chunks from the same file, use `enrichChunksFromFile`\n * which is more efficient as it parses the file only once.\n */\nexport async function enrichChunk(\n chunk: CodeChunk,\n content: string,\n options?: EnrichmentOptions,\n): Promise<EnrichedChunk> {\n const analysis = await getFileAnalysis(chunk.filePath, content, options);\n\n if (!analysis) {\n // Fallback: return with basic enrichment (file path and language only)\n const basicHeader = `File: ${chunk.filePath}\\nLanguage: ${chunk.language}\\n\\n---\\n`;\n return {\n ...chunk,\n enrichedContent: basicHeader + chunk.content,\n containedSymbols: [],\n wasEnriched: false,\n };\n }\n\n // Find symbols in this chunk's range\n const chunkSymbols = findSymbolsInRange(\n analysis.symbols,\n chunk.startLine,\n chunk.endLine,\n );\n\n // Build enriched content with cross-file context\n const enrichedContent = buildEnrichedContent(\n chunk,\n chunkSymbols,\n analysis.imports,\n analysis.exports,\n analysis.crossFileContext,\n );\n\n return {\n ...chunk,\n enrichedContent,\n containedSymbols: chunkSymbols,\n wasEnriched: true,\n };\n}\n\n/**\n * Enrich all chunks from a single file (optimized - parses once)\n */\nexport async function enrichChunksFromFile(\n chunks: CodeChunk[],\n content: string,\n options?: EnrichmentOptions,\n): Promise<EnrichedChunk[]> {\n if (chunks.length === 0) {\n return [];\n }\n\n // All chunks should be from the same file\n const filePath = chunks[0]?.filePath;\n if (!filePath) {\n return chunks.map((chunk) => {\n const basicHeader = `File: ${chunk.filePath}\\nLanguage: ${chunk.language}\\n\\n---\\n`;\n return {\n ...chunk,\n enrichedContent: basicHeader + chunk.content,\n containedSymbols: [],\n wasEnriched: false,\n };\n });\n }\n\n // Parse once for all chunks\n const analysis = await getFileAnalysis(filePath, content, options);\n\n if (!analysis) {\n // Fallback: return with basic enrichment\n return chunks.map((chunk) => {\n const basicHeader = `File: ${chunk.filePath}\\nLanguage: ${chunk.language}\\n\\n---\\n`;\n return {\n ...chunk,\n enrichedContent: basicHeader + chunk.content,\n containedSymbols: [],\n wasEnriched: false,\n };\n });\n }\n\n // Enrich each chunk using the cached analysis with cross-file context\n return chunks.map((chunk) => {\n const chunkSymbols = findSymbolsInRange(\n analysis.symbols,\n chunk.startLine,\n chunk.endLine,\n );\n\n const enrichedContent = buildEnrichedContent(\n chunk,\n chunkSymbols,\n analysis.imports,\n analysis.exports,\n analysis.crossFileContext,\n );\n\n return {\n ...chunk,\n enrichedContent,\n containedSymbols: chunkSymbols,\n wasEnriched: true,\n };\n });\n}\n\n/**\n * Enrich chunks from multiple files\n *\n * Groups chunks by file path for efficient processing.\n */\nexport async function enrichChunks(\n chunks: CodeChunk[],\n fileContents: Map<string, string>,\n options?: EnrichmentOptions,\n): Promise<EnrichedChunk[]> {\n // Group chunks by file path\n const chunksByFile = new Map<string, CodeChunk[]>();\n for (const chunk of chunks) {\n const existing = chunksByFile.get(chunk.filePath) ?? [];\n existing.push(chunk);\n chunksByFile.set(chunk.filePath, existing);\n }\n\n // Process each file's chunks\n const enrichedByFile = new Map<string, EnrichedChunk[]>();\n\n for (const [filePath, fileChunks] of chunksByFile) {\n const content = fileContents.get(filePath);\n if (!content) {\n // No content available, return with basic enrichment\n enrichedByFile.set(\n filePath,\n fileChunks.map((chunk) => {\n const basicHeader = `File: ${chunk.filePath}\\nLanguage: ${chunk.language}\\n\\n---\\n`;\n return {\n ...chunk,\n enrichedContent: basicHeader + chunk.content,\n containedSymbols: [],\n wasEnriched: false,\n };\n }),\n );\n continue;\n }\n\n const enriched = await enrichChunksFromFile(fileChunks, content, options);\n enrichedByFile.set(filePath, enriched);\n }\n\n // Reconstruct in original order\n return chunks.map((chunk) => {\n const fileEnriched = enrichedByFile.get(chunk.filePath);\n if (!fileEnriched) {\n const basicHeader = `File: ${chunk.filePath}\\nLanguage: ${chunk.language}\\n\\n---\\n`;\n return {\n ...chunk,\n enrichedContent: basicHeader + chunk.content,\n containedSymbols: [],\n wasEnriched: false,\n };\n }\n\n // Find matching enriched chunk by id\n const enriched = fileEnriched.find((e) => e.id === chunk.id);\n if (enriched) {\n return enriched;\n }\n\n const basicHeader = `File: ${chunk.filePath}\\nLanguage: ${chunk.language}\\n\\n---\\n`;\n return {\n ...chunk,\n enrichedContent: basicHeader + chunk.content,\n containedSymbols: [],\n wasEnriched: false,\n };\n });\n}\n\n/**\n * Get cache statistics\n */\nexport function getASTCacheStats(): { files: number; entries: string[] } {\n return {\n files: astCache.size,\n entries: Array.from(astCache.keys()),\n };\n}\n","/**\n * File watcher for automatic index updates\n *\n * Features:\n * - SHA-256 hash comparison to detect real content changes\n * - Debounce (5s default) to handle rapid changes\n * - Persistent hash cache to avoid unnecessary re-indexing\n * - fast-glob for efficient file scanning\n */\n\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport * as crypto from \"node:crypto\";\nimport { watch, type FSWatcher } from \"chokidar\";\nimport fg from \"fast-glob\";\nimport ignore, { type Ignore } from \"ignore\";\nimport type { EmbeddingConfig } from \"@core/embeddings/types\";\nimport { OllamaClient } from \"@core/embeddings/client\";\nimport { VectorStore } from \"@core/embeddings/store\";\nimport {\n chunkFile,\n shouldIndexFile,\n SUPPORTED_EXTENSIONS,\n} from \"@core/embeddings/chunker\";\nimport { enrichChunksFromFile } from \"@core/embeddings/enricher\";\nimport { logger } from \"@utils\";\n\n/** Default debounce delay in milliseconds */\nconst DEFAULT_DEBOUNCE_MS = 5000;\n\n/** Cache file name for storing hashes */\nconst HASH_CACHE_FILE = \".src-index-hashes.json\";\n\nexport interface WatcherOptions {\n directory: string;\n config: EmbeddingConfig;\n /** Debounce delay in ms (default: 5000) */\n debounceMs?: number;\n onReady?: () => void;\n onError?: (error: Error) => void;\n onIndexed?: (filePath: string) => void;\n onRemoved?: (filePath: string) => void;\n}\n\ntype HashCache = Record<string, string>;\n\ninterface PendingChange {\n type: \"add\" | \"change\" | \"unlink\";\n filePath: string;\n timer: ReturnType<typeof setTimeout>;\n}\n\nexport class IndexWatcher {\n private readonly directory: string;\n private readonly config: EmbeddingConfig;\n private readonly debounceMs: number;\n private readonly ollamaClient: OllamaClient;\n private readonly vectorStore: VectorStore;\n private watcher: FSWatcher | null = null;\n private ig: Ignore;\n private isProcessing = false;\n private hashCache: HashCache = {};\n private pendingChanges = new Map<string, PendingChange>();\n private operationQueue: (() => Promise<void>)[] = [];\n\n private readonly onReady?: () => void;\n private readonly onError?: (error: Error) => void;\n private readonly onIndexed?: (filePath: string) => void;\n private readonly onRemoved?: (filePath: string) => void;\n\n constructor(options: WatcherOptions) {\n this.directory = path.resolve(options.directory);\n this.config = options.config;\n this.debounceMs = options.debounceMs ?? DEFAULT_DEBOUNCE_MS;\n this.ollamaClient = new OllamaClient(options.config);\n this.vectorStore = new VectorStore(this.directory, options.config);\n this.ig = this.createIgnoreFilter();\n\n this.onReady = options.onReady;\n this.onError = options.onError;\n this.onIndexed = options.onIndexed;\n this.onRemoved = options.onRemoved;\n\n this.loadHashCache();\n }\n\n /**\n * Compute SHA-256 hash of content\n */\n private computeHash(content: string): string {\n return crypto.createHash(\"sha256\").update(content, \"utf8\").digest(\"hex\");\n }\n\n /**\n * Get hash cache file path\n */\n private getHashCachePath(): string {\n return path.join(this.directory, \".src-index\", HASH_CACHE_FILE);\n }\n\n /**\n * Load hash cache from disk\n */\n private loadHashCache(): void {\n const cachePath = this.getHashCachePath();\n\n if (fs.existsSync(cachePath)) {\n try {\n const content = fs.readFileSync(cachePath, \"utf-8\");\n this.hashCache = JSON.parse(content) as HashCache;\n logger.debug(\n `Loaded ${String(Object.keys(this.hashCache).length)} cached hashes`,\n );\n } catch {\n this.hashCache = {};\n }\n }\n }\n\n /**\n * Save hash cache to disk\n */\n private saveHashCache(): void {\n const cachePath = this.getHashCachePath();\n const cacheDir = path.dirname(cachePath);\n\n try {\n if (!fs.existsSync(cacheDir)) {\n fs.mkdirSync(cacheDir, { recursive: true });\n }\n fs.writeFileSync(cachePath, JSON.stringify(this.hashCache, null, 2));\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n logger.debug(`Failed to save hash cache: ${error.message}`);\n }\n }\n\n /**\n * Check if file content has changed by comparing hashes\n */\n private hasContentChanged(filePath: string, content: string): boolean {\n const newHash = this.computeHash(content);\n const oldHash = this.hashCache[filePath];\n\n if (oldHash === newHash) {\n return false;\n }\n\n this.hashCache[filePath] = newHash;\n return true;\n }\n\n /**\n * Remove file from hash cache\n */\n private removeFromHashCache(filePath: string): void {\n const { [filePath]: _, ...rest } = this.hashCache;\n this.hashCache = rest;\n }\n\n /**\n * Create ignore filter from .gitignore\n */\n private createIgnoreFilter(): Ignore {\n const ig = ignore();\n const gitignorePath = path.join(this.directory, \".gitignore\");\n\n if (fs.existsSync(gitignorePath)) {\n try {\n const content = fs.readFileSync(gitignorePath, \"utf-8\");\n ig.add(content);\n } catch {\n // Ignore read errors\n }\n }\n\n return ig;\n }\n\n /**\n * Check if a file should be indexed\n */\n private shouldIndex(filePath: string): boolean {\n const relativePath = path\n .relative(this.directory, filePath)\n .replace(/\\\\/g, \"/\");\n\n // Skip hidden files/folders\n if (relativePath.split(\"/\").some((part) => part.startsWith(\".\"))) {\n return false;\n }\n\n // Skip gitignore patterns\n if (this.ig.ignores(relativePath)) {\n return false;\n }\n\n return shouldIndexFile(filePath);\n }\n\n /**\n * Schedule a file change with debouncing\n */\n private scheduleChange(\n type: \"add\" | \"change\" | \"unlink\",\n filePath: string,\n ): void {\n const existing = this.pendingChanges.get(filePath);\n if (existing) {\n clearTimeout(existing.timer);\n }\n\n const timer = setTimeout(() => {\n this.pendingChanges.delete(filePath);\n this.queueOperation(async () => this.processChange(type, filePath));\n }, this.debounceMs);\n\n this.pendingChanges.set(filePath, { type, filePath, timer });\n\n logger.debug(\n `Scheduled ${type}: ${path.basename(filePath)} (${String(this.debounceMs)}ms)`,\n );\n }\n\n /**\n * Process a file change after debounce\n */\n private async processChange(\n type: \"add\" | \"change\" | \"unlink\",\n filePath: string,\n ): Promise<void> {\n if (type === \"unlink\") {\n await this.removeFile(filePath);\n } else {\n await this.indexFile(filePath);\n }\n }\n\n /**\n * Index a single file\n */\n private async indexFile(filePath: string): Promise<void> {\n if (!this.shouldIndex(filePath)) {\n return;\n }\n\n try {\n const content = fs.readFileSync(filePath, \"utf-8\");\n\n // Skip if content unchanged\n if (!this.hasContentChanged(filePath, content)) {\n logger.debug(`Skipped (unchanged): ${path.basename(filePath)}`);\n return;\n }\n\n const chunks = await chunkFile(filePath, content, this.config);\n\n if (chunks.length === 0) {\n return;\n }\n\n // Enrich chunks with semantic metadata\n const enrichedChunks = await enrichChunksFromFile(chunks, content);\n\n // Use enrichedContent for embedding\n const texts = enrichedChunks.map((c) => c.enrichedContent);\n const embeddings = await this.ollamaClient.embedBatch(texts);\n\n // Store original chunk data (without enrichedContent)\n const embeddedChunks = enrichedChunks.map((chunk, i) => ({\n id: chunk.id,\n content: chunk.content,\n filePath: chunk.filePath,\n language: chunk.language,\n startLine: chunk.startLine,\n endLine: chunk.endLine,\n symbolName: chunk.symbolName,\n symbolType: chunk.symbolType,\n vector: embeddings[i] ?? [],\n }));\n\n await this.vectorStore.deleteByFilePath(filePath);\n await this.vectorStore.addChunks(embeddedChunks);\n\n this.saveHashCache();\n\n logger.debug(`Indexed: ${path.relative(this.directory, filePath)}`);\n this.onIndexed?.(filePath);\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n logger.error(`Failed to index ${filePath}: ${error.message}`);\n this.onError?.(error);\n }\n }\n\n /**\n * Remove a file from the index\n */\n private async removeFile(filePath: string): Promise<void> {\n try {\n await this.vectorStore.deleteByFilePath(filePath);\n this.removeFromHashCache(filePath);\n this.saveHashCache();\n\n logger.debug(`Removed: ${path.relative(this.directory, filePath)}`);\n this.onRemoved?.(filePath);\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n logger.error(`Failed to remove ${filePath}: ${error.message}`);\n this.onError?.(error);\n }\n }\n\n /**\n * Queue an operation to prevent concurrent modifications\n */\n private queueOperation(operation: () => Promise<void>): void {\n this.operationQueue.push(operation);\n void this.processQueue();\n }\n\n /**\n * Process queued operations sequentially\n */\n private async processQueue(): Promise<void> {\n if (this.isProcessing) {\n return;\n }\n\n this.isProcessing = true;\n\n while (this.operationQueue.length > 0) {\n const operation = this.operationQueue.shift();\n if (operation) {\n try {\n await operation();\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n logger.error(`Operation failed: ${error.message}`);\n }\n }\n }\n\n this.isProcessing = false;\n }\n\n /**\n * Collect files using fast-glob\n */\n private async collectFilesWithGlob(): Promise<string[]> {\n const extensions = SUPPORTED_EXTENSIONS.map((ext) => ext.slice(1));\n const pattern = `**/*.{${extensions.join(\",\")}}`;\n\n const files = await fg(pattern, {\n cwd: this.directory,\n absolute: true,\n ignore: [\"**/.*\", \"**/.*/**\"],\n dot: false,\n onlyFiles: true,\n followSymbolicLinks: false,\n });\n\n // Filter by gitignore\n return files.filter((file) => {\n const relativePath = path\n .relative(this.directory, file)\n .replace(/\\\\/g, \"/\");\n return !this.ig.ignores(relativePath);\n });\n }\n\n /**\n * Perform full initial indexing\n */\n private async fullIndex(): Promise<void> {\n logger.info(\"Starting full index...\");\n\n const files = await this.collectFilesWithGlob();\n let indexed = 0;\n let skipped = 0;\n\n for (const filePath of files) {\n try {\n const content = fs.readFileSync(filePath, \"utf-8\");\n\n if (!this.hasContentChanged(filePath, content)) {\n skipped++;\n continue;\n }\n\n const chunks = await chunkFile(filePath, content, this.config);\n\n if (chunks.length === 0) {\n continue;\n }\n\n // Enrich chunks with semantic metadata\n const enrichedChunks = await enrichChunksFromFile(chunks, content);\n\n // Use enrichedContent for embedding\n const texts = enrichedChunks.map((c) => c.enrichedContent);\n const embeddings = await this.ollamaClient.embedBatch(texts);\n\n // Store original chunk data (without enrichedContent)\n const embeddedChunks = enrichedChunks.map((chunk, i) => ({\n id: chunk.id,\n content: chunk.content,\n filePath: chunk.filePath,\n language: chunk.language,\n startLine: chunk.startLine,\n endLine: chunk.endLine,\n symbolName: chunk.symbolName,\n symbolType: chunk.symbolType,\n vector: embeddings[i] ?? [],\n }));\n\n await this.vectorStore.addChunks(embeddedChunks);\n indexed++;\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n logger.debug(`Error indexing ${filePath}: ${error.message}`);\n }\n }\n\n this.saveHashCache();\n\n logger.info(\n `Full index: ${String(indexed)} indexed, ${String(skipped)} skipped`,\n );\n }\n\n /**\n * Start watching for file changes\n */\n async start(): Promise<void> {\n const health = await this.ollamaClient.healthCheck();\n if (!health.ok) {\n throw new Error(health.error ?? \"Ollama is not available\");\n }\n\n await this.vectorStore.connect();\n\n if (!this.vectorStore.exists()) {\n await this.fullIndex();\n }\n\n this.watcher = watch(this.directory, {\n ignored: (filePath: string) => {\n const relativePath = path\n .relative(this.directory, filePath)\n .replace(/\\\\/g, \"/\");\n // Skip empty paths or root directory\n if (!relativePath) {\n return false;\n }\n // Skip hidden files/folders\n if (relativePath.split(\"/\").some((part) => part.startsWith(\".\"))) {\n return true;\n }\n return this.ig.ignores(relativePath);\n },\n persistent: true,\n ignoreInitial: true,\n awaitWriteFinish: {\n stabilityThreshold: 500,\n pollInterval: 100,\n },\n });\n\n this.watcher.on(\"add\", (filePath: string) => {\n if (shouldIndexFile(filePath)) {\n this.scheduleChange(\"add\", filePath);\n }\n });\n\n this.watcher.on(\"change\", (filePath: string) => {\n if (shouldIndexFile(filePath)) {\n this.scheduleChange(\"change\", filePath);\n }\n });\n\n this.watcher.on(\"unlink\", (filePath: string) => {\n if (shouldIndexFile(filePath)) {\n this.scheduleChange(\"unlink\", filePath);\n }\n });\n\n this.watcher.on(\"ready\", () => {\n logger.info(\n `Watching: ${this.directory} (${String(this.debounceMs)}ms debounce)`,\n );\n this.onReady?.();\n });\n\n this.watcher.on(\"error\", (err: unknown) => {\n const error = err instanceof Error ? err : new Error(String(err));\n logger.error(`Watcher error: ${error.message}`);\n this.onError?.(error);\n });\n }\n\n /**\n * Stop watching and cleanup\n */\n async stop(): Promise<void> {\n for (const pending of this.pendingChanges.values()) {\n clearTimeout(pending.timer);\n }\n this.pendingChanges.clear();\n\n this.saveHashCache();\n\n if (this.watcher) {\n await this.watcher.close();\n this.watcher = null;\n }\n this.vectorStore.close();\n logger.info(\"Watcher stopped\");\n }\n\n /**\n * Check if watcher is running\n */\n isRunning(): boolean {\n return this.watcher !== null;\n }\n\n /**\n * Clear the hash cache\n */\n clearCache(): void {\n this.hashCache = {};\n const cachePath = this.getHashCachePath();\n if (fs.existsSync(cachePath)) {\n fs.unlinkSync(cachePath);\n }\n logger.info(\"Hash cache cleared\");\n }\n\n /**\n * Get cache statistics\n */\n getCacheStats(): { cachedFiles: number; cacheSize: number } {\n return {\n cachedFiles: Object.keys(this.hashCache).length,\n cacheSize: JSON.stringify(this.hashCache).length,\n };\n }\n}\n\n/**\n * Create a new index watcher\n */\nexport function createIndexWatcher(options: WatcherOptions): IndexWatcher {\n return new IndexWatcher(options);\n}\n","/**\n * Re-ranking module for improving search result relevance\n *\n * Uses Ollama LLM to score query-document pairs and reorder results\n * based on semantic relevance rather than just vector similarity.\n */\n\nimport type { SearchResult } from \"@core/embeddings/types\";\nimport { logger } from \"@utils\";\n\n/**\n * Re-ranking options\n */\nexport interface RerankerOptions {\n /** Ollama base URL */\n ollamaBaseUrl: string;\n /** Model to use for re-ranking (default: llama3.2) */\n model?: string;\n /** Maximum number of results to re-rank (default: 20) */\n maxResults?: number;\n /** Timeout in milliseconds (default: 30000) */\n timeout?: number;\n}\n\n/**\n * Re-ranked result with LLM score\n */\nexport interface RerankedResult extends SearchResult {\n /** Original search score */\n originalScore: number;\n /** LLM relevance score (0-10) */\n rerankScore: number;\n}\n\n/**\n * Parse LLM response to extract relevance score\n */\nfunction parseScore(response: string): number {\n // Try to extract a number from the response\n const match = /\\b(\\d+(?:\\.\\d+)?)\\b/.exec(response);\n if (match?.[1]) {\n const score = parseFloat(match[1]);\n // Normalize to 0-10 range\n if (score >= 0 && score <= 10) {\n return score;\n }\n if (score > 10 && score <= 100) {\n return score / 10;\n }\n }\n // Default to middle score if parsing fails\n return 5;\n}\n\n/**\n * Score a single query-document pair using Ollama\n */\nasync function scoreResult(\n query: string,\n content: string,\n options: RerankerOptions,\n): Promise<number> {\n const model = options.model ?? \"llama3.2\";\n const timeout = options.timeout ?? 30000;\n\n const prompt = `Rate the relevance of the following code snippet to the search query on a scale of 0-10.\n0 = completely irrelevant\n5 = somewhat relevant\n10 = highly relevant and directly answers the query\n\nQuery: \"${query}\"\n\nCode:\n\\`\\`\\`\n${content.slice(0, 1000)}\n\\`\\`\\`\n\nRespond with ONLY a number between 0 and 10.`;\n\n try {\n const response = await fetch(`${options.ollamaBaseUrl}/api/generate`, {\n method: \"POST\",\n headers: { \"Content-Type\": \"application/json\" },\n body: JSON.stringify({\n model,\n prompt,\n stream: false,\n options: {\n temperature: 0,\n num_predict: 10, // We only need a short response\n },\n }),\n signal: AbortSignal.timeout(timeout),\n });\n\n if (!response.ok) {\n logger.warn(`Re-ranking request failed: ${response.statusText}`);\n return 5; // Default score\n }\n\n const data = (await response.json()) as { response?: string };\n return parseScore(data.response ?? \"5\");\n } catch (error) {\n logger.warn(\n `Re-ranking error: ${error instanceof Error ? error.message : String(error)}`,\n );\n return 5; // Default score on error\n }\n}\n\n/**\n * Re-rank search results using LLM scoring\n *\n * Takes initial search results and re-scores them based on\n * semantic relevance to the query using an LLM.\n */\nexport async function rerank(\n query: string,\n results: SearchResult[],\n options: RerankerOptions,\n): Promise<RerankedResult[]> {\n const maxResults = options.maxResults ?? 20;\n\n // Limit results to re-rank for performance\n const toRerank = results.slice(0, maxResults);\n\n if (toRerank.length === 0) {\n return [];\n }\n\n logger.debug(`Re-ranking ${String(toRerank.length)} results for: ${query}`);\n\n // Score all results in parallel (with some concurrency limit)\n const batchSize = 5;\n const rerankedResults: RerankedResult[] = [];\n\n for (let i = 0; i < toRerank.length; i += batchSize) {\n const batch = toRerank.slice(i, i + batchSize);\n const scores = await Promise.all(\n batch.map(async (result) =>\n scoreResult(query, result.chunk.content, options),\n ),\n );\n\n for (let j = 0; j < batch.length; j++) {\n const result = batch[j];\n const score = scores[j];\n if (result !== undefined && score !== undefined) {\n rerankedResults.push({\n ...result,\n originalScore: result.score,\n rerankScore: score,\n score: score, // Use rerank score as the new score\n });\n }\n }\n }\n\n // Sort by rerank score (higher is better)\n rerankedResults.sort((a, b) => b.rerankScore - a.rerankScore);\n\n logger.debug(\n `Re-ranking complete, top score: ${String(rerankedResults[0]?.rerankScore ?? 0)}`,\n );\n\n return rerankedResults;\n}\n\n/**\n * Create a reranker function with preset options\n */\nexport function createReranker(\n options: RerankerOptions,\n): (query: string, results: SearchResult[]) => Promise<RerankedResult[]> {\n return async (query: string, results: SearchResult[]) =>\n rerank(query, results, options);\n}\n","/**\n * Call graph extraction and storage\n *\n * Extracts function call relationships from code using tree-sitter\n * to build a graph showing which functions call which.\n *\n * Features:\n * - Persistent caching in .src-index/call-graph.json\n * - Hash-based invalidation for changed files\n */\n\nimport { Query } from \"web-tree-sitter\";\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport * as crypto from \"node:crypto\";\nimport type { Position, Symbol } from \"@core/ast/types\";\nimport { parseCode, type ParseResult } from \"@core/parser\";\nimport { extractSymbols } from \"@core/symbols\";\nimport { registerCache } from \"@core/utils\";\nimport { logger } from \"@utils\";\n\n/**\n * A function call found in code\n */\nexport interface FunctionCall {\n /** Name of the called function */\n callee: string;\n /** Position of the call */\n position: Position;\n /** Arguments passed (if extractable) */\n arguments?: string[];\n}\n\n/**\n * A node in the call graph\n */\nexport interface CallGraphNode {\n /** Function/method name */\n name: string;\n /** Full qualified name (file:function) */\n qualifiedName: string;\n /** File path */\n filePath: string;\n /** Function type */\n type: string;\n /** Start position */\n start: Position;\n /** End position */\n end: Position;\n /** Functions this node calls */\n calls: string[];\n /** Functions that call this node */\n calledBy: string[];\n}\n\n/**\n * The complete call graph for a codebase\n */\nexport interface CallGraph {\n /** All nodes in the graph */\n nodes: Map<string, CallGraphNode>;\n /** File paths included in the graph */\n files: string[];\n /** Total number of call edges */\n edgeCount: number;\n}\n\n/**\n * Serializable call graph for persistent cache\n */\ninterface SerializedCallGraph {\n nodes: Record<string, CallGraphNode>;\n files: string[];\n edgeCount: number;\n fileHashes: Record<string, string>;\n timestamp: number;\n}\n\n/**\n * Compute SHA-256 hash of content\n */\nfunction computeHash(content: string): string {\n return crypto.createHash(\"sha256\").update(content).digest(\"hex\").slice(0, 16);\n}\n\n/**\n * Get call graph cache path for a directory\n */\nfunction getCachePath(directory: string): string {\n return path.join(directory, \".src-index\", \"call-graph.json\");\n}\n\n/**\n * Save call graph to persistent cache\n */\nfunction saveCallGraphCache(\n directory: string,\n graph: CallGraph,\n fileHashes: Record<string, string>,\n): void {\n try {\n const cachePath = getCachePath(directory);\n const cacheDir = path.dirname(cachePath);\n\n // Ensure cache directory exists\n if (!fs.existsSync(cacheDir)) {\n fs.mkdirSync(cacheDir, { recursive: true });\n }\n\n const serialized: SerializedCallGraph = {\n nodes: Object.fromEntries(graph.nodes),\n files: graph.files,\n edgeCount: graph.edgeCount,\n fileHashes,\n timestamp: Date.now(),\n };\n\n fs.writeFileSync(cachePath, JSON.stringify(serialized), \"utf-8\");\n logger.debug(`Call graph cache saved: ${String(graph.nodes.size)} nodes`);\n } catch {\n // Silently ignore cache save errors (directory not writable, etc.)\n logger.debug(\"Call graph cache save skipped: directory not writable\");\n }\n}\n\n/**\n * Load call graph from persistent cache if valid\n */\nfunction loadCallGraphCache(\n directory: string,\n currentHashes: Record<string, string>,\n): CallGraph | null {\n const cachePath = getCachePath(directory);\n\n if (!fs.existsSync(cachePath)) {\n return null;\n }\n\n try {\n const content = fs.readFileSync(cachePath, \"utf-8\");\n const cached = JSON.parse(content) as SerializedCallGraph;\n\n // Validate hashes - check if any file has changed\n const cachedFiles = new Set(Object.keys(cached.fileHashes));\n const currentFiles = new Set(Object.keys(currentHashes));\n\n // Check for added or removed files\n if (cachedFiles.size !== currentFiles.size) {\n logger.debug(\"Call graph cache invalid: file count changed\");\n return null;\n }\n\n // Check for modified files\n for (const [filePath, hash] of Object.entries(currentHashes)) {\n if (cached.fileHashes[filePath] !== hash) {\n logger.debug(`Call graph cache invalid: ${filePath} changed`);\n return null;\n }\n }\n\n // Cache is valid - restore the Map\n const nodes = new Map<string, CallGraphNode>(Object.entries(cached.nodes));\n\n logger.debug(`Call graph cache loaded: ${String(nodes.size)} nodes`);\n\n return {\n nodes,\n files: cached.files,\n edgeCount: cached.edgeCount,\n };\n } catch (error) {\n logger.debug(\n `Failed to load call graph cache: ${error instanceof Error ? error.message : String(error)}`,\n );\n return null;\n }\n}\n\n/**\n * Call graph cache per file\n */\ninterface FileCallData {\n symbols: Symbol[];\n calls: Map<string, FunctionCall[]>; // symbol name -> calls made\n}\n\nconst callGraphCache = new Map<string, FileCallData>();\n\n/**\n * Clear the call graph cache\n */\nexport function clearCallGraphCache(): void {\n callGraphCache.clear();\n}\n\n// Register cache for centralized clearing\nregisterCache(\"embeddings:callGraphCache\", clearCallGraphCache);\n\n/**\n * Extract function calls from a tree-sitter node\n */\nfunction extractCallsFromTree(\n tree: ParseResult[\"tree\"],\n languageInstance: ParseResult[\"languageInstance\"],\n language: string,\n): Map<string, FunctionCall[]> {\n const callsBySymbol = new Map<string, FunctionCall[]>();\n\n // Query patterns for function calls in different languages\n const callPatterns: Record<string, string> = {\n typescript: `\n (call_expression\n function: [(identifier) @callee\n (member_expression property: (property_identifier) @callee)]\n arguments: (arguments) @args)\n `,\n javascript: `\n (call_expression\n function: [(identifier) @callee\n (member_expression property: (property_identifier) @callee)]\n arguments: (arguments) @args)\n `,\n python: `\n (call\n function: [(identifier) @callee\n (attribute attribute: (identifier) @callee)]\n arguments: (argument_list) @args)\n `,\n go: `\n (call_expression\n function: [(identifier) @callee\n (selector_expression field: (field_identifier) @callee)]\n arguments: (argument_list) @args)\n `,\n };\n\n const pattern = callPatterns[language];\n if (!pattern) {\n return callsBySymbol;\n }\n\n try {\n const query = new Query(languageInstance, pattern);\n const matches = query.matches(tree.rootNode);\n\n // Extract callee names from matches\n const callCaptures: { callee: string; position: Position }[] = [];\n\n for (const match of matches) {\n for (const capture of match.captures) {\n if (capture.name === \"callee\") {\n callCaptures.push({\n callee: capture.node.text,\n position: {\n line: capture.node.startPosition.row + 1,\n column: capture.node.startPosition.column,\n offset: capture.node.startIndex,\n },\n });\n }\n }\n }\n\n // For now, store all calls without symbol association\n // A more sophisticated approach would track which symbol contains each call\n if (callCaptures.length > 0) {\n callsBySymbol.set(\n \"__global__\",\n callCaptures.map((c) => ({\n callee: c.callee,\n position: c.position,\n })),\n );\n }\n } catch (error) {\n logger.debug(\n `Failed to extract calls for ${language}: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n\n return callsBySymbol;\n}\n\n/**\n * Associate calls with their containing symbols\n */\nfunction associateCallsWithSymbols(\n symbols: Symbol[],\n allCalls: FunctionCall[],\n): Map<string, FunctionCall[]> {\n const callsBySymbol = new Map<string, FunctionCall[]>();\n\n // Sort symbols by start offset for efficient lookup\n const sortedSymbols = [...symbols]\n .filter((s) => s.type === \"function\" || s.type === \"method\")\n .sort((a, b) => a.start.offset - b.start.offset);\n\n for (const call of allCalls) {\n // Find the symbol that contains this call\n let containingSymbol: Symbol | null = null;\n\n for (const symbol of sortedSymbols) {\n if (\n call.position.offset >= symbol.start.offset &&\n call.position.offset <= symbol.end.offset\n ) {\n containingSymbol = symbol;\n } else if (call.position.offset < symbol.start.offset) {\n // Calls are sorted by position, so we can break early\n break;\n }\n }\n\n const symbolName = containingSymbol?.name ?? \"__global__\";\n const existing = callsBySymbol.get(symbolName) ?? [];\n existing.push(call);\n callsBySymbol.set(symbolName, existing);\n }\n\n return callsBySymbol;\n}\n\n/**\n * Analyze a file and extract call graph data\n */\nexport async function analyzeFileForCallGraph(\n filePath: string,\n content: string,\n): Promise<FileCallData | null> {\n // Check cache\n const cached = callGraphCache.get(filePath);\n if (cached) {\n return cached;\n }\n\n try {\n const parseResult = await parseCode(content, { filePath });\n\n const { symbols } = extractSymbols(\n parseResult.tree,\n parseResult.languageInstance,\n parseResult.language,\n );\n\n const callsMap = extractCallsFromTree(\n parseResult.tree,\n parseResult.languageInstance,\n parseResult.language,\n );\n\n // Get all calls and associate with symbols\n const allCalls = callsMap.get(\"__global__\") ?? [];\n const callsBySymbol = associateCallsWithSymbols(symbols, allCalls);\n\n const data: FileCallData = {\n symbols,\n calls: callsBySymbol,\n };\n\n callGraphCache.set(filePath, data);\n return data;\n } catch (error) {\n logger.debug(\n `Failed to analyze ${filePath} for call graph: ${error instanceof Error ? error.message : String(error)}`,\n );\n return null;\n }\n}\n\n/**\n * Build a call graph from multiple files\n *\n * Uses persistent caching with hash-based invalidation for performance.\n */\nexport async function buildCallGraph(\n files: { path: string; content: string }[],\n): Promise<CallGraph> {\n if (files.length === 0) {\n return { nodes: new Map(), files: [], edgeCount: 0 };\n }\n\n // Compute hashes for all files\n const fileHashes: Record<string, string> = {};\n for (const file of files) {\n fileHashes[file.path] = computeHash(file.content);\n }\n\n // Determine base directory from common path prefix\n const baseDir = findCommonDirectory(files.map((f) => f.path));\n\n // Try to load from persistent cache\n const cached = loadCallGraphCache(baseDir, fileHashes);\n if (cached) {\n return cached;\n }\n\n // Build the call graph\n const nodes = new Map<string, CallGraphNode>();\n const filePaths: string[] = [];\n let edgeCount = 0;\n\n // First pass: collect all symbols\n for (const file of files) {\n filePaths.push(file.path);\n const data = await analyzeFileForCallGraph(file.path, file.content);\n\n if (!data) {\n continue;\n }\n\n // Create nodes for all functions/methods\n for (const symbol of data.symbols) {\n if (symbol.type === \"function\" || symbol.type === \"method\") {\n const qualifiedName = `${file.path}:${symbol.name}`;\n nodes.set(qualifiedName, {\n name: symbol.name,\n qualifiedName,\n filePath: file.path,\n type: symbol.type,\n start: symbol.start,\n end: symbol.end,\n calls: [],\n calledBy: [],\n });\n }\n }\n }\n\n // Second pass: build edges\n for (const file of files) {\n const data = callGraphCache.get(file.path);\n if (!data) {\n continue;\n }\n\n for (const [symbolName, calls] of data.calls) {\n const callerKey = `${file.path}:${symbolName}`;\n const callerNode = nodes.get(callerKey);\n\n if (!callerNode && symbolName !== \"__global__\") {\n continue;\n }\n\n for (const call of calls) {\n // Try to find the callee in our nodes\n // This is a simplified approach - in reality we'd need to resolve imports\n for (const [nodeKey, node] of nodes) {\n if (node.name === call.callee) {\n // Add edge\n if (callerNode) {\n callerNode.calls.push(nodeKey);\n }\n node.calledBy.push(callerKey);\n edgeCount++;\n }\n }\n }\n }\n }\n\n const graph: CallGraph = {\n nodes,\n files: filePaths,\n edgeCount,\n };\n\n // Save to persistent cache\n saveCallGraphCache(baseDir, graph, fileHashes);\n\n return graph;\n}\n\n/**\n * Find common directory from a list of file paths\n */\nfunction findCommonDirectory(paths: string[]): string {\n if (paths.length === 0) {\n return \".\";\n }\n\n const firstPathStr = paths[0];\n if (!firstPathStr) {\n return \".\";\n }\n\n if (paths.length === 1) {\n return path.dirname(firstPathStr);\n }\n\n // Normalize paths and split into segments\n const segments = paths.map((p) => path.normalize(p).split(path.sep));\n const firstPath = segments[0];\n\n if (!firstPath) {\n return \".\";\n }\n\n // Find common prefix\n let commonLength = 0;\n\n for (let i = 0; i < firstPath.length; i++) {\n const segment = firstPath[i];\n if (segment && segments.every((s) => s[i] === segment)) {\n commonLength = i + 1;\n } else {\n break;\n }\n }\n\n // Build common directory path\n const commonSegments = firstPath.slice(0, commonLength);\n const commonDir = commonSegments.join(path.sep);\n\n // If the common path is a file, return its directory\n if (\n commonDir &&\n fs.existsSync(commonDir) &&\n fs.statSync(commonDir).isFile()\n ) {\n return path.dirname(commonDir);\n }\n\n return commonDir || \".\";\n}\n\n/**\n * Get callers and callees for a specific function\n */\nexport function getCallContext(\n graph: CallGraph,\n filePath: string,\n functionName: string,\n): {\n callers: CallGraphNode[];\n callees: CallGraphNode[];\n} | null {\n const qualifiedName = `${filePath}:${functionName}`;\n const node = graph.nodes.get(qualifiedName);\n\n if (!node) {\n return null;\n }\n\n const callers: CallGraphNode[] = [];\n const callees: CallGraphNode[] = [];\n\n for (const callerKey of node.calledBy) {\n const caller = graph.nodes.get(callerKey);\n if (caller) {\n callers.push(caller);\n }\n }\n\n for (const calleeKey of node.calls) {\n const callee = graph.nodes.get(calleeKey);\n if (callee) {\n callees.push(callee);\n }\n }\n\n return { callers, callees };\n}\n\n/**\n * Format call context as a string for enrichment\n */\nexport function formatCallContext(\n callers: CallGraphNode[],\n callees: CallGraphNode[],\n maxItems = 5,\n): string {\n const lines: string[] = [];\n\n if (callers.length > 0) {\n const callerNames = callers\n .slice(0, maxItems)\n .map((c) => c.name)\n .join(\", \");\n lines.push(`Called by: ${callerNames}`);\n }\n\n if (callees.length > 0) {\n const calleeNames = callees\n .slice(0, maxItems)\n .map((c) => c.name)\n .join(\", \");\n lines.push(`Calls: ${calleeNames}`);\n }\n\n return lines.join(\"\\n\");\n}\n\n/**\n * Get call graph cache statistics\n */\nexport function getCallGraphCacheStats(): {\n files: number;\n entries: string[];\n} {\n return {\n files: callGraphCache.size,\n entries: Array.from(callGraphCache.keys()),\n };\n}\n","/**\n * Index Codebase Feature\n *\n * Indexes a directory by:\n * 1. Scanning for supported files\n * 2. Chunking each file\n * 3. Generating embeddings via Ollama\n * 4. Storing in LanceDB\n */\n\nimport { z } from \"zod\";\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport ignore, { type Ignore } from \"ignore\";\nimport type { Feature, FeatureResult } from \"@features/types\";\nimport { EMBEDDING_CONFIG } from \"@config\";\nimport {\n chunkFile,\n createOllamaClient,\n createVectorStore,\n enrichChunksFromFile,\n shouldIndexFile,\n type EmbeddedChunk,\n type EnrichedChunk,\n type EnrichmentOptions,\n} from \"@core/embeddings\";\nimport { logger } from \"@utils\";\nimport { readPathAliasesCached } from \"@core/utils\";\n\n/** Default concurrency for parallel file processing */\nconst DEFAULT_CONCURRENCY = 4;\n\n/**\n * Process items in parallel with concurrency limit using worker pool pattern\n */\nasync function parallelMap<T, R>(\n items: T[],\n processor: (item: T) => Promise<R>,\n concurrency: number,\n): Promise<R[]> {\n const results: (R | undefined)[] = new Array<R | undefined>(items.length);\n let currentIndex = 0;\n\n const worker = async (): Promise<void> => {\n while (currentIndex < items.length) {\n const index = currentIndex++;\n const item = items[index];\n if (item !== undefined) {\n results[index] = await processor(item);\n }\n }\n };\n\n const workers = Array.from(\n { length: Math.min(concurrency, items.length) },\n async () => worker(),\n );\n await Promise.all(workers);\n\n // Filter out undefined values (shouldn't happen but TypeScript needs this)\n return results.filter((r): r is R => r !== undefined);\n}\n\nexport const indexCodebaseSchema = z.object({\n directory: z\n .string()\n .optional()\n .default(\".\")\n .describe(\"Path to the directory to index (defaults to current directory)\"),\n force: z\n .boolean()\n .optional()\n .default(false)\n .describe(\"Force re-indexing even if index exists\"),\n exclude: z\n .array(z.string())\n .optional()\n .default([])\n .describe(\"Additional glob patterns to exclude\"),\n concurrency: z\n .number()\n .int()\n .positive()\n .optional()\n .default(DEFAULT_CONCURRENCY)\n .describe(\"Number of files to process in parallel (default: 4)\"),\n});\n\nexport type IndexCodebaseInput = z.infer<typeof indexCodebaseSchema>;\n\ninterface IndexResult {\n directory: string;\n filesIndexed: number;\n chunksCreated: number;\n languages: Record<string, number>;\n errors: string[];\n}\n\n/**\n * Create an ignore instance with gitignore patterns and additional exclusions\n */\nfunction createIgnoreFilter(\n baseDir: string,\n additionalExclusions: string[],\n): Ignore {\n const ig = ignore();\n\n // Read .gitignore if it exists\n const gitignorePath = path.join(baseDir, \".gitignore\");\n if (fs.existsSync(gitignorePath)) {\n try {\n const content = fs.readFileSync(gitignorePath, \"utf-8\");\n ig.add(content);\n } catch {\n // Ignore read errors\n }\n }\n\n // Add additional user exclusions\n if (additionalExclusions.length > 0) {\n ig.add(additionalExclusions);\n }\n\n return ig;\n}\n\n/**\n * Check if a name starts with a dot (hidden file/folder)\n */\nfunction isHidden(name: string): boolean {\n return name.startsWith(\".\");\n}\n\n/**\n * Recursively collect files from a directory\n */\nfunction collectFiles(dir: string, ig: Ignore, baseDir: string): string[] {\n const files: string[] = [];\n\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n // Skip hidden files/folders (starting with .)\n if (isHidden(entry.name)) {\n continue;\n }\n\n const fullPath = path.join(dir, entry.name);\n const relativePath = path.relative(baseDir, fullPath).replace(/\\\\/g, \"/\");\n\n // Check if ignored by gitignore patterns\n if (ig.ignores(relativePath)) {\n continue;\n }\n\n if (entry.isDirectory()) {\n files.push(...collectFiles(fullPath, ig, baseDir));\n } else if (entry.isFile() && shouldIndexFile(entry.name)) {\n files.push(fullPath);\n }\n }\n\n return files;\n}\n\n/**\n * Execute the index_codebase feature\n */\nexport async function execute(\n input: IndexCodebaseInput,\n): Promise<FeatureResult> {\n const { directory, force, exclude, concurrency } = input;\n\n // Validate directory exists\n if (!fs.existsSync(directory)) {\n return {\n success: false,\n error: `Directory not found: ${directory}`,\n };\n }\n\n const absoluteDir = path.resolve(directory);\n\n // Initialize components\n const ollamaClient = createOllamaClient(EMBEDDING_CONFIG);\n const vectorStore = createVectorStore(absoluteDir, EMBEDDING_CONFIG);\n\n // Check Ollama health\n const health = await ollamaClient.healthCheck();\n if (!health.ok) {\n return {\n success: false,\n error: health.error ?? \"Ollama is not available\",\n };\n }\n\n // Check if index exists and force is not set\n if (vectorStore.exists() && !force) {\n return {\n success: false,\n error:\n \"Index already exists. Use force=true to re-index or search_code to query.\",\n };\n }\n\n const result: IndexResult = {\n directory: absoluteDir,\n filesIndexed: 0,\n chunksCreated: 0,\n languages: {},\n errors: [],\n };\n\n try {\n // Connect to vector store\n await vectorStore.connect();\n\n // Clear existing data if force re-indexing\n if (force && vectorStore.exists()) {\n await vectorStore.clear();\n }\n\n // Create ignore filter from .gitignore and user exclusions\n const ig = createIgnoreFilter(absoluteDir, exclude);\n\n // Collect files\n const files = collectFiles(absoluteDir, ig, absoluteDir);\n\n if (files.length === 0) {\n return {\n success: true,\n message: \"No indexable files found in directory\",\n data: result,\n };\n }\n\n // Read path aliases from tsconfig.json if present\n const pathAliases = readPathAliasesCached(absoluteDir);\n const aliasCount = Object.keys(pathAliases).length;\n\n // Enrichment options with cross-file context enabled\n const enrichmentOptions: EnrichmentOptions = {\n projectRoot: absoluteDir,\n pathAliases,\n includeCrossFileContext: true,\n };\n\n logger.debug(\n `Indexing ${String(files.length)} files with concurrency=${String(concurrency)} (projectRoot: ${absoluteDir}, ${String(aliasCount)} path aliases)`,\n );\n\n // Process files in parallel: chunk and enrich\n interface FileProcessResult {\n chunks: EnrichedChunk[];\n error?: string;\n }\n\n const processFile = async (\n filePath: string,\n ): Promise<FileProcessResult> => {\n try {\n const content = fs.readFileSync(filePath, \"utf-8\");\n const chunks = await chunkFile(filePath, content, EMBEDDING_CONFIG);\n\n // Enrich chunks with semantic metadata including cross-file context\n const enrichedChunks = await enrichChunksFromFile(\n chunks,\n content,\n enrichmentOptions,\n );\n\n return { chunks: enrichedChunks };\n } catch (err) {\n const errorMsg = err instanceof Error ? err.message : String(err);\n return {\n chunks: [],\n error: `Error processing ${filePath}: ${errorMsg}`,\n };\n }\n };\n\n // Process all files in parallel with concurrency limit\n const fileResults = await parallelMap(files, processFile, concurrency);\n\n // Aggregate results\n const allEnrichedChunks: EnrichedChunk[] = [];\n\n for (const fileResult of fileResults) {\n if (fileResult.error) {\n result.errors.push(fileResult.error);\n } else {\n allEnrichedChunks.push(...fileResult.chunks);\n result.filesIndexed++;\n\n // Track language stats\n for (const chunk of fileResult.chunks) {\n result.languages[chunk.language] =\n (result.languages[chunk.language] ?? 0) + 1;\n }\n }\n }\n\n // Generate embeddings in batches using enriched content\n const { batchSize } = EMBEDDING_CONFIG;\n const embeddedChunks: EmbeddedChunk[] = [];\n\n for (let i = 0; i < allEnrichedChunks.length; i += batchSize) {\n const batch = allEnrichedChunks.slice(i, i + batchSize);\n // Use enrichedContent for embedding (contains metadata header + original code)\n const texts = batch.map((c) => c.enrichedContent);\n\n try {\n const embeddings = await ollamaClient.embedBatch(texts);\n\n for (let j = 0; j < batch.length; j++) {\n const chunk = batch[j];\n const vector = embeddings[j];\n\n if (chunk && vector) {\n // Store original chunk data (without enrichedContent to save space)\n embeddedChunks.push({\n id: chunk.id,\n content: chunk.content,\n filePath: chunk.filePath,\n language: chunk.language,\n startLine: chunk.startLine,\n endLine: chunk.endLine,\n symbolName: chunk.symbolName,\n symbolType: chunk.symbolType,\n vector,\n });\n }\n }\n } catch (err) {\n const errorMsg = err instanceof Error ? err.message : String(err);\n result.errors.push(`Embedding batch error: ${errorMsg}`);\n }\n }\n\n // Store embeddings\n if (embeddedChunks.length > 0) {\n await vectorStore.addChunks(embeddedChunks);\n result.chunksCreated = embeddedChunks.length;\n }\n\n vectorStore.close();\n\n const hasErrors = result.errors.length > 0;\n const message = hasErrors\n ? `Indexed ${String(result.filesIndexed)} files (${String(result.chunksCreated)} chunks) with ${String(result.errors.length)} errors`\n : `Successfully indexed ${String(result.filesIndexed)} files (${String(result.chunksCreated)} chunks)`;\n\n return {\n success: true,\n message,\n data: result,\n };\n } catch (err) {\n vectorStore.close();\n const errorMsg = err instanceof Error ? err.message : String(err);\n return {\n success: false,\n error: `Indexing failed: ${errorMsg}`,\n data: result,\n };\n }\n}\n\nexport const indexCodebaseFeature: Feature<typeof indexCodebaseSchema> = {\n name: \"index_codebase\",\n description:\n \"Index a codebase for semantic code search. USE THIS FIRST before search_code. Required once per project - creates vector embeddings for 50+ languages. After initial indexing, use update_index for incremental updates.\",\n schema: indexCodebaseSchema,\n execute,\n};\n","/**\n * Search Code Feature\n *\n * Performs hybrid search on indexed codebase combining:\n * 1. Vector similarity search (semantic embeddings via Ollama)\n * 2. Full-text search (BM25 keyword matching)\n * 3. RRF (Reciprocal Rank Fusion) to combine results\n *\n * Supports three search modes:\n * - 'hybrid' (default): Best of both vector and keyword search\n * - 'vector': Semantic search only\n * - 'fts': Keyword search only\n *\n * Optional features:\n * - LLM re-ranking for improved relevance\n * - Call context to show callers/callees for each result\n */\n\nimport { z } from \"zod\";\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport ignore, { type Ignore } from \"ignore\";\nimport type { Feature, FeatureResult } from \"@features/types\";\nimport { EMBEDDING_CONFIG } from \"@config\";\nimport {\n createOllamaClient,\n createVectorStore,\n rerank,\n buildCallGraph,\n getCallContext,\n shouldIndexFile,\n type SearchResult,\n type SearchMode,\n} from \"@core/embeddings\";\n\nexport const searchCodeSchema = z.object({\n query: z.string().min(1).describe(\"Natural language search query\"),\n directory: z\n .string()\n .optional()\n .default(\".\")\n .describe(\"Path to the indexed directory (defaults to current directory)\"),\n limit: z\n .number()\n .int()\n .positive()\n .optional()\n .default(10)\n .describe(\"Maximum number of results to return\"),\n threshold: z\n .number()\n .min(0)\n .max(2)\n .optional()\n .describe(\"Maximum distance threshold for results (lower = more similar)\"),\n mode: z\n .enum([\"vector\", \"fts\", \"hybrid\"])\n .optional()\n .default(\"hybrid\")\n .describe(\n \"Search mode: 'vector' (semantic only), 'fts' (keyword only), 'hybrid' (combined with RRF fusion)\",\n ),\n rerank: z\n .boolean()\n .optional()\n .default(true)\n .describe(\n \"Enable LLM re-ranking for improved relevance (enabled by default)\",\n ),\n includeCallContext: z\n .boolean()\n .optional()\n .default(true)\n .describe(\n \"Include caller/callee information for each result (uses cached call graph)\",\n ),\n});\n\nexport type SearchCodeInput = z.infer<typeof searchCodeSchema>;\n\ninterface CallContextInfo {\n callers: string[];\n callees: string[];\n}\n\ninterface FormattedResult {\n filePath: string;\n language: string;\n startLine: number;\n endLine: number;\n content: string;\n score: number;\n symbolName?: string;\n symbolType?: string;\n callContext?: CallContextInfo;\n}\n\ninterface SearchOutput {\n query: string;\n directory: string;\n resultsCount: number;\n results: FormattedResult[];\n}\n\n/**\n * Create gitignore filter\n */\nfunction createIgnoreFilter(directory: string): Ignore {\n const ig = ignore();\n ig.add([\"node_modules\", \".git\", \"dist\", \"build\", \".src-index\"]);\n\n const gitignorePath = path.join(directory, \".gitignore\");\n if (fs.existsSync(gitignorePath)) {\n const content = fs.readFileSync(gitignorePath, \"utf-8\");\n ig.add(content);\n }\n\n return ig;\n}\n\n/**\n * Check if hidden file/folder\n */\nfunction isHidden(name: string): boolean {\n return name.startsWith(\".\");\n}\n\n/**\n * Recursively collect files for call graph\n */\nfunction collectFiles(dir: string, ig: Ignore, baseDir: string): string[] {\n const files: string[] = [];\n\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n if (isHidden(entry.name)) {\n continue;\n }\n\n const fullPath = path.join(dir, entry.name);\n const relativePath = path.relative(baseDir, fullPath).replace(/\\\\/g, \"/\");\n\n if (ig.ignores(relativePath)) {\n continue;\n }\n\n if (entry.isDirectory()) {\n files.push(...collectFiles(fullPath, ig, baseDir));\n } else if (entry.isFile() && shouldIndexFile(entry.name)) {\n files.push(fullPath);\n }\n }\n\n return files;\n}\n\n/**\n * Format search results for output\n */\nfunction formatResults(\n results: SearchResult[],\n baseDir: string,\n): FormattedResult[] {\n return results.map((r) => ({\n filePath: path.relative(baseDir, r.chunk.filePath),\n language: r.chunk.language,\n startLine: r.chunk.startLine,\n endLine: r.chunk.endLine,\n content: r.chunk.content,\n score: r.score,\n symbolName: r.chunk.symbolName,\n symbolType: r.chunk.symbolType,\n }));\n}\n\n/**\n * Execute the search_code feature\n */\nexport async function execute(input: SearchCodeInput): Promise<FeatureResult> {\n const {\n query,\n directory,\n limit,\n threshold,\n mode,\n rerank: enableRerank,\n includeCallContext,\n } = input;\n\n // Validate directory exists\n if (!fs.existsSync(directory)) {\n return {\n success: false,\n error: `Directory not found: ${directory}`,\n };\n }\n\n const absoluteDir = path.resolve(directory);\n\n // Initialize components\n const ollamaClient = createOllamaClient(EMBEDDING_CONFIG);\n const vectorStore = createVectorStore(absoluteDir, EMBEDDING_CONFIG);\n\n // Check if index exists\n if (!vectorStore.exists()) {\n return {\n success: false,\n error: `No index found for directory. Run index_codebase first: ${absoluteDir}`,\n };\n }\n\n try {\n // Check Ollama health\n const health = await ollamaClient.healthCheck();\n if (!health.ok) {\n return {\n success: false,\n error: health.error ?? \"Ollama is not available\",\n };\n }\n\n // Connect to vector store\n await vectorStore.connect();\n\n // Generate query embedding\n const queryVector = await ollamaClient.embed(query);\n\n // Search for similar chunks using hybrid search (vector + BM25 + RRF)\n let results = await vectorStore.searchHybrid(queryVector, query, limit, {\n mode: mode as SearchMode,\n });\n\n // Apply threshold filter if specified (only for vector mode where lower = better)\n // For hybrid/fts modes, RRF scores are higher = better, so threshold is ignored\n if (threshold !== undefined && mode === \"vector\") {\n results = results.filter((r) => r.score <= threshold);\n }\n\n // Apply LLM re-ranking if enabled (default: true)\n if (enableRerank && results.length > 0) {\n results = await rerank(query, results, {\n ollamaBaseUrl: EMBEDDING_CONFIG.ollamaBaseUrl,\n model: EMBEDDING_CONFIG.rerankModel,\n maxResults: limit,\n });\n }\n\n vectorStore.close();\n\n let formattedResults = formatResults(results, absoluteDir);\n\n // Add call context if requested\n if (includeCallContext && formattedResults.length > 0) {\n // Build call graph for the directory\n const ig = createIgnoreFilter(absoluteDir);\n const files = collectFiles(absoluteDir, ig, absoluteDir);\n const fileContents = files.map((f) => ({\n path: f,\n content: fs.readFileSync(f, \"utf-8\"),\n }));\n\n const callGraph = await buildCallGraph(fileContents);\n\n // Add call context to each result that has a symbol name\n formattedResults = formattedResults.map((result) => {\n if (!result.symbolName) {\n return result;\n }\n\n const fullPath = path.join(absoluteDir, result.filePath);\n const context = getCallContext(callGraph, fullPath, result.symbolName);\n\n if (context) {\n return {\n ...result,\n callContext: {\n callers: context.callers.map((c) => c.name),\n callees: context.callees.map((c) => c.name),\n },\n };\n }\n\n return result;\n });\n }\n\n const output: SearchOutput = {\n query,\n directory: absoluteDir,\n resultsCount: formattedResults.length,\n results: formattedResults,\n };\n\n if (formattedResults.length === 0) {\n return {\n success: true,\n message: \"No matching code found\",\n data: output,\n };\n }\n\n // Build text message with results\n const resultLines = formattedResults.map((r, i) => {\n const location = `${r.filePath}:${String(r.startLine)}-${String(r.endLine)}`;\n const symbol = r.symbolName\n ? ` (${r.symbolType ?? \"symbol\"}: ${r.symbolName})`\n : \"\";\n const preview = r.content.slice(0, 100).replace(/\\n/g, \" \");\n\n let callInfo = \"\";\n if (r.callContext) {\n const callers =\n r.callContext.callers.length > 0\n ? `Called by: ${r.callContext.callers.slice(0, 3).join(\", \")}${r.callContext.callers.length > 3 ? \"...\" : \"\"}`\n : \"\";\n const callees =\n r.callContext.callees.length > 0\n ? `Calls: ${r.callContext.callees.slice(0, 3).join(\", \")}${r.callContext.callees.length > 3 ? \"...\" : \"\"}`\n : \"\";\n if (callers || callees) {\n callInfo = `\\n ${[callers, callees].filter(Boolean).join(\" | \")}`;\n }\n }\n\n return `${String(i + 1)}. [${r.language}] ${location}${symbol}\\n ${preview}...${callInfo}`;\n });\n\n const message = `Found ${String(formattedResults.length)} results for \"${query}\":\\n\\n${resultLines.join(\"\\n\\n\")}`;\n\n return {\n success: true,\n message,\n data: output,\n };\n } catch (err) {\n vectorStore.close();\n const errorMsg = err instanceof Error ? err.message : String(err);\n return {\n success: false,\n error: `Search failed: ${errorMsg}`,\n };\n }\n}\n\nexport const searchCodeFeature: Feature<typeof searchCodeSchema> = {\n name: \"search_code\",\n description:\n \"Search code semantically using natural language queries. USE THIS to find code by concept/meaning (e.g., 'authentication logic', 'error handling'). Requires index_codebase first. Returns relevant code chunks with file locations, function names, and call relationships (who calls what).\",\n schema: searchCodeSchema,\n execute,\n};\n","/**\n * Get Index Status Feature\n *\n * Returns information about the embedding index for a directory:\n * - Whether an index exists\n * - Total chunks and files indexed\n * - Language breakdown\n */\n\nimport { z } from \"zod\";\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport type { Feature, FeatureResult } from \"@features/types\";\nimport { EMBEDDING_CONFIG } from \"@config\";\nimport {\n createVectorStore,\n getIndexPath,\n type IndexStatus,\n} from \"@core/embeddings\";\n\nexport const getIndexStatusSchema = z.object({\n directory: z\n .string()\n .optional()\n .default(\".\")\n .describe(\"Path to the directory to check (defaults to current directory)\"),\n});\n\nexport type GetIndexStatusInput = z.infer<typeof getIndexStatusSchema>;\n\n/**\n * Execute the get_index_status feature\n */\nexport async function execute(\n input: GetIndexStatusInput,\n): Promise<FeatureResult> {\n const { directory } = input;\n\n // Validate directory exists\n if (!fs.existsSync(directory)) {\n return {\n success: false,\n error: `Directory not found: ${directory}`,\n };\n }\n\n const absoluteDir = path.resolve(directory);\n const indexPath = getIndexPath(absoluteDir);\n\n // Check if index exists\n if (!fs.existsSync(indexPath)) {\n const status: IndexStatus = {\n directory: absoluteDir,\n indexPath,\n exists: false,\n totalChunks: 0,\n totalFiles: 0,\n languages: {},\n };\n\n return {\n success: true,\n message: `No index found for ${absoluteDir}. Run index_codebase to create one.`,\n data: status,\n };\n }\n\n try {\n const vectorStore = createVectorStore(absoluteDir, EMBEDDING_CONFIG);\n await vectorStore.connect();\n\n const status = await vectorStore.getStatus(absoluteDir);\n vectorStore.close();\n\n // Format language breakdown\n const languageLines = Object.entries(status.languages)\n .sort(([, a], [, b]) => b - a)\n .map(([lang, count]) => ` - ${lang}: ${String(count)} chunks`);\n\n const message = [\n `Index Status for ${absoluteDir}`,\n ``,\n `Index Path: ${status.indexPath}`,\n `Total Files: ${String(status.totalFiles)}`,\n `Total Chunks: ${String(status.totalChunks)}`,\n ``,\n `Languages:`,\n ...languageLines,\n ].join(\"\\n\");\n\n return {\n success: true,\n message,\n data: status,\n };\n } catch (err) {\n const errorMsg = err instanceof Error ? err.message : String(err);\n return {\n success: false,\n error: `Failed to read index status: ${errorMsg}`,\n };\n }\n}\n\nexport const getIndexStatusFeature: Feature<typeof getIndexStatusSchema> = {\n name: \"get_index_status\",\n description:\n \"Check if a codebase is indexed and ready for search. USE THIS to verify index exists before searching. Returns file count, chunk count, and indexed languages.\",\n schema: getIndexStatusSchema,\n execute,\n};\n","/**\n * Get Call Graph Feature\n *\n * Analyzes function call relationships in a codebase.\n * Can either:\n * 1. Build a full call graph for a directory\n * 2. Query callers/callees for a specific function\n */\n\nimport { z } from \"zod\";\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport ignore, { type Ignore } from \"ignore\";\nimport type { Feature, FeatureResult } from \"@features/types\";\nimport {\n buildCallGraph,\n getCallContext,\n formatCallContext,\n shouldIndexFile,\n type CallGraphNode,\n} from \"@core/embeddings\";\nimport { logger } from \"@utils\";\n\nexport const getCallGraphSchema = z.object({\n directory: z\n .string()\n .optional()\n .default(\".\")\n .describe(\"Path to the directory to analyze\"),\n functionName: z\n .string()\n .optional()\n .describe(\"Optional: specific function name to query callers/callees for\"),\n filePath: z\n .string()\n .optional()\n .describe(\n \"Optional: file path to narrow down function search (used with functionName)\",\n ),\n maxDepth: z\n .number()\n .int()\n .positive()\n .optional()\n .default(2)\n .describe(\"Maximum depth for call chain traversal (default: 2)\"),\n exclude: z\n .array(z.string())\n .optional()\n .default([])\n .describe(\"Glob patterns to exclude from analysis\"),\n});\n\nexport type GetCallGraphInput = z.infer<typeof getCallGraphSchema>;\n\ninterface CallGraphResult {\n directory: string;\n mode: \"full\" | \"query\";\n totalFunctions: number;\n totalCalls: number;\n filesAnalyzed: number;\n query?: {\n functionName: string;\n filePath?: string;\n callers: CallGraphNode[];\n callees: CallGraphNode[];\n formattedContext: string;\n };\n graph?: {\n nodes: Record<string, CallGraphNode>;\n topCallers: { name: string; callCount: number }[];\n topCallees: { name: string; calledByCount: number }[];\n };\n}\n\n/**\n * Create gitignore filter from .gitignore file\n */\nfunction createIgnoreFilter(\n directory: string,\n extraPatterns: string[],\n): Ignore {\n const ig = ignore();\n\n // Add default ignores\n ig.add([\"node_modules\", \".git\", \"dist\", \"build\", \".src-index\"]);\n\n // Add extra patterns\n if (extraPatterns.length > 0) {\n ig.add(extraPatterns);\n }\n\n // Read .gitignore if exists\n const gitignorePath = path.join(directory, \".gitignore\");\n if (fs.existsSync(gitignorePath)) {\n const content = fs.readFileSync(gitignorePath, \"utf-8\");\n ig.add(content);\n }\n\n return ig;\n}\n\n/**\n * Check if a name starts with a dot (hidden file/folder)\n */\nfunction isHidden(name: string): boolean {\n return name.startsWith(\".\");\n}\n\n/**\n * Recursively collect files from a directory\n */\nfunction collectFiles(dir: string, ig: Ignore, baseDir: string): string[] {\n const files: string[] = [];\n\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n if (isHidden(entry.name)) {\n continue;\n }\n\n const fullPath = path.join(dir, entry.name);\n const relativePath = path.relative(baseDir, fullPath).replace(/\\\\/g, \"/\");\n\n if (ig.ignores(relativePath)) {\n continue;\n }\n\n if (entry.isDirectory()) {\n files.push(...collectFiles(fullPath, ig, baseDir));\n } else if (entry.isFile() && shouldIndexFile(entry.name)) {\n files.push(fullPath);\n }\n }\n\n return files;\n}\n\n/**\n * Execute the get_call_graph feature\n */\nexport async function execute(\n input: GetCallGraphInput,\n): Promise<FeatureResult> {\n const { directory, functionName, filePath, maxDepth, exclude } = input;\n\n // Validate directory exists\n if (!fs.existsSync(directory)) {\n return {\n success: false,\n error: `Directory not found: ${directory}`,\n };\n }\n\n const absoluteDir = path.resolve(directory);\n\n try {\n // Create ignore filter\n const ig = createIgnoreFilter(absoluteDir, exclude);\n\n // Collect files\n const files = collectFiles(absoluteDir, ig, absoluteDir);\n\n if (files.length === 0) {\n return {\n success: true,\n message: \"No analyzable files found in directory\",\n data: {\n directory: absoluteDir,\n mode: \"full\",\n totalFunctions: 0,\n totalCalls: 0,\n filesAnalyzed: 0,\n } satisfies CallGraphResult,\n };\n }\n\n logger.debug(`Analyzing call graph for ${String(files.length)} files`);\n\n // Read file contents and build call graph\n const fileContents = files.map((f) => ({\n path: f,\n content: fs.readFileSync(f, \"utf-8\"),\n }));\n\n const graph = await buildCallGraph(fileContents);\n\n const result: CallGraphResult = {\n directory: absoluteDir,\n mode: functionName ? \"query\" : \"full\",\n totalFunctions: graph.nodes.size,\n totalCalls: Array.from(graph.nodes.values()).reduce(\n (sum, node) => sum + node.calls.length,\n 0,\n ),\n filesAnalyzed: files.length,\n };\n\n // If querying for a specific function\n if (functionName) {\n const targetFilePath = filePath\n ? path.resolve(directory, filePath)\n : undefined;\n\n const callContext = getCallContext(\n graph,\n targetFilePath ?? \"\",\n functionName,\n );\n\n if (!callContext) {\n // Try to find function in any file\n let foundContext: {\n callers: CallGraphNode[];\n callees: CallGraphNode[];\n } | null = null;\n let foundFilePath = \"\";\n\n for (const node of graph.nodes.values()) {\n if (node.name === functionName) {\n foundFilePath = node.filePath;\n foundContext = getCallContext(graph, node.filePath, functionName);\n if (foundContext) {\n break;\n }\n }\n }\n\n if (foundContext) {\n result.query = {\n functionName,\n filePath: foundFilePath,\n callers: foundContext.callers,\n callees: foundContext.callees,\n formattedContext: formatCallContext(\n foundContext.callers,\n foundContext.callees,\n maxDepth,\n ),\n };\n } else {\n return {\n success: false,\n error: `Function '${functionName}' not found in the codebase`,\n };\n }\n } else {\n result.query = {\n functionName,\n filePath: targetFilePath,\n callers: callContext.callers,\n callees: callContext.callees,\n formattedContext: formatCallContext(\n callContext.callers,\n callContext.callees,\n maxDepth,\n ),\n };\n }\n\n const message = `Call graph for '${functionName}':\\n\\n${result.query.formattedContext}`;\n\n return {\n success: true,\n message,\n data: result,\n };\n }\n\n // Full graph mode - compute top callers and callees\n const callerCounts = new Map<string, number>();\n const calleeCounts = new Map<string, number>();\n\n for (const node of graph.nodes.values()) {\n callerCounts.set(node.name, node.calls.length);\n calleeCounts.set(node.name, node.calledBy.length);\n }\n\n const topCallers = Array.from(callerCounts.entries())\n .sort((a, b) => b[1] - a[1])\n .slice(0, 10)\n .map(([name, callCount]) => ({ name, callCount }));\n\n const topCallees = Array.from(calleeCounts.entries())\n .sort((a, b) => b[1] - a[1])\n .slice(0, 10)\n .map(([name, calledByCount]) => ({ name, calledByCount }));\n\n result.graph = {\n nodes: Object.fromEntries(graph.nodes),\n topCallers,\n topCallees,\n };\n\n // Build summary message\n const topCallersStr = topCallers\n .map((c) => ` - ${c.name}: ${String(c.callCount)} calls`)\n .join(\"\\n\");\n const topCalleesStr = topCallees\n .map(\n (c) => ` - ${c.name}: called by ${String(c.calledByCount)} functions`,\n )\n .join(\"\\n\");\n\n const message = `Call graph analysis complete:\n- Files analyzed: ${String(files.length)}\n- Functions found: ${String(result.totalFunctions)}\n- Total calls: ${String(result.totalCalls)}\n\nTop callers (functions that call the most):\n${topCallersStr}\n\nMost called (functions called by the most):\n${topCalleesStr}\n\nUse functionName parameter to query specific function relationships.`;\n\n return {\n success: true,\n message,\n data: result,\n };\n } catch (err) {\n const errorMsg = err instanceof Error ? err.message : String(err);\n return {\n success: false,\n error: `Call graph analysis failed: ${errorMsg}`,\n };\n }\n}\n\nexport const getCallGraphFeature: Feature<typeof getCallGraphSchema> = {\n name: \"get_call_graph\",\n description:\n \"Analyze function call relationships in a codebase. Query callers/callees for a specific function or get full call graph statistics.\",\n schema: getCallGraphSchema,\n execute,\n};\n","/**\n * Update Index Feature\n *\n * Incrementally updates the codebase index by:\n * 1. Detecting files that have changed since last indexing\n * 2. Re-indexing only the changed files\n * 3. Removing deleted files from the index\n *\n * Uses SHA-256 hash comparison to detect real content changes.\n */\n\nimport { z } from \"zod\";\nimport * as fs from \"node:fs\";\nimport * as path from \"node:path\";\nimport * as crypto from \"node:crypto\";\nimport ignore, { type Ignore } from \"ignore\";\nimport type { Feature, FeatureResult } from \"@features/types\";\nimport { EMBEDDING_CONFIG } from \"@config\";\nimport {\n chunkFile,\n createOllamaClient,\n createVectorStore,\n enrichChunksFromFile,\n shouldIndexFile,\n type EmbeddedChunk,\n type EnrichmentOptions,\n} from \"@core/embeddings\";\nimport { readPathAliasesCached } from \"@core/utils\";\n\n/** Cache file name for storing hashes */\nconst HASH_CACHE_FILE = \".src-index-hashes.json\";\n\nexport const updateIndexSchema = z.object({\n directory: z\n .string()\n .optional()\n .default(\".\")\n .describe(\"Path to the indexed directory\"),\n dryRun: z\n .boolean()\n .optional()\n .default(false)\n .describe(\"Only report changes without updating the index\"),\n force: z\n .boolean()\n .optional()\n .default(false)\n .describe(\"Force re-index of all files (ignore hash cache)\"),\n});\n\nexport type UpdateIndexInput = z.infer<typeof updateIndexSchema>;\n\ntype HashCache = Record<string, string>;\n\ninterface UpdateResult {\n directory: string;\n dryRun: boolean;\n added: string[];\n modified: string[];\n removed: string[];\n unchanged: number;\n errors: string[];\n}\n\n/**\n * Compute SHA-256 hash of content\n */\nfunction computeHash(content: string): string {\n return crypto.createHash(\"sha256\").update(content, \"utf8\").digest(\"hex\");\n}\n\n/**\n * Get hash cache file path\n */\nfunction getHashCachePath(directory: string): string {\n return path.join(directory, \".src-index\", HASH_CACHE_FILE);\n}\n\n/**\n * Load hash cache from disk\n */\nfunction loadHashCache(directory: string): HashCache {\n const cachePath = getHashCachePath(directory);\n if (fs.existsSync(cachePath)) {\n try {\n const content = fs.readFileSync(cachePath, \"utf-8\");\n return JSON.parse(content) as HashCache;\n } catch {\n return {};\n }\n }\n return {};\n}\n\n/**\n * Save hash cache to disk\n */\nfunction saveHashCache(directory: string, cache: HashCache): void {\n const cachePath = getHashCachePath(directory);\n const cacheDir = path.dirname(cachePath);\n if (!fs.existsSync(cacheDir)) {\n fs.mkdirSync(cacheDir, { recursive: true });\n }\n fs.writeFileSync(cachePath, JSON.stringify(cache, null, 2));\n}\n\n/**\n * Create gitignore filter\n */\nfunction createIgnoreFilter(directory: string): Ignore {\n const ig = ignore();\n ig.add([\"node_modules\", \".git\", \"dist\", \"build\", \".src-index\"]);\n\n const gitignorePath = path.join(directory, \".gitignore\");\n if (fs.existsSync(gitignorePath)) {\n const content = fs.readFileSync(gitignorePath, \"utf-8\");\n ig.add(content);\n }\n\n return ig;\n}\n\n/**\n * Check if a name starts with a dot (hidden)\n */\nfunction isHidden(name: string): boolean {\n return name.startsWith(\".\");\n}\n\n/**\n * Recursively collect files\n */\nfunction collectFiles(dir: string, ig: Ignore, baseDir: string): string[] {\n const files: string[] = [];\n\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n if (isHidden(entry.name)) {\n continue;\n }\n\n const fullPath = path.join(dir, entry.name);\n const relativePath = path.relative(baseDir, fullPath).replace(/\\\\/g, \"/\");\n\n if (ig.ignores(relativePath)) {\n continue;\n }\n\n if (entry.isDirectory()) {\n files.push(...collectFiles(fullPath, ig, baseDir));\n } else if (entry.isFile() && shouldIndexFile(entry.name)) {\n files.push(fullPath);\n }\n }\n\n return files;\n}\n\n/**\n * Execute the update_index feature\n */\nexport async function execute(input: UpdateIndexInput): Promise<FeatureResult> {\n const { directory, dryRun, force } = input;\n\n // Validate directory\n if (!fs.existsSync(directory)) {\n return {\n success: false,\n error: `Directory not found: ${directory}`,\n };\n }\n\n const absoluteDir = path.resolve(directory);\n\n // Initialize components\n const ollamaClient = createOllamaClient(EMBEDDING_CONFIG);\n const vectorStore = createVectorStore(absoluteDir, EMBEDDING_CONFIG);\n\n // Check if index exists\n if (!vectorStore.exists()) {\n return {\n success: false,\n error: `No index found for directory. Run index_codebase first: ${absoluteDir}`,\n };\n }\n\n const result: UpdateResult = {\n directory: absoluteDir,\n dryRun,\n added: [],\n modified: [],\n removed: [],\n unchanged: 0,\n errors: [],\n };\n\n try {\n // Check Ollama health (only if not dry run)\n if (!dryRun) {\n const health = await ollamaClient.healthCheck();\n if (!health.ok) {\n return {\n success: false,\n error: health.error ?? \"Ollama is not available\",\n };\n }\n }\n\n // Connect to vector store\n await vectorStore.connect();\n\n // Load hash cache\n const hashCache = force ? {} : loadHashCache(absoluteDir);\n const newHashCache: HashCache = {};\n\n // Collect current files\n const ig = createIgnoreFilter(absoluteDir);\n const currentFiles = new Set(collectFiles(absoluteDir, ig, absoluteDir));\n\n // Get indexed files from vector store\n const indexedFiles = new Set(await vectorStore.getIndexedFiles());\n\n // Find files to add/modify/remove\n const filesToProcess: { path: string; type: \"add\" | \"modify\" }[] = [];\n\n for (const filePath of currentFiles) {\n const content = fs.readFileSync(filePath, \"utf-8\");\n const hash = computeHash(content);\n newHashCache[filePath] = hash;\n\n if (!indexedFiles.has(filePath)) {\n // New file\n result.added.push(path.relative(absoluteDir, filePath));\n filesToProcess.push({ path: filePath, type: \"add\" });\n } else if (hashCache[filePath] !== hash) {\n // Modified file\n result.modified.push(path.relative(absoluteDir, filePath));\n filesToProcess.push({ path: filePath, type: \"modify\" });\n } else {\n result.unchanged++;\n }\n }\n\n // Find removed files\n for (const filePath of indexedFiles) {\n if (!currentFiles.has(filePath)) {\n result.removed.push(path.relative(absoluteDir, filePath));\n }\n }\n\n // If dry run, just report what would be done\n if (dryRun) {\n vectorStore.close();\n\n const message = buildDryRunMessage(result);\n return {\n success: true,\n message,\n data: result,\n };\n }\n\n // Read path aliases from tsconfig.json if present\n const pathAliases = readPathAliasesCached(absoluteDir);\n\n // Enrichment options\n const enrichmentOptions: EnrichmentOptions = {\n projectRoot: absoluteDir,\n pathAliases,\n includeCrossFileContext: true,\n };\n\n // Process files\n const embeddedChunks: EmbeddedChunk[] = [];\n\n for (const { path: filePath, type } of filesToProcess) {\n try {\n // Delete existing chunks if modifying\n if (type === \"modify\") {\n await vectorStore.deleteByFilePath(filePath);\n }\n\n const content = fs.readFileSync(filePath, \"utf-8\");\n const chunks = await chunkFile(filePath, content, EMBEDDING_CONFIG);\n\n if (chunks.length === 0) {\n continue;\n }\n\n const enrichedChunks = await enrichChunksFromFile(\n chunks,\n content,\n enrichmentOptions,\n );\n\n const texts = enrichedChunks.map((c) => c.enrichedContent);\n const embeddings = await ollamaClient.embedBatch(texts);\n\n for (let i = 0; i < enrichedChunks.length; i++) {\n const chunk = enrichedChunks[i];\n const vector = embeddings[i];\n if (chunk && vector) {\n embeddedChunks.push({\n id: chunk.id,\n content: chunk.content,\n filePath: chunk.filePath,\n language: chunk.language,\n startLine: chunk.startLine,\n endLine: chunk.endLine,\n symbolName: chunk.symbolName,\n symbolType: chunk.symbolType,\n vector,\n });\n }\n }\n } catch (err) {\n const errorMsg = err instanceof Error ? err.message : String(err);\n result.errors.push(`Error processing ${filePath}: ${errorMsg}`);\n }\n }\n\n // Add new chunks\n if (embeddedChunks.length > 0) {\n await vectorStore.addChunks(embeddedChunks);\n }\n\n // Remove deleted files\n for (const relativePath of result.removed) {\n const filePath = path.join(absoluteDir, relativePath);\n await vectorStore.deleteByFilePath(filePath);\n }\n\n // Save new hash cache\n saveHashCache(absoluteDir, newHashCache);\n\n vectorStore.close();\n\n const message = buildResultMessage(result);\n\n return {\n success: true,\n message,\n data: result,\n };\n } catch (err) {\n vectorStore.close();\n const errorMsg = err instanceof Error ? err.message : String(err);\n return {\n success: false,\n error: `Update failed: ${errorMsg}`,\n };\n }\n}\n\n/**\n * Build message for dry run\n */\nfunction buildDryRunMessage(result: UpdateResult): string {\n const lines: string[] = [\"Dry run - changes detected:\"];\n\n if (result.added.length > 0) {\n lines.push(`\\nFiles to add (${String(result.added.length)}):`);\n for (const f of result.added.slice(0, 10)) {\n lines.push(` + ${f}`);\n }\n if (result.added.length > 10) {\n lines.push(` ... and ${String(result.added.length - 10)} more`);\n }\n }\n\n if (result.modified.length > 0) {\n lines.push(`\\nFiles to update (${String(result.modified.length)}):`);\n for (const f of result.modified.slice(0, 10)) {\n lines.push(` ~ ${f}`);\n }\n if (result.modified.length > 10) {\n lines.push(` ... and ${String(result.modified.length - 10)} more`);\n }\n }\n\n if (result.removed.length > 0) {\n lines.push(`\\nFiles to remove (${String(result.removed.length)}):`);\n for (const f of result.removed.slice(0, 10)) {\n lines.push(` - ${f}`);\n }\n if (result.removed.length > 10) {\n lines.push(` ... and ${String(result.removed.length - 10)} more`);\n }\n }\n\n lines.push(`\\nUnchanged: ${String(result.unchanged)} files`);\n\n if (\n result.added.length === 0 &&\n result.modified.length === 0 &&\n result.removed.length === 0\n ) {\n return \"Index is up to date - no changes detected.\";\n }\n\n lines.push(\"\\nRun without --dryRun to apply changes.\");\n\n return lines.join(\"\\n\");\n}\n\n/**\n * Build message for actual update\n */\nfunction buildResultMessage(result: UpdateResult): string {\n const changes =\n result.added.length + result.modified.length + result.removed.length;\n\n if (changes === 0) {\n return \"Index is up to date - no changes needed.\";\n }\n\n const lines: string[] = [\"Index updated successfully:\"];\n\n if (result.added.length > 0) {\n lines.push(` Added: ${String(result.added.length)} files`);\n }\n if (result.modified.length > 0) {\n lines.push(` Modified: ${String(result.modified.length)} files`);\n }\n if (result.removed.length > 0) {\n lines.push(` Removed: ${String(result.removed.length)} files`);\n }\n lines.push(` Unchanged: ${String(result.unchanged)} files`);\n\n if (result.errors.length > 0) {\n lines.push(`\\nErrors (${String(result.errors.length)}):`);\n for (const err of result.errors.slice(0, 5)) {\n lines.push(` - ${err}`);\n }\n }\n\n return lines.join(\"\\n\");\n}\n\nexport const updateIndexFeature: Feature<typeof updateIndexSchema> = {\n name: \"update_index\",\n description:\n \"Refresh the search index after code changes. USE THIS instead of re-indexing - it's fast because it only processes changed files (SHA-256 hash detection). Use dryRun=true to preview changes first.\",\n schema: updateIndexSchema,\n execute,\n};\n","import { z } from \"zod\";\n\nimport { getASTRoot, parseCode } from \"@core/parser\";\n\nimport type { Feature, FeatureResult } from \"@features/types\";\nimport { errorResult, readContent, successResult } from \"@features/utils\";\n\nexport const parseAstSchema = z\n .object({\n file_path: z\n .string()\n .optional()\n .describe(\n \"Path to the file to parse (either file_path or content required)\",\n ),\n content: z\n .string()\n .optional()\n .describe(\n \"Code content to parse directly (either file_path or content required)\",\n ),\n language: z\n .string()\n .optional()\n .describe(\"Language name (auto-detected from file path if not provided)\"),\n max_depth: z\n .number()\n .int()\n .positive()\n .optional()\n .describe(\"Maximum depth of AST to return (default: unlimited)\"),\n })\n .refine((data) => data.file_path ?? data.content, {\n message: \"Either file_path or content must be provided\",\n });\n\nexport type ParseAstInput = z.infer<typeof parseAstSchema>;\n\nexport async function execute(input: ParseAstInput): Promise<FeatureResult> {\n const { file_path, content: inputContent, language, max_depth } = input;\n\n // Get content\n const contentResult = readContent(file_path, inputContent);\n if (!contentResult.success) {\n return { success: false, error: contentResult.error };\n }\n\n try {\n // Parse the code\n const parseResult = await parseCode(contentResult.content, {\n language,\n filePath: file_path,\n });\n\n // Get AST root with optional depth limit\n const root = getASTRoot(parseResult, max_depth);\n\n // Count nodes (without depth limit for accurate count)\n const fullRoot = getASTRoot(parseResult);\n const countNodesRecursive = (node: typeof fullRoot): number => {\n let count = 1;\n if (node.children) {\n for (const child of node.children) {\n count += countNodesRecursive(child);\n }\n }\n return count;\n };\n const nodeCount = countNodesRecursive(fullRoot);\n\n return successResult(\n {\n language: parseResult.language,\n root,\n node_count: nodeCount,\n },\n `Parsed ${parseResult.language} code with ${String(nodeCount)} nodes`,\n );\n } catch (error) {\n return errorResult(\"parse\", error);\n }\n}\n\nexport const parseAstFeature: Feature<typeof parseAstSchema> = {\n name: \"parse_ast\",\n description:\n \"Parse code and return the Abstract Syntax Tree (AST). Supports multiple languages including JavaScript, TypeScript, Python, Go, Rust, Java, C, C++, and more.\",\n schema: parseAstSchema,\n execute,\n};\n","import { z } from \"zod\";\n\nimport { parseCode } from \"@core/parser\";\nimport {\n executePresetQuery,\n executeQuery,\n getAvailablePresets,\n type QueryPreset,\n} from \"@core/queries\";\n\nimport type { Feature, FeatureResult } from \"@features/types\";\nimport {\n errorMessage,\n errorResult,\n readContent,\n successResult,\n} from \"@features/utils\";\n\nconst presetValues = [\n \"functions\",\n \"classes\",\n \"imports\",\n \"exports\",\n \"comments\",\n \"strings\",\n \"variables\",\n \"types\",\n] as const;\n\nexport const queryCodeSchema = z\n .object({\n file_path: z\n .string()\n .optional()\n .describe(\n \"Path to the file to query (either file_path or content required)\",\n ),\n content: z\n .string()\n .optional()\n .describe(\n \"Code content to query directly (either file_path or content required)\",\n ),\n language: z\n .string()\n .optional()\n .describe(\"Language name (auto-detected from file path if not provided)\"),\n query: z\n .string()\n .optional()\n .describe(\"SCM query pattern (either query or preset required)\"),\n preset: z\n .enum(presetValues)\n .optional()\n .describe(\n \"Preset query name: functions, classes, imports, exports, comments, strings, variables, types\",\n ),\n max_matches: z\n .number()\n .int()\n .positive()\n .optional()\n .describe(\"Maximum number of matches to return\"),\n })\n .refine((data) => data.file_path ?? data.content, {\n message: \"Either file_path or content must be provided\",\n })\n .refine((data) => data.query ?? data.preset, {\n message: \"Either query or preset must be provided\",\n });\n\nexport type QueryCodeInput = z.infer<typeof queryCodeSchema>;\n\nexport async function execute(input: QueryCodeInput): Promise<FeatureResult> {\n const {\n file_path,\n content: inputContent,\n language,\n query,\n preset,\n max_matches,\n } = input;\n\n // Get content\n const contentResult = readContent(file_path, inputContent);\n if (!contentResult.success) {\n return { success: false, error: contentResult.error };\n }\n\n try {\n // Parse the code\n const parseResult = await parseCode(contentResult.content, {\n language,\n filePath: file_path,\n });\n\n // Execute query\n let result;\n if (preset) {\n // Check if preset is available for this language\n const availablePresets = getAvailablePresets(parseResult.language);\n if (!availablePresets.includes(preset)) {\n return errorMessage(\n `Preset '${preset}' is not available for ${parseResult.language}. Available presets: ${availablePresets.join(\", \")}`,\n );\n }\n\n result = executePresetQuery(\n parseResult.tree,\n parseResult.languageInstance,\n parseResult.language,\n preset as QueryPreset,\n { maxMatches: max_matches },\n );\n } else if (query) {\n result = executeQuery(\n parseResult.tree,\n parseResult.languageInstance,\n query,\n parseResult.language,\n { maxMatches: max_matches },\n );\n } else {\n return errorMessage(\"Either query or preset must be provided\");\n }\n\n return successResult(\n {\n matches: result.matches,\n count: result.count,\n language: result.language,\n query: result.query,\n },\n `Found ${String(result.count)} match${result.count === 1 ? \"\" : \"es\"} in ${parseResult.language} code`,\n );\n } catch (error) {\n return errorResult(\"query\", error);\n }\n}\n\nexport const queryCodeFeature: Feature<typeof queryCodeSchema> = {\n name: \"query_code\",\n description:\n \"Execute Tree-sitter SCM queries on code to find patterns. Use preset queries (functions, classes, imports, exports, comments, strings, variables, types) or custom SCM query patterns.\",\n schema: queryCodeSchema,\n execute,\n};\n","import { z } from \"zod\";\n\nimport { parseCode } from \"@core/parser\";\nimport { extractSymbols, type SymbolFilter } from \"@core/symbols\";\n\nimport type { Feature, FeatureResult } from \"@features/types\";\nimport { errorResult, readContent, successResult } from \"@features/utils\";\n\nconst symbolTypeValues = [\n \"function\",\n \"class\",\n \"variable\",\n \"constant\",\n \"interface\",\n \"type\",\n \"enum\",\n \"method\",\n \"property\",\n] as const;\n\nexport const listSymbolsSchema = z\n .object({\n file_path: z\n .string()\n .optional()\n .describe(\n \"Path to the file to analyze (either file_path or content required)\",\n ),\n content: z\n .string()\n .optional()\n .describe(\n \"Code content to analyze directly (either file_path or content required)\",\n ),\n language: z\n .string()\n .optional()\n .describe(\"Language name (auto-detected from file path if not provided)\"),\n types: z\n .array(z.enum(symbolTypeValues))\n .optional()\n .describe(\n \"Filter by symbol types: function, class, variable, constant, interface, type, enum, method, property\",\n ),\n })\n .refine((data) => data.file_path ?? data.content, {\n message: \"Either file_path or content must be provided\",\n });\n\nexport type ListSymbolsInput = z.infer<typeof listSymbolsSchema>;\n\nexport async function execute(input: ListSymbolsInput): Promise<FeatureResult> {\n const { file_path, content: inputContent, language, types } = input;\n\n // Get content using shared helper\n const contentResult = readContent(file_path, inputContent);\n if (!contentResult.success) {\n return { success: false, error: contentResult.error };\n }\n\n try {\n // Parse the code\n const parseResult = await parseCode(contentResult.content, {\n language,\n filePath: file_path,\n });\n\n // Build filter\n const filter: SymbolFilter = {};\n if (types && types.length > 0) {\n filter.types = types;\n }\n\n // Extract symbols\n const { symbols, summary } = extractSymbols(\n parseResult.tree,\n parseResult.languageInstance,\n parseResult.language,\n filter,\n );\n\n return successResult(\n {\n symbols,\n summary,\n language: parseResult.language,\n },\n `Found ${String(summary.total)} symbol${summary.total === 1 ? \"\" : \"s\"} in ${parseResult.language} code`,\n );\n } catch (error) {\n return errorResult(\"extract symbols\", error);\n }\n}\n\nexport const listSymbolsFeature: Feature<typeof listSymbolsSchema> = {\n name: \"list_symbols\",\n description:\n \"Extract all code symbols (functions, classes, variables, etc.) from a file. Returns structured information including name, type, location, and signature for each symbol.\",\n schema: listSymbolsSchema,\n execute,\n};\n","/**\n * LangChain text splitter fallback for languages without Tree-sitter support\n */\nimport {\n RecursiveCharacterTextSplitter,\n type SupportedTextSplitterLanguage,\n} from \"@langchain/textsplitters\";\n\nimport { loadJsonConfig, registerCache } from \"@core/utils\";\n\ninterface LangChainConfig {\n mapping: Record<string, string>;\n supported: string[];\n generic: string[];\n}\n\ninterface FullConfig {\n langchain: LangChainConfig;\n}\n\nlet langchainConfig: LangChainConfig | null = null;\n\nfunction loadConfig(): LangChainConfig {\n if (langchainConfig) {\n return langchainConfig;\n }\n\n const defaultConfig: FullConfig = {\n langchain: { mapping: {}, supported: [], generic: [] },\n };\n const fullConfig = loadJsonConfig<FullConfig>(\n \"languages.json\",\n defaultConfig,\n );\n langchainConfig = fullConfig.langchain;\n return langchainConfig;\n}\n\nexport interface TextChunk {\n content: string;\n startLine: number;\n endLine: number;\n index: number;\n}\n\nexport interface TextSplitResult {\n chunks: TextChunk[];\n count: number;\n language: string;\n method: \"text-splitter\";\n}\n\nexport interface TextSplitOptions {\n chunkSize?: number;\n chunkOverlap?: number;\n}\n\nexport function isTextSplitterLanguage(language: string): boolean {\n const config = loadConfig();\n return (\n language in config.mapping ||\n config.supported.includes(language) ||\n config.generic.includes(language)\n );\n}\n\nexport function getTextSplitterLanguage(\n language: string,\n): SupportedTextSplitterLanguage | undefined {\n const config = loadConfig();\n\n if (language in config.mapping) {\n return config.mapping[language] as SupportedTextSplitterLanguage;\n }\n if (config.supported.includes(language)) {\n return language as SupportedTextSplitterLanguage;\n }\n return undefined;\n}\n\nfunction calculateLineNumbers(\n fullContent: string,\n chunkContent: string,\n startSearchIndex: number,\n): { startLine: number; endLine: number; foundIndex: number } {\n const chunkIndex = fullContent.indexOf(chunkContent, startSearchIndex);\n const actualIndex = chunkIndex >= 0 ? chunkIndex : startSearchIndex;\n const beforeChunk = fullContent.slice(0, actualIndex);\n const startLine = (beforeChunk.match(/\\n/g) ?? []).length + 1;\n const chunkLines = (chunkContent.match(/\\n/g) ?? []).length;\n return {\n startLine,\n endLine: startLine + chunkLines,\n foundIndex: actualIndex + chunkContent.length,\n };\n}\n\nexport async function splitCode(\n content: string,\n language: string,\n options: TextSplitOptions = {},\n): Promise<TextSplitResult> {\n const { chunkSize = 1000, chunkOverlap = 200 } = options;\n const splitterLanguage = getTextSplitterLanguage(language);\n\n const splitter = splitterLanguage\n ? RecursiveCharacterTextSplitter.fromLanguage(splitterLanguage, {\n chunkSize,\n chunkOverlap,\n })\n : new RecursiveCharacterTextSplitter({ chunkSize, chunkOverlap });\n\n const docs = await splitter.createDocuments([content]);\n const chunks: TextChunk[] = [];\n let searchIndex = 0;\n\n for (const [i, doc] of docs.entries()) {\n const { startLine, endLine, foundIndex } = calculateLineNumbers(\n content,\n doc.pageContent,\n searchIndex,\n );\n searchIndex = foundIndex;\n chunks.push({ content: doc.pageContent, startLine, endLine, index: i });\n }\n\n return { chunks, count: chunks.length, language, method: \"text-splitter\" };\n}\n\nexport function getSeparators(language: string): string[] {\n const splitterLanguage = getTextSplitterLanguage(language);\n return splitterLanguage\n ? RecursiveCharacterTextSplitter.getSeparatorsForLanguage(splitterLanguage)\n : [];\n}\n\nexport function clearConfigCache(): void {\n langchainConfig = null;\n}\n\n// Register cache for centralized clearing\nregisterCache(\"fallback:config\", clearConfigCache);\n","/**\n * Unified Parser Module\n *\n * Provides a single interface for code parsing with automatic fallback:\n * 1. Tree-sitter (AST parsing) - for supported languages\n * 2. LangChain text splitter - for unsupported languages but known file types\n * 3. Generic text splitting - for any other text files\n */\nimport { readFileSync } from \"fs\";\nimport { extname } from \"path\";\n\nimport type { Language, Tree } from \"web-tree-sitter\";\n\nimport type { ASTNode } from \"@core/ast/types\";\nimport {\n isTextSplitterLanguage,\n splitCode,\n type TextChunk,\n} from \"@core/fallback\";\nimport {\n getASTRoot,\n getLanguageFromPath,\n isLanguageSupported,\n parseCode,\n} from \"@core/parser\";\nimport {\n extractSymbolsFromTags,\n findClasses,\n findFunctions,\n} from \"@core/queries\";\nimport {\n DEFAULT_CHUNK_OVERLAP,\n DEFAULT_CHUNK_SIZE,\n SKIP_KEYWORDS,\n} from \"@core/constants\";\nimport { loadJsonConfig, registerCache } from \"@core/utils\";\n\n// Centralized config types\ninterface LanguagesConfig {\n treesitter: Record<\n string,\n {\n wasm: string;\n queries: string;\n extensions: string[];\n aliases?: string[];\n }\n >;\n fallbackExtensions: Record<string, string>;\n specialFilenames: Record<string, string>;\n binaryExtensions: string[];\n}\n\n// Config cache\nlet configCache: LanguagesConfig | null = null;\nlet binaryExtensionsCache: Set<string> | null = null;\nlet extensionToLanguageCache: Record<string, string> | null = null;\nlet specialFilenamesCache: Record<string, string> | null = null;\n\nfunction loadConfig(): LanguagesConfig {\n if (configCache) {\n return configCache;\n }\n\n configCache = loadJsonConfig<LanguagesConfig>(\"languages.json\", {\n treesitter: {},\n fallbackExtensions: {},\n specialFilenames: {},\n binaryExtensions: [],\n });\n return configCache;\n}\n\nfunction getBinaryExtensions(): Set<string> {\n if (binaryExtensionsCache) {\n return binaryExtensionsCache;\n }\n const config = loadConfig();\n binaryExtensionsCache = new Set(config.binaryExtensions);\n return binaryExtensionsCache;\n}\n\nfunction getExtensionToLanguage(): Record<string, string> {\n if (extensionToLanguageCache) {\n return extensionToLanguageCache;\n }\n const config = loadConfig();\n extensionToLanguageCache = config.fallbackExtensions;\n return extensionToLanguageCache;\n}\n\nfunction getSpecialFilenames(): Record<string, string> {\n if (specialFilenamesCache) {\n return specialFilenamesCache;\n }\n const config = loadConfig();\n specialFilenamesCache = config.specialFilenames;\n return specialFilenamesCache;\n}\n\n/** Clear caches (for testing) */\nexport function clearUnifiedCache(): void {\n configCache = null;\n binaryExtensionsCache = null;\n extensionToLanguageCache = null;\n specialFilenamesCache = null;\n}\n\n/**\n * Unified parse result - works for both Tree-sitter and fallback\n */\nexport interface UnifiedParseResult {\n /** Parsing method used */\n method: \"tree-sitter\" | \"langchain\" | \"generic\";\n /** Language detected */\n language: string;\n /** File path */\n filePath: string;\n /** Original content */\n content: string;\n /** Line count */\n lineCount: number;\n\n // Tree-sitter specific (only when method === \"tree-sitter\")\n /** Tree-sitter tree (if available) */\n tree?: Tree;\n /** Language instance (if available) */\n languageInstance?: Language;\n /** AST root node (if available) */\n ast?: ASTNode;\n\n // Fallback specific (only when method !== \"tree-sitter\")\n /** Text chunks (if using fallback) */\n chunks?: TextChunk[];\n}\n\n/**\n * Unified symbol extraction result\n */\nexport interface UnifiedSymbols {\n /** Extraction method used */\n method: \"tree-sitter\" | \"regex\";\n /** Functions found */\n functions: UnifiedSymbol[];\n /** Classes found */\n classes: UnifiedSymbol[];\n /** All symbols */\n all: UnifiedSymbol[];\n}\n\n/**\n * Unified symbol representation\n */\nexport interface UnifiedSymbol {\n name: string;\n type: \"function\" | \"method\" | \"class\" | \"interface\" | \"module\" | \"variable\";\n line: number;\n endLine?: number;\n signature?: string;\n documentation?: string;\n}\n\n/**\n * Parse options\n */\nexport interface UnifiedParseOptions {\n /** Force a specific language (skip auto-detection) */\n language?: string;\n /** Include AST in result (Tree-sitter only, can be verbose) */\n includeAst?: boolean;\n /** Max AST depth (Tree-sitter only) */\n astMaxDepth?: number;\n /** Chunk size for fallback splitting */\n chunkSize?: number;\n /** Chunk overlap for fallback splitting */\n chunkOverlap?: number;\n}\n\n/**\n * Extract a meaningful name from AST node text, skipping keywords\n */\nfunction extractNameFromNode(text: string): string {\n // Split by common delimiters\n const parts = text.split(/[(\\s{<:=[\\]]/);\n\n // Find first non-keyword identifier\n for (const part of parts) {\n const trimmed = part.trim();\n if (\n trimmed &&\n !SKIP_KEYWORDS.has(trimmed.toLowerCase()) &&\n /^[a-zA-Z_]/.test(trimmed)\n ) {\n return trimmed;\n }\n }\n\n return \"anonymous\";\n}\n\n/**\n * Check if a file is binary based on extension\n */\nexport function isBinaryFile(filePath: string): boolean {\n const ext = extname(filePath).toLowerCase();\n return getBinaryExtensions().has(ext);\n}\n\n/**\n * Detect language from file path (extended detection)\n */\nexport function detectLanguage(filePath: string): string {\n // First try Tree-sitter supported languages\n const tsConfig = getLanguageFromPath(filePath);\n if (tsConfig) {\n return tsConfig.name;\n }\n\n // Then try extension mapping from config\n const ext = extname(filePath).toLowerCase();\n const extensionMap = getExtensionToLanguage();\n const mappedLang = extensionMap[ext];\n if (mappedLang) {\n return mappedLang;\n }\n\n // Check for special filenames from config\n const filename = filePath.split(/[/\\\\]/).pop()?.toLowerCase() ?? \"\";\n const specialFilenames = getSpecialFilenames();\n const specialLang = specialFilenames[filename];\n if (specialLang) {\n return specialLang;\n }\n\n // Check for patterns like .env.local, dockerfile.prod\n if (filename.startsWith(\".env.\") || filename === \".env\") {\n return \"env\";\n }\n if (filename.startsWith(\"dockerfile.\") || filename === \"dockerfile\") {\n return \"dockerfile\";\n }\n\n return \"text\";\n}\n\n/**\n * Parse a file with automatic fallback\n *\n * 1. If Tree-sitter supports the language → full AST parsing\n * 2. If LangChain supports the language → text splitting with language separators\n * 3. LangChain generic → text splitting with default separators\n * 4. If all fail → returns undefined (file is ignored)\n */\nexport async function parseFile(\n filePath: string,\n options: UnifiedParseOptions = {},\n): Promise<UnifiedParseResult | undefined> {\n // Check for binary files - ignore them\n if (isBinaryFile(filePath)) {\n return undefined;\n }\n\n // Read file content\n let content: string;\n try {\n const rawContent = readFileSync(filePath, \"utf-8\");\n // Normalize line endings (handle CRLF and CR)\n content = rawContent.replace(/\\r\\n/g, \"\\n\").replace(/\\r/g, \"\\n\");\n } catch {\n // Cannot read file - ignore\n return undefined;\n }\n\n const lineCount = content.split(\"\\n\").length;\n\n // Detect language\n const language = options.language ?? detectLanguage(filePath);\n\n // Try Tree-sitter first\n if (isLanguageSupported(language)) {\n try {\n const parseResult = await parseCode(content, { language });\n\n const result: UnifiedParseResult = {\n method: \"tree-sitter\",\n language: parseResult.language,\n filePath,\n content,\n lineCount,\n tree: parseResult.tree,\n languageInstance: parseResult.languageInstance,\n };\n\n // Include AST if requested\n if (options.includeAst) {\n result.ast = getASTRoot(parseResult, options.astMaxDepth);\n }\n\n return result;\n } catch {\n // Tree-sitter failed, fall through to LangChain\n }\n }\n\n // Try LangChain with detected language\n const {\n chunkSize = DEFAULT_CHUNK_SIZE,\n chunkOverlap = DEFAULT_CHUNK_OVERLAP,\n } = options;\n\n if (isTextSplitterLanguage(language)) {\n try {\n const splitResult = await splitCode(content, language, {\n chunkSize,\n chunkOverlap,\n });\n\n return {\n method: \"langchain\",\n language,\n filePath,\n content,\n lineCount,\n chunks: splitResult.chunks,\n };\n } catch {\n // LangChain with language failed, try generic\n }\n }\n\n // Try LangChain generic\n try {\n const splitResult = await splitCode(content, \"generic\", {\n chunkSize,\n chunkOverlap,\n });\n\n return {\n method: \"generic\",\n language,\n filePath,\n content,\n lineCount,\n chunks: splitResult.chunks,\n };\n } catch {\n // All methods failed - ignore file\n return undefined;\n }\n}\n\n/**\n * Parse content directly (without file)\n *\n * Returns undefined if content cannot be parsed\n */\nexport async function parseContent(\n content: string,\n language: string,\n options: Omit<UnifiedParseOptions, \"language\"> = {},\n): Promise<Omit<UnifiedParseResult, \"filePath\"> | undefined> {\n // Normalize line endings (handle CRLF and CR)\n const normalizedContent = content.replace(/\\r\\n/g, \"\\n\").replace(/\\r/g, \"\\n\");\n const lineCount = normalizedContent.split(\"\\n\").length;\n\n // Try Tree-sitter first\n if (isLanguageSupported(language)) {\n try {\n const parseResult = await parseCode(content, { language });\n\n const result: Omit<UnifiedParseResult, \"filePath\"> = {\n method: \"tree-sitter\",\n language: parseResult.language,\n content,\n lineCount,\n tree: parseResult.tree,\n languageInstance: parseResult.languageInstance,\n };\n\n if (options.includeAst) {\n result.ast = getASTRoot(parseResult, options.astMaxDepth);\n }\n\n return result;\n } catch {\n // Tree-sitter failed, fall through to LangChain\n }\n }\n\n // Try LangChain with detected language\n const {\n chunkSize = DEFAULT_CHUNK_SIZE,\n chunkOverlap = DEFAULT_CHUNK_OVERLAP,\n } = options;\n\n if (isTextSplitterLanguage(language)) {\n try {\n const splitResult = await splitCode(content, language, {\n chunkSize,\n chunkOverlap,\n });\n\n return {\n method: \"langchain\",\n language,\n content,\n lineCount,\n chunks: splitResult.chunks,\n };\n } catch {\n // LangChain with language failed, try generic\n }\n }\n\n // Try LangChain generic\n try {\n const splitResult = await splitCode(content, \"generic\", {\n chunkSize,\n chunkOverlap,\n });\n\n return {\n method: \"generic\",\n language,\n content,\n lineCount,\n chunks: splitResult.chunks,\n };\n } catch {\n // All methods failed - ignore\n return undefined;\n }\n}\n\n/**\n * Extract symbols with automatic fallback\n *\n * 1. Tree-sitter → accurate AST-based extraction\n * 2. Regex fallback → best-effort pattern matching\n */\nexport function extractSymbols(result: UnifiedParseResult): UnifiedSymbols {\n // Tree-sitter path\n if (\n result.method === \"tree-sitter\" &&\n result.tree &&\n result.languageInstance\n ) {\n const { definitions } = extractSymbolsFromTags(\n result.tree,\n result.languageInstance,\n result.language,\n );\n\n const functions: UnifiedSymbol[] = [];\n const classes: UnifiedSymbol[] = [];\n const all: UnifiedSymbol[] = [];\n\n for (const def of definitions) {\n const symbol: UnifiedSymbol = {\n name: def.name,\n type: def.kind as UnifiedSymbol[\"type\"],\n line: def.node.start.line,\n endLine: def.node.end.line,\n documentation: def.documentation,\n };\n\n all.push(symbol);\n\n if (def.kind === \"function\" || def.kind === \"method\") {\n functions.push(symbol);\n } else if (\n def.kind === \"class\" ||\n def.kind === \"interface\" ||\n def.kind === \"module\"\n ) {\n classes.push(symbol);\n }\n }\n\n // If tags.scm didn't find anything, try direct AST queries\n if (functions.length === 0) {\n const funcNodes = findFunctions(\n result.tree,\n result.languageInstance,\n result.language,\n );\n for (const node of funcNodes) {\n const symbol: UnifiedSymbol = {\n name: extractNameFromNode(node.text),\n type: \"function\",\n line: node.start.line,\n endLine: node.end.line,\n };\n functions.push(symbol);\n all.push(symbol);\n }\n }\n\n if (classes.length === 0) {\n const classNodes = findClasses(\n result.tree,\n result.languageInstance,\n result.language,\n );\n for (const node of classNodes) {\n const symbol: UnifiedSymbol = {\n name: extractNameFromNode(node.text),\n type: \"class\",\n line: node.start.line,\n endLine: node.end.line,\n };\n classes.push(symbol);\n all.push(symbol);\n }\n }\n\n return { method: \"tree-sitter\", functions, classes, all };\n }\n\n // LangChain fallback - no symbol extraction (text splitting only)\n return { method: \"regex\", functions: [], classes: [], all: [] };\n}\n\n/**\n * Get a summary of parsing capabilities for a file\n */\nexport function getParsingCapabilities(filePath: string): {\n language: string;\n method: \"tree-sitter\" | \"langchain\" | \"generic\";\n features: string[];\n} {\n if (isBinaryFile(filePath)) {\n return {\n language: \"binary\",\n method: \"generic\",\n features: [],\n };\n }\n\n const language = detectLanguage(filePath);\n\n if (isLanguageSupported(language)) {\n return {\n language,\n method: \"tree-sitter\",\n features: [\n \"Full AST parsing\",\n \"Accurate symbol extraction\",\n \"Syntax highlighting queries\",\n \"Code navigation\",\n \"Semantic analysis\",\n ],\n };\n }\n\n if (isTextSplitterLanguage(language)) {\n return {\n language,\n method: \"langchain\",\n features: [\n \"Intelligent text splitting\",\n \"Language-aware chunking\",\n \"Basic symbol extraction (regex)\",\n ],\n };\n }\n\n return {\n language,\n method: \"generic\",\n features: [\"Generic text splitting\", \"Basic symbol extraction (regex)\"],\n };\n}\n\n/**\n * Check if a file can be parsed (not binary)\n */\nexport function canParse(filePath: string): boolean {\n return !isBinaryFile(filePath);\n}\n\n/**\n * Get all supported languages with their parsing method\n */\nexport function getSupportedLanguagesInfo(): {\n language: string;\n method: \"tree-sitter\" | \"langchain\";\n extensions: string[];\n}[] {\n const result: {\n language: string;\n method: \"tree-sitter\" | \"langchain\";\n extensions: string[];\n }[] = [];\n\n const config = loadConfig();\n\n // Tree-sitter languages from config\n for (const [name, langConfig] of Object.entries(config.treesitter)) {\n result.push({\n language: name,\n method: \"tree-sitter\",\n extensions: langConfig.extensions,\n });\n }\n\n // LangChain languages - group extensions by language from fallbackExtensions\n const langchainExtensions: Record<string, string[]> = {};\n for (const [ext, lang] of Object.entries(config.fallbackExtensions)) {\n // Skip languages already covered by Tree-sitter\n if (config.treesitter[lang]) {\n continue;\n }\n langchainExtensions[lang] ??= [];\n langchainExtensions[lang].push(ext);\n }\n\n for (const [lang, extensions] of Object.entries(langchainExtensions)) {\n result.push({\n language: lang,\n method: \"langchain\",\n extensions,\n });\n }\n\n return result;\n}\n\n// Register cache for centralized clearing\nregisterCache(\"unified:config\", clearUnifiedCache);\n","import { z } from \"zod\";\n\nimport { getASTRoot } from \"@core/parser\";\nimport { extractCodeInfo } from \"@core/symbols\";\nimport {\n canParse,\n extractSymbols,\n getParsingCapabilities,\n parseFile,\n} from \"@core/unified\";\n\nimport type { Feature, FeatureResult } from \"@features/types\";\n\nexport const analyzeFileSchema = z.object({\n file_path: z.string().describe(\"Path to the file to analyze\"),\n include_ast: z\n .boolean()\n .default(false)\n .describe(\"Include full AST in response (default: false, can be verbose)\"),\n include_symbols: z\n .boolean()\n .default(true)\n .describe(\"Include extracted symbols (default: true)\"),\n include_imports: z\n .boolean()\n .default(true)\n .describe(\"Include import statements (default: true)\"),\n include_exports: z\n .boolean()\n .default(true)\n .describe(\"Include export statements (default: true)\"),\n ast_max_depth: z\n .number()\n .int()\n .positive()\n .optional()\n .describe(\"Maximum depth for AST if included\"),\n include_chunks: z\n .boolean()\n .default(false)\n .describe(\"Include text chunks for fallback parsing (default: false)\"),\n});\n\nexport type AnalyzeFileInput = z.input<typeof analyzeFileSchema>;\n\nexport async function execute(\n rawInput: AnalyzeFileInput,\n): Promise<FeatureResult> {\n // Parse with defaults applied\n const input = analyzeFileSchema.parse(rawInput);\n const {\n file_path,\n include_ast,\n include_symbols,\n include_imports,\n include_exports,\n ast_max_depth,\n include_chunks,\n } = input;\n\n try {\n // Check if file can be parsed\n if (!canParse(file_path)) {\n return {\n success: false,\n error: `Cannot parse binary file: ${file_path}`,\n };\n }\n\n // Get parsing capabilities for this file\n const capabilities = getParsingCapabilities(file_path);\n\n // Parse the file with automatic fallback\n const parseResult = await parseFile(file_path, {\n includeAst: include_ast,\n astMaxDepth: ast_max_depth,\n });\n\n // If parsing failed (unsupported file type), return error\n if (!parseResult) {\n return {\n success: false,\n error: `Cannot parse file: ${file_path}. Unsupported or unreadable file type.`,\n };\n }\n\n // Build response based on parsing method\n const response: Record<string, unknown> = {\n file_path,\n language: parseResult.language,\n parsing_method: parseResult.method,\n capabilities: capabilities.features,\n metrics: {\n lines: parseResult.lineCount,\n functions: 0,\n classes: 0,\n imports: 0,\n exports: 0,\n },\n };\n\n // Tree-sitter path - full analysis\n if (\n parseResult.method === \"tree-sitter\" &&\n parseResult.tree &&\n parseResult.languageInstance\n ) {\n const codeInfo = extractCodeInfo(\n parseResult.tree,\n parseResult.languageInstance,\n parseResult.language,\n );\n\n response.metrics = {\n lines: parseResult.lineCount,\n functions: codeInfo.symbols.summary.functions,\n classes: codeInfo.symbols.summary.classes,\n imports: codeInfo.imports.length,\n exports: codeInfo.exports.length,\n };\n\n if (include_symbols) {\n response.symbols = codeInfo.symbols.symbols;\n }\n\n if (include_imports) {\n response.imports = codeInfo.imports;\n }\n\n if (include_exports) {\n response.exports = codeInfo.exports;\n }\n\n if (include_ast) {\n response.ast = getASTRoot(\n {\n tree: parseResult.tree,\n language: parseResult.language,\n parser: null as never,\n languageInstance: parseResult.languageInstance,\n },\n ast_max_depth,\n );\n }\n } else {\n // Fallback path - limited analysis\n const symbols = extractSymbols(parseResult);\n\n response.metrics = {\n lines: parseResult.lineCount,\n functions: symbols.functions.length,\n classes: symbols.classes.length,\n imports: 0, // Not available in fallback\n exports: 0, // Not available in fallback\n };\n\n if (include_symbols) {\n response.symbols = symbols.all;\n response.symbol_extraction_method = symbols.method;\n }\n\n if (include_chunks && parseResult.chunks) {\n response.chunks = parseResult.chunks.map((chunk) => ({\n index: chunk.index,\n startLine: chunk.startLine,\n endLine: chunk.endLine,\n preview:\n chunk.content.slice(0, 100) +\n (chunk.content.length > 100 ? \"...\" : \"\"),\n }));\n response.chunk_count = parseResult.chunks.length;\n }\n\n // Note about limited analysis\n response.note = `File parsed using ${parseResult.method} fallback. Some features (imports, exports, full AST) are not available.`;\n }\n\n // Build summary message\n const metrics = response.metrics as Record<string, number>;\n const methodNote =\n parseResult.method !== \"tree-sitter\" ? ` [${parseResult.method}]` : \"\";\n const summary = [\n `${parseResult.language} file${methodNote}`,\n `${String(metrics.lines)} lines`,\n `${String(metrics.functions)} functions`,\n `${String(metrics.classes)} classes`,\n ];\n\n if (parseResult.method === \"tree-sitter\") {\n summary.push(`${String(metrics.imports)} imports`);\n summary.push(`${String(metrics.exports)} exports`);\n }\n\n return {\n success: true,\n data: response,\n message: `Analyzed ${file_path}: ${summary.join(\", \")}`,\n };\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n return {\n success: false,\n error: `Failed to analyze file: ${message}`,\n };\n }\n}\n\nexport const analyzeFileFeature: Feature<typeof analyzeFileSchema> = {\n name: \"analyze_file\",\n description:\n \"Perform a comprehensive analysis of a source code file. Returns symbols, imports, exports, and code metrics. Optionally includes the full AST.\",\n schema: analyzeFileSchema,\n execute,\n};\n","export type * from \"@features/types\";\nexport { infoFeature, getServerInfo } from \"@features/info\";\nexport { indexCodebaseFeature } from \"@features/index-codebase\";\nexport { searchCodeFeature } from \"@features/search-code\";\nexport { getIndexStatusFeature } from \"@features/get-index-status\";\nexport { getCallGraphFeature } from \"@features/get-call-graph\";\nexport { updateIndexFeature } from \"@features/update-index\";\n\n// Internal features (used by other features, not exposed via CLI/MCP)\nexport { parseAstFeature } from \"@features/parse-ast\";\nexport { queryCodeFeature } from \"@features/query-code\";\nexport { listSymbolsFeature } from \"@features/list-symbols\";\nexport { analyzeFileFeature } from \"@features/analyze-file\";\n\nimport type { Feature } from \"@features/types\";\nimport { getIndexStatusFeature } from \"@features/get-index-status\";\nimport { indexCodebaseFeature } from \"@features/index-codebase\";\nimport { infoFeature } from \"@features/info\";\nimport { searchCodeFeature } from \"@features/search-code\";\nimport { updateIndexFeature } from \"@features/update-index\";\n\n// Registry of features exposed via CLI and MCP\nexport const features: Feature[] = [\n infoFeature,\n indexCodebaseFeature,\n searchCodeFeature,\n getIndexStatusFeature,\n updateIndexFeature,\n];\n\nexport function getFeature(name: string): Feature | undefined {\n return features.find((f) => f.name === name);\n}\n","import type { McpServer } from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport type { Feature } from \"@features/types\";\nimport { z } from \"zod\";\n\nfunction zodToMcpSchema(schema: z.ZodType): Record<string, z.ZodType> {\n if (schema instanceof z.ZodObject) {\n return schema.shape as Record<string, z.ZodType>;\n }\n return { input: schema };\n}\n\nexport function registerFeatureAsTool(\n server: McpServer,\n feature: Feature,\n): void {\n const mcpSchema = zodToMcpSchema(feature.schema);\n\n // eslint-disable-next-line @typescript-eslint/no-deprecated\n server.tool(feature.name, feature.description, mcpSchema, async (params) => {\n const result = feature.execute(params);\n\n const formatResult = (\n res: Awaited<ReturnType<typeof feature.execute>>,\n ): {\n content: { type: \"text\"; text: string }[];\n isError: boolean;\n } => ({\n content: [\n {\n type: \"text\" as const,\n text: res.message ?? JSON.stringify(res.data, null, 2),\n },\n ],\n isError: !res.success,\n });\n\n if (result instanceof Promise) {\n return await result.then(formatResult);\n }\n return formatResult(result);\n });\n}\n","import type { McpServer } from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport { features } from \"@features\";\nimport { registerFeatureAsTool } from \"@tools/adapter\";\n\nexport function registerTools(server: McpServer): void {\n for (const feature of features) {\n registerFeatureAsTool(server, feature);\n }\n}\n\nexport { features as tools };\n","import type { McpServer } from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport { getServerInfo } from \"@features\";\n\nexport function registerResources(server: McpServer): void {\n // eslint-disable-next-line @typescript-eslint/no-deprecated\n server.resource(\"server_info\", \"src://server/info\", (uri) => {\n const info = getServerInfo();\n return {\n contents: [\n {\n uri: uri.href,\n mimeType: \"application/json\",\n text: JSON.stringify(info, null, 2),\n },\n ],\n };\n });\n}\n","import type { McpServer } from \"@modelcontextprotocol/sdk/server/mcp.js\";\n\n/**\n * Register MCP prompts\n *\n * Prompts are reusable templates that help AI assistants understand\n * how to use SRC effectively for code search and analysis.\n */\nexport function registerPrompts(server: McpServer): void {\n // Main overview prompt - helps AI understand when to use SRC\n server.registerPrompt(\n \"src-overview\",\n {\n title: \"SRC Overview\",\n description:\n \"Learn about SRC capabilities and when to use it for code search and analysis\",\n },\n () => ({\n messages: [\n {\n role: \"user\",\n content: {\n type: \"text\",\n text: `# SRC (Structured Repo Context) - Overview\n\n## What is SRC?\nSRC is a semantic code search MCP server. It indexes codebases and provides intelligent search using:\n- **Vector embeddings** for semantic similarity (understands meaning, not just keywords)\n- **BM25 keyword search** for exact matches\n- **Hybrid search** combining both with RRF fusion\n- **LLM re-ranking** for optimal relevance\n- **Call graph analysis** showing function relationships\n\n## When to use SRC?\n\n**USE SRC when the user wants to:**\n- Find code by meaning/concept (\"find authentication logic\", \"where is error handling\")\n- Understand code relationships (\"what calls this function\", \"what does this function call\")\n- Search across a large codebase\n- Find similar code patterns\n- Explore unfamiliar code\n\n**DON'T USE SRC for:**\n- Reading a specific file (use file read tools instead)\n- Simple text search in a single file (use grep/search)\n- Non-code queries\n\n## Typical Workflow\n\n1. **Check status**: Use \\`get_index_status\\` to see if index exists\n2. **Index if needed**: Use \\`index_codebase\\` (only once per project)\n3. **Search**: Use \\`search_code\\` with natural language queries\n\nNote: When using \\`serve\\` mode, the server auto-indexes on startup and watches for file changes.\n\n## Supported Languages\n- **Full AST support (18)**: JavaScript, TypeScript, Python, Rust, Go, Java, C, C++, C#, Ruby, PHP, Kotlin, Scala, Swift, HTML, Svelte, OCaml\n- **Text splitting (16+)**: Markdown, LaTeX, Solidity, Haskell, Elixir, and more\n- **Generic (30+)**: Config files, shell scripts, SQL, and more\n\n## Tips\n- Use natural language queries: \"authentication middleware\" not \"auth*\"\n- The hybrid search mode (default) works best for most queries\n- Call context is included by default - shows who calls what`,\n },\n },\n ],\n }),\n );\n\n // Workflow prompt - step by step guide\n server.registerPrompt(\n \"code-search-workflow\",\n {\n title: \"Code Search Workflow\",\n description: \"Step-by-step guide for searching code with SRC\",\n },\n () => ({\n messages: [\n {\n role: \"user\",\n content: {\n type: \"text\",\n text: `# Code Search Workflow with SRC\n\n## Step 1: Check Index\n\\`\\`\\`\nget_index_status()\n\\`\\`\\`\n\n## Step 2: Index if Needed\nIf no index exists:\n\\`\\`\\`\nindex_codebase()\n\\`\\`\\`\n\n## Step 3: Search\n\\`\\`\\`\nsearch_code(query: \"your search query here\")\n\\`\\`\\`\n\n## search_code Arguments\n\n| Argument | Type | Default | Description |\n|----------|------|---------|-------------|\n| query | string | required | Natural language search query |\n| limit | number | 10 | Max results to return |\n| mode | \"hybrid\" / \"vector\" / \"fts\" | \"hybrid\" | Search mode |\n| rerank | boolean | true | LLM re-ranking for better relevance |\n| includeCallContext | boolean | true | Include caller/callee info |\n| threshold | number | - | Distance threshold (vector mode only) |\n\n## Search Modes\n- **hybrid** (default): Vector + BM25 + RRF fusion - best overall\n- **vector**: Semantic similarity only - good for conceptual queries\n- **fts**: Keyword search only - good for exact identifiers\n\n## Examples\n\\`\\`\\`\n// Find authentication code\nsearch_code(query: \"user authentication and login\")\n\n// More results\nsearch_code(query: \"error handling\", limit: 20)\n\n// Exact identifier search\nsearch_code(query: \"UserAuthService\", mode: \"fts\")\n\n// Without call context (faster)\nsearch_code(query: \"database queries\", includeCallContext: false)\n\\`\\`\\``,\n },\n },\n ],\n }),\n );\n\n // Search tips prompt\n server.registerPrompt(\n \"search-tips\",\n {\n title: \"Search Tips\",\n description: \"Tips for writing effective code search queries\",\n },\n () => ({\n messages: [\n {\n role: \"user\",\n content: {\n type: \"text\",\n text: `# Effective Code Search Tips\n\n## Good Query Examples\n\n| Goal | Good Query | Why |\n|------|------------|-----|\n| Find auth code | \"user authentication and login validation\" | Describes the concept |\n| Find error handling | \"error handling and exception catching\" | Uses related terms |\n| Find API endpoints | \"REST API route handlers\" | Specifies the pattern |\n| Find database code | \"database query and data persistence\" | Covers the domain |\n| Find a function | \"calculateTotalPrice function\" | Includes the name |\n\n## Query Writing Tips\n\n1. **Be descriptive, not literal**\n - Good: \"user password validation and hashing\"\n - Bad: \"validatePassword\"\n\n2. **Include context**\n - Good: \"authentication middleware for Express routes\"\n - Bad: \"auth middleware\"\n\n3. **Use domain language**\n - Good: \"shopping cart checkout process\"\n - Bad: \"cart function\"\n\n4. **Combine concepts**\n - Good: \"file upload with size validation and error handling\"\n - Bad: \"upload\"\n\n## Search Mode Selection\n\n| Mode | Use When |\n|------|----------|\n| **hybrid** | Default choice, works for most queries |\n| **vector** | Conceptual searches like \"code that handles retries\" |\n| **fts** | Exact identifiers like \"UserAuthService\" |\n\n## Understanding Results\n\nEach result includes:\n- **content**: The matching code chunk\n- **filePath**: Source file location\n- **startLine/endLine**: Line numbers\n- **symbolName/Type**: Function or class name if detected\n- **score**: Relevance score (higher = better match)\n- **callers**: Functions that call this code\n- **callees**: Functions this code calls`,\n },\n },\n ],\n }),\n );\n}\n","import { McpServer } from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport { StdioServerTransport } from \"@modelcontextprotocol/sdk/server/stdio.js\";\nimport { config } from \"@config\";\nimport { registerTools } from \"@tools\";\nimport { registerResources } from \"@resources\";\nimport { registerPrompts } from \"@prompts\";\nimport { logger } from \"@utils\";\n\nexport function createServer(): McpServer {\n const server = new McpServer({\n name: config.name,\n version: config.version,\n });\n\n registerTools(server);\n registerResources(server);\n registerPrompts(server);\n\n return server;\n}\n\nexport async function startServer(): Promise<void> {\n const server = createServer();\n const transport = new StdioServerTransport();\n\n await server.connect(transport);\n logger.info(`${config.name} v${config.version} started`);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAGA,MAAa,SAAuB;CAClC,MAAM;CACN,UAAU;CACV,SAAS;CACT,aACE;CACH;AAED,MAAM,UAAU,QAAQ,IAAI;AAC5B,MAAM,cAAc,QAAQ,IAAI;AAEhC,MAAa,MAAM;CACjB,OAAO,YAAY;CACnB,QAAQ,YAAY;CACpB,UAAU,eAAe;CAC1B;;;;AAKD,MAAa,mBAAoC;CAC/C,eAAe,QAAQ,IAAI,mBAAmB;CAC9C,gBAAgB,QAAQ,IAAI,mBAAmB;CAC/C,qBAAqB,OAAO,QAAQ,IAAI,qBAAqB,IAAI;CACjE,kBAAkB,OAAO,QAAQ,IAAI,WAAW,IAAI;CACpD,qBAAqB,OAAO,QAAQ,IAAI,cAAc,IAAI;CAC1D,WAAW,OAAO,QAAQ,IAAI,qBAAqB,IAAI;CAEvD,aAAa,QAAQ,IAAI,gBAAgB;CAC1C;;;;AAKD,MAAa,oBAAoB;CAE/B,yBAAyB,QAAQ,IAAI,0BAA0B;CAE/D,qBAAqB,OAAO,QAAQ,IAAI,uBAAuB,IAAI;CAEnE,qBACE,OAAO,QAAQ,IAAI,kCAAkC,IAAI;CAC5D;;;;ACzCD,MAAa,aAAa,EAAE,OAAO,EACjC,QAAQ,EACL,KAAK,CAAC,QAAQ,OAAO,CAAC,CACtB,UAAU,CACV,QAAQ,OAAO,CACf,SAAS,gBAAgB,EAC7B,CAAC;AAWF,SAAgB,gBAA4B;AAC1C,QAAO;EACL,MAAM,OAAO;EACb,UAAU,OAAO;EACjB,SAAS,OAAO;EAChB,aAAa,OAAO;EACrB;;AAGH,SAAgBA,UAAQ,OAAiC;CACvD,MAAM,OAAO,eAAe;AAE5B,KAAI,MAAM,WAAW,OACnB,QAAO;EACL,SAAS;EACT,MAAM;EACN,SAAS,KAAK,UAAU,MAAM,MAAM,EAAE;EACvC;CAGH,MAAM,cAAc,KAAK,eAAe;AAIxC,QAAO;EACL,SAAS;EACT,MAAM;EACN,SALA,GAAG,KAAK,SAAS,IAAI,KAAK,KAAK,KAAK,KAAK,QAAQ,IAAI,cAAc,MAAM;EAM1E;;AAGH,MAAa,cAA0C;CACrD,MAAM;CACN,aACE;CACF,QAAQ;CACR;CACD;;;;;;;;AClDD,IAAa,eAAb,MAA0B;CACxB,AAAiB;CACjB,AAAiB;CAEjB,YACE,UACA;AACA,OAAK,SAAS,IAAI,OAAO,EAAE,MAAMC,SAAO,eAAe,CAAC;AACxD,OAAK,QAAQA,SAAO;;;;;CAMtB,MAAM,MAAM,MAAiC;EAM3C,MAAM,UALW,MAAM,KAAK,OAAO,MAAM;GACvC,OAAO,KAAK;GACZ,OAAO;GACR,CAAC,EAEsB,WAAW;AACnC,MAAI,CAAC,OACH,OAAM,IAAI,MAAM,oCAAoC;AAEtD,SAAO;;;;;CAMT,MAAM,WAAW,OAAsC;AAMrD,UALiB,MAAM,KAAK,OAAO,MAAM;GACvC,OAAO,KAAK;GACZ,OAAO;GACR,CAAC,EAEc;;;;;CAMlB,MAAM,cAAwD;AAC5D,MAAI;AAOF,OAAI,EANa,MAAM,KAAK,OAAO,MAAM,EACjB,OACG,MACxB,MAAM,EAAE,SAAS,KAAK,SAAS,EAAE,KAAK,WAAW,GAAG,KAAK,MAAM,GAAG,CACpE,CAGC,QAAO;IACL,IAAI;IACJ,OAAO,UAAU,KAAK,MAAM,gCAAgC,KAAK;IAClE;AAGH,UAAO,EAAE,IAAI,MAAM;WACZ,OAAO;AAEd,UAAO;IAAE,IAAI;IAAO,OAAO,6BADX,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;IACH;;;;;;;AAQzE,SAAgB,mBACd,UACc;AACd,QAAO,IAAI,aAAaA,SAAO;;;;;AC1EjC,MAAM,aAAuC;CAC3C,OAAO;CACP,MAAM;CACN,MAAM;CACN,OAAO;CACR;AAED,MAAM,eAAwD;CAC5D,OAAO,GAAG;CACV,MAAM,GAAG;CACT,MAAM,GAAG;CACT,OAAO,GAAG;CACX;AAED,SAAS,gBAAgB,OAAkC;AACzD,QAAO,SAAS;;AAGlB,SAAS,UAAU,OAA0B;CAC3C,MAAM,cAAc,IAAI;CACxB,MAAM,eAAe,gBAAgB,YAAY,GAC7C,WAAW,eACX,WAAW;AACf,QAAO,WAAW,UAAU;;AAG9B,SAAS,cAAc,OAAiB,SAAyB;AAG/D,QAAO,GAFW,GAAG,qBAAI,IAAI,MAAM,EAAC,aAAa,CAAC,CAE9B,GADH,aAAa,OAAO,MAAM,aAAa,CAAC,OAAO,EAAE,CAAC,CACnC,GAAG;;AAGrC,MAAa,SAAS;CACpB,MAAM,SAAiB,GAAG,MAAuB;AAC/C,MAAI,UAAU,QAAQ,CACpB,SAAQ,MAAM,cAAc,SAAS,QAAQ,EAAE,GAAG,KAAK;;CAI3D,KAAK,SAAiB,GAAG,MAAuB;AAC9C,MAAI,UAAU,OAAO,CACnB,SAAQ,MAAM,cAAc,QAAQ,QAAQ,EAAE,GAAG,KAAK;;CAI1D,KAAK,SAAiB,GAAG,MAAuB;AAC9C,MAAI,UAAU,OAAO,CACnB,SAAQ,KAAK,cAAc,QAAQ,QAAQ,EAAE,GAAG,KAAK;;CAIzD,MAAM,SAAiB,GAAG,MAAuB;AAC/C,MAAI,UAAU,QAAQ,CACpB,SAAQ,MAAM,cAAc,SAAS,QAAQ,EAAE,GAAG,KAAK;;CAI3D,QAAQ,SAAiB,GAAG,MAAuB;AACjD,UAAQ,MAAM,GAAG,MAAM,KAAK,GAAG,SAAS,GAAG,KAAK;;CAEnD;;;;;;;AC5DD,MAAa,SAAS;CAEpB,SAAS,GAAG;CACZ,OAAO,GAAG;CACV,MAAM,GAAG;CACT,MAAM,GAAG;CAGT,KAAK,GAAG;CACR,MAAM,GAAG;CACT,MAAM,GAAG;CACT,SAAS,GAAG;CAGZ,cAAc,SAAyB,GAAG,KAAK,GAAG,MAAM,KAAK,CAAC;CAC9D,YAAY,SAAyB,GAAG,KAAK,GAAG,IAAI,KAAK,CAAC;CAC1D,WAAW,SAAyB,GAAG,KAAK,GAAG,KAAK,KAAK,CAAC;CAG1D,gBAAgB,QAAwB,GAAG,GAAG,MAAM,IAAI,CAAC,GAAG;CAC5D,cAAc,QAAwB,GAAG,GAAG,IAAI,IAAI,CAAC,GAAG;CACxD,aAAa,QAAwB,GAAG,GAAG,KAAK,IAAI,CAAC,GAAG;CACxD,aAAa,QAAwB,GAAG,GAAG,OAAO,IAAI,CAAC,GAAG;CAG1D,gBAAgB,QAAwB,GAAG,KAAK,IAAI;CACpD,cAAc,QAAwB,GAAG,QAAQ,IAAI;CACrD,aAAa,WAAyB,GAAG,IAAIC,OAAK;CACnD;;;;;;;;;;;;ACbD,MAAM,aAAa;AACnB,MAAM,iBAAiB;;;;;;;AAyBvB,SAAS,UACP,eACA,YACA,IAAI,IACY;CAChB,MAAM,yBAAS,IAAI,KAAsD;AAGzE,eAAc,SAAS,QAAQ,UAAU;EAEvC,MAAM,WAAW,KAAK,KADT,QAAQ;EAErB,MAAM,WAAW,OAAO,IAAI,OAAO,MAAM,GAAG;AAC5C,MAAI,SACF,UAAS,SAAS;MAElB,QAAO,IAAI,OAAO,MAAM,IAAI;GAAE,OAAO;GAAU;GAAQ,CAAC;GAE1D;AAGF,YAAW,SAAS,QAAQ,UAAU;EAEpC,MAAM,WAAW,KAAK,KADT,QAAQ;EAErB,MAAM,WAAW,OAAO,IAAI,OAAO,MAAM,GAAG;AAC5C,MAAI,SACF,UAAS,SAAS;MAElB,QAAO,IAAI,OAAO,MAAM,IAAI;GAAE,OAAO;GAAU;GAAQ,CAAC;GAE1D;AAUF,QAPiB,MAAM,KAAK,OAAO,QAAQ,CAAC,CACzC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM,CACjC,KAAK,EAAE,OAAO,cAAc;EAC3B,GAAG;EACH;EACD,EAAE;;;;;AAwBP,IAAa,cAAb,MAAyB;CACvB,AAAQ,KAAgC;CACxC,AAAQ,QAA8B;CACtC,AAAiB;CACjB,AAAQ,kBAAkB;CAE1B,YACE,WACA,SACA;AACA,OAAK,YAAY,KAAK,KAAK,WAAW,eAAe;;;;;CAMvD,MAAM,UAAyB;AAC7B,OAAK,KAAK,MAAM,QAAQ,QAAQ,KAAK,UAAU;AAG/C,OADmB,MAAM,KAAK,GAAG,YAAY,EAC9B,SAAS,WAAW,CACjC,MAAK,QAAQ,MAAM,KAAK,GAAG,UAAU,WAAW;;;;;CAOpD,QAAc;AACZ,OAAK,KAAK;AACV,OAAK,QAAQ;;;;;CAMf,SAAkB;AAChB,SAAO,GAAG,WAAW,KAAK,UAAU;;;;;CAMtC,MAAM,UAAU,QAAwC;AACtD,MAAI,CAAC,KAAK,GACR,OAAM,IAAI,MAAM,gDAAgD;EAGlE,MAAM,UAAU,OAAO,KAAK,WAAW;GACrC,IAAI,MAAM;GACV,SAAS,MAAM;GACf,UAAU,MAAM;GAChB,UAAU,MAAM;GAChB,WAAW,MAAM;GACjB,SAAS,MAAM;GACf,YAAY,MAAM,cAAc;GAChC,YAAY,MAAM,cAAc;GAChC,QAAQ,MAAM;GACf,EAAE;AAEH,MAAI,CAAC,KAAK,MACR,MAAK,QAAQ,MAAM,KAAK,GAAG,YAAY,YAAY,QAAQ;MAE3D,OAAM,KAAK,MAAM,IAAI,QAAQ;;;;;;CAQjC,MAAM,iBAAgC;AACpC,MAAI,CAAC,KAAK,SAAS,KAAK,gBACtB;AAGF,MAAI;AACF,SAAM,KAAK,MAAM,YAAY,WAAW,EACtC,QAAQ,QAAQ,MAAM,KAAK,EAC5B,CAAC;AACF,QAAK,kBAAkB;AACvB,UAAO,MAAM,sCAAsC;WAC5C,OAAO;AAEd,OAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,iBAAiB,EAAE;AACtE,SAAK,kBAAkB;AACvB,WAAO,MAAM,2BAA2B;SAExC,QAAO,KACL,+BAA+B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACtF;;;;;;CAQP,MAAM,OAAO,aAAuB,QAAQ,IAA6B;AACvE,MAAI,CAAC,KAAK,MACR,QAAO,EAAE;AAQX,UALiB,MAAM,KAAK,MACzB,aAAa,YAAY,CACzB,MAAM,MAAM,CACZ,SAAS,EAEG,KAAK,SAAS;GAC3B,OAAO;IACL,IAAI,IAAI;IACR,SAAS,IAAI;IACb,UAAU,IAAI;IACd,UAAU,IAAI;IACd,WAAW,IAAI;IACf,SAAS,IAAI;IACb,YAAY,IAAI,cAAc;IAC9B,YAAY,IAAI,cAAc;IAC/B;GACD,OAAO,IAAI,aAAa;GACzB,EAAE;;;;;CAML,MAAM,UAAU,WAAmB,QAAQ,IAA6B;AACtE,MAAI,CAAC,KAAK,MACR,QAAO,EAAE;AAIX,QAAM,KAAK,gBAAgB;AAE3B,MAAI;AAOF,WANiB,MAAM,KAAK,MACzB,OAAO,CACP,cAAc,UAAU,CACxB,MAAM,MAAM,CACZ,SAAS,EAEG,KAAK,KAAK,WAAW;IAClC,OAAO;KACL,IAAI,IAAI;KACR,SAAS,IAAI;KACb,UAAU,IAAI;KACd,UAAU,IAAI;KACd,WAAW,IAAI;KACf,SAAS,IAAI;KACb,YAAY,IAAI,cAAc;KAC9B,YAAY,IAAI,cAAc;KAC/B;IAED,OAAO,KAAK,QAAQ;IACrB,EAAE;WACI,OAAO;AACd,UAAO,KACL,qDAAqD,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC5G;AACD,UAAO,EAAE;;;;;;;CAQb,MAAM,aACJ,aACA,WACA,QAAQ,IACR,UAA+B,EAAE,EACR;EACzB,MAAM,EAAE,OAAO,UAAU,OAAO,OAAO;AAEvC,MAAI,CAAC,KAAK,MACR,QAAO,EAAE;AAIX,MAAI,SAAS,SACX,QAAO,KAAK,OAAO,aAAa,MAAM;AAIxC,MAAI,SAAS,MACX,QAAO,KAAK,UAAU,WAAW,MAAM;EAIzC,MAAM,CAAC,eAAe,cAAc,MAAM,QAAQ,IAAI,CACpD,KAAK,OAAO,aAAa,QAAQ,EAAE,EACnC,KAAK,UAAU,WAAW,QAAQ,EAAE,CACrC,CAAC;AAMF,SAHqB,UAAU,eAAe,YAAY,KAAK,CAG3C,MAAM,GAAG,MAAM;;;;;CAMrC,MAAM,iBAAiB,UAAiC;AACtD,MAAI,CAAC,KAAK,MACR;AAGF,QAAM,KAAK,MAAM,OAAO,iBAAiB,SAAS,QAAQ,MAAM,KAAK,CAAC,GAAG;;;;;CAM3E,MAAM,QAAuB;AAC3B,MAAI,KAAK,MAAM,KAAK,OAAO;AACzB,SAAM,KAAK,GAAG,UAAU,WAAW;AACnC,QAAK,QAAQ;;;;;;CAOjB,MAAM,UAAU,WAAyC;EACvD,MAAM,SAAsB;GAC1B;GACA,WAAW,KAAK;GAChB,QAAQ,KAAK,QAAQ;GACrB,aAAa;GACb,YAAY;GACZ,WAAW,EAAE;GACd;AAED,MAAI,CAAC,KAAK,MACR,QAAO;EAGT,MAAM,UAAW,MAAM,KAAK,MAAM,OAAO,CAAC,SAAS;AAEnD,SAAO,cAAc,QAAQ;EAE7B,MAAM,8BAAc,IAAI,KAAa;EACrC,MAAM,iBAAyC,EAAE;AAEjD,OAAK,MAAM,OAAO,SAAS;AACzB,eAAY,IAAI,IAAI,SAAS;GAC7B,MAAM,OAAO,IAAI;AACjB,kBAAe,SAAS,eAAe,SAAS,KAAK;;AAGvD,SAAO,aAAa,YAAY;AAChC,SAAO,YAAY;AAEnB,SAAO;;;;;CAMT,MAAM,kBAAqC;AACzC,MAAI,CAAC,KAAK,MACR,QAAO,EAAE;EAGX,MAAM,OAAQ,MAAM,KAAK,MACtB,OAAO,CACP,OAAO,CAAC,WAAW,CAAC,CACpB,SAAS;EACZ,MAAM,8BAAc,IAAI,KAAa;AAErC,OAAK,MAAM,OAAO,KAChB,aAAY,IAAI,IAAI,SAAS;AAG/B,SAAO,MAAM,KAAK,YAAY;;;;;;AAOlC,SAAgB,kBACd,WACA,UACa;AACb,QAAO,IAAI,YAAY,WAAWC,SAAO;;;;;AAM3C,SAAgB,aAAa,WAA2B;AACtD,QAAO,KAAK,KAAK,WAAW,eAAe;;;;;;;;;;;;;;ACrY7C,IAAI,iBAAgC;;;;;;;AAQpC,SAAgB,eAAuB;AACrC,KAAI,eACF,QAAO;CAIT,MAAM,aACJ,OAAO,cAAc,cACjB,YACA,QAAQ,cAAc,OAAO,KAAK,IAAI,CAAC;CAG7C,MAAM,gBAAgB;EACpB,KAAK,YAAY,MAAM,MAAM,MAAM,SAAS;EAC5C,KAAK,YAAY,MAAM,MAAM,SAAS;EACtC,KAAK,QAAQ,KAAK,EAAE,SAAS;EAC9B;AAED,MAAK,MAAM,KAAK,cACd,KAAI,WAAW,EAAE,EAAE;AACjB,mBAAiB;AACjB,SAAO;;AAKX,kBAAiB,KAAK,QAAQ,KAAK,EAAE,SAAS;AAC9C,QAAO;;;;;;;;;AAUT,SAAgB,eAAkB,UAAkB,cAAoB;CACtE,MAAM,aAAa,KAAK,cAAc,EAAE,SAAS;AAEjD,KAAI;EACF,MAAM,UAAU,aAAa,YAAY,QAAQ;AACjD,SAAO,KAAK,MAAM,QAAQ;SACpB;AACN,SAAO;;;;;;;;;AAUX,SAAgB,aAAa,GAAG,UAA4B;AAC1D,QAAO,KAAK,cAAc,EAAE,GAAG,SAAS;;;;;;;;AAS1C,SAAgB,YAAY,GAAG,UAA6B;AAC1D,QAAO,WAAW,aAAa,GAAG,SAAS,CAAC;;;;;;;;ACvE9C,MAAM,gCAAgB,IAAI,KAA2B;;;;;;;AAQrD,SAAgB,cAAc,MAAc,SAA6B;AACvE,eAAc,IAAI,MAAM,QAAQ;;;;;;;;;;;;;;ACalC,SAAS,kBAAkB,MAAsB;CAC/C,IAAI,SAAS;CACb,IAAI,WAAW;CACf,IAAI,sBAAsB;CAC1B,IAAI,qBAAqB;AAEzB,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;EACpC,MAAM,OAAO,KAAK,OAAO,EAAE;EAC3B,MAAM,WAAW,KAAK,OAAO,IAAI,EAAE;AAEnC,MAAI,qBAAqB;AACvB,OAAI,SAAS,MAAM;AACjB,0BAAsB;AACtB,cAAU;;AAEZ;;AAGF,MAAI,oBAAoB;AACtB,OAAI,SAAS,OAAO,aAAa,KAAK;AACpC,yBAAqB;AACrB;;AAEF;;AAGF,MAAI,UAAU;AACZ,aAAU;AACV,OAAI,SAAS,QAAO,KAAK,OAAO,IAAI,EAAE,KAAK,KACzC,YAAW;AAEb;;AAIF,MAAI,SAAS,MAAK;AAChB,cAAW;AACX,aAAU;aACD,SAAS,OAAO,aAAa,KAAK;AAC3C,yBAAsB;AACtB;aACS,SAAS,OAAO,aAAa,KAAK;AAC3C,wBAAqB;AACrB;QAEA,WAAU;;AAId,QAAO;;;;;AAMT,SAAS,cAAc,SAAkC;AACvD,KAAI;EACF,MAAM,kBAAkB,kBAAkB,QAAQ;AAClD,SAAO,KAAK,MAAM,gBAAgB;UAC3B,OAAO;AACd,SAAO,MACL,kCAAkC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACzF;AACD,SAAO;;;;;;;;;;;;;;AAeX,SAAS,aACP,OACA,SACA,aACa;CACb,MAAM,UAAuB,EAAE;AAE/B,MAAK,MAAM,CAAC,SAAS,YAAY,OAAO,QAAQ,MAAM,EAAE;EACtD,MAAM,SAAS,QAAQ;AACvB,MAAI,CAAC,OACH;AAIF,MAAI,QAAQ,SAAS,KAAK,IAAI,OAAO,SAAS,KAAK,EAAE;GAEnD,MAAM,cAAc,QAAQ,MAAM,GAAG,GAAG,GAAG;GAC3C,MAAM,aAAa,OAAO,MAAM,GAAG,GAAG,GAAG;GAGzC,MAAM,iBAAiB,KAAK,KAAK,aAAa,SAAS,WAAW;AAGlE,WAAQ,eAFe,KAAK,SAAS,aAAa,eAAe,CAE3B,QAAQ,OAAO,IAAI,GAAG;SACvD;GAEL,MAAM,iBAAiB,KAAK,KAAK,aAAa,SAAS,OAAO;AAG9D,WAAQ,WAFe,KAAK,SAAS,aAAa,eAAe,CAE/B,QAAQ,OAAO,IAAI;;;AAIzD,QAAO;;;;;;;;;;;;;;AAeT,SAAgB,gBAAgB,aAAkC;CAChE,MAAM,eAAe,KAAK,KAAK,aAAa,gBAAgB;AAE5D,KAAI,CAAC,GAAG,WAAW,aAAa,EAAE;AAChC,SAAO,MAAM,6BAA6B,eAAe;AACzD,SAAO,EAAE;;AAGX,KAAI;EAEF,MAAM,WAAW,cADD,GAAG,aAAa,cAAc,QAAQ,CACf;AAEvC,MAAI,CAAC,SACH,QAAO,EAAE;EAGX,MAAM,QAAQ,SAAS,iBAAiB;EACxC,MAAM,UAAU,SAAS,iBAAiB,WAAW;AAErD,MAAI,CAAC,SAAS,OAAO,KAAK,MAAM,CAAC,WAAW,GAAG;AAC7C,UAAO,MAAM,oCAAoC;AACjD,UAAO,EAAE;;EAGX,MAAM,UAAU,aAAa,OAAO,SAAS,YAAY;AACzD,SAAO,MACL,UAAU,OAAO,OAAO,KAAK,QAAQ,CAAC,OAAO,CAAC,kCAC/C;AAED,SAAO;UACA,OAAO;AACd,SAAO,MACL,iCAAiC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACxF;AACD,SAAO,EAAE;;;;;;AAOb,MAAM,iCAAiB,IAAI,KAA0B;;;;AAKrD,SAAgB,sBAAsB,aAAkC;CACtE,MAAM,iBAAiB,KAAK,UAAU,YAAY;CAClD,MAAM,SAAS,eAAe,IAAI,eAAe;AAEjD,KAAI,WAAW,OACb,QAAO;CAGT,MAAM,UAAU,gBAAgB,YAAY;AAC5C,gBAAe,IAAI,gBAAgB,QAAQ;AAC3C,QAAO;;;;;;;;;ACjMT,IAAIC,gBAAsC;AAC1C,IAAI,iBAAwD;AAC5D,IAAI,oBAA2D;AAE/D,SAAS,aAA8B;AACrC,KAAIA,cACF,QAAOA;AAGT,iBAAc,eAAgC,kBAAkB,EAC9D,YAAY,EAAE,EACf,CAAC;AACF,QAAOA;;AAGT,SAAS,iBAAiD;AACxD,KAAI,eACF,QAAO;CAGT,MAAMC,WAAS,YAAY;AAC3B,kBAAiB,EAAE;AAEnB,MAAK,MAAM,CAAC,MAAM,SAAS,OAAO,QAAQA,SAAO,WAAW,EAAE;AAC5D,iBAAe,QAAQ;GACrB;GACA,MAAM,KAAK;GACX,SAAS,KAAK;GACd,YAAY,KAAK;GACjB,SAAS,KAAK;GACf;AAGD,MAAI,KAAK,QACP,MAAK,MAAM,SAAS,KAAK,QACvB,gBAAe,SAAS;GACtB;GACA,MAAM,KAAK;GACX,SAAS,KAAK;GACd,YAAY,KAAK;GACjB,SAAS,KAAK;GACf;;AAKP,QAAO;;AAGT,SAAS,oBAAoD;AAC3D,KAAI,kBACF,QAAO;CAGT,MAAM,YAAY,gBAAgB;AAClC,qBAAoB,EAAE;AAEtB,MAAK,MAAMA,YAAU,OAAO,OAAO,UAAU,CAC3C,MAAK,MAAM,OAAOA,SAAO,WACvB,mBAAkB,OAAOA;AAI7B,QAAO;;;AAiBT,SAAgB,oBACd,UAC4B;CAC5B,MAAM,MAAM,SAAS,MAAM,SAAS,YAAY,IAAI,CAAC,CAAC,aAAa;AACnE,QAAO,mBAAmB,CAAC;;;AAI7B,SAAgB,kBAAkB,MAA0C;AAC1E,QAAO,gBAAgB,CAAC,KAAK,aAAa;;;AAmB5C,SAAgBC,uBAA2B;AACzC,iBAAc;AACd,kBAAiB;AACjB,qBAAoB;;AAItB,MAAa,YAAY,gBAAgB;AACzC,MAAa,gBAAgB,mBAAmB;AAGhD,cAAc,oBAAoBA,qBAAmB;;;;;;;;;;;;;AC9GrD,IAAI,gBAAgB;AACpB,IAAI,cAAoC;;;;AAKxC,MAAM,gCAAgB,IAAI,KAAuB;;;;AAKjD,IAAI,SAAwB;;;;;AAM5B,eAAsB,mBAAkC;AACtD,KAAI,cACF;AAGF,KAAI,YACF,QAAO;AAGT,gBAAe,YAAY;AAEzB,QAAM,OAAO,MAAM;AACnB,WAAS,IAAI,QAAQ;AACrB,kBAAgB;KACd;AAEJ,QAAO;;;;;AAaT,eAAe,YAA6B;AAC1C,OAAM,kBAAkB;AACxB,KAAI,CAAC,OACH,OAAM,IAAI,MAAM,yBAAyB;AAE3C,QAAO;;;;;AAMT,eAAe,aAAa,UAA2C;CACrE,MAAM,WAAWC,SAAO;CAGxB,MAAM,SAAS,cAAc,IAAI,SAAS;AAC1C,KAAI,OACF,QAAO;AAGT,OAAM,kBAAkB;CAIxB,MAAM,WAAW,KADC,cAAc,EACC,QAAQ,eAAeA,SAAO,KAAK,OAAO;AAE3E,KAAI,CAAC,WAAW,SAAS,CACvB,OAAM,IAAI,MACR,oCAAoCA,SAAO,KAAK,IAAI,WACrD;CAGH,MAAM,WAAW,MAAM,SAAS,KAAK,SAAS;AAC9C,eAAc,IAAI,UAAU,SAAS;AAErC,QAAO;;;;;AA8BT,eAAsB,UACpB,SACA,UAAwB,EAAE,EACJ;CACtB,MAAM,EAAE,UAAU,aAAa;CAG/B,IAAIA;AAEJ,KAAI,UAAU;AACZ,aAAS,kBAAkB,SAAS;AACpC,MAAI,CAACA,SACH,OAAM,IAAI,MAAM,yBAAyB,WAAW;YAE7C,UAAU;AACnB,aAAS,oBAAoB,SAAS;AACtC,MAAI,CAACA,SACH,OAAM,IAAI,MAAM,uCAAuC,WAAW;OAGpE,OAAM,IAAI,MAAM,+CAA+C;CAIjE,MAAM,mBAAmB,MAAM,aAAaA,SAAO;CAGnD,MAAM,iBAAiB,MAAM,WAAW;AACxC,gBAAe,YAAY,iBAAiB;CAG5C,MAAM,OAAO,eAAe,MAAM,QAAQ;AAE1C,KAAI,CAAC,KACH,OAAM,IAAI,MAAM,0BAA0B;AAG5C,QAAO;EACL;EACA,UAAUA,SAAO;EACjB,QAAQ;EACR;EACD;;;;;AAMH,SAAgB,WAAW,OAAc,QAA0B;AACjE,QAAO;EACL,MAAM,MAAM,MAAM;EAClB,QAAQ,MAAM;EACd;EACD;;;;;AAMH,SAAgB,UACd,MACA,UACA,eAAe,GACN;CACT,MAAM,UAAmB;EACvB,MAAM,KAAK;EACX,MAAM,KAAK;EACX,OAAO,WAAW,KAAK,eAAe,KAAK,WAAW;EACtD,KAAK,WAAW,KAAK,aAAa,KAAK,SAAS;EAChD,SAAS,KAAK;EACf;AAGD,KAAI,aAAa,UAAa,gBAAgB,SAC5C,QAAO;AAIT,KAAI,KAAK,aAAa,GAAG;EACvB,MAAM,gBAAgB,KAAK;AAC3B,MAAI,cAAc,SAAS,EACzB,SAAQ,WAAW,cAAc,KAAK,UACpC,UAAU,OAAO,UAAU,eAAe,EAAE,CAC7C;;CAKL,MAAM,WAAW,KAAK,KAAK;CAC3B,MAAM,SAA8C,EAAE;CACtD,MAAM,aAAa,SAAS;AAE5B,MAAK,MAAM,aAAa,WACtB,KAAI,WAAW;EACb,MAAM,YAAY,KAAK,kBAAkB,UAAU;AACnD,MAAI,UACF,QAAO,aAAa,UAAU,WAAW,UAAU,eAAe,EAAE;;AAK1E,KAAI,OAAO,KAAK,OAAO,CAAC,SAAS,EAC/B,SAAQ,SAAS;AAGnB,QAAO;;;;;AA2BT,SAAgB,qBAA2B;AACzC,eAAc,OAAO;;;;;AAMvB,SAAgB,cAAoB;AAClC,eAAc,OAAO;AACrB,UAAS;AACT,iBAAgB;AAChB,eAAc;;AAIhB,cAAc,wBAAwB,mBAAmB;AACzD,cAAc,gBAAgB,YAAY;;;;;;;;;;;ACxR1C,SAAgB,YACd,UACA,MAC0B;AAC1B,QAAO,SAAS,MAAM,MAAM,EAAE,SAAS,KAAK;;;;;;;;;AAU9C,SAAgB,mBACd,UACA,OAC0B;AAC1B,QAAO,SAAS,MAAM,MAAM,MAAM,SAAS,EAAE,KAAK,CAAC;;;;;;;;;AAUrD,SAAgB,oBACd,UACA,QAC0B;AAC1B,QAAO,SAAS,MAAM,MAAM,EAAE,KAAK,WAAW,OAAO,CAAC;;;;;;;;;AAwBxD,SAAgB,eAAe,aAAqB,QAAwB;AAC1E,QAAO,YAAY,WAAW,OAAO,GACjC,YAAY,MAAM,OAAO,OAAO,GAChC;;;;;;;AAwDN,SAAgB,sBAGd;CACA,MAAM,uBAAO,IAAI,KAAa;AAC9B,QAAO;EACL,MAAM,SAAkB,KAAK,IAAI,KAAK,MAAM,OAAO;EACnD,MAAM,SAAkB,KAAK,IAAI,KAAK,MAAM,OAAO;EACpD;;;;;;;;;;;;;;AChHH,MAAM,2BAAW,IAAI,KAAqB;;;;AA0C1C,SAAS,sBAAsB,UAA0B;AAEvD,KAAI,aAAa,SACf,QAAO;AAET,KAAI,aAAa,MACf,QAAO;AAET,QAAO;;;;;AAMT,SAAgB,WACd,UACA,WACoB;CAEpB,MAAM,eAAe,KAAK,WADV,sBAAsB,SAAS,EACD,GAAG,UAAU,MAAM;AAEjE,KAAI,YAAY,aAAa,CAC3B,QAAO,KAAK,cAAc,EAAE,aAAa;;;;;;AAU7C,SAAS,cAAc,SAA2B;CAChD,MAAM,WAAqB,EAAE;CAG7B,MAAM,QADoB,QAAQ,QAAQ,SAAS,KAAK,CAAC,QAAQ,OAAO,KAAK,CAC7C,MAAM,KAAK;AAE3C,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,QAAQ,+BAA+B,KAAK,KAAK;AACvD,MAAI,QAAQ,IAAI;GACd,MAAM,QAAQ,MAAM,GAAG,MAAM,IAAI,CAAC,KAAK,MAAM,EAAE,MAAM,CAAC;AACtD,YAAS,KAAK,GAAG,MAAM;;;AAI3B,QAAO;;;;;AAMT,SAAS,wBAAwB,SAAyB;AAGxD,QAD0B,QAAQ,QAAQ,SAAS,KAAK,CAAC,QAAQ,OAAO,KAAK,CAE1E,MAAM,KAAK,CACX,QAAQ,SAAS,CAAC,qBAAqB,KAAK,KAAK,CAAC,CAClD,KAAK,KAAK;;;;;AAMf,SAAS,WACP,UACA,WACoB;CAEpB,MAAM,UAAU,KADE,cAAc,EACA,WAAW,UAAU,GAAG,UAAU,MAAM;AAExE,KAAI,CAAC,WAAW,QAAQ,CACtB;AAGF,KAAI;AACF,SAAO,aAAa,SAAS,QAAQ;SAC/B;AACN;;;;;;;;;;;AAYJ,SAAgB,aACd,UACA,WACA,0BAAU,IAAI,KAAa,EACP;CACpB,MAAM,WAAW,GAAG,SAAS,GAAG;AAGhC,KAAI,SAAS,IAAI,SAAS,CACxB,QAAO,SAAS,IAAI,SAAS;AAI/B,KAAI,QAAQ,IAAI,SAAS,CACvB;AAEF,SAAQ,IAAI,SAAS;CAIrB,MAAM,aAAa,WADH,sBAAsB,SAAS,EACR,UAAU;AAEjD,KAAI,CAAC,WACH;CAIF,MAAM,WAAW,cAAc,WAAW;CAC1C,MAAM,aAAa,wBAAwB,WAAW,CAAC,MAAM;CAG7D,MAAM,iBAA2B,EAAE;AACnC,MAAK,MAAM,eAAe,UAAU;EAClC,MAAM,mBAAmB,aAAa,aAAa,WAAW,QAAQ;AACtE,MAAI,iBACF,gBAAe,KAAK,iBAAiB;;CAKzC,MAAM,eAAe,CAAC,GAAG,gBAAgB,WAAW,CACjD,OAAO,QAAQ,CACf,KAAK,OAAO;AAEf,KAAI,aACF,UAAS,IAAI,UAAU,aAAa;AAGtC,QAAO,gBAAgB;;;;;AAMzB,SAAgB,cAAc,UAAsC;AAClE,QAAO,aAAa,UAAU,OAAO;;;;;AAMvC,SAAgB,oBAAoB,UAAsC;AACxE,QAAO,aAAa,UAAU,aAAa;;;;;AAM7C,SAAgB,gBAAgB,UAAsC;AACpE,QAAO,aAAa,UAAU,SAAS;;;;;AAsBzC,SAAgB,gBAAgB,UAA2B;AACzD,QAAO,WAAW,UAAU,OAAO,KAAK;;;;;AAa1C,SAAgB,gBAAsB;AACpC,UAAS,OAAO;;AAIlB,cAAc,eAAe,cAAc;;;;AC7P3C,MAAM,mBAAyD;CAC7D,UAAU;CACV,SAAS;CACT,SAAS;CACT,SAAS;CACT,WAAW;;;;CAIX,OAAO;;;;CAIR;AAED,MAAa,oBAGT;CACF,YAAY,EACV,WAAW;;;;QAKZ;CACD,MAAM,EAAE,SAAS,sBAAsB;CACvC,MAAM;EACJ,SAAS;EACT,UAAU;EACX;CACD,MAAM;EAAE,SAAS;EAAsB,UAAU;EAAwB;CACzE,MAAM;EACJ,WAAW;EACX,UAAU;EACV,SAAS;EACT,WAAW;EACZ;CACD,MAAM;EACJ,SAAS;EACT,UAAU;EACX;CACD,KAAK;EACH,UAAU;EACV,SAAS;EACV;CACD,OAAO;EACL,WAAW;EACX,SAAS;;;;;EAKT,UAAU;EACV,SAAS;EACV;CACD,OAAO;EACL,WAAW;;;;EAIX,SAAS;;;;EAIT,UAAU;EACV,SAAS;EACV;CACD,OAAO;EACL,WAAW;EACX,SAAS;;;;EAIT,UAAU;EACV,SAAS;EACV;CACD,QAAQ;EACN,UAAU;EACV,SAAS;EACV;CACF;AAED,SAAgB,gBACd,UACA,QACoB;AACpB,QAAO,kBAAkB,YAAY,WAAW,iBAAiB;;;;;;;;;;;;;ACZnE,SAAgB,aACd,MACA,kBACA,aACA,UACA,UAAwB,EAAE,EACb;CACb,MAAM,EAAE,YAAY,YAAY,aAAa;CAG7C,IAAI;AACJ,KAAI;AACF,UAAQ,IAAI,MAAM,kBAAkB,YAAY;UACzC,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACtE,QAAM,IAAI,MAAM,kBAAkB,UAAU;;CAI9C,MAAM,eAA+B,MAAM,QAAQ,KAAK,UAAU;EAChE;EACA;EACD,CAAC;CAEF,MAAM,UAAwB,EAAE;CAChC,IAAI,QAAQ;AAEZ,MAAK,MAAM,SAAS,cAAc;AAEhC,MAAI,eAAe,UAAa,SAAS,WACvC;EAGF,MAAM,WAA2B,MAAM,SAAS,KAAK,aAAa;GAChE,MAAM,QAAQ;GACd,MAAM,UAAU,QAAQ,KAAK;GAC9B,EAAE;AAEH,UAAQ,KAAK;GACX,SAAS,MAAM;GACf;GACD,CAAC;AAEF;;AAGF,QAAO;EACL;EACA;EACA,OAAO;EACP;EACA,QAAQ;EACT;;;;;;;;;;;AAYH,SAAgB,qBACd,MACA,kBACA,UACA,WACA,UAAwB,EAAE,EACD;CACzB,IAAI;AAEJ,SAAQ,WAAR;EACE,KAAK;AACH,iBAAc,cAAc,SAAS;AACrC;EACF,KAAK;AACH,iBAAc,oBAAoB,SAAS;AAC3C;EACF,KAAK;AACH,iBAAc,gBAAgB,SAAS;AACvC;EACF,KAAK;EACL,KAAK;EACL,KAAK,QAEH;;AAGJ,KAAI,CAAC,YACH;AAGF,KAAI;AAQF,SAAO;GACL,GARa,aACb,MACA,kBACA,aACA,UACA,QACD;GAGC,QAAQ;GACT;SACK;AAEN;;;;;;;;;;;;;AAcJ,SAAgB,iBACd,MACA,kBACA,UACA,UAAwB,EAAE,EACD;AACzB,QAAO,qBACL,MACA,kBACA,UACA,QACA,QACD;;;;;;;AAQH,SAAgB,mBACd,MACA,kBACA,UACA,QACA,UAAwB,EAAE,EACb;CACb,MAAM,EAAE,eAAe;CAIvB,MAAM,kBAAkB,gBAAgB,UAAU,OAAO;AAGzD,KAAI,WAAW,eAAe,WAAW,WAAW;AAElD,MAAI,gBAAgB,SAAS,EAAE;GAC7B,MAAM,EAAE,gBAAgB,uBACtB,MACA,kBACA,SACD;GAED,IAAI,eACF,WAAW,cACP,YAAY,QACT,MAAM,EAAE,SAAS,cAAc,EAAE,SAAS,SAC5C,GACD,YAAY,QACT,MACC,EAAE,SAAS,WACX,EAAE,SAAS,eACX,EAAE,SAAS,SACd;AAGP,OAAI,aAAa,SAAS,GAAG;AAE3B,QAAI,eAAe,UAAa,aAAa,SAAS,WACpD,gBAAe,aAAa,MAAM,GAAG,WAAW;IAIlD,MAAM,UAAwB,aAAa,KAAK,SAAS;KACvD,SAAS;KACT,UAAU,CACR;MACE,MACE,WAAW,cACP,wBACA;MACN,MAAM,IAAI;MACX,EACD;MAAE,MAAM,GAAG,OAAO,MAAM,GAAG,GAAG,CAAC;MAAQ,MAAM,IAAI;MAAU,CAC5D;KACF,EAAE;AAEH,WAAO;KACL;KACA,OAAO,QAAQ;KACf,OAAO,aAAa,OAAO;KAC3B;KACA,QAAQ;KACT;;AAIH,OAAI,gBACF,QAAO,aACL,MACA,kBACA,iBACA,UACA,QACD;AAIH,UAAO;IACL,SAAS,EAAE;IACX,OAAO;IACP,OAAO,aAAa,OAAO;IAC3B;IACA,QAAQ;IACT;;AAIH,MAAI,gBACF,QAAO,aACL,MACA,kBACA,iBACA,UACA,QACD;;AAKL,KAAI,CAAC,gBACH,OAAM,IAAI,MAAM,OAAO,OAAO,gCAAgC,WAAW;AAG3E,QAAO,aACL,MACA,kBACA,iBACA,UACA,QACD;;;;;;;;AASH,SAAgB,uBACd,MACA,kBACA,UAIA;CACA,MAAM,SAAS,iBAAiB,MAAM,kBAAkB,SAAS;AAEjE,KAAI,CAAC,OACH,QAAO;EAAE,aAAa,EAAE;EAAE,YAAY,EAAE;EAAE;CAG5C,MAAM,cAA+B,EAAE;CACvC,MAAM,aAA6B,EAAE;AAErC,MAAK,MAAM,SAAS,OAAO,SAAS;EAElC,MAAM,cAAc,YAAY,MAAM,UAAU,OAAO;AACvD,MAAI,CAAC,YACH;EAIF,MAAM,aAAa,oBAAoB,MAAM,UAAU,cAAc;EACrE,MAAM,aAAa,oBAAoB,MAAM,UAAU,aAAa;EACpE,MAAM,aAAa,YAAY,MAAM,UAAU,MAAM;AAErD,MAAI,YAAY;GACd,MAAM,OAAO,eAAe,WAAW,MAAM,cAAc;AAC3D,eAAY,KAAK;IACf,MAAM,YAAY,KAAK;IACvB;IACA,MAAM,WAAW;IACjB,UAAU,YAAY;IACtB,eAAe,YAAY,KAAK;IACjC,CAAC;aACO,YAAY;GACrB,MAAM,OAAO,eAAe,WAAW,MAAM,aAAa;AAC1D,cAAW,KAAK;IACd,MAAM,YAAY,KAAK;IACvB;IACA,MAAM,WAAW;IACjB,UAAU,YAAY;IACvB,CAAC;;;AAIN,QAAO;EAAE;EAAa;EAAY;;;;;AA2OpC,SAAgB,gBAAgB,UAAuC;AAErE,KAAI,SAAS,QAAQ,MAAM;EACzB,MAAM,WAAW,SAAS,OAAO;AACjC,MAAI,CAAC,MAAM,QAAQ,SAAS,CAC1B,QAAO,SAAS;;AAKpB,KAAI,SAAS,SACX,MAAK,MAAM,SAAS,SAAS,UAAU;AACrC,MACE,MAAM,SAAS,gBACf,MAAM,SAAS,yBACf,MAAM,SAAS,mBAEf,QAAO,MAAM;AAGf,MAAI,MAAM,SAAS,sBACjB,QAAO,gBAAgB,MAAM;;;;;;AAWrC,SAAgB,aAAa,WAAwC;AAEnE,KAAI,UAAU,QAAQ,MAAM;EAC1B,MAAM,WAAW,UAAU,OAAO;AAClC,MAAI,CAAC,MAAM,QAAQ,SAAS,CAC1B,QAAO,SAAS;;AAKpB,KAAI,UAAU,UACZ;OAAK,MAAM,SAAS,UAAU,SAC5B,KAAI,MAAM,SAAS,gBAAgB,MAAM,SAAS,kBAChD,QAAO,MAAM;;;;;;;;;AC9mBrB,SAAgB,eACd,MACA,kBACA,UACA,SAAuB,EAAE,EACV;CACf,MAAM,UAAoB,EAAE;CAC5B,MAAM,EAAE,OAAO,iBAAiB;CAEhC,MAAM,iBAAiB,SAA8B;AACnD,MAAI,SAAS,CAAC,MAAM,SAAS,KAAK,CAChC,QAAO;AAET,MAAI,cAAc,SAAS,KAAK,CAC9B,QAAO;AAET,SAAO;;AAIT,KACE,cAAc,WAAW,IACzB,cAAc,SAAS,IACvB,cAAc,QAAQ,IACtB,cAAc,YAAY,EAC1B;EACA,MAAM,aAAa,mBACjB,MACA,kBACA,UACA,YACD;AAGD,MAAI,cAAc,WAAW,IAAI,cAAc,SAAS,CACtD,MAAK,MAAM,SAAS,WAAW,SAAS;GACtC,MAAM,aAAa,YAAY,MAAM,UAAU,sBAAsB;GACrE,MAAM,cAAc,YAAY,MAAM,UAAU,gBAAgB;AAEhE,OAAI,YAAY;IACd,MAAM,OACJ,aAAa,KAAK,QAAQ,gBAAgB,WAAW,KAAK;AAC5D,QAAI,MAAM;KAIR,MAAM,OAFJ,WAAW,KAAK,KAAK,SAAS,SAAS,IACvC,WAAW,KAAK,SAAS,sBACS,WAAW;AAC/C,SAAI,cAAc,KAAK,CACrB,SAAQ,KAAK;MACX;MACA;MACA,OAAO,WAAW,KAAK;MACvB,KAAK,WAAW,KAAK;MACrB,WAAW,yBAAyB,WAAW,KAAK;MACpD,WAAW,iBAAiB,WAAW,KAAK;MAC7C,CAAC;;;;;AASd,KAAI,cAAc,QAAQ,IAAI,cAAc,YAAY,EAAE;EACxD,MAAM,cAAc,mBAClB,MACA,kBACA,UACA,UACD;AAED,OAAK,MAAM,SAAS,YAAY,SAAS;GACvC,MAAM,aAAa,YAAY,MAAM,UAAU,mBAAmB;GAClE,MAAM,cAAc,YAAY,MAAM,UAAU,aAAa;AAE7D,OAAI,YAAY;IACd,MAAM,OAAO,aAAa,KAAK,QAAQ,aAAa,WAAW,KAAK;AACpE,QAAI,MAAM;KAER,MAAM,WAAW,WAAW,KAAK;KACjC,IAAI,aAAyB;AAC7B,SACE,SAAS,SAAS,YAAY,IAC9B,aAAa,wBAEb,cAAa;cACJ,SAAS,SAAS,SAAS,CACpC,cAAa;AAGf,SAAI,cAAc,WAAW,CAC3B,SAAQ,KAAK;MACX;MACA,MAAM;MACN,OAAO,WAAW,KAAK;MACvB,KAAK,WAAW,KAAK;MACrB,WAAW,iBAAiB,WAAW,KAAK;MAC7C,CAAC;;;;;AAQZ,KAAI,cAAc,WAAW,IAAI,cAAc,WAAW,CACxD,KAAI;EACF,MAAM,YAAY,mBAChB,MACA,kBACA,UACA,YACD;AAED,OAAK,MAAM,SAAS,UAAU,SAAS;GACrC,MAAM,cAAc,mBAAmB,MAAM,UAAU;IACrD;IACA;IACA;IACD,CAAC;GACF,MAAM,cAAc,mBAAmB,MAAM,UAAU;IACrD;IACA;IACA;IACD,CAAC;AAEF,OAAI,eAAe,aAAa;IAK9B,MAAM,OAHJ,YAAY,KAAK,KAAK,WAAW,SAAS,IAC1C,YAAY,MAAM,UAAU,gBAAgB,KAAK,SAEb,aAAa;AACnD,QAAI,cAAc,KAAK,CACrB,SAAQ,KAAK;KACX,MAAM,YAAY,KAAK;KACvB;KACA,OAAO,YAAY,KAAK;KACxB,KAAK,YAAY,KAAK;KACtB,WAAW,iBAAiB,YAAY,KAAK;KAC9C,CAAC;;;SAIF;AAMV,KACE,cAAc,YAAY,IAC1B,cAAc,OAAO,IACrB,cAAc,OAAO,CAErB,KAAI;EACF,MAAM,aAAa,mBACjB,MACA,kBACA,UACA,QACD;AAED,OAAK,MAAM,SAAS,WAAW,SAAS;GACtC,MAAM,cAAc,mBAAmB,MAAM,UAAU;IACrD;IACA;IACA;IACA;IACD,CAAC;GACF,MAAM,aAAa,mBAAmB,MAAM,UAAU;IACpD;IACA;IACA;IACD,CAAC;AAEF,OAAI,eAAe,YAAY;IAC7B,IAAI,OAAmB;AACvB,QAAI,YAAY,SAAS,iBACvB,QAAO;aACE,YAAY,SAAS,YAC9B,QAAO;AAGT,QAAI,cAAc,KAAK,CACrB,SAAQ,KAAK;KACX,MAAM,YAAY,KAAK;KACvB;KACA,OAAO,WAAW,KAAK;KACvB,KAAK,WAAW,KAAK;KACrB,WAAW,iBAAiB,WAAW,KAAK;KAC7C,CAAC;;;SAIF;AAmBV,QAAO;EAAE;EAAS,SAbF;GACd,WAAW,QAAQ,QAAQ,MAAM,EAAE,SAAS,WAAW,CAAC;GACxD,SAAS,QAAQ,QAAQ,MAAM,EAAE,SAAS,QAAQ,CAAC;GACnD,WAAW,QAAQ,QAAQ,MAAM,EAAE,SAAS,WAAW,CAAC;GACxD,WAAW,QAAQ,QAAQ,MAAM,EAAE,SAAS,WAAW,CAAC;GACxD,YAAY,QAAQ,QAAQ,MAAM,EAAE,SAAS,YAAY,CAAC;GAC1D,OAAO,QAAQ,QAAQ,MAAM,EAAE,SAAS,OAAO,CAAC;GAChD,OAAO,QAAQ,QAAQ,MAAM,EAAE,SAAS,OAAO,CAAC;GAChD,SAAS,QAAQ,QAAQ,MAAM,EAAE,SAAS,SAAS,CAAC;GACpD,YAAY,QAAQ,QAAQ,MAAM,EAAE,SAAS,WAAW,CAAC;GACzD,OAAO,QAAQ;GAChB;EAE0B;;;;;AAM7B,SAAS,yBAAyB,MAGX;CAErB,MAAM,OAAO,KAAK;CAGlB,MAAM,UACJ,6EAA6E,KAC3E,KACD;AACH,KAAI,QACF,QAAO,QAAQ,GAAG,MAAM;CAI1B,MAAM,aAAa,iCAAiC,KAAK,KAAK;AAC9D,KAAI,WACF,QAAO,WAAW,GAAG,MAAM;CAI7B,MAAM,UAAU,4CAA4C,KAAK,KAAK;AACtE,KAAI,QACF,QAAO,QAAQ,GAAG,QAAQ,MAAM,GAAG,CAAC,MAAM;CAI5C,MAAM,UAAU,2CAA2C,KAAK,KAAK;AACrE,KAAI,QACF,QAAO,QAAQ,GAAG,MAAM;CAI1B,MAAM,YAAY,KAAK,MAAM,OAAO,CAAC;AACrC,QAAO,YAAY,UAAU,MAAM,GAAG;;;;;AAMxC,SAAS,iBAAiB,MAA8C;CACtE,MAAM,YAAsB,EAAE;CAC9B,MAAM,OAAO,KAAK;AAsBlB,MAAK,MAAM,OAnBc;EACvB;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD,CAIC,sBADgB,IAAI,OAAO,MAAM,IAAI,KAAK,EAC9B,KAAK,KAAK,MAAM,GAAG,IAAI,CAAC,CAElC,WAAU,KAAK,IAAI;AAIvB,QAAO,UAAU,SAAS,IAAI,YAAY;;;;;AAM5C,SAAgB,eACd,MACA,kBACA,UACU;CACV,MAAM,UAAoB,EAAE;AAE5B,KAAI;EACF,MAAM,SAAS,mBACb,MACA,kBACA,UACA,UACD;EAED,MAAM,UAAU,qBAAqB;AAErC,OAAK,MAAM,SAAS,OAAO,SAAS;GAClC,MAAM,cAAc,mBAAmB,MAAM,UAAU,CACrD,oBACA,oBACD,CAAC;AAEF,OAAI,CAAC,eAAe,QAAQ,IAAI,YAAY,KAAK,CAC/C;AAEF,WAAQ,IAAI,YAAY,KAAK;GAE7B,MAAM,gBAAgB,mBAAmB,MAAM,UAAU;IACvD;IACA;IACA;IACD,CAAC;GACF,MAAM,iBAAiB,YAAY,MAAM,UAAU,iBAAiB;GACpE,MAAM,eAAe,MAAM,SAAS,QACjC,MAAM,EAAE,SAAS,cACnB;GAED,MAAM,SAAS,gBACX,cAAc,KAAK,KAAK,QAAQ,SAAS,GAAG,GAC5C;GAEJ,MAAM,QAAwB,EAAE;AAEhC,OAAI,eACF,OAAM,KAAK,EAAE,MAAM,eAAe,KAAK,MAAM,CAAC;AAGhD,QAAK,MAAM,eAAe,aACxB,OAAM,KAAK,EAAE,MAAM,YAAY,KAAK,MAAM,CAAC;AAG7C,WAAQ,KAAK;IACX;IACA;IACA,WAAW,CAAC,CAAC,kBAAkB,aAAa,WAAW;IACvD,OAAO,YAAY,KAAK;IACxB,KAAK,YAAY,KAAK;IACvB,CAAC;;SAEE;AAIR,QAAO;;;;;AAMT,SAAgB,eACd,MACA,kBACA,UACU;CACV,MAAM,aAAuB,EAAE;AAE/B,KAAI;EACF,MAAM,SAAS,mBACb,MACA,kBACA,UACA,UACD;EAED,MAAM,UAAU,qBAAqB;AAErC,OAAK,MAAM,SAAS,OAAO,SAAS;GAClC,MAAM,cAAc,mBAAmB,MAAM,UAAU;IACrD;IACA;IACA;IACA;IACD,CAAC;AAEF,OAAI,CAAC,eAAe,QAAQ,IAAI,YAAY,KAAK,CAC/C;AAEF,WAAQ,IAAI,YAAY,KAAK;GAE7B,MAAM,cAAc,YAAY,MAAM,UAAU,cAAc;GAE9D,MAAM,OAAO,YAAY,KAAK;GAC9B,MAAM,YAAY,KAAK,SAAS,iBAAiB;GAGjD,IAAI,OAAO,aAAa,KAAK;AAC7B,OAAI,CAAC,MAAM;IAET,MAAM,YACJ,uFAAuF,KACrF,KACD;AACH,QAAI,YAAY,GACd,QAAO,UAAU;;AAIrB,cAAW,KAAK;IACd,MAAM,QAAQ;IACd;IACA,OAAO,YAAY,KAAK;IACxB,KAAK,YAAY,KAAK;IACvB,CAAC;;SAEE;AAIR,QAAO;;;;;;;;;;;;;;ACpdT,SAAS,gBACP,UACA,SACA,WACQ;AAMR,QAAO,SALM,OACV,WAAW,MAAM,CACjB,OAAO,GAAG,SAAS,GAAG,OAAO,UAAU,CAAC,GAAG,UAAU,CACrD,OAAO,MAAM,CACb,MAAM,GAAG,GAAG;;;;;AAOjB,SAAgB,eAAe,UAA0B;AA0CvD,QAvC6C;EAC3C,IAAI;EACJ,KAAK;EACL,IAAI;EACJ,KAAK;EACL,KAAK;EACL,KAAK;EACL,IAAI;EACJ,IAAI;EACJ,IAAI;EACJ,MAAM;EACN,IAAI;EACJ,IAAI;EACJ,KAAK;EACL,GAAG;EACH,KAAK;EACL,GAAG;EACH,KAAK;EACL,IAAI;EACJ,OAAO;EACP,OAAO;EACP,KAAK;EACL,QAAQ;EACR,IAAI;EACJ,MAAM;EACN,MAAM;EACN,KAAK;EACL,MAAM;EACN,KAAK;EACL,MAAM;EACN,KAAK;EACL,MAAM;EACN,MAAM;EACN,KAAK;EACL,IAAI;EACJ,MAAM;EACN,KAAK;EACN,CAvCW,SAAS,MAAM,IAAI,CAAC,KAAK,EAAE,aAAa,IAAI,OAyC5B;;;;;AAM9B,SAAS,kBAAkB,SAAiB,QAAwB;AAElE,SADe,QAAQ,MAAM,GAAG,OAAO,CACxB,MAAM,MAAM,IAAI,EAAE,EAAE,SAAS;;;;;AAM9C,SAAS,iBAAiB,SAAiB,QAAwB;AACjE,QAAO,QAAQ,MAAM,OAAO,MAAM,QAAQ,OAAO,IAAI,OAAO;;;;;AAM9D,SAAS,kBACP,SACA,SACA,SACU;CAEV,MAAM,oBAAoB,QAAQ,QAAQ,SAAS,KAAK,CAAC,QAAQ,OAAO,KAAK;AAE7E,KAAI,kBAAkB,UAAU,QAC9B,QAAO,CAAC,kBAAkB;CAG5B,MAAM,SAAmB,EAAE;CAC3B,MAAM,QAAQ,kBAAkB,MAAM,KAAK;CAC3C,IAAI,eAAyB,EAAE;CAC/B,IAAI,cAAc;AAElB,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,WAAW,KAAK,SAAS;AAE/B,MAAI,cAAc,WAAW,WAAW,aAAa,SAAS,GAAG;AAC/D,UAAO,KAAK,aAAa,KAAK,KAAK,CAAC;GAGpC,MAAM,eAAyB,EAAE;GACjC,IAAI,cAAc;AAClB,QACE,IAAI,IAAI,aAAa,SAAS,GAC9B,KAAK,KAAK,cAAc,SACxB,KACA;IACA,MAAM,IAAI,aAAa;AACvB,QAAI,MAAM,QAAW;AACnB,kBAAa,QAAQ,EAAE;AACvB,oBAAe,EAAE,SAAS;;;AAG9B,kBAAe;AACf,iBAAc;;AAGhB,eAAa,KAAK,KAAK;AACvB,iBAAe;;AAGjB,KAAI,aAAa,SAAS,EACxB,QAAO,KAAK,aAAa,KAAK,KAAK,CAAC;AAGtC,QAAO;;;;;AAMT,SAAS,YACP,UACA,UACA,SACA,WACA,SACA,YACA,YACW;AACX,QAAO;EACL,IAAI,gBAAgB,UAAU,SAAS,UAAU;EACjD;EACA;EACA;EACA;EACA;EACA;EACA;EACD;;;;;;;;;;;AAyBH,eAAsB,UACpB,UACA,SACA,UACsB;CACtB,MAAM,WAAW,eAAe,SAAS;CACzC,MAAM,UAAUC,SAAO;CACvB,MAAM,UAAUA,SAAO;CAGvB,IAAI,UAAoB,EAAE;AAC1B,KAAI;EACF,MAAM,cAAc,MAAM,UAAU,SAAS,EAAE,UAAU,CAAC;AAM1D,YALe,eACb,YAAY,MACZ,YAAY,kBACZ,YAAY,SACb,CACgB;UACV,OAAO;AACd,SAAO,MACL,kCAAkC,SAAS,6BACzC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAEzD;AAED,SAAO,cAAc,UAAU,SAAS,UAAU,SAAS,QAAQ;;AAIrE,KAAI,QAAQ,WAAW,EACrB,QAAO,cAAc,UAAU,SAAS,UAAU,SAAS,QAAQ;CAIrE,MAAM,gBAAgB,CAAC,GAAG,QAAQ,CAAC,MAChC,GAAG,MAAM,EAAE,MAAM,SAAS,EAAE,MAAM,OACpC;CAGD,MAAM,UAA2B,EAAE;CACnC,IAAI,gBAAgB;AAEpB,MAAK,MAAM,UAAU,eAAe;AAElC,MAAI,OAAO,MAAM,SAAS,eAIxB;OAHmB,QAChB,MAAM,eAAe,OAAO,MAAM,OAAO,CACzC,MAAM,CACM,SAAS,EACtB,SAAQ,KAAK;IACX,SAAS,QAAQ,MAAM,eAAe,OAAO,MAAM,OAAO;IAC1D,aAAa;IACb,WAAW,OAAO,MAAM;IACxB,WAAW,kBAAkB,SAAS,cAAc;IACpD,SAAS,kBAAkB,SAAS,OAAO,MAAM,OAAO;IACzD,CAAC;;EAKN,MAAM,gBAAgB,iBAAiB,SAAS,OAAO;AACvD,UAAQ,KAAK;GACX,SAAS;GACT,aAAa,OAAO,MAAM;GAC1B,WAAW,OAAO,IAAI;GACtB,WAAW,OAAO,MAAM;GACxB,SAAS,OAAO,IAAI;GACpB,YAAY,OAAO;GACnB,YAAY,OAAO;GACpB,CAAC;AAEF,kBAAgB,KAAK,IAAI,eAAe,OAAO,IAAI,OAAO;;AAI5D,KAAI,gBAAgB,QAAQ,QAE1B;MADwB,QAAQ,MAAM,cAAc,CAAC,MAAM,CACvC,SAAS,EAC3B,SAAQ,KAAK;GACX,SAAS,QAAQ,MAAM,cAAc;GACrC,aAAa;GACb,WAAW,QAAQ;GACnB,WAAW,kBAAkB,SAAS,cAAc;GACpD,SAAS,kBAAkB,SAAS,QAAQ,OAAO;GACpD,CAAC;;CAKN,MAAM,SAAsB,EAAE;AAE9B,MAAK,MAAM,UAAU,SAAS;EAC5B,MAAM,gBAAgB,OAAO,QAAQ,MAAM;AAC3C,MAAI,cAAc,WAAW,EAC3B;AAGF,MAAI,cAAc,UAAU,QAE1B,QAAO,KACL,YACE,UACA,UACA,eACA,OAAO,WACP,OAAO,SACP,OAAO,YACP,OAAO,WACR,CACF;OACI;GAEL,MAAM,QAAQ,kBAAkB,eAAe,SAAS,QAAQ;GAChE,IAAI,cAAc,OAAO;AAEzB,QAAK,MAAM,QAAQ,OAAO;IACxB,MAAM,aAAa,KAAK,MAAM,MAAM,IAAI,EAAE,EAAE,SAAS;AACrD,WAAO,KACL,YACE,UACA,UACA,MACA,aACA,cAAc,YAAY,GAC1B,OAAO,YACP,OAAO,WACR,CACF;AACD,mBAAe,YAAY,KAAK,MAAM,UAAU,GAAG;;;;AAKzD,QAAO;;;;;;AAOT,SAAS,cACP,UACA,SACA,UACA,SACA,SACa;AAEb,KAAI,QAAQ,MAAM,CAAC,WAAW,EAC5B,QAAO,EAAE;CAGX,MAAM,SAAsB,EAAE;CAC9B,MAAM,QAAQ,kBAAkB,SAAS,SAAS,QAAQ;CAE1D,IAAI,cAAc;AAClB,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,aAAa,KAAK,MAAM,MAAM,IAAI,EAAE,EAAE,SAAS;AACrD,SAAO,KACL,YACE,UACA,UACA,MACA,aACA,cAAc,YAAY,EAC3B,CACF;AACD,iBAAe,YAAY,KAAK,MAAM,UAAU,GAAG;;AAGrD,QAAO;;;;;AAuBT,MAAa,uBAAuB;CAClC;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;;;;AAKD,SAAgB,gBAAgB,UAA2B;CACzD,MAAM,MAAM,OAAO,SAAS,MAAM,IAAI,CAAC,KAAK,EAAE,aAAa,IAAI;AAC/D,QAAO,qBAAqB,SAAS,IAAI;;;;;;;;;;;ACvW3C,MAAM,oCAAoB,IAAI,KAAuC;;;;AAKrE,SAAgB,sBAA4B;AAC1C,mBAAkB,OAAO;;AAI3B,cAAc,6BAA6B,oBAAoB;;;;AAK/D,MAAM,aAAa;CAAC;CAAO;CAAQ;CAAO;CAAQ;CAAQ;CAAO;;;;AAKjE,SAAS,kBACP,cACA,iBACA,SACe;CACf,MAAM,EAAE,aAAa,cAAc,EAAE,KAAK;AAG1C,KACE,CAAC,aAAa,WAAW,IAAI,IAC7B,CAAC,aAAa,WAAW,IAAI,IAC7B,CAAC,OAAO,KAAK,YAAY,CAAC,MAAM,UAAU,aAAa,WAAW,MAAM,CAAC,CAEzE,QAAO;CAGT,IAAI;AAGJ,MAAK,MAAM,CAAC,OAAO,WAAW,OAAO,QAAQ,YAAY,CACvD,KAAI,aAAa,WAAW,MAAM,EAAE;EAClC,MAAM,eAAe,aAAa,MAAM,MAAM,OAAO;AACrD,iBAAe,KAAK,KAAK,aAAa,QAAQ,aAAa;AAC3D;;AAKJ,KAAI,iBAAiB,OACnB,KAAI,aAAa,WAAW,IAAI,EAAE;EAChC,MAAM,aAAa,KAAK,QAAQ,gBAAgB;AAChD,iBAAe,KAAK,QAAQ,YAAY,aAAa;OAGrD,QAAO;AAMX,MAAK,MAAM,OAAO,YAAY;EAC5B,MAAM,UAAU,eAAe;AAC/B,MAAI,GAAG,WAAW,QAAQ,IAAI,GAAG,SAAS,QAAQ,CAAC,QAAQ,CACzD,QAAO;;AAKX,KAAI,GAAG,WAAW,aAAa,IAAI,GAAG,SAAS,aAAa,CAAC,QAAQ,CACnE,QAAO;AAIT,MAAK,MAAM,OAAO,YAAY;EAC5B,MAAM,YAAY,KAAK,KAAK,cAAc,QAAQ,MAAM;AACxD,MAAI,GAAG,WAAW,UAAU,IAAI,GAAG,SAAS,UAAU,CAAC,QAAQ,CAC7D,QAAO;;AAIX,QAAO;;;;;AAMT,eAAe,oBACb,UACmC;CAEnC,MAAM,SAAS,kBAAkB,IAAI,SAAS;AAC9C,KAAI,WAAW,OACb,QAAO;AAGT,KAAI;EAEF,MAAM,cAAc,MAAM,UADV,GAAG,aAAa,UAAU,QAAQ,EACL,EAAE,UAAU,CAAC;EAE1D,MAAM,EAAE,YAAY,eAClB,YAAY,MACZ,YAAY,kBACZ,YAAY,SACb;EAQD,MAAM,SAA4B;GAAE;GAAS,SAN7B,eACd,YAAY,MACZ,YAAY,kBACZ,YAAY,SACb;GAEqD;AACtD,oBAAkB,IAAI,UAAU,OAAO;AACvC,SAAO;UACA,OAAO;AACd,SAAO,MACL,qBAAqB,SAAS,IAAI,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACzF;AACD,oBAAkB,IAAI,UAAU,KAAK;AACrC,SAAO;;;;;;AAOX,SAAS,oBACP,iBACA,SACA,SACU;CACV,MAAM,gCAAgB,IAAI,KAAa;AAGvC,MAAK,MAAM,QAAQ,gBAAgB,MACjC,eAAc,IAAI,KAAK,KAAK;AAI9B,KAAI,gBAAgB,aAAa;EAC/B,MAAM,gBAAgB,IAAI,IAAI,QAAQ,KAAK,MAAM,EAAE,KAAK,CAAC;AACzD,SAAO,QAAQ,QAAQ,MAAM,cAAc,IAAI,EAAE,KAAK,CAAC;;AAIzD,KAAI,gBAAgB,WAAW;EAC7B,MAAM,gBAAgB,QAAQ,MAAM,MAAM,EAAE,UAAU;AACtD,MAAI,cACF,eAAc,IAAI,cAAc,KAAK;;AAKzC,QAAO,QAAQ,QAAQ,MAAM,cAAc,IAAI,EAAE,KAAK,CAAC;;;;;AAMzD,eAAsB,wBACpB,SACA,iBACA,SAC2B;CAC3B,MAAM,aAAa,QAAQ,cAAc;CACzC,MAAM,oBAAoB,QAAQ,qBAAqB;CAEvD,MAAM,kBAAoC,EAAE;AAG5C,MAAK,MAAM,OAAO,QAAQ,MAAM,GAAG,WAAW,EAAE;EAC9C,MAAM,eAAe,kBACnB,IAAI,QACJ,iBACA,QACD;AAED,MAAI,CAAC,cAAc;AACjB,mBAAgB,KAAK;IACnB,QAAQ;IACR,cAAc;IACd,SAAS,EAAE;IACX,SAAS,EAAE;IACZ,CAAC;AACF;;EAGF,MAAM,WAAW,MAAM,oBAAoB,aAAa;AAExD,MAAI,CAAC,UAAU;AACb,mBAAgB,KAAK;IACnB,QAAQ;IACR;IACA,SAAS,EAAE;IACX,SAAS,EAAE;IACZ,CAAC;AACF;;EAIF,MAAM,kBAAkB,oBACtB,KACA,SAAS,SACT,SAAS,QACV,CAAC,MAAM,GAAG,kBAAkB;AAE7B,kBAAgB,KAAK;GACnB,QAAQ;GACR;GACA,SAAS;GACT,SAAS,SAAS;GACnB,CAAC;;AAMJ,QAAO;EACL;EACA,wBAJc,4BAA4B,gBAAgB;EAK3D;;;;;AAMH,SAAS,4BACP,iBACQ;CACR,MAAM,QAAkB,EAAE;AAE1B,MAAK,MAAM,YAAY,iBAAiB;AACtC,MAAI,SAAS,QAAQ,WAAW,EAC9B;EAIF,MAAM,qBAAqB,SAAS,QAAQ,KAAK,MAAM;AACrD,OAAI,EAAE,UACJ,QAAO,GAAG,EAAE,KAAK,IAAI,EAAE;AAEzB,UAAO,GAAG,EAAE,KAAK,IAAI,EAAE,KAAK;IAC5B;AAEF,MAAI,mBAAmB,SAAS,EAC9B,OAAM,KACJ,QAAQ,SAAS,OAAO,OAAO,IAAI,mBAAmB,KAAK,KAAK,GACjE;;AAIL,QAAO,MAAM,KAAK,KAAK;;;;;;;;ACnQzB,MAAM,2BAAW,IAAI,KAAgC;;;;AAKrD,SAAgB,gBAAsB;AACpC,UAAS,OAAO;;AAIlB,cAAc,uBAAuB,cAAc;;AAGnD,MAAM,cAAc;;AAGpB,MAAM,cAAc;;;;AAKpB,eAAe,gBACb,UACA,SACA,SACmC;CAEnC,MAAM,SAAS,SAAS,IAAI,SAAS;AACrC,KAAI,OACF,QAAO;AAGT,KAAI;EAEF,MAAM,cAAc,MAAM,UAAU,SAAS,EAAE,UAAU,CAAC;EAG1D,MAAM,EAAE,YAAY,eAClB,YAAY,MACZ,YAAY,kBACZ,YAAY,SACb;EACD,MAAM,UAAU,eACd,YAAY,MACZ,YAAY,kBACZ,YAAY,SACb;EAOD,MAAM,WAA8B;GAClC;GACA;GACA;GACA,SAVc,eACd,YAAY,MACZ,YAAY,kBACZ,YAAY,SACb;GAOA;AAOD,OAHE,SAAS,2BACT,kBAAkB,4BAEU,QAAQ,SAAS,KAAK,SAAS,YAC3D,KAAI;GAQF,MAAM,mBAAmB,MAAM,wBAC7B,SACA,UATyC;IACzC,aAAa,QAAQ;IACrB,aAAa,QAAQ;IACrB,YAAY,kBAAkB;IAC9B,mBAAmB,kBAAkB;IACtC,CAMA;AAED,YAAS,mBAAmB;AAC5B,UAAO,MACL,mCAAmC,SAAS,IAAI,OAAO,iBAAiB,gBAAgB,OAAO,CAAC,UACjG;WACM,OAAO;AACd,UAAO,MACL,4CAA4C,SAAS,IAAI,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAChH;;AAKL,WAAS,IAAI,UAAU,SAAS;AAEhC,SAAO;UACA,OAAO;AACd,SAAO,MACL,mBAAmB,SAAS,IAAI,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACvF;AACD,SAAO;;;;;;AAOX,SAAS,mBACP,SACA,WACA,SACe;CACf,MAAM,eAA8B,EAAE;AAEtC,MAAK,MAAM,UAAU,SAAS;EAE5B,MAAM,cAAc,OAAO,MAAM;EACjC,MAAM,YAAY,OAAO,IAAI;AAG7B,MAAI,eAAe,WAAW,aAAa,UACzC,cAAa,KAAK;GAChB,MAAM,OAAO;GACb,MAAM,OAAO;GACb,WAAW,OAAO;GACnB,CAAC;;AAIN,QAAO;;;;;AAMT,SAAS,oBAAoB,SAA2B;AAMtD,QALgB,QACb,MAAM,GAAG,YAAY,CACrB,KAAK,QAAQ,IAAI,OAAO,CACxB,QAAQ,WAAW,OAAO,SAAS,EAAE,CAEzB,KAAK,KAAK;;;;;AAM3B,SAAS,kBAAkB,SAA2B;AAMpD,QALc,QACX,MAAM,GAAG,YAAY,CACrB,KAAK,QAAQ,IAAI,KAAK,CACtB,QAAQ,SAAS,KAAK,SAAS,KAAK,SAAS,UAAU,CAE7C,KAAK,KAAK;;;;;AAMzB,SAAS,cAAc,SAAgC;AACrD,QAAO,QAAQ,KAAK,MAAM,GAAG,EAAE,KAAK,IAAI,EAAE,KAAK,GAAG,CAAC,KAAK,KAAK;;;;;AAM/D,SAAS,qBACP,OACA,SACA,SACA,SACA,kBACQ;CACR,MAAM,cAAwB,EAAE;AAGhC,aAAY,KAAK,SAAS,MAAM,WAAW;AAG3C,aAAY,KAAK,aAAa,MAAM,WAAW;AAG/C,KAAI,QAAQ,SAAS,EACnB,aAAY,KAAK,YAAY,cAAc,QAAQ,GAAG;AAIxD,KAAI,QAAQ,SAAS,GAAG;EACtB,MAAM,YAAY,oBAAoB,QAAQ;AAC9C,MAAI,UAAU,SAAS,EACrB,aAAY,KAAK,YAAY,YAAY;;AAK7C,KAAI,QAAQ,SAAS,GAAG;EACtB,MAAM,YAAY,kBAAkB,QAAQ;AAC5C,MAAI,UAAU,SAAS,EACrB,aAAY,KAAK,YAAY,YAAY;;AAK7C,KAAI,oBAAoB,iBAAiB,uBAAuB,SAAS,EACvE,aAAY,KACV,0BAA0B,iBAAiB,yBAC5C;AAIH,QAAO,YAAY,KAAK,KAAK,GAAG,cAAc,MAAM;;;;;AAsDtD,eAAsB,qBACpB,QACA,SACA,SAC0B;AAC1B,KAAI,OAAO,WAAW,EACpB,QAAO,EAAE;CAIX,MAAM,WAAW,OAAO,IAAI;AAC5B,KAAI,CAAC,SACH,QAAO,OAAO,KAAK,UAAU;EAC3B,MAAM,cAAc,SAAS,MAAM,SAAS,cAAc,MAAM,SAAS;AACzE,SAAO;GACL,GAAG;GACH,iBAAiB,cAAc,MAAM;GACrC,kBAAkB,EAAE;GACpB,aAAa;GACd;GACD;CAIJ,MAAM,WAAW,MAAM,gBAAgB,UAAU,SAAS,QAAQ;AAElE,KAAI,CAAC,SAEH,QAAO,OAAO,KAAK,UAAU;EAC3B,MAAM,cAAc,SAAS,MAAM,SAAS,cAAc,MAAM,SAAS;AACzE,SAAO;GACL,GAAG;GACH,iBAAiB,cAAc,MAAM;GACrC,kBAAkB,EAAE;GACpB,aAAa;GACd;GACD;AAIJ,QAAO,OAAO,KAAK,UAAU;EAC3B,MAAM,eAAe,mBACnB,SAAS,SACT,MAAM,WACN,MAAM,QACP;EAED,MAAM,kBAAkB,qBACtB,OACA,cACA,SAAS,SACT,SAAS,SACT,SAAS,iBACV;AAED,SAAO;GACL,GAAG;GACH;GACA,kBAAkB;GAClB,aAAa;GACd;GACD;;;;;;;;;;;;;;;AC7VJ,MAAM,sBAAsB;;AAG5B,MAAMC,oBAAkB;AAqBxB,IAAa,eAAb,MAA0B;CACxB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAQ,UAA4B;CACpC,AAAQ;CACR,AAAQ,eAAe;CACvB,AAAQ,YAAuB,EAAE;CACjC,AAAQ,iCAAiB,IAAI,KAA4B;CACzD,AAAQ,iBAA0C,EAAE;CAEpD,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CAEjB,YAAY,SAAyB;AACnC,OAAK,YAAY,KAAK,QAAQ,QAAQ,UAAU;AAChD,OAAK,SAAS,QAAQ;AACtB,OAAK,aAAa,QAAQ,cAAc;AACxC,OAAK,eAAe,IAAI,aAAa,QAAQ,OAAO;AACpD,OAAK,cAAc,IAAI,YAAY,KAAK,WAAW,QAAQ,OAAO;AAClE,OAAK,KAAK,KAAK,oBAAoB;AAEnC,OAAK,UAAU,QAAQ;AACvB,OAAK,UAAU,QAAQ;AACvB,OAAK,YAAY,QAAQ;AACzB,OAAK,YAAY,QAAQ;AAEzB,OAAK,eAAe;;;;;CAMtB,AAAQ,YAAY,SAAyB;AAC3C,SAAO,OAAO,WAAW,SAAS,CAAC,OAAO,SAAS,OAAO,CAAC,OAAO,MAAM;;;;;CAM1E,AAAQ,mBAA2B;AACjC,SAAO,KAAK,KAAK,KAAK,WAAW,cAAcA,kBAAgB;;;;;CAMjE,AAAQ,gBAAsB;EAC5B,MAAM,YAAY,KAAK,kBAAkB;AAEzC,MAAI,GAAG,WAAW,UAAU,CAC1B,KAAI;GACF,MAAM,UAAU,GAAG,aAAa,WAAW,QAAQ;AACnD,QAAK,YAAY,KAAK,MAAM,QAAQ;AACpC,UAAO,MACL,UAAU,OAAO,OAAO,KAAK,KAAK,UAAU,CAAC,OAAO,CAAC,gBACtD;UACK;AACN,QAAK,YAAY,EAAE;;;;;;CAQzB,AAAQ,gBAAsB;EAC5B,MAAM,YAAY,KAAK,kBAAkB;EACzC,MAAM,WAAW,KAAK,QAAQ,UAAU;AAExC,MAAI;AACF,OAAI,CAAC,GAAG,WAAW,SAAS,CAC1B,IAAG,UAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AAE7C,MAAG,cAAc,WAAW,KAAK,UAAU,KAAK,WAAW,MAAM,EAAE,CAAC;WAC7D,KAAK;GACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,UAAO,MAAM,8BAA8B,MAAM,UAAU;;;;;;CAO/D,AAAQ,kBAAkB,UAAkB,SAA0B;EACpE,MAAM,UAAU,KAAK,YAAY,QAAQ;AAGzC,MAFgB,KAAK,UAAU,cAEf,QACd,QAAO;AAGT,OAAK,UAAU,YAAY;AAC3B,SAAO;;;;;CAMT,AAAQ,oBAAoB,UAAwB;EAClD,MAAM,GAAG,WAAW,GAAG,GAAG,SAAS,KAAK;AACxC,OAAK,YAAY;;;;;CAMnB,AAAQ,qBAA6B;EACnC,MAAM,KAAK,QAAQ;EACnB,MAAM,gBAAgB,KAAK,KAAK,KAAK,WAAW,aAAa;AAE7D,MAAI,GAAG,WAAW,cAAc,CAC9B,KAAI;GACF,MAAM,UAAU,GAAG,aAAa,eAAe,QAAQ;AACvD,MAAG,IAAI,QAAQ;UACT;AAKV,SAAO;;;;;CAMT,AAAQ,YAAY,UAA2B;EAC7C,MAAM,eAAe,KAClB,SAAS,KAAK,WAAW,SAAS,CAClC,QAAQ,OAAO,IAAI;AAGtB,MAAI,aAAa,MAAM,IAAI,CAAC,MAAM,SAAS,KAAK,WAAW,IAAI,CAAC,CAC9D,QAAO;AAIT,MAAI,KAAK,GAAG,QAAQ,aAAa,CAC/B,QAAO;AAGT,SAAO,gBAAgB,SAAS;;;;;CAMlC,AAAQ,eACN,MACA,UACM;EACN,MAAM,WAAW,KAAK,eAAe,IAAI,SAAS;AAClD,MAAI,SACF,cAAa,SAAS,MAAM;EAG9B,MAAM,QAAQ,iBAAiB;AAC7B,QAAK,eAAe,OAAO,SAAS;AACpC,QAAK,eAAe,YAAY,KAAK,cAAc,MAAM,SAAS,CAAC;KAClE,KAAK,WAAW;AAEnB,OAAK,eAAe,IAAI,UAAU;GAAE;GAAM;GAAU;GAAO,CAAC;AAE5D,SAAO,MACL,aAAa,KAAK,IAAI,KAAK,SAAS,SAAS,CAAC,IAAI,OAAO,KAAK,WAAW,CAAC,KAC3E;;;;;CAMH,MAAc,cACZ,MACA,UACe;AACf,MAAI,SAAS,SACX,OAAM,KAAK,WAAW,SAAS;MAE/B,OAAM,KAAK,UAAU,SAAS;;;;;CAOlC,MAAc,UAAU,UAAiC;AACvD,MAAI,CAAC,KAAK,YAAY,SAAS,CAC7B;AAGF,MAAI;GACF,MAAM,UAAU,GAAG,aAAa,UAAU,QAAQ;AAGlD,OAAI,CAAC,KAAK,kBAAkB,UAAU,QAAQ,EAAE;AAC9C,WAAO,MAAM,wBAAwB,KAAK,SAAS,SAAS,GAAG;AAC/D;;GAGF,MAAM,SAAS,MAAM,UAAU,UAAU,SAAS,KAAK,OAAO;AAE9D,OAAI,OAAO,WAAW,EACpB;GAIF,MAAM,iBAAiB,MAAM,qBAAqB,QAAQ,QAAQ;GAGlE,MAAM,QAAQ,eAAe,KAAK,MAAM,EAAE,gBAAgB;GAC1D,MAAM,aAAa,MAAM,KAAK,aAAa,WAAW,MAAM;GAG5D,MAAM,iBAAiB,eAAe,KAAK,OAAO,OAAO;IACvD,IAAI,MAAM;IACV,SAAS,MAAM;IACf,UAAU,MAAM;IAChB,UAAU,MAAM;IAChB,WAAW,MAAM;IACjB,SAAS,MAAM;IACf,YAAY,MAAM;IAClB,YAAY,MAAM;IAClB,QAAQ,WAAW,MAAM,EAAE;IAC5B,EAAE;AAEH,SAAM,KAAK,YAAY,iBAAiB,SAAS;AACjD,SAAM,KAAK,YAAY,UAAU,eAAe;AAEhD,QAAK,eAAe;AAEpB,UAAO,MAAM,YAAY,KAAK,SAAS,KAAK,WAAW,SAAS,GAAG;AACnE,QAAK,YAAY,SAAS;WACnB,KAAK;GACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,UAAO,MAAM,mBAAmB,SAAS,IAAI,MAAM,UAAU;AAC7D,QAAK,UAAU,MAAM;;;;;;CAOzB,MAAc,WAAW,UAAiC;AACxD,MAAI;AACF,SAAM,KAAK,YAAY,iBAAiB,SAAS;AACjD,QAAK,oBAAoB,SAAS;AAClC,QAAK,eAAe;AAEpB,UAAO,MAAM,YAAY,KAAK,SAAS,KAAK,WAAW,SAAS,GAAG;AACnE,QAAK,YAAY,SAAS;WACnB,KAAK;GACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,UAAO,MAAM,oBAAoB,SAAS,IAAI,MAAM,UAAU;AAC9D,QAAK,UAAU,MAAM;;;;;;CAOzB,AAAQ,eAAe,WAAsC;AAC3D,OAAK,eAAe,KAAK,UAAU;AACnC,EAAK,KAAK,cAAc;;;;;CAM1B,MAAc,eAA8B;AAC1C,MAAI,KAAK,aACP;AAGF,OAAK,eAAe;AAEpB,SAAO,KAAK,eAAe,SAAS,GAAG;GACrC,MAAM,YAAY,KAAK,eAAe,OAAO;AAC7C,OAAI,UACF,KAAI;AACF,UAAM,WAAW;YACV,KAAK;IACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,WAAO,MAAM,qBAAqB,MAAM,UAAU;;;AAKxD,OAAK,eAAe;;;;;CAMtB,MAAc,uBAA0C;AActD,UAVc,MAAM,GAFJ,SADG,qBAAqB,KAAK,QAAQ,IAAI,MAAM,EAAE,CAAC,CAC9B,KAAK,IAAI,CAAC,IAEd;GAC9B,KAAK,KAAK;GACV,UAAU;GACV,QAAQ,CAAC,SAAS,WAAW;GAC7B,KAAK;GACL,WAAW;GACX,qBAAqB;GACtB,CAAC,EAGW,QAAQ,SAAS;GAC5B,MAAM,eAAe,KAClB,SAAS,KAAK,WAAW,KAAK,CAC9B,QAAQ,OAAO,IAAI;AACtB,UAAO,CAAC,KAAK,GAAG,QAAQ,aAAa;IACrC;;;;;CAMJ,MAAc,YAA2B;AACvC,SAAO,KAAK,yBAAyB;EAErC,MAAM,QAAQ,MAAM,KAAK,sBAAsB;EAC/C,IAAI,UAAU;EACd,IAAI,UAAU;AAEd,OAAK,MAAM,YAAY,MACrB,KAAI;GACF,MAAM,UAAU,GAAG,aAAa,UAAU,QAAQ;AAElD,OAAI,CAAC,KAAK,kBAAkB,UAAU,QAAQ,EAAE;AAC9C;AACA;;GAGF,MAAM,SAAS,MAAM,UAAU,UAAU,SAAS,KAAK,OAAO;AAE9D,OAAI,OAAO,WAAW,EACpB;GAIF,MAAM,iBAAiB,MAAM,qBAAqB,QAAQ,QAAQ;GAGlE,MAAM,QAAQ,eAAe,KAAK,MAAM,EAAE,gBAAgB;GAC1D,MAAM,aAAa,MAAM,KAAK,aAAa,WAAW,MAAM;GAG5D,MAAM,iBAAiB,eAAe,KAAK,OAAO,OAAO;IACvD,IAAI,MAAM;IACV,SAAS,MAAM;IACf,UAAU,MAAM;IAChB,UAAU,MAAM;IAChB,WAAW,MAAM;IACjB,SAAS,MAAM;IACf,YAAY,MAAM;IAClB,YAAY,MAAM;IAClB,QAAQ,WAAW,MAAM,EAAE;IAC5B,EAAE;AAEH,SAAM,KAAK,YAAY,UAAU,eAAe;AAChD;WACO,KAAK;GACZ,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,UAAO,MAAM,kBAAkB,SAAS,IAAI,MAAM,UAAU;;AAIhE,OAAK,eAAe;AAEpB,SAAO,KACL,eAAe,OAAO,QAAQ,CAAC,YAAY,OAAO,QAAQ,CAAC,UAC5D;;;;;CAMH,MAAM,QAAuB;EAC3B,MAAM,SAAS,MAAM,KAAK,aAAa,aAAa;AACpD,MAAI,CAAC,OAAO,GACV,OAAM,IAAI,MAAM,OAAO,SAAS,0BAA0B;AAG5D,QAAM,KAAK,YAAY,SAAS;AAEhC,MAAI,CAAC,KAAK,YAAY,QAAQ,CAC5B,OAAM,KAAK,WAAW;AAGxB,OAAK,UAAU,MAAM,KAAK,WAAW;GACnC,UAAU,aAAqB;IAC7B,MAAM,eAAe,KAClB,SAAS,KAAK,WAAW,SAAS,CAClC,QAAQ,OAAO,IAAI;AAEtB,QAAI,CAAC,aACH,QAAO;AAGT,QAAI,aAAa,MAAM,IAAI,CAAC,MAAM,SAAS,KAAK,WAAW,IAAI,CAAC,CAC9D,QAAO;AAET,WAAO,KAAK,GAAG,QAAQ,aAAa;;GAEtC,YAAY;GACZ,eAAe;GACf,kBAAkB;IAChB,oBAAoB;IACpB,cAAc;IACf;GACF,CAAC;AAEF,OAAK,QAAQ,GAAG,QAAQ,aAAqB;AAC3C,OAAI,gBAAgB,SAAS,CAC3B,MAAK,eAAe,OAAO,SAAS;IAEtC;AAEF,OAAK,QAAQ,GAAG,WAAW,aAAqB;AAC9C,OAAI,gBAAgB,SAAS,CAC3B,MAAK,eAAe,UAAU,SAAS;IAEzC;AAEF,OAAK,QAAQ,GAAG,WAAW,aAAqB;AAC9C,OAAI,gBAAgB,SAAS,CAC3B,MAAK,eAAe,UAAU,SAAS;IAEzC;AAEF,OAAK,QAAQ,GAAG,eAAe;AAC7B,UAAO,KACL,aAAa,KAAK,UAAU,IAAI,OAAO,KAAK,WAAW,CAAC,cACzD;AACD,QAAK,WAAW;IAChB;AAEF,OAAK,QAAQ,GAAG,UAAU,QAAiB;GACzC,MAAM,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,IAAI,CAAC;AACjE,UAAO,MAAM,kBAAkB,MAAM,UAAU;AAC/C,QAAK,UAAU,MAAM;IACrB;;;;;CAMJ,MAAM,OAAsB;AAC1B,OAAK,MAAM,WAAW,KAAK,eAAe,QAAQ,CAChD,cAAa,QAAQ,MAAM;AAE7B,OAAK,eAAe,OAAO;AAE3B,OAAK,eAAe;AAEpB,MAAI,KAAK,SAAS;AAChB,SAAM,KAAK,QAAQ,OAAO;AAC1B,QAAK,UAAU;;AAEjB,OAAK,YAAY,OAAO;AACxB,SAAO,KAAK,kBAAkB;;;;;CAMhC,YAAqB;AACnB,SAAO,KAAK,YAAY;;;;;CAM1B,aAAmB;AACjB,OAAK,YAAY,EAAE;EACnB,MAAM,YAAY,KAAK,kBAAkB;AACzC,MAAI,GAAG,WAAW,UAAU,CAC1B,IAAG,WAAW,UAAU;AAE1B,SAAO,KAAK,qBAAqB;;;;;CAMnC,gBAA4D;AAC1D,SAAO;GACL,aAAa,OAAO,KAAK,KAAK,UAAU,CAAC;GACzC,WAAW,KAAK,UAAU,KAAK,UAAU,CAAC;GAC3C;;;;;;AAOL,SAAgB,mBAAmB,SAAuC;AACxE,QAAO,IAAI,aAAa,QAAQ;;;;;;;;ACrgBlC,SAAS,WAAW,UAA0B;CAE5C,MAAM,QAAQ,sBAAsB,KAAK,SAAS;AAClD,KAAI,QAAQ,IAAI;EACd,MAAM,QAAQ,WAAW,MAAM,GAAG;AAElC,MAAI,SAAS,KAAK,SAAS,GACzB,QAAO;AAET,MAAI,QAAQ,MAAM,SAAS,IACzB,QAAO,QAAQ;;AAInB,QAAO;;;;;AAMT,eAAe,YACb,OACA,SACA,SACiB;CACjB,MAAM,QAAQ,QAAQ,SAAS;CAC/B,MAAM,UAAU,QAAQ,WAAW;CAEnC,MAAM,SAAS;;;;;UAKP,MAAM;;;;EAId,QAAQ,MAAM,GAAG,IAAK,CAAC;;;;AAKvB,KAAI;EACF,MAAM,WAAW,MAAM,MAAM,GAAG,QAAQ,cAAc,gBAAgB;GACpE,QAAQ;GACR,SAAS,EAAE,gBAAgB,oBAAoB;GAC/C,MAAM,KAAK,UAAU;IACnB;IACA;IACA,QAAQ;IACR,SAAS;KACP,aAAa;KACb,aAAa;KACd;IACF,CAAC;GACF,QAAQ,YAAY,QAAQ,QAAQ;GACrC,CAAC;AAEF,MAAI,CAAC,SAAS,IAAI;AAChB,UAAO,KAAK,8BAA8B,SAAS,aAAa;AAChE,UAAO;;AAIT,SAAO,YADO,MAAM,SAAS,MAAM,EACZ,YAAY,IAAI;UAChC,OAAO;AACd,SAAO,KACL,qBAAqB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC5E;AACD,SAAO;;;;;;;;;AAUX,eAAsB,OACpB,OACA,SACA,SAC2B;CAC3B,MAAM,aAAa,QAAQ,cAAc;CAGzC,MAAM,WAAW,QAAQ,MAAM,GAAG,WAAW;AAE7C,KAAI,SAAS,WAAW,EACtB,QAAO,EAAE;AAGX,QAAO,MAAM,cAAc,OAAO,SAAS,OAAO,CAAC,gBAAgB,QAAQ;CAG3E,MAAM,YAAY;CAClB,MAAM,kBAAoC,EAAE;AAE5C,MAAK,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK,WAAW;EACnD,MAAM,QAAQ,SAAS,MAAM,GAAG,IAAI,UAAU;EAC9C,MAAM,SAAS,MAAM,QAAQ,IAC3B,MAAM,IAAI,OAAO,WACf,YAAY,OAAO,OAAO,MAAM,SAAS,QAAQ,CAClD,CACF;AAED,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;GACrC,MAAM,SAAS,MAAM;GACrB,MAAM,QAAQ,OAAO;AACrB,OAAI,WAAW,UAAa,UAAU,OACpC,iBAAgB,KAAK;IACnB,GAAG;IACH,eAAe,OAAO;IACtB,aAAa;IACN;IACR,CAAC;;;AAMR,iBAAgB,MAAM,GAAG,MAAM,EAAE,cAAc,EAAE,YAAY;AAE7D,QAAO,MACL,mCAAmC,OAAO,gBAAgB,IAAI,eAAe,EAAE,GAChF;AAED,QAAO;;;;;;;;;;;;;;;;;;ACpFT,SAASC,cAAY,SAAyB;AAC5C,QAAO,OAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM,CAAC,MAAM,GAAG,GAAG;;;;;AAM/E,SAAS,aAAa,WAA2B;AAC/C,QAAO,KAAK,KAAK,WAAW,cAAc,kBAAkB;;;;;AAM9D,SAAS,mBACP,WACA,OACA,YACM;AACN,KAAI;EACF,MAAM,YAAY,aAAa,UAAU;EACzC,MAAM,WAAW,KAAK,QAAQ,UAAU;AAGxC,MAAI,CAAC,GAAG,WAAW,SAAS,CAC1B,IAAG,UAAU,UAAU,EAAE,WAAW,MAAM,CAAC;EAG7C,MAAM,aAAkC;GACtC,OAAO,OAAO,YAAY,MAAM,MAAM;GACtC,OAAO,MAAM;GACb,WAAW,MAAM;GACjB;GACA,WAAW,KAAK,KAAK;GACtB;AAED,KAAG,cAAc,WAAW,KAAK,UAAU,WAAW,EAAE,QAAQ;AAChE,SAAO,MAAM,2BAA2B,OAAO,MAAM,MAAM,KAAK,CAAC,QAAQ;SACnE;AAEN,SAAO,MAAM,wDAAwD;;;;;;AAOzE,SAAS,mBACP,WACA,eACkB;CAClB,MAAM,YAAY,aAAa,UAAU;AAEzC,KAAI,CAAC,GAAG,WAAW,UAAU,CAC3B,QAAO;AAGT,KAAI;EACF,MAAM,UAAU,GAAG,aAAa,WAAW,QAAQ;EACnD,MAAM,SAAS,KAAK,MAAM,QAAQ;EAGlC,MAAM,cAAc,IAAI,IAAI,OAAO,KAAK,OAAO,WAAW,CAAC;EAC3D,MAAM,eAAe,IAAI,IAAI,OAAO,KAAK,cAAc,CAAC;AAGxD,MAAI,YAAY,SAAS,aAAa,MAAM;AAC1C,UAAO,MAAM,+CAA+C;AAC5D,UAAO;;AAIT,OAAK,MAAM,CAAC,UAAU,SAAS,OAAO,QAAQ,cAAc,CAC1D,KAAI,OAAO,WAAW,cAAc,MAAM;AACxC,UAAO,MAAM,6BAA6B,SAAS,UAAU;AAC7D,UAAO;;EAKX,MAAM,QAAQ,IAAI,IAA2B,OAAO,QAAQ,OAAO,MAAM,CAAC;AAE1E,SAAO,MAAM,4BAA4B,OAAO,MAAM,KAAK,CAAC,QAAQ;AAEpE,SAAO;GACL;GACA,OAAO,OAAO;GACd,WAAW,OAAO;GACnB;UACM,OAAO;AACd,SAAO,MACL,oCAAoC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC3F;AACD,SAAO;;;AAYX,MAAM,iCAAiB,IAAI,KAA2B;;;;AAKtD,SAAgB,sBAA4B;AAC1C,gBAAe,OAAO;;AAIxB,cAAc,6BAA6B,oBAAoB;;;;AAK/D,SAAS,qBACP,MACA,kBACA,UAC6B;CAC7B,MAAM,gCAAgB,IAAI,KAA6B;CA8BvD,MAAM,UA3BuC;EAC3C,YAAY;;;;;;EAMZ,YAAY;;;;;;EAMZ,QAAQ;;;;;;EAMR,IAAI;;;;;;EAML,CAE4B;AAC7B,KAAI,CAAC,QACH,QAAO;AAGT,KAAI;EAEF,MAAM,UADQ,IAAI,MAAM,kBAAkB,QAAQ,CAC5B,QAAQ,KAAK,SAAS;EAG5C,MAAM,eAAyD,EAAE;AAEjE,OAAK,MAAM,SAAS,QAClB,MAAK,MAAM,WAAW,MAAM,SAC1B,KAAI,QAAQ,SAAS,SACnB,cAAa,KAAK;GAChB,QAAQ,QAAQ,KAAK;GACrB,UAAU;IACR,MAAM,QAAQ,KAAK,cAAc,MAAM;IACvC,QAAQ,QAAQ,KAAK,cAAc;IACnC,QAAQ,QAAQ,KAAK;IACtB;GACF,CAAC;AAOR,MAAI,aAAa,SAAS,EACxB,eAAc,IACZ,cACA,aAAa,KAAK,OAAO;GACvB,QAAQ,EAAE;GACV,UAAU,EAAE;GACb,EAAE,CACJ;UAEI,OAAO;AACd,SAAO,MACL,+BAA+B,SAAS,IAAI,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACnG;;AAGH,QAAO;;;;;AAMT,SAAS,0BACP,SACA,UAC6B;CAC7B,MAAM,gCAAgB,IAAI,KAA6B;CAGvD,MAAM,gBAAgB,CAAC,GAAG,QAAQ,CAC/B,QAAQ,MAAM,EAAE,SAAS,cAAc,EAAE,SAAS,SAAS,CAC3D,MAAM,GAAG,MAAM,EAAE,MAAM,SAAS,EAAE,MAAM,OAAO;AAElD,MAAK,MAAM,QAAQ,UAAU;EAE3B,IAAI,mBAAkC;AAEtC,OAAK,MAAM,UAAU,cACnB,KACE,KAAK,SAAS,UAAU,OAAO,MAAM,UACrC,KAAK,SAAS,UAAU,OAAO,IAAI,OAEnC,oBAAmB;WACV,KAAK,SAAS,SAAS,OAAO,MAAM,OAE7C;EAIJ,MAAM,aAAa,kBAAkB,QAAQ;EAC7C,MAAM,WAAW,cAAc,IAAI,WAAW,IAAI,EAAE;AACpD,WAAS,KAAK,KAAK;AACnB,gBAAc,IAAI,YAAY,SAAS;;AAGzC,QAAO;;;;;AAMT,eAAsB,wBACpB,UACA,SAC8B;CAE9B,MAAM,SAAS,eAAe,IAAI,SAAS;AAC3C,KAAI,OACF,QAAO;AAGT,KAAI;EACF,MAAM,cAAc,MAAM,UAAU,SAAS,EAAE,UAAU,CAAC;EAE1D,MAAM,EAAE,YAAY,eAClB,YAAY,MACZ,YAAY,kBACZ,YAAY,SACb;EAYD,MAAM,OAAqB;GACzB;GACA,OAJoB,0BAA0B,SAR/B,qBACf,YAAY,MACZ,YAAY,kBACZ,YAAY,SACb,CAGyB,IAAI,aAAa,IAAI,EAAE,CACiB;GAKjE;AAED,iBAAe,IAAI,UAAU,KAAK;AAClC,SAAO;UACA,OAAO;AACd,SAAO,MACL,qBAAqB,SAAS,mBAAmB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACxG;AACD,SAAO;;;;;;;;AASX,eAAsB,eACpB,OACoB;AACpB,KAAI,MAAM,WAAW,EACnB,QAAO;EAAE,uBAAO,IAAI,KAAK;EAAE,OAAO,EAAE;EAAE,WAAW;EAAG;CAItD,MAAM,aAAqC,EAAE;AAC7C,MAAK,MAAM,QAAQ,MACjB,YAAW,KAAK,QAAQA,cAAY,KAAK,QAAQ;CAInD,MAAM,UAAU,oBAAoB,MAAM,KAAK,MAAM,EAAE,KAAK,CAAC;CAG7D,MAAM,SAAS,mBAAmB,SAAS,WAAW;AACtD,KAAI,OACF,QAAO;CAIT,MAAM,wBAAQ,IAAI,KAA4B;CAC9C,MAAM,YAAsB,EAAE;CAC9B,IAAI,YAAY;AAGhB,MAAK,MAAM,QAAQ,OAAO;AACxB,YAAU,KAAK,KAAK,KAAK;EACzB,MAAM,OAAO,MAAM,wBAAwB,KAAK,MAAM,KAAK,QAAQ;AAEnE,MAAI,CAAC,KACH;AAIF,OAAK,MAAM,UAAU,KAAK,QACxB,KAAI,OAAO,SAAS,cAAc,OAAO,SAAS,UAAU;GAC1D,MAAM,gBAAgB,GAAG,KAAK,KAAK,GAAG,OAAO;AAC7C,SAAM,IAAI,eAAe;IACvB,MAAM,OAAO;IACb;IACA,UAAU,KAAK;IACf,MAAM,OAAO;IACb,OAAO,OAAO;IACd,KAAK,OAAO;IACZ,OAAO,EAAE;IACT,UAAU,EAAE;IACb,CAAC;;;AAMR,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,OAAO,eAAe,IAAI,KAAK,KAAK;AAC1C,MAAI,CAAC,KACH;AAGF,OAAK,MAAM,CAAC,YAAY,UAAU,KAAK,OAAO;GAC5C,MAAM,YAAY,GAAG,KAAK,KAAK,GAAG;GAClC,MAAM,aAAa,MAAM,IAAI,UAAU;AAEvC,OAAI,CAAC,cAAc,eAAe,aAChC;AAGF,QAAK,MAAM,QAAQ,MAGjB,MAAK,MAAM,CAAC,SAAS,SAAS,MAC5B,KAAI,KAAK,SAAS,KAAK,QAAQ;AAE7B,QAAI,WACF,YAAW,MAAM,KAAK,QAAQ;AAEhC,SAAK,SAAS,KAAK,UAAU;AAC7B;;;;CAOV,MAAM,QAAmB;EACvB;EACA,OAAO;EACP;EACD;AAGD,oBAAmB,SAAS,OAAO,WAAW;AAE9C,QAAO;;;;;AAMT,SAAS,oBAAoB,OAAyB;AACpD,KAAI,MAAM,WAAW,EACnB,QAAO;CAGT,MAAM,eAAe,MAAM;AAC3B,KAAI,CAAC,aACH,QAAO;AAGT,KAAI,MAAM,WAAW,EACnB,QAAO,KAAK,QAAQ,aAAa;CAInC,MAAM,WAAW,MAAM,KAAK,MAAM,KAAK,UAAU,EAAE,CAAC,MAAM,KAAK,IAAI,CAAC;CACpE,MAAM,YAAY,SAAS;AAE3B,KAAI,CAAC,UACH,QAAO;CAIT,IAAI,eAAe;AAEnB,MAAK,IAAI,IAAI,GAAG,IAAI,UAAU,QAAQ,KAAK;EACzC,MAAM,UAAU,UAAU;AAC1B,MAAI,WAAW,SAAS,OAAO,MAAM,EAAE,OAAO,QAAQ,CACpD,gBAAe,IAAI;MAEnB;;CAMJ,MAAM,YADiB,UAAU,MAAM,GAAG,aAAa,CACtB,KAAK,KAAK,IAAI;AAG/C,KACE,aACA,GAAG,WAAW,UAAU,IACxB,GAAG,SAAS,UAAU,CAAC,QAAQ,CAE/B,QAAO,KAAK,QAAQ,UAAU;AAGhC,QAAO,aAAa;;;;;AAMtB,SAAgB,eACd,OACA,UACA,cAIO;CACP,MAAM,gBAAgB,GAAG,SAAS,GAAG;CACrC,MAAM,OAAO,MAAM,MAAM,IAAI,cAAc;AAE3C,KAAI,CAAC,KACH,QAAO;CAGT,MAAM,UAA2B,EAAE;CACnC,MAAM,UAA2B,EAAE;AAEnC,MAAK,MAAM,aAAa,KAAK,UAAU;EACrC,MAAM,SAAS,MAAM,MAAM,IAAI,UAAU;AACzC,MAAI,OACF,SAAQ,KAAK,OAAO;;AAIxB,MAAK,MAAM,aAAa,KAAK,OAAO;EAClC,MAAM,SAAS,MAAM,MAAM,IAAI,UAAU;AACzC,MAAI,OACF,SAAQ,KAAK,OAAO;;AAIxB,QAAO;EAAE;EAAS;EAAS;;;;;;;;;;;;;;;AClhB7B,MAAM,sBAAsB;;;;AAK5B,eAAe,YACb,OACA,WACA,aACc;CACd,MAAM,UAA6B,IAAI,MAAqB,MAAM,OAAO;CACzE,IAAI,eAAe;CAEnB,MAAM,SAAS,YAA2B;AACxC,SAAO,eAAe,MAAM,QAAQ;GAClC,MAAM,QAAQ;GACd,MAAM,OAAO,MAAM;AACnB,OAAI,SAAS,OACX,SAAQ,SAAS,MAAM,UAAU,KAAK;;;CAK5C,MAAM,UAAU,MAAM,KACpB,EAAE,QAAQ,KAAK,IAAI,aAAa,MAAM,OAAO,EAAE,EAC/C,YAAY,QAAQ,CACrB;AACD,OAAM,QAAQ,IAAI,QAAQ;AAG1B,QAAO,QAAQ,QAAQ,MAAc,MAAM,OAAU;;AAGvD,MAAa,sBAAsB,EAAE,OAAO;CAC1C,WAAW,EACR,QAAQ,CACR,UAAU,CACV,QAAQ,IAAI,CACZ,SAAS,iEAAiE;CAC7E,OAAO,EACJ,SAAS,CACT,UAAU,CACV,QAAQ,MAAM,CACd,SAAS,yCAAyC;CACrD,SAAS,EACN,MAAM,EAAE,QAAQ,CAAC,CACjB,UAAU,CACV,QAAQ,EAAE,CAAC,CACX,SAAS,sCAAsC;CAClD,aAAa,EACV,QAAQ,CACR,KAAK,CACL,UAAU,CACV,UAAU,CACV,QAAQ,oBAAoB,CAC5B,SAAS,sDAAsD;CACnE,CAAC;;;;AAeF,SAASC,qBACP,SACA,sBACQ;CACR,MAAM,KAAK,QAAQ;CAGnB,MAAM,gBAAgB,KAAK,KAAK,SAAS,aAAa;AACtD,KAAI,GAAG,WAAW,cAAc,CAC9B,KAAI;EACF,MAAM,UAAU,GAAG,aAAa,eAAe,QAAQ;AACvD,KAAG,IAAI,QAAQ;SACT;AAMV,KAAI,qBAAqB,SAAS,EAChC,IAAG,IAAI,qBAAqB;AAG9B,QAAO;;;;;AAMT,SAASC,WAAS,MAAuB;AACvC,QAAO,KAAK,WAAW,IAAI;;;;;AAM7B,SAASC,eAAa,KAAa,IAAY,SAA2B;CACxE,MAAM,QAAkB,EAAE;CAE1B,MAAM,UAAU,GAAG,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC;AAE5D,MAAK,MAAM,SAAS,SAAS;AAE3B,MAAID,WAAS,MAAM,KAAK,CACtB;EAGF,MAAM,WAAW,KAAK,KAAK,KAAK,MAAM,KAAK;EAC3C,MAAM,eAAe,KAAK,SAAS,SAAS,SAAS,CAAC,QAAQ,OAAO,IAAI;AAGzE,MAAI,GAAG,QAAQ,aAAa,CAC1B;AAGF,MAAI,MAAM,aAAa,CACrB,OAAM,KAAK,GAAGC,eAAa,UAAU,IAAI,QAAQ,CAAC;WACzC,MAAM,QAAQ,IAAI,gBAAgB,MAAM,KAAK,CACtD,OAAM,KAAK,SAAS;;AAIxB,QAAO;;;;;AAMT,eAAsBC,UACpB,OACwB;CACxB,MAAM,EAAE,WAAW,OAAO,SAAS,gBAAgB;AAGnD,KAAI,CAAC,GAAG,WAAW,UAAU,CAC3B,QAAO;EACL,SAAS;EACT,OAAO,wBAAwB;EAChC;CAGH,MAAM,cAAc,KAAK,QAAQ,UAAU;CAG3C,MAAM,eAAe,mBAAmB,iBAAiB;CACzD,MAAM,cAAc,kBAAkB,aAAa,iBAAiB;CAGpE,MAAM,SAAS,MAAM,aAAa,aAAa;AAC/C,KAAI,CAAC,OAAO,GACV,QAAO;EACL,SAAS;EACT,OAAO,OAAO,SAAS;EACxB;AAIH,KAAI,YAAY,QAAQ,IAAI,CAAC,MAC3B,QAAO;EACL,SAAS;EACT,OACE;EACH;CAGH,MAAM,SAAsB;EAC1B,WAAW;EACX,cAAc;EACd,eAAe;EACf,WAAW,EAAE;EACb,QAAQ,EAAE;EACX;AAED,KAAI;AAEF,QAAM,YAAY,SAAS;AAG3B,MAAI,SAAS,YAAY,QAAQ,CAC/B,OAAM,YAAY,OAAO;EAO3B,MAAM,QAAQD,eAAa,aAHhBF,qBAAmB,aAAa,QAAQ,EAGP,YAAY;AAExD,MAAI,MAAM,WAAW,EACnB,QAAO;GACL,SAAS;GACT,SAAS;GACT,MAAM;GACP;EAIH,MAAM,cAAc,sBAAsB,YAAY;EACtD,MAAM,aAAa,OAAO,KAAK,YAAY,CAAC;EAG5C,MAAM,oBAAuC;GAC3C,aAAa;GACb;GACA,yBAAyB;GAC1B;AAED,SAAO,MACL,YAAY,OAAO,MAAM,OAAO,CAAC,0BAA0B,OAAO,YAAY,CAAC,iBAAiB,YAAY,IAAI,OAAO,WAAW,CAAC,gBACpI;EAQD,MAAM,cAAc,OAClB,aAC+B;AAC/B,OAAI;IACF,MAAM,UAAU,GAAG,aAAa,UAAU,QAAQ;AAUlD,WAAO,EAAE,QANc,MAAM,qBAHd,MAAM,UAAU,UAAU,SAAS,iBAAiB,EAKjE,SACA,kBACD,EAEgC;YAC1B,KAAK;AAEZ,WAAO;KACL,QAAQ,EAAE;KACV,OAAO,oBAAoB,SAAS,IAHrB,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;KAIhE;;;EAKL,MAAM,cAAc,MAAM,YAAY,OAAO,aAAa,YAAY;EAGtE,MAAM,oBAAqC,EAAE;AAE7C,OAAK,MAAM,cAAc,YACvB,KAAI,WAAW,MACb,QAAO,OAAO,KAAK,WAAW,MAAM;OAC/B;AACL,qBAAkB,KAAK,GAAG,WAAW,OAAO;AAC5C,UAAO;AAGP,QAAK,MAAM,SAAS,WAAW,OAC7B,QAAO,UAAU,MAAM,aACpB,OAAO,UAAU,MAAM,aAAa,KAAK;;EAMlD,MAAM,EAAE,cAAc;EACtB,MAAM,iBAAkC,EAAE;AAE1C,OAAK,IAAI,IAAI,GAAG,IAAI,kBAAkB,QAAQ,KAAK,WAAW;GAC5D,MAAM,QAAQ,kBAAkB,MAAM,GAAG,IAAI,UAAU;GAEvD,MAAM,QAAQ,MAAM,KAAK,MAAM,EAAE,gBAAgB;AAEjD,OAAI;IACF,MAAM,aAAa,MAAM,aAAa,WAAW,MAAM;AAEvD,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;KACrC,MAAM,QAAQ,MAAM;KACpB,MAAM,SAAS,WAAW;AAE1B,SAAI,SAAS,OAEX,gBAAe,KAAK;MAClB,IAAI,MAAM;MACV,SAAS,MAAM;MACf,UAAU,MAAM;MAChB,UAAU,MAAM;MAChB,WAAW,MAAM;MACjB,SAAS,MAAM;MACf,YAAY,MAAM;MAClB,YAAY,MAAM;MAClB;MACD,CAAC;;YAGC,KAAK;IACZ,MAAM,WAAW,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AACjE,WAAO,OAAO,KAAK,0BAA0B,WAAW;;;AAK5D,MAAI,eAAe,SAAS,GAAG;AAC7B,SAAM,YAAY,UAAU,eAAe;AAC3C,UAAO,gBAAgB,eAAe;;AAGxC,cAAY,OAAO;AAOnB,SAAO;GACL,SAAS;GACT,SAPgB,OAAO,OAAO,SAAS,IAErC,WAAW,OAAO,OAAO,aAAa,CAAC,UAAU,OAAO,OAAO,cAAc,CAAC,gBAAgB,OAAO,OAAO,OAAO,OAAO,CAAC,WAC3H,wBAAwB,OAAO,OAAO,aAAa,CAAC,UAAU,OAAO,OAAO,cAAc,CAAC;GAK7F,MAAM;GACP;UACM,KAAK;AACZ,cAAY,OAAO;AAEnB,SAAO;GACL,SAAS;GACT,OAAO,oBAHQ,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;GAI/D,MAAM;GACP;;;AAIL,MAAa,uBAA4D;CACvE,MAAM;CACN,aACE;CACF,QAAQ;CACR;CACD;;;;;;;;;;;;;;;;;;;;;ACnVD,MAAa,mBAAmB,EAAE,OAAO;CACvC,OAAO,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,SAAS,gCAAgC;CAClE,WAAW,EACR,QAAQ,CACR,UAAU,CACV,QAAQ,IAAI,CACZ,SAAS,gEAAgE;CAC5E,OAAO,EACJ,QAAQ,CACR,KAAK,CACL,UAAU,CACV,UAAU,CACV,QAAQ,GAAG,CACX,SAAS,sCAAsC;CAClD,WAAW,EACR,QAAQ,CACR,IAAI,EAAE,CACN,IAAI,EAAE,CACN,UAAU,CACV,SAAS,gEAAgE;CAC5E,MAAM,EACH,KAAK;EAAC;EAAU;EAAO;EAAS,CAAC,CACjC,UAAU,CACV,QAAQ,SAAS,CACjB,SACC,mGACD;CACH,QAAQ,EACL,SAAS,CACT,UAAU,CACV,QAAQ,KAAK,CACb,SACC,oEACD;CACH,oBAAoB,EACjB,SAAS,CACT,UAAU,CACV,QAAQ,KAAK,CACb,SACC,6EACD;CACJ,CAAC;;;;AA+BF,SAASI,qBAAmB,WAA2B;CACrD,MAAM,KAAK,QAAQ;AACnB,IAAG,IAAI;EAAC;EAAgB;EAAQ;EAAQ;EAAS;EAAa,CAAC;CAE/D,MAAM,gBAAgB,KAAK,KAAK,WAAW,aAAa;AACxD,KAAI,GAAG,WAAW,cAAc,EAAE;EAChC,MAAM,UAAU,GAAG,aAAa,eAAe,QAAQ;AACvD,KAAG,IAAI,QAAQ;;AAGjB,QAAO;;;;;AAMT,SAASC,WAAS,MAAuB;AACvC,QAAO,KAAK,WAAW,IAAI;;;;;AAM7B,SAASC,eAAa,KAAa,IAAY,SAA2B;CACxE,MAAM,QAAkB,EAAE;CAE1B,MAAM,UAAU,GAAG,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC;AAE5D,MAAK,MAAM,SAAS,SAAS;AAC3B,MAAID,WAAS,MAAM,KAAK,CACtB;EAGF,MAAM,WAAW,KAAK,KAAK,KAAK,MAAM,KAAK;EAC3C,MAAM,eAAe,KAAK,SAAS,SAAS,SAAS,CAAC,QAAQ,OAAO,IAAI;AAEzE,MAAI,GAAG,QAAQ,aAAa,CAC1B;AAGF,MAAI,MAAM,aAAa,CACrB,OAAM,KAAK,GAAGC,eAAa,UAAU,IAAI,QAAQ,CAAC;WACzC,MAAM,QAAQ,IAAI,gBAAgB,MAAM,KAAK,CACtD,OAAM,KAAK,SAAS;;AAIxB,QAAO;;;;;AAMT,SAAS,cACP,SACA,SACmB;AACnB,QAAO,QAAQ,KAAK,OAAO;EACzB,UAAU,KAAK,SAAS,SAAS,EAAE,MAAM,SAAS;EAClD,UAAU,EAAE,MAAM;EAClB,WAAW,EAAE,MAAM;EACnB,SAAS,EAAE,MAAM;EACjB,SAAS,EAAE,MAAM;EACjB,OAAO,EAAE;EACT,YAAY,EAAE,MAAM;EACpB,YAAY,EAAE,MAAM;EACrB,EAAE;;;;;AAML,eAAsBC,UAAQ,OAAgD;CAC5E,MAAM,EACJ,OACA,WACA,OACA,WACA,MACA,QAAQ,cACR,uBACE;AAGJ,KAAI,CAAC,GAAG,WAAW,UAAU,CAC3B,QAAO;EACL,SAAS;EACT,OAAO,wBAAwB;EAChC;CAGH,MAAM,cAAc,KAAK,QAAQ,UAAU;CAG3C,MAAM,eAAe,mBAAmB,iBAAiB;CACzD,MAAM,cAAc,kBAAkB,aAAa,iBAAiB;AAGpE,KAAI,CAAC,YAAY,QAAQ,CACvB,QAAO;EACL,SAAS;EACT,OAAO,2DAA2D;EACnE;AAGH,KAAI;EAEF,MAAM,SAAS,MAAM,aAAa,aAAa;AAC/C,MAAI,CAAC,OAAO,GACV,QAAO;GACL,SAAS;GACT,OAAO,OAAO,SAAS;GACxB;AAIH,QAAM,YAAY,SAAS;EAG3B,MAAM,cAAc,MAAM,aAAa,MAAM,MAAM;EAGnD,IAAI,UAAU,MAAM,YAAY,aAAa,aAAa,OAAO,OAAO,EAChE,MACP,CAAC;AAIF,MAAI,cAAc,UAAa,SAAS,SACtC,WAAU,QAAQ,QAAQ,MAAM,EAAE,SAAS,UAAU;AAIvD,MAAI,gBAAgB,QAAQ,SAAS,EACnC,WAAU,MAAM,OAAO,OAAO,SAAS;GACrC,eAAe,iBAAiB;GAChC,OAAO,iBAAiB;GACxB,YAAY;GACb,CAAC;AAGJ,cAAY,OAAO;EAEnB,IAAI,mBAAmB,cAAc,SAAS,YAAY;AAG1D,MAAI,sBAAsB,iBAAiB,SAAS,GAAG;GASrD,MAAM,YAAY,MAAM,eANVD,eAAa,aADhBF,qBAAmB,YAAY,EACE,YAAY,CAC7B,KAAK,OAAO;IACrC,MAAM;IACN,SAAS,GAAG,aAAa,GAAG,QAAQ;IACrC,EAAE,CAEiD;AAGpD,sBAAmB,iBAAiB,KAAK,WAAW;AAClD,QAAI,CAAC,OAAO,WACV,QAAO;IAIT,MAAM,UAAU,eAAe,WADd,KAAK,KAAK,aAAa,OAAO,SAAS,EACJ,OAAO,WAAW;AAEtE,QAAI,QACF,QAAO;KACL,GAAG;KACH,aAAa;MACX,SAAS,QAAQ,QAAQ,KAAK,MAAM,EAAE,KAAK;MAC3C,SAAS,QAAQ,QAAQ,KAAK,MAAM,EAAE,KAAK;MAC5C;KACF;AAGH,WAAO;KACP;;EAGJ,MAAM,SAAuB;GAC3B;GACA,WAAW;GACX,cAAc,iBAAiB;GAC/B,SAAS;GACV;AAED,MAAI,iBAAiB,WAAW,EAC9B,QAAO;GACL,SAAS;GACT,SAAS;GACT,MAAM;GACP;EAIH,MAAM,cAAc,iBAAiB,KAAK,GAAG,MAAM;GACjD,MAAM,WAAW,GAAG,EAAE,SAAS,GAAG,OAAO,EAAE,UAAU,CAAC,GAAG,OAAO,EAAE,QAAQ;GAC1E,MAAM,SAAS,EAAE,aACb,KAAK,EAAE,cAAc,SAAS,IAAI,EAAE,WAAW,KAC/C;GACJ,MAAM,UAAU,EAAE,QAAQ,MAAM,GAAG,IAAI,CAAC,QAAQ,OAAO,IAAI;GAE3D,IAAI,WAAW;AACf,OAAI,EAAE,aAAa;IACjB,MAAM,UACJ,EAAE,YAAY,QAAQ,SAAS,IAC3B,cAAc,EAAE,YAAY,QAAQ,MAAM,GAAG,EAAE,CAAC,KAAK,KAAK,GAAG,EAAE,YAAY,QAAQ,SAAS,IAAI,QAAQ,OACxG;IACN,MAAM,UACJ,EAAE,YAAY,QAAQ,SAAS,IAC3B,UAAU,EAAE,YAAY,QAAQ,MAAM,GAAG,EAAE,CAAC,KAAK,KAAK,GAAG,EAAE,YAAY,QAAQ,SAAS,IAAI,QAAQ,OACpG;AACN,QAAI,WAAW,QACb,YAAW,QAAQ,CAAC,SAAS,QAAQ,CAAC,OAAO,QAAQ,CAAC,KAAK,MAAM;;AAIrE,UAAO,GAAG,OAAO,IAAI,EAAE,CAAC,KAAK,EAAE,SAAS,IAAI,WAAW,OAAO,OAAO,QAAQ,KAAK;IAClF;AAIF,SAAO;GACL,SAAS;GACT,SAJc,SAAS,OAAO,iBAAiB,OAAO,CAAC,gBAAgB,MAAM,QAAQ,YAAY,KAAK,OAAO;GAK7G,MAAM;GACP;UACM,KAAK;AACZ,cAAY,OAAO;AAEnB,SAAO;GACL,SAAS;GACT,OAAO,kBAHQ,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;GAIhE;;;AAIL,MAAa,oBAAsD;CACjE,MAAM;CACN,aACE;CACF,QAAQ;CACR;CACD;;;;;;;;;;;;AC3UD,MAAa,uBAAuB,EAAE,OAAO,EAC3C,WAAW,EACR,QAAQ,CACR,UAAU,CACV,QAAQ,IAAI,CACZ,SAAS,iEAAiE,EAC9E,CAAC;;;;AAOF,eAAsBI,UACpB,OACwB;CACxB,MAAM,EAAE,cAAc;AAGtB,KAAI,CAAC,GAAG,WAAW,UAAU,CAC3B,QAAO;EACL,SAAS;EACT,OAAO,wBAAwB;EAChC;CAGH,MAAM,cAAc,KAAK,QAAQ,UAAU;CAC3C,MAAM,YAAY,aAAa,YAAY;AAG3C,KAAI,CAAC,GAAG,WAAW,UAAU,EAAE;EAC7B,MAAM,SAAsB;GAC1B,WAAW;GACX;GACA,QAAQ;GACR,aAAa;GACb,YAAY;GACZ,WAAW,EAAE;GACd;AAED,SAAO;GACL,SAAS;GACT,SAAS,sBAAsB,YAAY;GAC3C,MAAM;GACP;;AAGH,KAAI;EACF,MAAM,cAAc,kBAAkB,aAAa,iBAAiB;AACpE,QAAM,YAAY,SAAS;EAE3B,MAAM,SAAS,MAAM,YAAY,UAAU,YAAY;AACvD,cAAY,OAAO;EAGnB,MAAM,gBAAgB,OAAO,QAAQ,OAAO,UAAU,CACnD,MAAM,GAAG,IAAI,GAAG,OAAO,IAAI,EAAE,CAC7B,KAAK,CAAC,MAAM,WAAW,OAAO,KAAK,IAAI,OAAO,MAAM,CAAC,SAAS;AAajE,SAAO;GACL,SAAS;GACT,SAbc;IACd,oBAAoB;IACpB;IACA,eAAe,OAAO;IACtB,gBAAgB,OAAO,OAAO,WAAW;IACzC,iBAAiB,OAAO,OAAO,YAAY;IAC3C;IACA;IACA,GAAG;IACJ,CAAC,KAAK,KAAK;GAKV,MAAM;GACP;UACM,KAAK;AAEZ,SAAO;GACL,SAAS;GACT,OAAO,gCAHQ,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;GAIhE;;;AAIL,MAAa,wBAA8D;CACzE,MAAM;CACN,aACE;CACF,QAAQ;CACR;CACD;;;;;;;;;;;;ACvFD,MAAa,qBAAqB,EAAE,OAAO;CACzC,WAAW,EACR,QAAQ,CACR,UAAU,CACV,QAAQ,IAAI,CACZ,SAAS,mCAAmC;CAC/C,cAAc,EACX,QAAQ,CACR,UAAU,CACV,SAAS,gEAAgE;CAC5E,UAAU,EACP,QAAQ,CACR,UAAU,CACV,SACC,8EACD;CACH,UAAU,EACP,QAAQ,CACR,KAAK,CACL,UAAU,CACV,UAAU,CACV,QAAQ,EAAE,CACV,SAAS,sDAAsD;CAClE,SAAS,EACN,MAAM,EAAE,QAAQ,CAAC,CACjB,UAAU,CACV,QAAQ,EAAE,CAAC,CACX,SAAS,yCAAyC;CACtD,CAAC;;;;;;;;;;;;;;;ACrBF,MAAM,kBAAkB;AAExB,MAAa,oBAAoB,EAAE,OAAO;CACxC,WAAW,EACR,QAAQ,CACR,UAAU,CACV,QAAQ,IAAI,CACZ,SAAS,gCAAgC;CAC5C,QAAQ,EACL,SAAS,CACT,UAAU,CACV,QAAQ,MAAM,CACd,SAAS,iDAAiD;CAC7D,OAAO,EACJ,SAAS,CACT,UAAU,CACV,QAAQ,MAAM,CACd,SAAS,kDAAkD;CAC/D,CAAC;;;;AAmBF,SAAS,YAAY,SAAyB;AAC5C,QAAO,OAAO,WAAW,SAAS,CAAC,OAAO,SAAS,OAAO,CAAC,OAAO,MAAM;;;;;AAM1E,SAAS,iBAAiB,WAA2B;AACnD,QAAO,KAAK,KAAK,WAAW,cAAc,gBAAgB;;;;;AAM5D,SAAS,cAAc,WAA8B;CACnD,MAAM,YAAY,iBAAiB,UAAU;AAC7C,KAAI,GAAG,WAAW,UAAU,CAC1B,KAAI;EACF,MAAM,UAAU,GAAG,aAAa,WAAW,QAAQ;AACnD,SAAO,KAAK,MAAM,QAAQ;SACpB;AACN,SAAO,EAAE;;AAGb,QAAO,EAAE;;;;;AAMX,SAAS,cAAc,WAAmB,OAAwB;CAChE,MAAM,YAAY,iBAAiB,UAAU;CAC7C,MAAM,WAAW,KAAK,QAAQ,UAAU;AACxC,KAAI,CAAC,GAAG,WAAW,SAAS,CAC1B,IAAG,UAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AAE7C,IAAG,cAAc,WAAW,KAAK,UAAU,OAAO,MAAM,EAAE,CAAC;;;;;AAM7D,SAAS,mBAAmB,WAA2B;CACrD,MAAM,KAAK,QAAQ;AACnB,IAAG,IAAI;EAAC;EAAgB;EAAQ;EAAQ;EAAS;EAAa,CAAC;CAE/D,MAAM,gBAAgB,KAAK,KAAK,WAAW,aAAa;AACxD,KAAI,GAAG,WAAW,cAAc,EAAE;EAChC,MAAM,UAAU,GAAG,aAAa,eAAe,QAAQ;AACvD,KAAG,IAAI,QAAQ;;AAGjB,QAAO;;;;;AAMT,SAAS,SAAS,MAAuB;AACvC,QAAO,KAAK,WAAW,IAAI;;;;;AAM7B,SAAS,aAAa,KAAa,IAAY,SAA2B;CACxE,MAAM,QAAkB,EAAE;CAE1B,MAAM,UAAU,GAAG,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC;AAE5D,MAAK,MAAM,SAAS,SAAS;AAC3B,MAAI,SAAS,MAAM,KAAK,CACtB;EAGF,MAAM,WAAW,KAAK,KAAK,KAAK,MAAM,KAAK;EAC3C,MAAM,eAAe,KAAK,SAAS,SAAS,SAAS,CAAC,QAAQ,OAAO,IAAI;AAEzE,MAAI,GAAG,QAAQ,aAAa,CAC1B;AAGF,MAAI,MAAM,aAAa,CACrB,OAAM,KAAK,GAAG,aAAa,UAAU,IAAI,QAAQ,CAAC;WACzC,MAAM,QAAQ,IAAI,gBAAgB,MAAM,KAAK,CACtD,OAAM,KAAK,SAAS;;AAIxB,QAAO;;;;;AAMT,eAAsB,QAAQ,OAAiD;CAC7E,MAAM,EAAE,WAAW,QAAQ,UAAU;AAGrC,KAAI,CAAC,GAAG,WAAW,UAAU,CAC3B,QAAO;EACL,SAAS;EACT,OAAO,wBAAwB;EAChC;CAGH,MAAM,cAAc,KAAK,QAAQ,UAAU;CAG3C,MAAM,eAAe,mBAAmB,iBAAiB;CACzD,MAAM,cAAc,kBAAkB,aAAa,iBAAiB;AAGpE,KAAI,CAAC,YAAY,QAAQ,CACvB,QAAO;EACL,SAAS;EACT,OAAO,2DAA2D;EACnE;CAGH,MAAM,SAAuB;EAC3B,WAAW;EACX;EACA,OAAO,EAAE;EACT,UAAU,EAAE;EACZ,SAAS,EAAE;EACX,WAAW;EACX,QAAQ,EAAE;EACX;AAED,KAAI;AAEF,MAAI,CAAC,QAAQ;GACX,MAAM,SAAS,MAAM,aAAa,aAAa;AAC/C,OAAI,CAAC,OAAO,GACV,QAAO;IACL,SAAS;IACT,OAAO,OAAO,SAAS;IACxB;;AAKL,QAAM,YAAY,SAAS;EAG3B,MAAM,YAAY,QAAQ,EAAE,GAAG,cAAc,YAAY;EACzD,MAAM,eAA0B,EAAE;EAGlC,MAAM,KAAK,mBAAmB,YAAY;EAC1C,MAAM,eAAe,IAAI,IAAI,aAAa,aAAa,IAAI,YAAY,CAAC;EAGxE,MAAM,eAAe,IAAI,IAAI,MAAM,YAAY,iBAAiB,CAAC;EAGjE,MAAM,iBAA6D,EAAE;AAErE,OAAK,MAAM,YAAY,cAAc;GAEnC,MAAM,OAAO,YADG,GAAG,aAAa,UAAU,QAAQ,CACjB;AACjC,gBAAa,YAAY;AAEzB,OAAI,CAAC,aAAa,IAAI,SAAS,EAAE;AAE/B,WAAO,MAAM,KAAK,KAAK,SAAS,aAAa,SAAS,CAAC;AACvD,mBAAe,KAAK;KAAE,MAAM;KAAU,MAAM;KAAO,CAAC;cAC3C,UAAU,cAAc,MAAM;AAEvC,WAAO,SAAS,KAAK,KAAK,SAAS,aAAa,SAAS,CAAC;AAC1D,mBAAe,KAAK;KAAE,MAAM;KAAU,MAAM;KAAU,CAAC;SAEvD,QAAO;;AAKX,OAAK,MAAM,YAAY,aACrB,KAAI,CAAC,aAAa,IAAI,SAAS,CAC7B,QAAO,QAAQ,KAAK,KAAK,SAAS,aAAa,SAAS,CAAC;AAK7D,MAAI,QAAQ;AACV,eAAY,OAAO;AAGnB,UAAO;IACL,SAAS;IACT,SAHc,mBAAmB,OAAO;IAIxC,MAAM;IACP;;EAOH,MAAM,oBAAuC;GAC3C,aAAa;GACb,aALkB,sBAAsB,YAAY;GAMpD,yBAAyB;GAC1B;EAGD,MAAM,iBAAkC,EAAE;AAE1C,OAAK,MAAM,EAAE,MAAM,UAAU,UAAU,eACrC,KAAI;AAEF,OAAI,SAAS,SACX,OAAM,YAAY,iBAAiB,SAAS;GAG9C,MAAM,UAAU,GAAG,aAAa,UAAU,QAAQ;GAClD,MAAM,SAAS,MAAM,UAAU,UAAU,SAAS,iBAAiB;AAEnE,OAAI,OAAO,WAAW,EACpB;GAGF,MAAM,iBAAiB,MAAM,qBAC3B,QACA,SACA,kBACD;GAED,MAAM,QAAQ,eAAe,KAAK,MAAM,EAAE,gBAAgB;GAC1D,MAAM,aAAa,MAAM,aAAa,WAAW,MAAM;AAEvD,QAAK,IAAI,IAAI,GAAG,IAAI,eAAe,QAAQ,KAAK;IAC9C,MAAM,QAAQ,eAAe;IAC7B,MAAM,SAAS,WAAW;AAC1B,QAAI,SAAS,OACX,gBAAe,KAAK;KAClB,IAAI,MAAM;KACV,SAAS,MAAM;KACf,UAAU,MAAM;KAChB,UAAU,MAAM;KAChB,WAAW,MAAM;KACjB,SAAS,MAAM;KACf,YAAY,MAAM;KAClB,YAAY,MAAM;KAClB;KACD,CAAC;;WAGC,KAAK;GACZ,MAAM,WAAW,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AACjE,UAAO,OAAO,KAAK,oBAAoB,SAAS,IAAI,WAAW;;AAKnE,MAAI,eAAe,SAAS,EAC1B,OAAM,YAAY,UAAU,eAAe;AAI7C,OAAK,MAAM,gBAAgB,OAAO,SAAS;GACzC,MAAM,WAAW,KAAK,KAAK,aAAa,aAAa;AACrD,SAAM,YAAY,iBAAiB,SAAS;;AAI9C,gBAAc,aAAa,aAAa;AAExC,cAAY,OAAO;AAInB,SAAO;GACL,SAAS;GACT,SAJc,mBAAmB,OAAO;GAKxC,MAAM;GACP;UACM,KAAK;AACZ,cAAY,OAAO;AAEnB,SAAO;GACL,SAAS;GACT,OAAO,kBAHQ,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;GAIhE;;;;;;AAOL,SAAS,mBAAmB,QAA8B;CACxD,MAAM,QAAkB,CAAC,8BAA8B;AAEvD,KAAI,OAAO,MAAM,SAAS,GAAG;AAC3B,QAAM,KAAK,mBAAmB,OAAO,OAAO,MAAM,OAAO,CAAC,IAAI;AAC9D,OAAK,MAAM,KAAK,OAAO,MAAM,MAAM,GAAG,GAAG,CACvC,OAAM,KAAK,OAAO,IAAI;AAExB,MAAI,OAAO,MAAM,SAAS,GACxB,OAAM,KAAK,aAAa,OAAO,OAAO,MAAM,SAAS,GAAG,CAAC,OAAO;;AAIpE,KAAI,OAAO,SAAS,SAAS,GAAG;AAC9B,QAAM,KAAK,sBAAsB,OAAO,OAAO,SAAS,OAAO,CAAC,IAAI;AACpE,OAAK,MAAM,KAAK,OAAO,SAAS,MAAM,GAAG,GAAG,CAC1C,OAAM,KAAK,OAAO,IAAI;AAExB,MAAI,OAAO,SAAS,SAAS,GAC3B,OAAM,KAAK,aAAa,OAAO,OAAO,SAAS,SAAS,GAAG,CAAC,OAAO;;AAIvE,KAAI,OAAO,QAAQ,SAAS,GAAG;AAC7B,QAAM,KAAK,sBAAsB,OAAO,OAAO,QAAQ,OAAO,CAAC,IAAI;AACnE,OAAK,MAAM,KAAK,OAAO,QAAQ,MAAM,GAAG,GAAG,CACzC,OAAM,KAAK,OAAO,IAAI;AAExB,MAAI,OAAO,QAAQ,SAAS,GAC1B,OAAM,KAAK,aAAa,OAAO,OAAO,QAAQ,SAAS,GAAG,CAAC,OAAO;;AAItE,OAAM,KAAK,gBAAgB,OAAO,OAAO,UAAU,CAAC,QAAQ;AAE5D,KACE,OAAO,MAAM,WAAW,KACxB,OAAO,SAAS,WAAW,KAC3B,OAAO,QAAQ,WAAW,EAE1B,QAAO;AAGT,OAAM,KAAK,2CAA2C;AAEtD,QAAO,MAAM,KAAK,KAAK;;;;;AAMzB,SAAS,mBAAmB,QAA8B;AAIxD,KAFE,OAAO,MAAM,SAAS,OAAO,SAAS,SAAS,OAAO,QAAQ,WAEhD,EACd,QAAO;CAGT,MAAM,QAAkB,CAAC,8BAA8B;AAEvD,KAAI,OAAO,MAAM,SAAS,EACxB,OAAM,KAAK,YAAY,OAAO,OAAO,MAAM,OAAO,CAAC,QAAQ;AAE7D,KAAI,OAAO,SAAS,SAAS,EAC3B,OAAM,KAAK,eAAe,OAAO,OAAO,SAAS,OAAO,CAAC,QAAQ;AAEnE,KAAI,OAAO,QAAQ,SAAS,EAC1B,OAAM,KAAK,cAAc,OAAO,OAAO,QAAQ,OAAO,CAAC,QAAQ;AAEjE,OAAM,KAAK,gBAAgB,OAAO,OAAO,UAAU,CAAC,QAAQ;AAE5D,KAAI,OAAO,OAAO,SAAS,GAAG;AAC5B,QAAM,KAAK,aAAa,OAAO,OAAO,OAAO,OAAO,CAAC,IAAI;AACzD,OAAK,MAAM,OAAO,OAAO,OAAO,MAAM,GAAG,EAAE,CACzC,OAAM,KAAK,OAAO,MAAM;;AAI5B,QAAO,MAAM,KAAK,KAAK;;AAGzB,MAAa,qBAAwD;CACnE,MAAM;CACN,aACE;CACF,QAAQ;CACR;CACD;;;;ACvbD,MAAa,iBAAiB,EAC3B,OAAO;CACN,WAAW,EACR,QAAQ,CACR,UAAU,CACV,SACC,mEACD;CACH,SAAS,EACN,QAAQ,CACR,UAAU,CACV,SACC,wEACD;CACH,UAAU,EACP,QAAQ,CACR,UAAU,CACV,SAAS,+DAA+D;CAC3E,WAAW,EACR,QAAQ,CACR,KAAK,CACL,UAAU,CACV,UAAU,CACV,SAAS,sDAAsD;CACnE,CAAC,CACD,QAAQ,SAAS,KAAK,aAAa,KAAK,SAAS,EAChD,SAAS,gDACV,CAAC;;;;AChBJ,MAAM,eAAe;CACnB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAa,kBAAkB,EAC5B,OAAO;CACN,WAAW,EACR,QAAQ,CACR,UAAU,CACV,SACC,mEACD;CACH,SAAS,EACN,QAAQ,CACR,UAAU,CACV,SACC,wEACD;CACH,UAAU,EACP,QAAQ,CACR,UAAU,CACV,SAAS,+DAA+D;CAC3E,OAAO,EACJ,QAAQ,CACR,UAAU,CACV,SAAS,sDAAsD;CAClE,QAAQ,EACL,KAAK,aAAa,CAClB,UAAU,CACV,SACC,+FACD;CACH,aAAa,EACV,QAAQ,CACR,KAAK,CACL,UAAU,CACV,UAAU,CACV,SAAS,sCAAsC;CACnD,CAAC,CACD,QAAQ,SAAS,KAAK,aAAa,KAAK,SAAS,EAChD,SAAS,gDACV,CAAC,CACD,QAAQ,SAAS,KAAK,SAAS,KAAK,QAAQ,EAC3C,SAAS,2CACV,CAAC;;;;AC7DJ,MAAM,mBAAmB;CACvB;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACA;CACD;AAED,MAAa,oBAAoB,EAC9B,OAAO;CACN,WAAW,EACR,QAAQ,CACR,UAAU,CACV,SACC,qEACD;CACH,SAAS,EACN,QAAQ,CACR,UAAU,CACV,SACC,0EACD;CACH,UAAU,EACP,QAAQ,CACR,UAAU,CACV,SAAS,+DAA+D;CAC3E,OAAO,EACJ,MAAM,EAAE,KAAK,iBAAiB,CAAC,CAC/B,UAAU,CACV,SACC,uGACD;CACJ,CAAC,CACD,QAAQ,SAAS,KAAK,aAAa,KAAK,SAAS,EAChD,SAAS,gDACV,CAAC;;;;AC3BJ,IAAI,kBAA0C;AAoH9C,SAAgB,mBAAyB;AACvC,mBAAkB;;AAIpB,cAAc,mBAAmB,iBAAiB;;;;ACvFlD,IAAI,cAAsC;AAC1C,IAAI,wBAA4C;AAChD,IAAI,2BAA0D;AAC9D,IAAI,wBAAuD;;AA4C3D,SAAgB,oBAA0B;AACxC,eAAc;AACd,yBAAwB;AACxB,4BAA2B;AAC3B,yBAAwB;;AA4gB1B,cAAc,kBAAkB,kBAAkB;;;;ACxmBlD,MAAa,oBAAoB,EAAE,OAAO;CACxC,WAAW,EAAE,QAAQ,CAAC,SAAS,8BAA8B;CAC7D,aAAa,EACV,SAAS,CACT,QAAQ,MAAM,CACd,SAAS,gEAAgE;CAC5E,iBAAiB,EACd,SAAS,CACT,QAAQ,KAAK,CACb,SAAS,4CAA4C;CACxD,iBAAiB,EACd,SAAS,CACT,QAAQ,KAAK,CACb,SAAS,4CAA4C;CACxD,iBAAiB,EACd,SAAS,CACT,QAAQ,KAAK,CACb,SAAS,4CAA4C;CACxD,eAAe,EACZ,QAAQ,CACR,KAAK,CACL,UAAU,CACV,UAAU,CACV,SAAS,oCAAoC;CAChD,gBAAgB,EACb,SAAS,CACT,QAAQ,MAAM,CACd,SAAS,4DAA4D;CACzE,CAAC;;;;ACnBF,MAAa,WAAsB;CACjC;CACA;CACA;CACA;CACA;CACD;;;;ACxBD,SAAS,eAAe,QAA8C;AACpE,KAAI,kBAAkB,EAAE,UACtB,QAAO,OAAO;AAEhB,QAAO,EAAE,OAAO,QAAQ;;AAG1B,SAAgB,sBACd,QACA,SACM;CACN,MAAM,YAAY,eAAe,QAAQ,OAAO;AAGhD,QAAO,KAAK,QAAQ,MAAM,QAAQ,aAAa,WAAW,OAAO,WAAW;EAC1E,MAAM,SAAS,QAAQ,QAAQ,OAAO;EAEtC,MAAM,gBACJ,SAII;GACJ,SAAS,CACP;IACE,MAAM;IACN,MAAM,IAAI,WAAW,KAAK,UAAU,IAAI,MAAM,MAAM,EAAE;IACvD,CACF;GACD,SAAS,CAAC,IAAI;GACf;AAED,MAAI,kBAAkB,QACpB,QAAO,MAAM,OAAO,KAAK,aAAa;AAExC,SAAO,aAAa,OAAO;GAC3B;;;;;ACpCJ,SAAgB,cAAc,QAAyB;AACrD,MAAK,MAAM,WAAW,SACpB,uBAAsB,QAAQ,QAAQ;;;;;ACH1C,SAAgB,kBAAkB,QAAyB;AAEzD,QAAO,SAAS,eAAe,sBAAsB,QAAQ;EAC3D,MAAM,OAAO,eAAe;AAC5B,SAAO,EACL,UAAU,CACR;GACE,KAAK,IAAI;GACT,UAAU;GACV,MAAM,KAAK,UAAU,MAAM,MAAM,EAAE;GACpC,CACF,EACF;GACD;;;;;;;;;;;ACRJ,SAAgB,gBAAgB,QAAyB;AAEvD,QAAO,eACL,gBACA;EACE,OAAO;EACP,aACE;EACH,SACM,EACL,UAAU,CACR;EACE,MAAM;EACN,SAAS;GACP,MAAM;GACN,MAAM;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAyCP;EACF,CACF,EACF,EACF;AAGD,QAAO,eACL,wBACA;EACE,OAAO;EACP,aAAa;EACd,SACM,EACL,UAAU,CACR;EACE,MAAM;EACN,SAAS;GACP,MAAM;GACN,MAAM;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgDP;EACF,CACF,EACF,EACF;AAGD,QAAO,eACL,eACA;EACE,OAAO;EACP,aAAa;EACd,SACM,EACL,UAAU,CACR;EACE,MAAM;EACN,SAAS;GACP,MAAM;GACN,MAAM;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgDP;EACF,CACF,EACF,EACF;;;;;AClMH,SAAgB,eAA0B;CACxC,MAAM,SAAS,IAAI,UAAU;EAC3B,MAAM,OAAO;EACb,SAAS,OAAO;EACjB,CAAC;AAEF,eAAc,OAAO;AACrB,mBAAkB,OAAO;AACzB,iBAAgB,OAAO;AAEvB,QAAO;;AAGT,eAAsB,cAA6B;CACjD,MAAM,SAAS,cAAc;CAC7B,MAAM,YAAY,IAAI,sBAAsB;AAE5C,OAAM,OAAO,QAAQ,UAAU;AAC/B,QAAO,KAAK,GAAG,OAAO,KAAK,IAAI,OAAO,QAAQ,UAAU"}