@gzoo/cortex 0.5.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (377) hide show
  1. package/CHANGELOG.md +21 -0
  2. package/CLAUDE.md +180 -0
  3. package/CONTRIBUTING.md +52 -0
  4. package/LICENSE +21 -0
  5. package/README.md +259 -0
  6. package/dist/cortex-mcp.mjs +3153 -0
  7. package/dist/cortex.mjs +8213 -0
  8. package/icon.png +0 -0
  9. package/logo.png +0 -0
  10. package/package.json +86 -0
  11. package/packages/cli/dist/commands/config.d.ts +4 -0
  12. package/packages/cli/dist/commands/config.d.ts.map +1 -0
  13. package/packages/cli/dist/commands/config.js +419 -0
  14. package/packages/cli/dist/commands/config.js.map +1 -0
  15. package/packages/cli/dist/commands/contradictions.d.ts +3 -0
  16. package/packages/cli/dist/commands/contradictions.d.ts.map +1 -0
  17. package/packages/cli/dist/commands/contradictions.js +74 -0
  18. package/packages/cli/dist/commands/contradictions.js.map +1 -0
  19. package/packages/cli/dist/commands/costs.d.ts +3 -0
  20. package/packages/cli/dist/commands/costs.d.ts.map +1 -0
  21. package/packages/cli/dist/commands/costs.js +168 -0
  22. package/packages/cli/dist/commands/costs.js.map +1 -0
  23. package/packages/cli/dist/commands/db.d.ts +3 -0
  24. package/packages/cli/dist/commands/db.d.ts.map +1 -0
  25. package/packages/cli/dist/commands/db.js +139 -0
  26. package/packages/cli/dist/commands/db.js.map +1 -0
  27. package/packages/cli/dist/commands/find.d.ts +3 -0
  28. package/packages/cli/dist/commands/find.d.ts.map +1 -0
  29. package/packages/cli/dist/commands/find.js +139 -0
  30. package/packages/cli/dist/commands/find.js.map +1 -0
  31. package/packages/cli/dist/commands/ingest.d.ts +3 -0
  32. package/packages/cli/dist/commands/ingest.d.ts.map +1 -0
  33. package/packages/cli/dist/commands/ingest.js +179 -0
  34. package/packages/cli/dist/commands/ingest.js.map +1 -0
  35. package/packages/cli/dist/commands/init.d.ts +3 -0
  36. package/packages/cli/dist/commands/init.d.ts.map +1 -0
  37. package/packages/cli/dist/commands/init.js +285 -0
  38. package/packages/cli/dist/commands/init.js.map +1 -0
  39. package/packages/cli/dist/commands/mcp.d.ts +3 -0
  40. package/packages/cli/dist/commands/mcp.d.ts.map +1 -0
  41. package/packages/cli/dist/commands/mcp.js +65 -0
  42. package/packages/cli/dist/commands/mcp.js.map +1 -0
  43. package/packages/cli/dist/commands/models.d.ts +3 -0
  44. package/packages/cli/dist/commands/models.d.ts.map +1 -0
  45. package/packages/cli/dist/commands/models.js +245 -0
  46. package/packages/cli/dist/commands/models.js.map +1 -0
  47. package/packages/cli/dist/commands/privacy.d.ts +3 -0
  48. package/packages/cli/dist/commands/privacy.d.ts.map +1 -0
  49. package/packages/cli/dist/commands/privacy.js +140 -0
  50. package/packages/cli/dist/commands/privacy.js.map +1 -0
  51. package/packages/cli/dist/commands/projects.d.ts +3 -0
  52. package/packages/cli/dist/commands/projects.d.ts.map +1 -0
  53. package/packages/cli/dist/commands/projects.js +142 -0
  54. package/packages/cli/dist/commands/projects.js.map +1 -0
  55. package/packages/cli/dist/commands/query.d.ts +3 -0
  56. package/packages/cli/dist/commands/query.d.ts.map +1 -0
  57. package/packages/cli/dist/commands/query.js +153 -0
  58. package/packages/cli/dist/commands/query.js.map +1 -0
  59. package/packages/cli/dist/commands/report.d.ts +3 -0
  60. package/packages/cli/dist/commands/report.d.ts.map +1 -0
  61. package/packages/cli/dist/commands/report.js +144 -0
  62. package/packages/cli/dist/commands/report.js.map +1 -0
  63. package/packages/cli/dist/commands/resolve.d.ts +3 -0
  64. package/packages/cli/dist/commands/resolve.d.ts.map +1 -0
  65. package/packages/cli/dist/commands/resolve.js +119 -0
  66. package/packages/cli/dist/commands/resolve.js.map +1 -0
  67. package/packages/cli/dist/commands/serve.d.ts +3 -0
  68. package/packages/cli/dist/commands/serve.d.ts.map +1 -0
  69. package/packages/cli/dist/commands/serve.js +108 -0
  70. package/packages/cli/dist/commands/serve.js.map +1 -0
  71. package/packages/cli/dist/commands/status.d.ts +3 -0
  72. package/packages/cli/dist/commands/status.d.ts.map +1 -0
  73. package/packages/cli/dist/commands/status.js +230 -0
  74. package/packages/cli/dist/commands/status.js.map +1 -0
  75. package/packages/cli/dist/commands/stop.d.ts +5 -0
  76. package/packages/cli/dist/commands/stop.d.ts.map +1 -0
  77. package/packages/cli/dist/commands/stop.js +80 -0
  78. package/packages/cli/dist/commands/stop.js.map +1 -0
  79. package/packages/cli/dist/commands/watch.d.ts +3 -0
  80. package/packages/cli/dist/commands/watch.d.ts.map +1 -0
  81. package/packages/cli/dist/commands/watch.js +235 -0
  82. package/packages/cli/dist/commands/watch.js.map +1 -0
  83. package/packages/cli/dist/index.d.ts +9 -0
  84. package/packages/cli/dist/index.d.ts.map +1 -0
  85. package/packages/cli/dist/index.js +68 -0
  86. package/packages/cli/dist/index.js.map +1 -0
  87. package/packages/cli/package.json +37 -0
  88. package/packages/cli/tsconfig.json +16 -0
  89. package/packages/core/dist/config/loader.d.ts +11 -0
  90. package/packages/core/dist/config/loader.d.ts.map +1 -0
  91. package/packages/core/dist/config/loader.js +133 -0
  92. package/packages/core/dist/config/loader.js.map +1 -0
  93. package/packages/core/dist/config/project-registry.d.ts +71 -0
  94. package/packages/core/dist/config/project-registry.d.ts.map +1 -0
  95. package/packages/core/dist/config/project-registry.js +89 -0
  96. package/packages/core/dist/config/project-registry.js.map +1 -0
  97. package/packages/core/dist/config/schema.d.ts +909 -0
  98. package/packages/core/dist/config/schema.d.ts.map +1 -0
  99. package/packages/core/dist/config/schema.js +125 -0
  100. package/packages/core/dist/config/schema.js.map +1 -0
  101. package/packages/core/dist/errors/cortex-error.d.ts +58 -0
  102. package/packages/core/dist/errors/cortex-error.d.ts.map +1 -0
  103. package/packages/core/dist/errors/cortex-error.js +68 -0
  104. package/packages/core/dist/errors/cortex-error.js.map +1 -0
  105. package/packages/core/dist/events/event-bus.d.ts +10 -0
  106. package/packages/core/dist/events/event-bus.d.ts.map +1 -0
  107. package/packages/core/dist/events/event-bus.js +42 -0
  108. package/packages/core/dist/events/event-bus.js.map +1 -0
  109. package/packages/core/dist/index.d.ts +8 -0
  110. package/packages/core/dist/index.d.ts.map +1 -0
  111. package/packages/core/dist/index.js +22 -0
  112. package/packages/core/dist/index.js.map +1 -0
  113. package/packages/core/dist/logger.d.ts +16 -0
  114. package/packages/core/dist/logger.d.ts.map +1 -0
  115. package/packages/core/dist/logger.js +57 -0
  116. package/packages/core/dist/logger.js.map +1 -0
  117. package/packages/core/dist/types/config.d.ts +107 -0
  118. package/packages/core/dist/types/config.d.ts.map +1 -0
  119. package/packages/core/dist/types/config.js +2 -0
  120. package/packages/core/dist/types/config.js.map +1 -0
  121. package/packages/core/dist/types/entity.d.ts +35 -0
  122. package/packages/core/dist/types/entity.d.ts.map +1 -0
  123. package/packages/core/dist/types/entity.js +2 -0
  124. package/packages/core/dist/types/entity.js.map +1 -0
  125. package/packages/core/dist/types/events.d.ts +76 -0
  126. package/packages/core/dist/types/events.d.ts.map +1 -0
  127. package/packages/core/dist/types/events.js +2 -0
  128. package/packages/core/dist/types/events.js.map +1 -0
  129. package/packages/core/dist/types/file.d.ts +15 -0
  130. package/packages/core/dist/types/file.d.ts.map +1 -0
  131. package/packages/core/dist/types/file.js +2 -0
  132. package/packages/core/dist/types/file.js.map +1 -0
  133. package/packages/core/dist/types/graph.d.ts +93 -0
  134. package/packages/core/dist/types/graph.d.ts.map +1 -0
  135. package/packages/core/dist/types/graph.js +2 -0
  136. package/packages/core/dist/types/graph.js.map +1 -0
  137. package/packages/core/dist/types/index.d.ts +10 -0
  138. package/packages/core/dist/types/index.d.ts.map +1 -0
  139. package/packages/core/dist/types/index.js +2 -0
  140. package/packages/core/dist/types/index.js.map +1 -0
  141. package/packages/core/dist/types/llm.d.ts +95 -0
  142. package/packages/core/dist/types/llm.d.ts.map +1 -0
  143. package/packages/core/dist/types/llm.js +10 -0
  144. package/packages/core/dist/types/llm.js.map +1 -0
  145. package/packages/core/dist/types/project.d.ts +11 -0
  146. package/packages/core/dist/types/project.d.ts.map +1 -0
  147. package/packages/core/dist/types/project.js +2 -0
  148. package/packages/core/dist/types/project.js.map +1 -0
  149. package/packages/core/dist/types/relationship.d.ts +26 -0
  150. package/packages/core/dist/types/relationship.d.ts.map +1 -0
  151. package/packages/core/dist/types/relationship.js +2 -0
  152. package/packages/core/dist/types/relationship.js.map +1 -0
  153. package/packages/core/package.json +22 -0
  154. package/packages/core/tsconfig.json +9 -0
  155. package/packages/graph/dist/index.d.ts +4 -0
  156. package/packages/graph/dist/index.d.ts.map +1 -0
  157. package/packages/graph/dist/index.js +4 -0
  158. package/packages/graph/dist/index.js.map +1 -0
  159. package/packages/graph/dist/migrations/001-initial.d.ts +4 -0
  160. package/packages/graph/dist/migrations/001-initial.d.ts.map +1 -0
  161. package/packages/graph/dist/migrations/001-initial.js +134 -0
  162. package/packages/graph/dist/migrations/001-initial.js.map +1 -0
  163. package/packages/graph/dist/query-engine.d.ts +35 -0
  164. package/packages/graph/dist/query-engine.d.ts.map +1 -0
  165. package/packages/graph/dist/query-engine.js +185 -0
  166. package/packages/graph/dist/query-engine.js.map +1 -0
  167. package/packages/graph/dist/sqlite-store.d.ts +125 -0
  168. package/packages/graph/dist/sqlite-store.d.ts.map +1 -0
  169. package/packages/graph/dist/sqlite-store.js +632 -0
  170. package/packages/graph/dist/sqlite-store.js.map +1 -0
  171. package/packages/graph/dist/vector-store.d.ts +27 -0
  172. package/packages/graph/dist/vector-store.d.ts.map +1 -0
  173. package/packages/graph/dist/vector-store.js +85 -0
  174. package/packages/graph/dist/vector-store.js.map +1 -0
  175. package/packages/graph/package.json +27 -0
  176. package/packages/graph/tsconfig.json +12 -0
  177. package/packages/ingest/dist/chunker.d.ts +21 -0
  178. package/packages/ingest/dist/chunker.d.ts.map +1 -0
  179. package/packages/ingest/dist/chunker.js +118 -0
  180. package/packages/ingest/dist/chunker.js.map +1 -0
  181. package/packages/ingest/dist/index.d.ts +11 -0
  182. package/packages/ingest/dist/index.d.ts.map +1 -0
  183. package/packages/ingest/dist/index.js +14 -0
  184. package/packages/ingest/dist/index.js.map +1 -0
  185. package/packages/ingest/dist/parsers/conversation.d.ts +10 -0
  186. package/packages/ingest/dist/parsers/conversation.d.ts.map +1 -0
  187. package/packages/ingest/dist/parsers/conversation.js +150 -0
  188. package/packages/ingest/dist/parsers/conversation.js.map +1 -0
  189. package/packages/ingest/dist/parsers/index.d.ts +11 -0
  190. package/packages/ingest/dist/parsers/index.d.ts.map +1 -0
  191. package/packages/ingest/dist/parsers/index.js +42 -0
  192. package/packages/ingest/dist/parsers/index.js.map +1 -0
  193. package/packages/ingest/dist/parsers/json-parser.d.ts +6 -0
  194. package/packages/ingest/dist/parsers/json-parser.d.ts.map +1 -0
  195. package/packages/ingest/dist/parsers/json-parser.js +114 -0
  196. package/packages/ingest/dist/parsers/json-parser.js.map +1 -0
  197. package/packages/ingest/dist/parsers/markdown.d.ts +6 -0
  198. package/packages/ingest/dist/parsers/markdown.d.ts.map +1 -0
  199. package/packages/ingest/dist/parsers/markdown.js +116 -0
  200. package/packages/ingest/dist/parsers/markdown.js.map +1 -0
  201. package/packages/ingest/dist/parsers/types.d.ts +18 -0
  202. package/packages/ingest/dist/parsers/types.d.ts.map +1 -0
  203. package/packages/ingest/dist/parsers/types.js +2 -0
  204. package/packages/ingest/dist/parsers/types.js.map +1 -0
  205. package/packages/ingest/dist/parsers/typescript.d.ts +11 -0
  206. package/packages/ingest/dist/parsers/typescript.d.ts.map +1 -0
  207. package/packages/ingest/dist/parsers/typescript.js +197 -0
  208. package/packages/ingest/dist/parsers/typescript.js.map +1 -0
  209. package/packages/ingest/dist/parsers/yaml-parser.d.ts +6 -0
  210. package/packages/ingest/dist/parsers/yaml-parser.d.ts.map +1 -0
  211. package/packages/ingest/dist/parsers/yaml-parser.js +52 -0
  212. package/packages/ingest/dist/parsers/yaml-parser.js.map +1 -0
  213. package/packages/ingest/dist/pipeline.d.ts +30 -0
  214. package/packages/ingest/dist/pipeline.d.ts.map +1 -0
  215. package/packages/ingest/dist/pipeline.js +311 -0
  216. package/packages/ingest/dist/pipeline.js.map +1 -0
  217. package/packages/ingest/dist/post-ingest.d.ts +25 -0
  218. package/packages/ingest/dist/post-ingest.d.ts.map +1 -0
  219. package/packages/ingest/dist/post-ingest.js +171 -0
  220. package/packages/ingest/dist/post-ingest.js.map +1 -0
  221. package/packages/ingest/dist/watcher.d.ts +26 -0
  222. package/packages/ingest/dist/watcher.d.ts.map +1 -0
  223. package/packages/ingest/dist/watcher.js +142 -0
  224. package/packages/ingest/dist/watcher.js.map +1 -0
  225. package/packages/ingest/package.json +30 -0
  226. package/packages/ingest/tsconfig.json +14 -0
  227. package/packages/llm/dist/cache.d.ts +26 -0
  228. package/packages/llm/dist/cache.d.ts.map +1 -0
  229. package/packages/llm/dist/cache.js +60 -0
  230. package/packages/llm/dist/cache.js.map +1 -0
  231. package/packages/llm/dist/index.d.ts +15 -0
  232. package/packages/llm/dist/index.d.ts.map +1 -0
  233. package/packages/llm/dist/index.js +19 -0
  234. package/packages/llm/dist/index.js.map +1 -0
  235. package/packages/llm/dist/output-parser.d.ts +4 -0
  236. package/packages/llm/dist/output-parser.d.ts.map +1 -0
  237. package/packages/llm/dist/output-parser.js +207 -0
  238. package/packages/llm/dist/output-parser.js.map +1 -0
  239. package/packages/llm/dist/prompts/context-ranking.d.ts +33 -0
  240. package/packages/llm/dist/prompts/context-ranking.d.ts.map +1 -0
  241. package/packages/llm/dist/prompts/context-ranking.js +30 -0
  242. package/packages/llm/dist/prompts/context-ranking.js.map +1 -0
  243. package/packages/llm/dist/prompts/contradiction-detection.d.ts +46 -0
  244. package/packages/llm/dist/prompts/contradiction-detection.d.ts.map +1 -0
  245. package/packages/llm/dist/prompts/contradiction-detection.js +45 -0
  246. package/packages/llm/dist/prompts/contradiction-detection.js.map +1 -0
  247. package/packages/llm/dist/prompts/conversational-query.d.ts +29 -0
  248. package/packages/llm/dist/prompts/conversational-query.d.ts.map +1 -0
  249. package/packages/llm/dist/prompts/conversational-query.js +34 -0
  250. package/packages/llm/dist/prompts/conversational-query.js.map +1 -0
  251. package/packages/llm/dist/prompts/entity-extraction.d.ts +67 -0
  252. package/packages/llm/dist/prompts/entity-extraction.d.ts.map +1 -0
  253. package/packages/llm/dist/prompts/entity-extraction.js +76 -0
  254. package/packages/llm/dist/prompts/entity-extraction.js.map +1 -0
  255. package/packages/llm/dist/prompts/follow-up-generation.d.ts +25 -0
  256. package/packages/llm/dist/prompts/follow-up-generation.d.ts.map +1 -0
  257. package/packages/llm/dist/prompts/follow-up-generation.js +25 -0
  258. package/packages/llm/dist/prompts/follow-up-generation.js.map +1 -0
  259. package/packages/llm/dist/prompts/merge-detection.d.ts +41 -0
  260. package/packages/llm/dist/prompts/merge-detection.d.ts.map +1 -0
  261. package/packages/llm/dist/prompts/merge-detection.js +31 -0
  262. package/packages/llm/dist/prompts/merge-detection.js.map +1 -0
  263. package/packages/llm/dist/prompts/relationship-inference.d.ts +60 -0
  264. package/packages/llm/dist/prompts/relationship-inference.d.ts.map +1 -0
  265. package/packages/llm/dist/prompts/relationship-inference.js +66 -0
  266. package/packages/llm/dist/prompts/relationship-inference.js.map +1 -0
  267. package/packages/llm/dist/providers/anthropic.d.ts +39 -0
  268. package/packages/llm/dist/providers/anthropic.d.ts.map +1 -0
  269. package/packages/llm/dist/providers/anthropic.js +157 -0
  270. package/packages/llm/dist/providers/anthropic.js.map +1 -0
  271. package/packages/llm/dist/providers/ollama.d.ts +52 -0
  272. package/packages/llm/dist/providers/ollama.d.ts.map +1 -0
  273. package/packages/llm/dist/providers/ollama.js +297 -0
  274. package/packages/llm/dist/providers/ollama.js.map +1 -0
  275. package/packages/llm/dist/providers/openai-compatible.d.ts +40 -0
  276. package/packages/llm/dist/providers/openai-compatible.d.ts.map +1 -0
  277. package/packages/llm/dist/providers/openai-compatible.js +164 -0
  278. package/packages/llm/dist/providers/openai-compatible.js.map +1 -0
  279. package/packages/llm/dist/router.d.ts +87 -0
  280. package/packages/llm/dist/router.d.ts.map +1 -0
  281. package/packages/llm/dist/router.js +399 -0
  282. package/packages/llm/dist/router.js.map +1 -0
  283. package/packages/llm/dist/token-tracker.d.ts +24 -0
  284. package/packages/llm/dist/token-tracker.d.ts.map +1 -0
  285. package/packages/llm/dist/token-tracker.js +114 -0
  286. package/packages/llm/dist/token-tracker.js.map +1 -0
  287. package/packages/llm/package.json +25 -0
  288. package/packages/llm/tsconfig.json +12 -0
  289. package/packages/mcp/dist/index.d.ts +10 -0
  290. package/packages/mcp/dist/index.d.ts.map +1 -0
  291. package/packages/mcp/dist/index.js +33 -0
  292. package/packages/mcp/dist/index.js.map +1 -0
  293. package/packages/mcp/dist/server.d.ts +5 -0
  294. package/packages/mcp/dist/server.d.ts.map +1 -0
  295. package/packages/mcp/dist/server.js +87 -0
  296. package/packages/mcp/dist/server.js.map +1 -0
  297. package/packages/mcp/dist/store-factory.d.ts +8 -0
  298. package/packages/mcp/dist/store-factory.d.ts.map +1 -0
  299. package/packages/mcp/dist/store-factory.js +24 -0
  300. package/packages/mcp/dist/store-factory.js.map +1 -0
  301. package/packages/mcp/dist/tools/contradictions.d.ts +14 -0
  302. package/packages/mcp/dist/tools/contradictions.d.ts.map +1 -0
  303. package/packages/mcp/dist/tools/contradictions.js +33 -0
  304. package/packages/mcp/dist/tools/contradictions.js.map +1 -0
  305. package/packages/mcp/dist/tools/find.d.ts +37 -0
  306. package/packages/mcp/dist/tools/find.d.ts.map +1 -0
  307. package/packages/mcp/dist/tools/find.js +52 -0
  308. package/packages/mcp/dist/tools/find.js.map +1 -0
  309. package/packages/mcp/dist/tools/projects.d.ts +16 -0
  310. package/packages/mcp/dist/tools/projects.d.ts.map +1 -0
  311. package/packages/mcp/dist/tools/projects.js +17 -0
  312. package/packages/mcp/dist/tools/projects.js.map +1 -0
  313. package/packages/mcp/dist/tools/query.d.ts +20 -0
  314. package/packages/mcp/dist/tools/query.d.ts.map +1 -0
  315. package/packages/mcp/dist/tools/query.js +65 -0
  316. package/packages/mcp/dist/tools/query.js.map +1 -0
  317. package/packages/mcp/dist/tools/status.d.ts +15 -0
  318. package/packages/mcp/dist/tools/status.d.ts.map +1 -0
  319. package/packages/mcp/dist/tools/status.js +17 -0
  320. package/packages/mcp/dist/tools/status.js.map +1 -0
  321. package/packages/mcp/package.json +27 -0
  322. package/packages/mcp/tsconfig.json +14 -0
  323. package/packages/server/dist/index.d.ts +17 -0
  324. package/packages/server/dist/index.d.ts.map +1 -0
  325. package/packages/server/dist/index.js +170 -0
  326. package/packages/server/dist/index.js.map +1 -0
  327. package/packages/server/dist/middleware/auth.d.ts +9 -0
  328. package/packages/server/dist/middleware/auth.d.ts.map +1 -0
  329. package/packages/server/dist/middleware/auth.js +94 -0
  330. package/packages/server/dist/middleware/auth.js.map +1 -0
  331. package/packages/server/dist/routes/contradictions.d.ts +4 -0
  332. package/packages/server/dist/routes/contradictions.d.ts.map +1 -0
  333. package/packages/server/dist/routes/contradictions.js +63 -0
  334. package/packages/server/dist/routes/contradictions.js.map +1 -0
  335. package/packages/server/dist/routes/entities.d.ts +4 -0
  336. package/packages/server/dist/routes/entities.d.ts.map +1 -0
  337. package/packages/server/dist/routes/entities.js +61 -0
  338. package/packages/server/dist/routes/entities.js.map +1 -0
  339. package/packages/server/dist/routes/projects.d.ts +4 -0
  340. package/packages/server/dist/routes/projects.d.ts.map +1 -0
  341. package/packages/server/dist/routes/projects.js +35 -0
  342. package/packages/server/dist/routes/projects.js.map +1 -0
  343. package/packages/server/dist/routes/query.d.ts +4 -0
  344. package/packages/server/dist/routes/query.d.ts.map +1 -0
  345. package/packages/server/dist/routes/query.js +93 -0
  346. package/packages/server/dist/routes/query.js.map +1 -0
  347. package/packages/server/dist/routes/relationships.d.ts +4 -0
  348. package/packages/server/dist/routes/relationships.d.ts.map +1 -0
  349. package/packages/server/dist/routes/relationships.js +52 -0
  350. package/packages/server/dist/routes/relationships.js.map +1 -0
  351. package/packages/server/dist/routes/status.d.ts +4 -0
  352. package/packages/server/dist/routes/status.d.ts.map +1 -0
  353. package/packages/server/dist/routes/status.js +85 -0
  354. package/packages/server/dist/routes/status.js.map +1 -0
  355. package/packages/server/dist/ws/event-relay.d.ts +10 -0
  356. package/packages/server/dist/ws/event-relay.d.ts.map +1 -0
  357. package/packages/server/dist/ws/event-relay.js +83 -0
  358. package/packages/server/dist/ws/event-relay.js.map +1 -0
  359. package/packages/server/package.json +32 -0
  360. package/packages/server/tsconfig.json +15 -0
  361. package/packages/web/dist/assets/index-Bxjfq4I0.css +1 -0
  362. package/packages/web/dist/assets/index-MqwNS5FD.js +248 -0
  363. package/packages/web/dist/assets/index-MqwNS5FD.js.map +1 -0
  364. package/packages/web/dist/cortex-icon.svg +9 -0
  365. package/packages/web/dist/icon.png +0 -0
  366. package/packages/web/dist/index.html +14 -0
  367. package/packages/web/dist/logo.png +0 -0
  368. package/packages/web/index.html +13 -0
  369. package/packages/web/package.json +34 -0
  370. package/packages/web/public/cortex-icon.svg +9 -0
  371. package/packages/web/public/icon.png +0 -0
  372. package/packages/web/public/logo.png +0 -0
  373. package/packages/web/tsconfig.json +22 -0
  374. package/packages/web/vite.config.ts +20 -0
  375. package/tsconfig.base.json +18 -0
  376. package/tsconfig.json +12 -0
  377. package/vitest.config.ts +14 -0
@@ -0,0 +1,3153 @@
1
+ #!/usr/bin/env node
2
+ var __defProp = Object.defineProperty;
3
+ var __export = (target, all) => {
4
+ for (var name in all)
5
+ __defProp(target, name, { get: all[name], enumerable: true });
6
+ };
7
+
8
+ // packages/mcp/dist/index.js
9
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
10
+
11
+ // packages/core/dist/types/llm.js
12
+ var LLMTask;
13
+ (function(LLMTask2) {
14
+ LLMTask2["ENTITY_EXTRACTION"] = "entity_extraction";
15
+ LLMTask2["RELATIONSHIP_INFERENCE"] = "relationship_inference";
16
+ LLMTask2["EMBEDDING_GENERATION"] = "embedding_generation";
17
+ LLMTask2["CONVERSATIONAL_QUERY"] = "conversational_query";
18
+ LLMTask2["CONTRADICTION_DETECTION"] = "contradiction_detection";
19
+ LLMTask2["CONTEXT_RANKING"] = "context_ranking";
20
+ })(LLMTask || (LLMTask = {}));
21
+
22
+ // packages/core/dist/errors/cortex-error.js
23
+ var CortexError = class extends Error {
24
+ code;
25
+ severity;
26
+ layer;
27
+ context;
28
+ recoveryAction;
29
+ retryable;
30
+ httpStatus;
31
+ name = "CortexError";
32
+ constructor(code, severity, layer, message, context, recoveryAction, retryable = false, httpStatus) {
33
+ super(message);
34
+ this.code = code;
35
+ this.severity = severity;
36
+ this.layer = layer;
37
+ this.context = context;
38
+ this.recoveryAction = recoveryAction;
39
+ this.retryable = retryable;
40
+ this.httpStatus = httpStatus;
41
+ }
42
+ };
43
+ var LLM_PROVIDER_UNAVAILABLE = "LLM_PROVIDER_UNAVAILABLE";
44
+ var LLM_EXTRACTION_FAILED = "LLM_EXTRACTION_FAILED";
45
+ var LLM_RATE_LIMITED = "LLM_RATE_LIMITED";
46
+ var LLM_BUDGET_EXHAUSTED = "LLM_BUDGET_EXHAUSTED";
47
+ var LLM_AUTH_FAILED = "LLM_AUTH_FAILED";
48
+ var LLM_TIMEOUT = "LLM_TIMEOUT";
49
+ var GRAPH_DB_ERROR = "GRAPH_DB_ERROR";
50
+ var GRAPH_ENTITY_NOT_FOUND = "GRAPH_ENTITY_NOT_FOUND";
51
+ var CONFIG_INVALID = "CONFIG_INVALID";
52
+ var CONFIG_MISSING = "CONFIG_MISSING";
53
+
54
+ // packages/core/dist/events/event-bus.js
55
+ var EventBus = class {
56
+ handlers = /* @__PURE__ */ new Map();
57
+ emit(event) {
58
+ const typeHandlers = this.handlers.get(event.type);
59
+ if (!typeHandlers)
60
+ return;
61
+ for (const handler of typeHandlers) {
62
+ try {
63
+ handler(event);
64
+ } catch {
65
+ }
66
+ }
67
+ }
68
+ on(type, handler) {
69
+ if (!this.handlers.has(type)) {
70
+ this.handlers.set(type, /* @__PURE__ */ new Set());
71
+ }
72
+ this.handlers.get(type).add(handler);
73
+ return () => this.off(type, handler);
74
+ }
75
+ off(type, handler) {
76
+ const typeHandlers = this.handlers.get(type);
77
+ if (!typeHandlers)
78
+ return;
79
+ typeHandlers.delete(handler);
80
+ if (typeHandlers.size === 0) {
81
+ this.handlers.delete(type);
82
+ }
83
+ }
84
+ once(type, handler) {
85
+ const wrappedHandler = (event) => {
86
+ this.off(type, wrappedHandler);
87
+ handler(event);
88
+ };
89
+ this.on(type, wrappedHandler);
90
+ }
91
+ };
92
+ var eventBus = new EventBus();
93
+
94
+ // packages/core/dist/config/schema.js
95
+ import { z } from "zod";
96
+ var ingestConfigSchema = z.object({
97
+ watchDirs: z.array(z.string()).default(["."]),
98
+ exclude: z.array(z.string()).default([
99
+ "node_modules",
100
+ "dist",
101
+ "build",
102
+ "out",
103
+ ".git",
104
+ "*.min.js",
105
+ "*.min.css",
106
+ "*.map",
107
+ "package-lock.json",
108
+ "yarn.lock",
109
+ "pnpm-lock.yaml",
110
+ "__pycache__",
111
+ "*.pyc",
112
+ ".DS_Store",
113
+ "Thumbs.db"
114
+ ]),
115
+ fileTypes: z.array(z.string()).default(["md", "ts", "tsx", "js", "jsx", "json", "yaml", "yml"]),
116
+ maxFileSize: z.number().positive().default(10485760),
117
+ maxFilesPerDir: z.number().positive().default(1e4),
118
+ maxTotalFiles: z.number().positive().default(5e4),
119
+ debounceMs: z.number().nonnegative().default(500),
120
+ parseTimeoutMs: z.number().positive().default(3e4),
121
+ batchSize: z.number().positive().default(5),
122
+ followSymlinks: z.boolean().default(false),
123
+ confirmCost: z.boolean().default(true)
124
+ });
125
+ var graphConfigSchema = z.object({
126
+ dbPath: z.string().default("~/.cortex/cortex.db"),
127
+ vectorDbPath: z.string().default("~/.cortex/vector.lance"),
128
+ walMode: z.boolean().default(true),
129
+ backupOnStartup: z.boolean().default(true),
130
+ integrityCheckInterval: z.enum(["daily", "weekly", "monthly", "never"]).default("weekly"),
131
+ softDelete: z.boolean().default(true),
132
+ mergeConfidenceThreshold: z.number().min(0).max(1).default(0.95)
133
+ });
134
+ var llmBudgetSchema = z.object({
135
+ monthlyLimitUsd: z.number().nonnegative().default(25),
136
+ warningThresholds: z.array(z.number().min(0).max(1)).default([0.5, 0.8, 0.9]),
137
+ enforcementAction: z.enum(["warn", "fallback-local", "stop"]).default("fallback-local")
138
+ });
139
+ var llmCacheSchema = z.object({
140
+ enabled: z.boolean().default(true),
141
+ ttlDays: z.number().positive().default(7),
142
+ maxSizeMb: z.number().positive().default(500)
143
+ });
144
+ var llmLocalSchema = z.object({
145
+ provider: z.string().default("ollama"),
146
+ host: z.string().url().default("http://localhost:11434"),
147
+ model: z.string().default("mistral:7b-instruct-q5_K_M"),
148
+ embeddingModel: z.string().default("nomic-embed-text"),
149
+ numCtx: z.number().positive().default(8192),
150
+ numGpu: z.number().default(-1),
151
+ timeoutMs: z.number().positive().default(9e4),
152
+ // 90 seconds
153
+ keepAlive: z.string().default("5m")
154
+ });
155
+ var llmCloudSchema = z.object({
156
+ provider: z.string().default("anthropic"),
157
+ baseUrl: z.string().url().optional(),
158
+ apiKeySource: z.string().default("env:CORTEX_ANTHROPIC_API_KEY"),
159
+ models: z.object({
160
+ primary: z.string().default("claude-sonnet-4-5-20250929"),
161
+ fast: z.string().default("claude-haiku-4-5-20251001")
162
+ }).default({}),
163
+ timeoutMs: z.number().positive().default(6e4),
164
+ maxRetries: z.number().nonnegative().default(3),
165
+ promptCaching: z.boolean().default(true)
166
+ });
167
+ var llmConfigSchema = z.object({
168
+ mode: z.enum(["cloud-first", "hybrid", "local-first", "local-only"]).default("cloud-first"),
169
+ taskRouting: z.record(z.string(), z.enum(["auto", "local", "cloud"])).default({
170
+ entity_extraction: "auto",
171
+ relationship_inference: "auto",
172
+ contradiction_detection: "local",
173
+ conversational_query: "auto",
174
+ context_ranking: "auto",
175
+ embedding_generation: "local"
176
+ }),
177
+ temperature: z.record(z.string(), z.number().min(0).max(2)).default({
178
+ extraction: 0.1,
179
+ chat: 0.7,
180
+ ranking: 0.1,
181
+ proactive: 0.5
182
+ }),
183
+ maxContextTokens: z.number().positive().default(5e4),
184
+ cache: llmCacheSchema.default({}),
185
+ budget: llmBudgetSchema.default({}),
186
+ local: llmLocalSchema.default({}),
187
+ cloud: llmCloudSchema.default({})
188
+ });
189
+ var privacyConfigSchema = z.object({
190
+ defaultLevel: z.enum(["standard", "sensitive", "restricted"]).default("standard"),
191
+ directoryOverrides: z.record(z.string(), z.enum(["standard", "sensitive", "restricted"])).default({}),
192
+ autoClassify: z.boolean().default(true),
193
+ logTransmissions: z.boolean().default(true),
194
+ showTransmissionIndicator: z.boolean().default(true),
195
+ secretPatterns: z.array(z.string()).default([
196
+ "(?i)(api[_-]?key|secret[_-]?key|access[_-]?token)\\s*[:=]\\s*[\\w\\-]{20,}",
197
+ "AKIA[0-9A-Z]{16}",
198
+ "sk-ant-[a-zA-Z0-9\\-]{40,}",
199
+ "ghp_[a-zA-Z0-9]{36}",
200
+ "(?i)password\\s*[:=]\\s*\\S{8,}"
201
+ ])
202
+ });
203
+ var serverConfigSchema = z.object({
204
+ port: z.number().int().min(1).max(65535).default(3710),
205
+ host: z.string().default("127.0.0.1"),
206
+ cors: z.array(z.string()).default(["http://localhost:5173"])
207
+ });
208
+ var loggingConfigSchema = z.object({
209
+ level: z.enum(["debug", "info", "warn", "error"]).default("info"),
210
+ file: z.string().default("~/.cortex/logs/cortex.log"),
211
+ structured: z.boolean().default(true),
212
+ maxSizeMb: z.number().positive().default(10),
213
+ maxFiles: z.number().positive().default(5),
214
+ redactPrompts: z.boolean().default(false)
215
+ });
216
+ var cortexConfigSchema = z.object({
217
+ $schema: z.string().optional(),
218
+ version: z.string().default("1.0"),
219
+ ingest: ingestConfigSchema.default({}),
220
+ graph: graphConfigSchema.default({}),
221
+ llm: llmConfigSchema.default({}),
222
+ privacy: privacyConfigSchema.default({}),
223
+ server: serverConfigSchema.default({}),
224
+ logging: loggingConfigSchema.default({})
225
+ });
226
+
227
+ // packages/core/dist/config/loader.js
228
+ import { readFileSync, existsSync } from "node:fs";
229
+ import { resolve, join } from "node:path";
230
+ import { homedir } from "node:os";
231
+ function loadDotEnv() {
232
+ const envPath = join(homedir(), ".cortex", ".env");
233
+ if (!existsSync(envPath))
234
+ return;
235
+ try {
236
+ const content = readFileSync(envPath, "utf-8");
237
+ for (const line of content.split(/\r?\n/)) {
238
+ const trimmed = line.trim();
239
+ if (!trimmed || trimmed.startsWith("#"))
240
+ continue;
241
+ const eqIndex = trimmed.indexOf("=");
242
+ if (eqIndex === -1)
243
+ continue;
244
+ const key = trimmed.slice(0, eqIndex).trim();
245
+ let value = trimmed.slice(eqIndex + 1).trim();
246
+ if (value.startsWith('"') && value.endsWith('"') || value.startsWith("'") && value.endsWith("'")) {
247
+ value = value.slice(1, -1);
248
+ }
249
+ if (process.env[key] === void 0) {
250
+ process.env[key] = value;
251
+ }
252
+ }
253
+ } catch {
254
+ }
255
+ }
256
+ var CONFIG_FILENAME = "cortex.config.json";
257
+ function findConfigFile(startDir) {
258
+ const searchPaths = [
259
+ startDir ? resolve(startDir, CONFIG_FILENAME) : null,
260
+ resolve(process.cwd(), CONFIG_FILENAME),
261
+ join(homedir(), ".cortex", CONFIG_FILENAME)
262
+ ].filter((p) => p !== null);
263
+ const envPath = process.env["CORTEX_CONFIG_PATH"];
264
+ if (envPath) {
265
+ searchPaths.unshift(resolve(envPath));
266
+ }
267
+ for (const p of searchPaths) {
268
+ if (existsSync(p))
269
+ return p;
270
+ }
271
+ return null;
272
+ }
273
+ function readConfigFile(filePath) {
274
+ try {
275
+ const raw = readFileSync(filePath, "utf-8");
276
+ return JSON.parse(raw);
277
+ } catch (err) {
278
+ throw new CortexError(CONFIG_INVALID, "critical", "config", `Failed to read config file: ${filePath}: ${err instanceof Error ? err.message : String(err)}`, { filePath });
279
+ }
280
+ }
281
+ function applyEnvOverrides(config8) {
282
+ const env = process.env;
283
+ if (env["CORTEX_LLM_MODE"]) {
284
+ config8.llm = { ...config8.llm, mode: env["CORTEX_LLM_MODE"] };
285
+ }
286
+ if (env["CORTEX_SERVER_PORT"]) {
287
+ config8.server = { ...config8.server, port: parseInt(env["CORTEX_SERVER_PORT"], 10) };
288
+ }
289
+ if (env["CORTEX_DB_PATH"]) {
290
+ config8.graph = { ...config8.graph, dbPath: env["CORTEX_DB_PATH"] };
291
+ }
292
+ if (env["CORTEX_LOG_LEVEL"]) {
293
+ config8.logging = { ...config8.logging, level: env["CORTEX_LOG_LEVEL"] };
294
+ }
295
+ if (env["CORTEX_BUDGET_LIMIT"]) {
296
+ const budget = { ...config8.llm?.budget, monthlyLimitUsd: parseFloat(env["CORTEX_BUDGET_LIMIT"]) };
297
+ config8.llm = { ...config8.llm, budget };
298
+ }
299
+ if (env["CORTEX_OLLAMA_HOST"]) {
300
+ const local = { ...config8.llm?.local, host: env["CORTEX_OLLAMA_HOST"] };
301
+ config8.llm = { ...config8.llm, local };
302
+ }
303
+ return config8;
304
+ }
305
+ function loadConfig(options = {}) {
306
+ loadDotEnv();
307
+ const { configDir, overrides, requireFile = false } = options;
308
+ let fileConfig = {};
309
+ const configPath = findConfigFile(configDir);
310
+ if (configPath) {
311
+ fileConfig = readConfigFile(configPath);
312
+ } else if (requireFile) {
313
+ throw new CortexError(CONFIG_MISSING, "critical", "config", "No cortex.config.json found. Run `cortex init` to create one.", void 0, "Run `cortex init` to create a configuration file.");
314
+ }
315
+ let merged = { ...fileConfig };
316
+ merged = applyEnvOverrides(merged);
317
+ if (overrides) {
318
+ merged = { ...merged, ...overrides };
319
+ }
320
+ const result = cortexConfigSchema.safeParse(merged);
321
+ if (!result.success) {
322
+ const messages = result.error.issues.map((issue) => ` ${issue.path.join(".")}: ${issue.message}`);
323
+ throw new CortexError(CONFIG_INVALID, "critical", "config", `Invalid configuration:
324
+ ${messages.join("\n")}`, { issues: result.error.issues });
325
+ }
326
+ return result.data;
327
+ }
328
+
329
+ // packages/core/dist/config/project-registry.js
330
+ import { join as join2 } from "node:path";
331
+ import { homedir as homedir2 } from "node:os";
332
+ import { z as z2 } from "zod";
333
+ var REGISTRY_PATH = join2(homedir2(), ".cortex", "projects.json");
334
+ var projectEntrySchema = z2.object({
335
+ name: z2.string(),
336
+ path: z2.string(),
337
+ configPath: z2.string().optional(),
338
+ addedAt: z2.string(),
339
+ lastWatched: z2.string().optional()
340
+ });
341
+ var projectRegistrySchema = z2.object({
342
+ version: z2.literal("1.0"),
343
+ projects: z2.record(z2.string(), projectEntrySchema)
344
+ });
345
+
346
+ // packages/core/dist/logger.js
347
+ var LOG_LEVELS = {
348
+ debug: 0,
349
+ info: 1,
350
+ warn: 2,
351
+ error: 3
352
+ };
353
+ var globalLogLevel = null;
354
+ var Logger = class _Logger {
355
+ level;
356
+ source;
357
+ constructor(source, level = "info") {
358
+ this.source = source;
359
+ this.level = LOG_LEVELS[level];
360
+ }
361
+ setLevel(level) {
362
+ this.level = LOG_LEVELS[level];
363
+ }
364
+ debug(message, context) {
365
+ this.log("debug", message, context);
366
+ }
367
+ info(message, context) {
368
+ this.log("info", message, context);
369
+ }
370
+ warn(message, context) {
371
+ this.log("warn", message, context);
372
+ }
373
+ error(message, context) {
374
+ this.log("error", message, context);
375
+ }
376
+ child(source) {
377
+ const childLogger = new _Logger(`${this.source}:${source}`);
378
+ childLogger.level = this.level;
379
+ return childLogger;
380
+ }
381
+ log(level, message, context) {
382
+ const effectiveLevel = globalLogLevel ?? this.level;
383
+ if (LOG_LEVELS[level] < effectiveLevel)
384
+ return;
385
+ const entry = {
386
+ level,
387
+ message,
388
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
389
+ source: this.source,
390
+ ...context && { context }
391
+ };
392
+ const output = level === "error" || level === "warn" ? process.stderr : process.stdout;
393
+ output.write(JSON.stringify(entry) + "\n");
394
+ }
395
+ };
396
+ function createLogger(source, level) {
397
+ const effectiveLevel = level ?? process.env["CORTEX_LOG_LEVEL"] ?? "info";
398
+ return new Logger(source, effectiveLevel);
399
+ }
400
+
401
+ // packages/llm/dist/providers/anthropic.js
402
+ import Anthropic from "@anthropic-ai/sdk";
403
+ var logger = createLogger("llm:anthropic");
404
+ var AnthropicProvider = class {
405
+ name = "anthropic";
406
+ type = "cloud";
407
+ client;
408
+ primaryModel;
409
+ fastModel;
410
+ promptCaching;
411
+ capabilities = {
412
+ supportedTasks: [
413
+ LLMTask.ENTITY_EXTRACTION,
414
+ LLMTask.RELATIONSHIP_INFERENCE,
415
+ LLMTask.CONTRADICTION_DETECTION,
416
+ LLMTask.CONVERSATIONAL_QUERY,
417
+ LLMTask.CONTEXT_RANKING
418
+ ],
419
+ maxContextTokens: 2e5,
420
+ supportsStructuredOutput: true,
421
+ supportsStreaming: true,
422
+ estimatedTokensPerSecond: 80,
423
+ costPerMillionInputTokens: 3,
424
+ costPerMillionOutputTokens: 15
425
+ };
426
+ constructor(options = {}) {
427
+ const apiKey = options.apiKey ?? process.env["CORTEX_ANTHROPIC_API_KEY"];
428
+ if (!apiKey) {
429
+ throw new CortexError(LLM_AUTH_FAILED, "critical", "llm", "Anthropic API key not found. Set CORTEX_ANTHROPIC_API_KEY environment variable.", void 0, "Set CORTEX_ANTHROPIC_API_KEY or run `cortex init`.", false, 401);
430
+ }
431
+ this.client = new Anthropic({
432
+ apiKey,
433
+ timeout: options.timeoutMs ?? 3e4,
434
+ maxRetries: options.maxRetries ?? 3
435
+ });
436
+ this.primaryModel = options.primaryModel ?? "claude-sonnet-4-5-20250929";
437
+ this.fastModel = options.fastModel ?? "claude-haiku-4-5-20251001";
438
+ this.promptCaching = options.promptCaching ?? true;
439
+ }
440
+ getModel(preference = "primary") {
441
+ return preference === "fast" ? this.fastModel : this.primaryModel;
442
+ }
443
+ async complete(prompt, options) {
444
+ const result = await this.completeWithSystem(void 0, prompt, options);
445
+ return result.content;
446
+ }
447
+ async completeWithSystem(systemPrompt2, userPrompt, options, modelPreference = "primary") {
448
+ const model = this.getModel(modelPreference);
449
+ try {
450
+ const systemMessages = systemPrompt2 ? this.buildSystemMessages(systemPrompt2) : void 0;
451
+ const response = await this.client.messages.create({
452
+ model,
453
+ max_tokens: options?.maxTokens ?? 4096,
454
+ temperature: options?.temperature ?? 0.7,
455
+ ...systemMessages && { system: systemMessages },
456
+ messages: [{ role: "user", content: userPrompt }],
457
+ ...options?.stopSequences?.length && { stop_sequences: options.stopSequences }
458
+ });
459
+ const textBlock = response.content.find((b) => b.type === "text");
460
+ const content = textBlock?.text ?? "";
461
+ return {
462
+ content,
463
+ inputTokens: response.usage.input_tokens,
464
+ outputTokens: response.usage.output_tokens,
465
+ model
466
+ };
467
+ } catch (err) {
468
+ throw this.mapError(err);
469
+ }
470
+ }
471
+ async completeStructured(prompt, _schema, options) {
472
+ const result = await this.complete(prompt, options);
473
+ return JSON.parse(result);
474
+ }
475
+ async *stream(prompt, options) {
476
+ yield* this.streamWithSystem(void 0, prompt, options);
477
+ }
478
+ async *streamWithSystem(systemPrompt2, userPrompt, options, modelPreference = "primary") {
479
+ const model = this.getModel(modelPreference);
480
+ try {
481
+ const systemMessages = systemPrompt2 ? this.buildSystemMessages(systemPrompt2) : void 0;
482
+ const stream = this.client.messages.stream({
483
+ model,
484
+ max_tokens: options?.maxTokens ?? 4096,
485
+ temperature: options?.temperature ?? 0.7,
486
+ ...systemMessages && { system: systemMessages },
487
+ messages: [{ role: "user", content: userPrompt }],
488
+ ...options?.stopSequences?.length && { stop_sequences: options.stopSequences }
489
+ });
490
+ for await (const event of stream) {
491
+ if (event.type === "content_block_delta" && event.delta.type === "text_delta") {
492
+ yield event.delta.text;
493
+ }
494
+ }
495
+ const finalMessage = await stream.finalMessage();
496
+ return {
497
+ inputTokens: finalMessage.usage.input_tokens,
498
+ outputTokens: finalMessage.usage.output_tokens,
499
+ model
500
+ };
501
+ } catch (err) {
502
+ throw this.mapError(err);
503
+ }
504
+ }
505
+ async embed(_texts) {
506
+ throw new CortexError(LLM_PROVIDER_UNAVAILABLE, "medium", "llm", "Anthropic does not support embeddings. Use local embedding model.");
507
+ }
508
+ async isAvailable() {
509
+ try {
510
+ await this.client.messages.create({
511
+ model: this.fastModel,
512
+ max_tokens: 1,
513
+ messages: [{ role: "user", content: "ping" }]
514
+ });
515
+ return true;
516
+ } catch {
517
+ return false;
518
+ }
519
+ }
520
+ buildSystemMessages(systemPrompt2) {
521
+ if (this.promptCaching) {
522
+ return [{
523
+ type: "text",
524
+ text: systemPrompt2,
525
+ cache_control: { type: "ephemeral" }
526
+ }];
527
+ }
528
+ return [{ type: "text", text: systemPrompt2 }];
529
+ }
530
+ mapError(err) {
531
+ if (err instanceof Anthropic.AuthenticationError) {
532
+ return new CortexError(LLM_AUTH_FAILED, "critical", "llm", "Anthropic API authentication failed. Check your API key.", void 0, "Verify CORTEX_ANTHROPIC_API_KEY is correct.", false, 401);
533
+ }
534
+ if (err instanceof Anthropic.RateLimitError) {
535
+ return new CortexError(LLM_RATE_LIMITED, "medium", "llm", "Anthropic rate limit exceeded.", void 0, "Wait and retry with backoff.", true, 429);
536
+ }
537
+ if (err instanceof Anthropic.APIConnectionTimeoutError) {
538
+ return new CortexError(LLM_TIMEOUT, "medium", "llm", "Anthropic API request timed out.", void 0, "Retry the request.", true, 504);
539
+ }
540
+ if (err instanceof Anthropic.APIError) {
541
+ return new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", `Anthropic API error: ${err.message}`, { status: err.status }, "Retry or check Anthropic status page.", true, err.status);
542
+ }
543
+ const message = err instanceof Error ? err.message : String(err);
544
+ return new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", `Anthropic provider error: ${message}`, void 0, "Check network connectivity.", true);
545
+ }
546
+ };
547
+
548
+ // packages/llm/dist/providers/ollama.js
549
+ var logger2 = createLogger("llm:ollama");
550
+ var OllamaProvider = class {
551
+ name = "ollama";
552
+ type = "local";
553
+ host;
554
+ model;
555
+ embeddingModel;
556
+ numCtx;
557
+ numGpu;
558
+ timeoutMs;
559
+ keepAlive;
560
+ capabilities = {
561
+ supportedTasks: [
562
+ LLMTask.ENTITY_EXTRACTION,
563
+ LLMTask.RELATIONSHIP_INFERENCE,
564
+ LLMTask.CONTRADICTION_DETECTION,
565
+ LLMTask.CONVERSATIONAL_QUERY,
566
+ LLMTask.CONTEXT_RANKING,
567
+ LLMTask.EMBEDDING_GENERATION
568
+ ],
569
+ maxContextTokens: 8192,
570
+ supportsStructuredOutput: true,
571
+ supportsStreaming: true,
572
+ estimatedTokensPerSecond: 30,
573
+ costPerMillionInputTokens: 0,
574
+ costPerMillionOutputTokens: 0
575
+ };
576
+ constructor(options = {}) {
577
+ this.host = options.host ?? process.env["CORTEX_OLLAMA_HOST"] ?? "http://localhost:11434";
578
+ this.model = options.model ?? "mistral:7b-instruct-q5_K_M";
579
+ this.embeddingModel = options.embeddingModel ?? "nomic-embed-text";
580
+ this.numCtx = options.numCtx ?? 8192;
581
+ this.numGpu = options.numGpu ?? -1;
582
+ this.timeoutMs = options.timeoutMs ?? 3e5;
583
+ this.keepAlive = options.keepAlive ?? "5m";
584
+ this.capabilities.maxContextTokens = this.numCtx;
585
+ }
586
+ getModel() {
587
+ return this.model;
588
+ }
589
+ async complete(prompt, options) {
590
+ const result = await this.completeWithSystem(void 0, prompt, options);
591
+ return result.content;
592
+ }
593
+ async completeWithSystem(systemPrompt2, userPrompt, options, _modelPreference = "primary") {
594
+ const numPredict = options?.maxTokens ? Math.min(options.maxTokens, Math.floor(this.numCtx / 2)) : void 0;
595
+ const requestBody = {
596
+ model: this.model,
597
+ prompt: userPrompt,
598
+ stream: false,
599
+ options: {
600
+ temperature: options?.temperature ?? 0.7,
601
+ num_ctx: this.numCtx,
602
+ num_gpu: this.numGpu,
603
+ ...numPredict !== void 0 && { num_predict: numPredict },
604
+ ...options?.stopSequences?.length && { stop: options.stopSequences }
605
+ },
606
+ keep_alive: this.keepAlive
607
+ };
608
+ if (systemPrompt2) {
609
+ requestBody.system = systemPrompt2;
610
+ }
611
+ try {
612
+ const controller = new AbortController();
613
+ const timeoutId = setTimeout(() => controller.abort(), this.timeoutMs);
614
+ const response = await fetch(`${this.host}/api/generate`, {
615
+ method: "POST",
616
+ headers: { "Content-Type": "application/json" },
617
+ body: JSON.stringify(requestBody),
618
+ signal: controller.signal
619
+ });
620
+ clearTimeout(timeoutId);
621
+ if (!response.ok) {
622
+ const errorText = await response.text();
623
+ throw new Error(`Ollama API error (${response.status}): ${errorText}`);
624
+ }
625
+ const result = await response.json();
626
+ const inputTokens = result.prompt_eval_count ?? 0;
627
+ const outputTokens = result.eval_count ?? 0;
628
+ logger2.debug("Ollama completion", {
629
+ model: this.model,
630
+ inputTokens,
631
+ outputTokens,
632
+ durationMs: result.total_duration ? Math.round(result.total_duration / 1e6) : void 0
633
+ });
634
+ return {
635
+ content: result.response,
636
+ inputTokens,
637
+ outputTokens,
638
+ model: this.model
639
+ };
640
+ } catch (err) {
641
+ throw this.mapError(err);
642
+ }
643
+ }
644
+ async completeStructured(prompt, _schema, options) {
645
+ const result = await this.complete(prompt, options);
646
+ return JSON.parse(result);
647
+ }
648
+ async *stream(prompt, options) {
649
+ yield* this.streamWithSystem(void 0, prompt, options);
650
+ }
651
+ async *streamWithSystem(systemPrompt2, userPrompt, options, _modelPreference = "primary") {
652
+ const streamNumPredict = options?.maxTokens ? Math.min(options.maxTokens, Math.floor(this.numCtx / 2)) : void 0;
653
+ const requestBody = {
654
+ model: this.model,
655
+ prompt: userPrompt,
656
+ stream: true,
657
+ options: {
658
+ temperature: options?.temperature ?? 0.7,
659
+ num_ctx: this.numCtx,
660
+ num_gpu: this.numGpu,
661
+ ...streamNumPredict !== void 0 && { num_predict: streamNumPredict },
662
+ ...options?.stopSequences?.length && { stop: options.stopSequences }
663
+ },
664
+ keep_alive: this.keepAlive
665
+ };
666
+ if (systemPrompt2) {
667
+ requestBody.system = systemPrompt2;
668
+ }
669
+ try {
670
+ const controller = new AbortController();
671
+ const timeoutId = setTimeout(() => controller.abort(), this.timeoutMs);
672
+ const response = await fetch(`${this.host}/api/generate`, {
673
+ method: "POST",
674
+ headers: { "Content-Type": "application/json" },
675
+ body: JSON.stringify(requestBody),
676
+ signal: controller.signal
677
+ });
678
+ clearTimeout(timeoutId);
679
+ if (!response.ok) {
680
+ const errorText = await response.text();
681
+ throw new Error(`Ollama API error (${response.status}): ${errorText}`);
682
+ }
683
+ if (!response.body) {
684
+ throw new Error("No response body from Ollama");
685
+ }
686
+ const reader = response.body.getReader();
687
+ const decoder = new TextDecoder();
688
+ let inputTokens = 0;
689
+ let outputTokens = 0;
690
+ while (true) {
691
+ const { done, value } = await reader.read();
692
+ if (done)
693
+ break;
694
+ const chunk = decoder.decode(value, { stream: true });
695
+ const lines = chunk.split("\n").filter((line) => line.trim());
696
+ for (const line of lines) {
697
+ try {
698
+ const data = JSON.parse(line);
699
+ if (data.response) {
700
+ yield data.response;
701
+ }
702
+ if (data.done) {
703
+ inputTokens = data.prompt_eval_count ?? 0;
704
+ outputTokens = data.eval_count ?? 0;
705
+ }
706
+ } catch {
707
+ }
708
+ }
709
+ }
710
+ return {
711
+ inputTokens,
712
+ outputTokens,
713
+ model: this.model
714
+ };
715
+ } catch (err) {
716
+ throw this.mapError(err);
717
+ }
718
+ }
719
+ async embed(texts) {
720
+ try {
721
+ const controller = new AbortController();
722
+ const timeoutId = setTimeout(() => controller.abort(), this.timeoutMs);
723
+ const requestBody = {
724
+ model: this.embeddingModel,
725
+ input: texts
726
+ };
727
+ const response = await fetch(`${this.host}/api/embed`, {
728
+ method: "POST",
729
+ headers: { "Content-Type": "application/json" },
730
+ body: JSON.stringify(requestBody),
731
+ signal: controller.signal
732
+ });
733
+ clearTimeout(timeoutId);
734
+ if (!response.ok) {
735
+ const errorText = await response.text();
736
+ throw new Error(`Ollama embed API error (${response.status}): ${errorText}`);
737
+ }
738
+ const result = await response.json();
739
+ logger2.debug("Ollama embeddings", {
740
+ model: this.embeddingModel,
741
+ count: texts.length,
742
+ dimensions: result.embeddings[0]?.length
743
+ });
744
+ return result.embeddings.map((emb) => new Float32Array(emb));
745
+ } catch (err) {
746
+ throw this.mapError(err);
747
+ }
748
+ }
749
+ async isAvailable() {
750
+ try {
751
+ const controller = new AbortController();
752
+ const timeoutId = setTimeout(() => controller.abort(), 5e3);
753
+ const response = await fetch(`${this.host}/api/tags`, {
754
+ method: "GET",
755
+ signal: controller.signal
756
+ });
757
+ clearTimeout(timeoutId);
758
+ if (!response.ok) {
759
+ return false;
760
+ }
761
+ const result = await response.json();
762
+ const hasModel = result.models.some((m) => m.name === this.model || m.model === this.model);
763
+ if (!hasModel) {
764
+ logger2.warn("Ollama model not found", { model: this.model, available: result.models.map((m) => m.name) });
765
+ }
766
+ return true;
767
+ } catch {
768
+ return false;
769
+ }
770
+ }
771
+ async ensureModel() {
772
+ const available = await this.isAvailable();
773
+ if (!available) {
774
+ throw new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", `Ollama is not running or not reachable at ${this.host}`, void 0, "Start Ollama with `ollama serve` or check the host configuration.", false);
775
+ }
776
+ const response = await fetch(`${this.host}/api/tags`);
777
+ const result = await response.json();
778
+ const hasModel = result.models.some((m) => m.name === this.model || m.model === this.model);
779
+ if (!hasModel) {
780
+ logger2.info("Pulling Ollama model", { model: this.model });
781
+ throw new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", `Ollama model "${this.model}" is not installed.`, { model: this.model }, `Run: ollama pull ${this.model}`, false);
782
+ }
783
+ }
784
+ async listModels() {
785
+ const controller = new AbortController();
786
+ const timeoutId = setTimeout(() => controller.abort(), 5e3);
787
+ try {
788
+ const response = await fetch(`${this.host}/api/tags`, { signal: controller.signal });
789
+ clearTimeout(timeoutId);
790
+ if (!response.ok)
791
+ return [];
792
+ const result = await response.json();
793
+ return result.models.map((m) => ({ name: m.name, sizeBytes: m.size, modifiedAt: m.modified_at }));
794
+ } catch {
795
+ clearTimeout(timeoutId);
796
+ return [];
797
+ }
798
+ }
799
+ getHost() {
800
+ return this.host;
801
+ }
802
+ getNumCtx() {
803
+ return this.numCtx;
804
+ }
805
+ getNumGpu() {
806
+ return this.numGpu;
807
+ }
808
+ getEmbeddingModel() {
809
+ return this.embeddingModel;
810
+ }
811
+ mapError(err) {
812
+ if (err instanceof CortexError) {
813
+ return err;
814
+ }
815
+ if (err instanceof Error) {
816
+ if (err.name === "AbortError") {
817
+ return new CortexError(LLM_TIMEOUT, "medium", "llm", `Ollama request timed out after ${this.timeoutMs}ms`, { timeoutMs: this.timeoutMs }, "Increase timeout or check if model is loaded.", true, 504);
818
+ }
819
+ if (err.message.includes("ECONNREFUSED") || err.message.includes("fetch failed")) {
820
+ return new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", `Cannot connect to Ollama at ${this.host}`, { host: this.host }, "Start Ollama with `ollama serve` or check the host configuration.", false);
821
+ }
822
+ return new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", `Ollama provider error: ${err.message}`, void 0, "Check Ollama logs for details.", true);
823
+ }
824
+ return new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", `Ollama provider error: ${String(err)}`, void 0, "Check Ollama logs for details.", true);
825
+ }
826
+ };
827
+
828
+ // packages/llm/dist/providers/openai-compatible.js
829
+ import OpenAI from "openai";
830
+ var logger3 = createLogger("llm:openai-compatible");
831
+ var OpenAICompatibleProvider = class {
832
+ name = "openai-compatible";
833
+ type = "cloud";
834
+ client;
835
+ primaryModel;
836
+ fastModel;
837
+ isGemini;
838
+ capabilities = {
839
+ supportedTasks: [
840
+ LLMTask.ENTITY_EXTRACTION,
841
+ LLMTask.RELATIONSHIP_INFERENCE,
842
+ LLMTask.CONTRADICTION_DETECTION,
843
+ LLMTask.CONVERSATIONAL_QUERY,
844
+ LLMTask.CONTEXT_RANKING
845
+ ],
846
+ maxContextTokens: 128e3,
847
+ supportsStructuredOutput: true,
848
+ supportsStreaming: true,
849
+ estimatedTokensPerSecond: 80,
850
+ // Set to 0 — pricing varies by provider; budget tracking is approximate
851
+ costPerMillionInputTokens: 0,
852
+ costPerMillionOutputTokens: 0
853
+ };
854
+ constructor(options) {
855
+ if (!options.apiKey) {
856
+ throw new CortexError(LLM_AUTH_FAILED, "critical", "llm", "OpenAI-compatible API key not found. Check llm.cloud.apiKeySource in your config.", void 0, "Set the environment variable specified in llm.cloud.apiKeySource.", false, 401);
857
+ }
858
+ this.client = new OpenAI({
859
+ apiKey: options.apiKey,
860
+ baseURL: options.baseUrl,
861
+ timeout: options.timeoutMs ?? 6e4,
862
+ maxRetries: options.maxRetries ?? 3
863
+ });
864
+ this.primaryModel = options.primaryModel ?? "gpt-4o";
865
+ this.fastModel = options.fastModel ?? "gpt-4o-mini";
866
+ this.isGemini = options.baseUrl.includes("generativelanguage.googleapis.com");
867
+ logger3.info("OpenAI-compatible provider initialized", {
868
+ baseUrl: options.baseUrl,
869
+ primaryModel: this.primaryModel,
870
+ fastModel: this.fastModel
871
+ });
872
+ }
873
+ getModel(preference = "primary") {
874
+ return preference === "fast" ? this.fastModel : this.primaryModel;
875
+ }
876
+ /** Gemini uses max_completion_tokens; others use max_tokens */
877
+ tokenLimitParams(maxTokens) {
878
+ return this.isGemini ? { max_completion_tokens: maxTokens } : { max_tokens: maxTokens };
879
+ }
880
+ async complete(prompt, options) {
881
+ const result = await this.completeWithSystem(void 0, prompt, options);
882
+ return result.content;
883
+ }
884
+ async completeWithSystem(systemPrompt2, userPrompt, options, modelPreference = "primary") {
885
+ const model = this.getModel(modelPreference);
886
+ try {
887
+ const messages = [];
888
+ if (systemPrompt2) {
889
+ messages.push({ role: "system", content: systemPrompt2 });
890
+ }
891
+ messages.push({ role: "user", content: userPrompt });
892
+ const response = await this.client.chat.completions.create({
893
+ model,
894
+ ...this.tokenLimitParams(options?.maxTokens ?? 4096),
895
+ temperature: options?.temperature ?? 0.7,
896
+ messages,
897
+ ...options?.stopSequences?.length && { stop: options.stopSequences }
898
+ });
899
+ const content = response.choices[0]?.message?.content ?? "";
900
+ return {
901
+ content,
902
+ inputTokens: response.usage?.prompt_tokens ?? 0,
903
+ outputTokens: response.usage?.completion_tokens ?? 0,
904
+ model
905
+ };
906
+ } catch (err) {
907
+ throw this.mapError(err);
908
+ }
909
+ }
910
+ async completeStructured(prompt, _schema, options) {
911
+ const result = await this.complete(prompt, options);
912
+ return JSON.parse(result);
913
+ }
914
+ async *stream(prompt, options) {
915
+ yield* this.streamWithSystem(void 0, prompt, options);
916
+ }
917
+ async *streamWithSystem(systemPrompt2, userPrompt, options, modelPreference = "primary") {
918
+ const model = this.getModel(modelPreference);
919
+ try {
920
+ const messages = [];
921
+ if (systemPrompt2) {
922
+ messages.push({ role: "system", content: systemPrompt2 });
923
+ }
924
+ messages.push({ role: "user", content: userPrompt });
925
+ const stream = await this.client.chat.completions.create({
926
+ model,
927
+ ...this.tokenLimitParams(options?.maxTokens ?? 4096),
928
+ temperature: options?.temperature ?? 0.7,
929
+ messages,
930
+ stream: true,
931
+ stream_options: { include_usage: true },
932
+ ...options?.stopSequences?.length && { stop: options.stopSequences }
933
+ });
934
+ let inputTokens = 0;
935
+ let outputTokens = 0;
936
+ for await (const chunk of stream) {
937
+ const delta = chunk.choices[0]?.delta?.content;
938
+ if (delta) {
939
+ yield delta;
940
+ }
941
+ if (chunk.usage) {
942
+ inputTokens = chunk.usage.prompt_tokens;
943
+ outputTokens = chunk.usage.completion_tokens;
944
+ }
945
+ }
946
+ return { inputTokens, outputTokens, model };
947
+ } catch (err) {
948
+ throw this.mapError(err);
949
+ }
950
+ }
951
+ async embed(_texts) {
952
+ throw new CortexError(LLM_PROVIDER_UNAVAILABLE, "medium", "llm", "OpenAI-compatible provider does not handle embeddings. Use local embedding model.");
953
+ }
954
+ async isAvailable() {
955
+ try {
956
+ await this.client.chat.completions.create({
957
+ model: this.fastModel,
958
+ ...this.tokenLimitParams(1),
959
+ messages: [{ role: "user", content: "ping" }]
960
+ });
961
+ return true;
962
+ } catch {
963
+ return false;
964
+ }
965
+ }
966
+ mapError(err) {
967
+ if (err instanceof OpenAI.AuthenticationError) {
968
+ return new CortexError(LLM_AUTH_FAILED, "critical", "llm", "OpenAI-compatible API authentication failed. Check your API key.", void 0, "Verify the environment variable in llm.cloud.apiKeySource is correct.", false, 401);
969
+ }
970
+ if (err instanceof OpenAI.RateLimitError) {
971
+ return new CortexError(LLM_RATE_LIMITED, "medium", "llm", "OpenAI-compatible API rate limit exceeded.", void 0, "Wait and retry with backoff.", true, 429);
972
+ }
973
+ if (err instanceof OpenAI.APIConnectionTimeoutError) {
974
+ return new CortexError(LLM_TIMEOUT, "medium", "llm", "OpenAI-compatible API request timed out.", void 0, "Retry the request or increase llm.cloud.timeoutMs.", true, 504);
975
+ }
976
+ if (err instanceof OpenAI.APIError) {
977
+ const body = typeof err.error === "object" ? JSON.stringify(err.error) : String(err.error ?? "");
978
+ logger3.debug("API error details", { status: err.status, body, headers: err.headers });
979
+ return new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", `OpenAI-compatible API error: ${err.status} ${err.message}${body ? ` \u2014 ${body}` : ""}`, { status: err.status }, "Retry or check your provider status page.", true, err.status);
980
+ }
981
+ const message = err instanceof Error ? err.message : String(err);
982
+ return new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", `OpenAI-compatible provider error: ${message}`, void 0, "Check network connectivity and llm.cloud.baseUrl configuration.", true);
983
+ }
984
+ };
985
+
986
+ // packages/llm/dist/token-tracker.js
987
+ var logger4 = createLogger("llm:token-tracker");
988
+ var MODEL_COSTS = {
989
+ "claude-sonnet-4-5-20250929": { input: 3, output: 15 },
990
+ "claude-haiku-4-5-20251001": { input: 0.8, output: 4 }
991
+ };
992
+ var DEFAULT_COST = { input: 3, output: 15 };
993
+ function estimateCost(model, inputTokens, outputTokens) {
994
+ const costs = MODEL_COSTS[model] ?? DEFAULT_COST;
995
+ return inputTokens / 1e6 * costs.input + outputTokens / 1e6 * costs.output;
996
+ }
997
+ var TokenTracker = class {
998
+ records = [];
999
+ monthlyBudgetUsd;
1000
+ warningThresholds;
1001
+ warningsFired = /* @__PURE__ */ new Set();
1002
+ constructor(monthlyBudgetUsd = 25, warningThresholds = [0.5, 0.8, 0.9]) {
1003
+ this.monthlyBudgetUsd = monthlyBudgetUsd;
1004
+ this.warningThresholds = warningThresholds;
1005
+ }
1006
+ record(requestId, task, provider, model, inputTokens, outputTokens, latencyMs) {
1007
+ const costUsd = estimateCost(model, inputTokens, outputTokens);
1008
+ const record = {
1009
+ id: crypto.randomUUID(),
1010
+ requestId,
1011
+ task,
1012
+ provider,
1013
+ model,
1014
+ inputTokens,
1015
+ outputTokens,
1016
+ estimatedCostUsd: costUsd,
1017
+ latencyMs,
1018
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
1019
+ };
1020
+ this.records.push(record);
1021
+ this.checkBudget();
1022
+ return record;
1023
+ }
1024
+ checkBudget() {
1025
+ const spent = this.getCurrentMonthSpend();
1026
+ const usedPercent = this.monthlyBudgetUsd > 0 ? spent / this.monthlyBudgetUsd : 0;
1027
+ const ts = (/* @__PURE__ */ new Date()).toISOString();
1028
+ for (const threshold of this.warningThresholds) {
1029
+ if (usedPercent >= threshold && !this.warningsFired.has(threshold)) {
1030
+ this.warningsFired.add(threshold);
1031
+ const remaining = this.monthlyBudgetUsd - spent;
1032
+ logger4.warn(`Budget warning: ${(usedPercent * 100).toFixed(1)}% used`, {
1033
+ spent,
1034
+ budget: this.monthlyBudgetUsd,
1035
+ remaining
1036
+ });
1037
+ eventBus.emit({
1038
+ type: "budget.warning",
1039
+ payload: { usedPercent: Math.round(usedPercent * 100), remainingUsd: remaining },
1040
+ timestamp: ts,
1041
+ source: "llm:token-tracker"
1042
+ });
1043
+ }
1044
+ }
1045
+ if (this.monthlyBudgetUsd > 0 && usedPercent >= 1 && !this.warningsFired.has(1)) {
1046
+ this.warningsFired.add(1);
1047
+ logger4.warn("Budget exhausted", { spent, budget: this.monthlyBudgetUsd });
1048
+ eventBus.emit({
1049
+ type: "budget.exhausted",
1050
+ payload: { totalSpentUsd: spent },
1051
+ timestamp: ts,
1052
+ source: "llm:token-tracker"
1053
+ });
1054
+ }
1055
+ }
1056
+ getCurrentMonthSpend() {
1057
+ const currentMonth = (/* @__PURE__ */ new Date()).toISOString().slice(0, 7);
1058
+ return this.records.filter((r) => r.timestamp.startsWith(currentMonth)).reduce((sum, r) => sum + r.estimatedCostUsd, 0);
1059
+ }
1060
+ isBudgetExhausted() {
1061
+ return this.getCurrentMonthSpend() >= this.monthlyBudgetUsd;
1062
+ }
1063
+ getBudgetRemaining() {
1064
+ return Math.max(0, this.monthlyBudgetUsd - this.getCurrentMonthSpend());
1065
+ }
1066
+ getRecords() {
1067
+ return [...this.records];
1068
+ }
1069
+ getSummary() {
1070
+ const currentMonth = (/* @__PURE__ */ new Date()).toISOString().slice(0, 7);
1071
+ const monthRecords = this.records.filter((r) => r.timestamp.startsWith(currentMonth));
1072
+ const costByTask = {};
1073
+ const costByProvider = {};
1074
+ let totalInput = 0;
1075
+ let totalOutput = 0;
1076
+ let totalCost = 0;
1077
+ for (const r of monthRecords) {
1078
+ totalInput += r.inputTokens;
1079
+ totalOutput += r.outputTokens;
1080
+ totalCost += r.estimatedCostUsd;
1081
+ costByTask[r.task] = (costByTask[r.task] ?? 0) + r.estimatedCostUsd;
1082
+ costByProvider[r.provider] = (costByProvider[r.provider] ?? 0) + r.estimatedCostUsd;
1083
+ }
1084
+ return {
1085
+ totalCostUsd: totalCost,
1086
+ totalInputTokens: totalInput,
1087
+ totalOutputTokens: totalOutput,
1088
+ requestCount: monthRecords.length,
1089
+ costByTask,
1090
+ costByProvider
1091
+ };
1092
+ }
1093
+ };
1094
+
1095
+ // packages/llm/dist/cache.js
1096
+ import { createHash } from "node:crypto";
1097
+ var logger5 = createLogger("llm:cache");
1098
+ var ResponseCache = class {
1099
+ cache = /* @__PURE__ */ new Map();
1100
+ enabled;
1101
+ ttlMs;
1102
+ maxEntries;
1103
+ constructor(options = {}) {
1104
+ this.enabled = options.enabled ?? true;
1105
+ this.ttlMs = options.ttlMs ?? 7 * 24 * 60 * 60 * 1e3;
1106
+ this.maxEntries = options.maxEntries ?? 1e4;
1107
+ }
1108
+ buildKey(contentHash, promptId, promptVersion) {
1109
+ return createHash("sha256").update(`${contentHash}:${promptId}:${promptVersion}`).digest("hex");
1110
+ }
1111
+ get(contentHash, promptId, promptVersion) {
1112
+ if (!this.enabled)
1113
+ return null;
1114
+ const key = this.buildKey(contentHash, promptId, promptVersion);
1115
+ const entry = this.cache.get(key);
1116
+ if (!entry)
1117
+ return null;
1118
+ if (Date.now() - entry.createdAt > this.ttlMs) {
1119
+ this.cache.delete(key);
1120
+ return null;
1121
+ }
1122
+ logger5.debug("Cache hit", { promptId, promptVersion });
1123
+ return entry;
1124
+ }
1125
+ set(contentHash, promptId, promptVersion, response, model, inputTokens, outputTokens) {
1126
+ if (!this.enabled)
1127
+ return;
1128
+ if (this.cache.size >= this.maxEntries) {
1129
+ const oldest = [...this.cache.entries()].sort((a, b) => a[1].createdAt - b[1].createdAt)[0];
1130
+ if (oldest) {
1131
+ this.cache.delete(oldest[0]);
1132
+ }
1133
+ }
1134
+ const key = this.buildKey(contentHash, promptId, promptVersion);
1135
+ this.cache.set(key, {
1136
+ response,
1137
+ model,
1138
+ inputTokens,
1139
+ outputTokens,
1140
+ createdAt: Date.now()
1141
+ });
1142
+ }
1143
+ clear() {
1144
+ this.cache.clear();
1145
+ }
1146
+ get size() {
1147
+ return this.cache.size;
1148
+ }
1149
+ };
1150
+
1151
+ // packages/llm/dist/output-parser.js
1152
+ function extractJson(raw) {
1153
+ let cleaned = raw.replace(/```(?:json)?\s*/g, "").replace(/```/g, "").trim();
1154
+ const startIdx = findJsonStart(cleaned);
1155
+ if (startIdx === -1) {
1156
+ throw new Error("No JSON found in response");
1157
+ }
1158
+ const openChar = cleaned[startIdx];
1159
+ const closeChar = openChar === "{" ? "}" : "]";
1160
+ const endIdx = findMatchingClose(cleaned, startIdx, openChar, closeChar);
1161
+ if (endIdx === -1) {
1162
+ const repaired = repairTruncatedJson(cleaned.slice(startIdx));
1163
+ if (repaired)
1164
+ return repaired;
1165
+ throw new Error("Unterminated JSON in response");
1166
+ }
1167
+ cleaned = cleaned.slice(startIdx, endIdx + 1);
1168
+ return cleaned;
1169
+ }
1170
+ function repairTruncatedJson(truncated) {
1171
+ let lastGoodIdx = -1;
1172
+ let depth = 0;
1173
+ let inString = false;
1174
+ let escaped = false;
1175
+ const openStack = [];
1176
+ for (let i = 0; i < truncated.length; i++) {
1177
+ const ch = truncated[i];
1178
+ if (escaped) {
1179
+ escaped = false;
1180
+ continue;
1181
+ }
1182
+ if (ch === "\\" && inString) {
1183
+ escaped = true;
1184
+ continue;
1185
+ }
1186
+ if (ch === '"') {
1187
+ inString = !inString;
1188
+ continue;
1189
+ }
1190
+ if (inString)
1191
+ continue;
1192
+ if (ch === "{" || ch === "[") {
1193
+ openStack.push(ch);
1194
+ depth++;
1195
+ } else if (ch === "}" || ch === "]") {
1196
+ openStack.pop();
1197
+ depth--;
1198
+ if (depth >= 1)
1199
+ lastGoodIdx = i;
1200
+ } else if (ch === "," && depth >= 1) {
1201
+ lastGoodIdx = i - 1;
1202
+ }
1203
+ }
1204
+ if (lastGoodIdx <= 0)
1205
+ return null;
1206
+ let repaired = truncated.slice(0, lastGoodIdx + 1).trimEnd();
1207
+ if (repaired.endsWith(","))
1208
+ repaired = repaired.slice(0, -1);
1209
+ const remaining = [];
1210
+ inString = false;
1211
+ escaped = false;
1212
+ for (let i = 0; i < repaired.length; i++) {
1213
+ const ch = repaired[i];
1214
+ if (escaped) {
1215
+ escaped = false;
1216
+ continue;
1217
+ }
1218
+ if (ch === "\\" && inString) {
1219
+ escaped = true;
1220
+ continue;
1221
+ }
1222
+ if (ch === '"') {
1223
+ inString = !inString;
1224
+ continue;
1225
+ }
1226
+ if (inString)
1227
+ continue;
1228
+ if (ch === "{")
1229
+ remaining.push("}");
1230
+ else if (ch === "[")
1231
+ remaining.push("]");
1232
+ else if (ch === "}" || ch === "]")
1233
+ remaining.pop();
1234
+ }
1235
+ repaired += remaining.reverse().join("");
1236
+ try {
1237
+ JSON.parse(repaired);
1238
+ return repaired;
1239
+ } catch {
1240
+ return null;
1241
+ }
1242
+ }
1243
+ function findJsonStart(s) {
1244
+ for (let i = 0; i < s.length; i++) {
1245
+ if (s[i] === "{" || s[i] === "[")
1246
+ return i;
1247
+ }
1248
+ return -1;
1249
+ }
1250
+ function findMatchingClose(s, start, open, close) {
1251
+ let depth = 0;
1252
+ let inString = false;
1253
+ let escaped = false;
1254
+ for (let i = start; i < s.length; i++) {
1255
+ const ch = s[i];
1256
+ if (escaped) {
1257
+ escaped = false;
1258
+ continue;
1259
+ }
1260
+ if (ch === "\\" && inString) {
1261
+ escaped = true;
1262
+ continue;
1263
+ }
1264
+ if (ch === '"') {
1265
+ inString = !inString;
1266
+ continue;
1267
+ }
1268
+ if (inString)
1269
+ continue;
1270
+ if (ch === open)
1271
+ depth++;
1272
+ if (ch === close) {
1273
+ depth--;
1274
+ if (depth === 0)
1275
+ return i;
1276
+ }
1277
+ }
1278
+ return -1;
1279
+ }
1280
+ function parseStructuredOutput(raw, schema) {
1281
+ const jsonStr = extractJson(raw);
1282
+ let parsed = JSON.parse(jsonStr);
1283
+ if (Array.isArray(parsed)) {
1284
+ parsed = inferObjectWrapper(parsed, schema);
1285
+ }
1286
+ const result = schema.safeParse(parsed);
1287
+ if (!result.success) {
1288
+ const issues = result.error.issues.map((i) => `${i.path.join(".")}: ${i.message}`).join("; ");
1289
+ throw new CortexError(LLM_EXTRACTION_FAILED, "medium", "llm", `Schema validation failed: ${issues}`, { raw: raw.slice(0, 500), issues: result.error.issues }, "Retry with correction prompt", true);
1290
+ }
1291
+ return result.data;
1292
+ }
1293
+ function inferObjectWrapper(arr, schema) {
1294
+ const def = schema._def;
1295
+ if (def?.typeName === "ZodObject" && typeof def.shape === "function") {
1296
+ const shape = def.shape();
1297
+ for (const [key, fieldSchema] of Object.entries(shape)) {
1298
+ const fieldDef = fieldSchema._def;
1299
+ if (fieldDef?.typeName === "ZodArray") {
1300
+ return { [key]: arr };
1301
+ }
1302
+ }
1303
+ }
1304
+ return { entities: arr };
1305
+ }
1306
+ function buildCorrectionPrompt(originalPrompt, failedOutput, error) {
1307
+ return `${originalPrompt}
1308
+
1309
+ Your previous response was invalid JSON or didn't match the schema.
1310
+
1311
+ Previous response (DO NOT repeat this):
1312
+ ${failedOutput.slice(0, 500)}
1313
+
1314
+ Error: ${error}
1315
+
1316
+ Please return ONLY valid JSON matching the required schema. No explanation.`;
1317
+ }
1318
+
1319
+ // packages/llm/dist/router.js
1320
+ var logger6 = createLogger("llm:router");
1321
+ function resolveApiKeySource(source) {
1322
+ if (source.startsWith("env:")) {
1323
+ return process.env[source.slice(4)];
1324
+ }
1325
+ return void 0;
1326
+ }
1327
+ var Router = class {
1328
+ cloudProvider = null;
1329
+ localProvider = null;
1330
+ mode;
1331
+ taskRouting;
1332
+ tracker;
1333
+ cache;
1334
+ config;
1335
+ constructor(options) {
1336
+ const { config: config8 } = options;
1337
+ this.config = config8;
1338
+ this.mode = config8.llm.mode;
1339
+ this.taskRouting = config8.llm.taskRouting;
1340
+ if (this.mode !== "local-only") {
1341
+ try {
1342
+ if (config8.llm.cloud.provider === "openai-compatible") {
1343
+ const baseUrl = config8.llm.cloud.baseUrl;
1344
+ if (!baseUrl) {
1345
+ logger6.warn("openai-compatible provider requires llm.cloud.baseUrl \u2014 skipping cloud");
1346
+ } else {
1347
+ this.cloudProvider = new OpenAICompatibleProvider({
1348
+ baseUrl,
1349
+ apiKey: options.apiKey ?? resolveApiKeySource(config8.llm.cloud.apiKeySource),
1350
+ primaryModel: config8.llm.cloud.models.primary,
1351
+ fastModel: config8.llm.cloud.models.fast,
1352
+ timeoutMs: config8.llm.cloud.timeoutMs,
1353
+ maxRetries: config8.llm.cloud.maxRetries
1354
+ });
1355
+ }
1356
+ } else {
1357
+ this.cloudProvider = new AnthropicProvider({
1358
+ apiKey: options.apiKey,
1359
+ primaryModel: config8.llm.cloud.models.primary,
1360
+ fastModel: config8.llm.cloud.models.fast,
1361
+ timeoutMs: config8.llm.cloud.timeoutMs,
1362
+ maxRetries: config8.llm.cloud.maxRetries,
1363
+ promptCaching: config8.llm.cloud.promptCaching
1364
+ });
1365
+ }
1366
+ } catch (err) {
1367
+ if (this.mode === "cloud-first") {
1368
+ throw err;
1369
+ }
1370
+ logger6.warn("Cloud provider unavailable, falling back to local-only", {
1371
+ error: err instanceof Error ? err.message : String(err)
1372
+ });
1373
+ }
1374
+ }
1375
+ if (this.mode !== "cloud-first" || !this.cloudProvider) {
1376
+ this.localProvider = new OllamaProvider({
1377
+ host: config8.llm.local.host,
1378
+ model: config8.llm.local.model,
1379
+ embeddingModel: config8.llm.local.embeddingModel,
1380
+ numCtx: config8.llm.local.numCtx,
1381
+ numGpu: config8.llm.local.numGpu,
1382
+ timeoutMs: config8.llm.local.timeoutMs,
1383
+ keepAlive: config8.llm.local.keepAlive
1384
+ });
1385
+ }
1386
+ this.tracker = new TokenTracker(config8.llm.budget.monthlyLimitUsd, config8.llm.budget.warningThresholds);
1387
+ this.cache = new ResponseCache({
1388
+ enabled: config8.llm.cache.enabled,
1389
+ ttlMs: config8.llm.cache.ttlDays * 24 * 60 * 60 * 1e3
1390
+ });
1391
+ logger6.info("Router initialized", {
1392
+ mode: this.mode,
1393
+ hasCloud: !!this.cloudProvider,
1394
+ hasLocal: !!this.localProvider
1395
+ });
1396
+ }
1397
+ /**
1398
+ * Select provider based on mode, task routing, and availability
1399
+ */
1400
+ async selectProvider(task, forceProvider) {
1401
+ const cloudName = () => this.cloudProvider?.name ?? "anthropic";
1402
+ if (forceProvider === "cloud") {
1403
+ if (!this.cloudProvider) {
1404
+ throw new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", "Cloud provider requested but not available.", { mode: this.mode }, "Set your cloud API key or change LLM mode.", false);
1405
+ }
1406
+ return { provider: this.cloudProvider, name: cloudName() };
1407
+ }
1408
+ if (forceProvider === "local") {
1409
+ if (!this.localProvider) {
1410
+ throw new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", "Local provider requested but not configured.", { mode: this.mode }, "Change LLM mode to include local provider.", false);
1411
+ }
1412
+ return { provider: this.localProvider, name: "ollama" };
1413
+ }
1414
+ const taskRoute = this.taskRouting[task] ?? "auto";
1415
+ if (taskRoute === "cloud" && this.cloudProvider) {
1416
+ return { provider: this.cloudProvider, name: cloudName() };
1417
+ }
1418
+ if (taskRoute === "local" && this.localProvider) {
1419
+ return { provider: this.localProvider, name: "ollama" };
1420
+ }
1421
+ switch (this.mode) {
1422
+ case "local-only":
1423
+ if (!this.localProvider) {
1424
+ throw new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", "Local-only mode but Ollama provider not available.", void 0, "Ensure Ollama is running with `ollama serve`.", false);
1425
+ }
1426
+ return { provider: this.localProvider, name: "ollama" };
1427
+ case "local-first":
1428
+ if (this.localProvider && await this.localProvider.isAvailable()) {
1429
+ return { provider: this.localProvider, name: "ollama" };
1430
+ }
1431
+ if (this.cloudProvider) {
1432
+ logger6.info("Local provider unavailable, falling back to cloud");
1433
+ return { provider: this.cloudProvider, name: cloudName() };
1434
+ }
1435
+ throw new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", "No LLM provider available.", { mode: this.mode }, "Start Ollama or configure cloud API key.", false);
1436
+ case "hybrid":
1437
+ const cheapTasks = [
1438
+ LLMTask.ENTITY_EXTRACTION,
1439
+ LLMTask.CONTEXT_RANKING,
1440
+ LLMTask.EMBEDDING_GENERATION
1441
+ ];
1442
+ if (cheapTasks.includes(task) && this.localProvider && await this.localProvider.isAvailable()) {
1443
+ logger6.debug("Hybrid routing to local provider", { task });
1444
+ return { provider: this.localProvider, name: "ollama" };
1445
+ }
1446
+ if (this.cloudProvider) {
1447
+ logger6.debug("Hybrid routing to cloud provider", { task });
1448
+ return { provider: this.cloudProvider, name: cloudName() };
1449
+ }
1450
+ if (this.localProvider) {
1451
+ logger6.warn("Cloud provider unavailable in hybrid mode, falling back to local", { task });
1452
+ return { provider: this.localProvider, name: "ollama" };
1453
+ }
1454
+ throw new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", "No LLM provider available.", { mode: this.mode }, "Configure cloud API key or start Ollama.", false);
1455
+ case "cloud-first":
1456
+ default:
1457
+ if (this.cloudProvider && !this.tracker.isBudgetExhausted()) {
1458
+ return { provider: this.cloudProvider, name: cloudName() };
1459
+ }
1460
+ if (this.localProvider && this.config.llm.budget.enforcementAction === "fallback-local") {
1461
+ logger6.info("Budget exhausted or cloud unavailable, falling back to local");
1462
+ return { provider: this.localProvider, name: "ollama" };
1463
+ }
1464
+ if (!this.cloudProvider) {
1465
+ throw new CortexError(LLM_PROVIDER_UNAVAILABLE, "high", "llm", "Cloud provider not available.", { mode: this.mode }, "Set your cloud API key or change LLM mode.", false);
1466
+ }
1467
+ throw new CortexError(LLM_BUDGET_EXHAUSTED, "high", "llm", "Monthly budget exhausted.", { spent: this.tracker.getCurrentMonthSpend() }, "Increase budget, wait for next month, or enable local fallback.", false, 402);
1468
+ }
1469
+ }
1470
+ // Keep legacy provider getter for backward compatibility
1471
+ get provider() {
1472
+ return this.cloudProvider ?? this.localProvider;
1473
+ }
1474
+ async complete(request) {
1475
+ const { provider, name: providerName } = await this.selectProvider(request.task, request.forceProvider);
1476
+ if (request.contentHash) {
1477
+ const cached = this.cache.get(request.contentHash, request.promptId, request.promptVersion);
1478
+ if (cached) {
1479
+ return {
1480
+ content: cached.response,
1481
+ model: cached.model,
1482
+ inputTokens: cached.inputTokens,
1483
+ outputTokens: cached.outputTokens,
1484
+ cached: true,
1485
+ latencyMs: 0,
1486
+ costUsd: 0,
1487
+ provider: providerName
1488
+ };
1489
+ }
1490
+ }
1491
+ const requestId = crypto.randomUUID();
1492
+ eventBus.emit({
1493
+ type: "llm.request.start",
1494
+ payload: { requestId, task: request.task, provider: providerName },
1495
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
1496
+ source: "llm:router"
1497
+ });
1498
+ const startMs = performance.now();
1499
+ const result = await provider.completeWithSystem(request.systemPrompt, request.userPrompt, {
1500
+ temperature: request.temperature,
1501
+ maxTokens: request.maxTokens
1502
+ }, request.modelPreference ?? "primary");
1503
+ const latencyMs = Math.round(performance.now() - startMs);
1504
+ const usageRecord = this.tracker.record(requestId, request.task, providerName, result.model, result.inputTokens, result.outputTokens, latencyMs);
1505
+ if (request.contentHash) {
1506
+ this.cache.set(request.contentHash, request.promptId, request.promptVersion, result.content, result.model, result.inputTokens, result.outputTokens);
1507
+ }
1508
+ eventBus.emit({
1509
+ type: "llm.request.complete",
1510
+ payload: {
1511
+ requestId,
1512
+ task: request.task,
1513
+ provider: providerName,
1514
+ model: result.model,
1515
+ usage: {
1516
+ inputTokens: result.inputTokens,
1517
+ outputTokens: result.outputTokens,
1518
+ estimatedCostUsd: usageRecord.estimatedCostUsd
1519
+ },
1520
+ latencyMs
1521
+ },
1522
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
1523
+ source: "llm:router"
1524
+ });
1525
+ return {
1526
+ content: result.content,
1527
+ model: result.model,
1528
+ inputTokens: result.inputTokens,
1529
+ outputTokens: result.outputTokens,
1530
+ cached: false,
1531
+ latencyMs,
1532
+ costUsd: usageRecord.estimatedCostUsd,
1533
+ provider: providerName
1534
+ };
1535
+ }
1536
+ async completeStructured(request, schema) {
1537
+ const result = await this.complete(request);
1538
+ try {
1539
+ const data = parseStructuredOutput(result.content, schema);
1540
+ if (this.mode === "local-first" && result.provider === "ollama" && this.cloudProvider && !request.forceProvider) {
1541
+ const entities = data.entities;
1542
+ if (Array.isArray(entities) && entities.length > 0) {
1543
+ const confidences = entities.map((e) => typeof e.confidence === "number" ? e.confidence : null).filter((c) => c !== null).sort((a, b) => a - b);
1544
+ if (confidences.length > 0) {
1545
+ const mid = Math.floor(confidences.length / 2);
1546
+ const median = confidences.length % 2 !== 0 ? confidences[mid] : (confidences[mid - 1] + confidences[mid]) / 2;
1547
+ if (median < 0.6) {
1548
+ logger6.info("Local confidence below threshold, escalating to cloud", {
1549
+ median: Math.round(median * 100) / 100,
1550
+ task: request.task
1551
+ });
1552
+ const cloudResult = await this.complete({ ...request, forceProvider: "cloud", contentHash: void 0 });
1553
+ const cloudData = parseStructuredOutput(cloudResult.content, schema);
1554
+ return { ...cloudResult, data: cloudData };
1555
+ }
1556
+ }
1557
+ }
1558
+ }
1559
+ return { ...result, data };
1560
+ } catch (firstErr) {
1561
+ logger6.warn("Structured output parse failed, retrying with correction", {
1562
+ promptId: request.promptId,
1563
+ error: firstErr instanceof Error ? firstErr.message : String(firstErr)
1564
+ });
1565
+ const correctedPrompt = buildCorrectionPrompt(request.userPrompt, result.content, firstErr instanceof Error ? firstErr.message : String(firstErr));
1566
+ const retryResult = await this.complete({
1567
+ ...request,
1568
+ userPrompt: correctedPrompt,
1569
+ contentHash: void 0
1570
+ // Don't cache correction attempts
1571
+ });
1572
+ const data = parseStructuredOutput(retryResult.content, schema);
1573
+ return { ...retryResult, data };
1574
+ }
1575
+ }
1576
+ async *stream(request) {
1577
+ const { provider, name: providerName } = await this.selectProvider(request.task, request.forceProvider);
1578
+ const requestId = crypto.randomUUID();
1579
+ eventBus.emit({
1580
+ type: "llm.request.start",
1581
+ payload: { requestId, task: request.task, provider: providerName },
1582
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
1583
+ source: "llm:router"
1584
+ });
1585
+ const startMs = performance.now();
1586
+ let fullContent = "";
1587
+ const gen = provider.streamWithSystem(request.systemPrompt, request.userPrompt, {
1588
+ temperature: request.temperature,
1589
+ maxTokens: request.maxTokens
1590
+ }, request.modelPreference ?? "primary");
1591
+ let streamResult;
1592
+ while (true) {
1593
+ const { value, done } = await gen.next();
1594
+ if (done) {
1595
+ streamResult = value;
1596
+ break;
1597
+ }
1598
+ fullContent += value;
1599
+ yield value;
1600
+ }
1601
+ const latencyMs = Math.round(performance.now() - startMs);
1602
+ const tokens = streamResult ?? { inputTokens: 0, outputTokens: 0, model: provider.getModel() };
1603
+ const usageRecord = this.tracker.record(requestId, request.task, providerName, tokens.model, tokens.inputTokens, tokens.outputTokens, latencyMs);
1604
+ eventBus.emit({
1605
+ type: "llm.request.complete",
1606
+ payload: {
1607
+ requestId,
1608
+ task: request.task,
1609
+ provider: providerName,
1610
+ model: tokens.model,
1611
+ usage: {
1612
+ inputTokens: tokens.inputTokens,
1613
+ outputTokens: tokens.outputTokens,
1614
+ estimatedCostUsd: usageRecord.estimatedCostUsd
1615
+ },
1616
+ latencyMs
1617
+ },
1618
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
1619
+ source: "llm:router"
1620
+ });
1621
+ return {
1622
+ content: fullContent,
1623
+ model: tokens.model,
1624
+ inputTokens: tokens.inputTokens,
1625
+ outputTokens: tokens.outputTokens,
1626
+ cached: false,
1627
+ latencyMs,
1628
+ costUsd: usageRecord.estimatedCostUsd,
1629
+ provider: providerName
1630
+ };
1631
+ }
1632
+ getTracker() {
1633
+ return this.tracker;
1634
+ }
1635
+ getCache() {
1636
+ return this.cache;
1637
+ }
1638
+ getLocalProvider() {
1639
+ return this.localProvider;
1640
+ }
1641
+ getCloudProvider() {
1642
+ return this.cloudProvider;
1643
+ }
1644
+ getMode() {
1645
+ return this.mode;
1646
+ }
1647
+ async isAvailable() {
1648
+ switch (this.mode) {
1649
+ case "local-only":
1650
+ return this.localProvider?.isAvailable() ?? false;
1651
+ case "cloud-first":
1652
+ return this.cloudProvider?.isAvailable() ?? false;
1653
+ default:
1654
+ const localAvailable = await this.localProvider?.isAvailable() ?? false;
1655
+ const cloudAvailable = await this.cloudProvider?.isAvailable() ?? false;
1656
+ return localAvailable || cloudAvailable;
1657
+ }
1658
+ }
1659
+ };
1660
+
1661
+ // packages/llm/dist/prompts/entity-extraction.js
1662
+ import { z as z3 } from "zod";
1663
+ var VALID_TYPES = [
1664
+ "Decision",
1665
+ "Requirement",
1666
+ "Pattern",
1667
+ "Component",
1668
+ "Dependency",
1669
+ "Interface",
1670
+ "Constraint",
1671
+ "ActionItem",
1672
+ "Risk",
1673
+ "Note"
1674
+ ];
1675
+ function coerceEntityType(val) {
1676
+ if (VALID_TYPES.includes(val)) {
1677
+ return val;
1678
+ }
1679
+ if (/rule|lint|option|setting|config/i.test(val))
1680
+ return "Constraint";
1681
+ if (/action|task|todo/i.test(val))
1682
+ return "ActionItem";
1683
+ if (/component|module|class|service/i.test(val))
1684
+ return "Component";
1685
+ if (/depend|import|library|package/i.test(val))
1686
+ return "Dependency";
1687
+ if (/require|must|shall|need/i.test(val))
1688
+ return "Requirement";
1689
+ return "Note";
1690
+ }
1691
+ var outputSchema = z3.object({
1692
+ entities: z3.array(z3.object({
1693
+ type: z3.enum(VALID_TYPES).catch((ctx) => coerceEntityType(String(ctx.input))),
1694
+ name: z3.string().min(3).max(100),
1695
+ content: z3.string().min(10),
1696
+ summary: z3.string().max(300),
1697
+ confidence: z3.number().min(0).max(1),
1698
+ tags: z3.array(z3.string()),
1699
+ properties: z3.record(z3.unknown())
1700
+ }))
1701
+ });
1702
+ var config = {
1703
+ provider: "cloud",
1704
+ model: "fast",
1705
+ temperature: 0.1,
1706
+ maxTokens: 8192,
1707
+ task: LLMTask.ENTITY_EXTRACTION
1708
+ };
1709
+
1710
+ // packages/llm/dist/prompts/relationship-inference.js
1711
+ import { z as z4 } from "zod";
1712
+ var outputSchema2 = z4.object({
1713
+ relationships: z4.array(z4.object({
1714
+ type: z4.enum([
1715
+ "depends_on",
1716
+ "implements",
1717
+ "contradicts",
1718
+ "evolved_from",
1719
+ "relates_to",
1720
+ "uses",
1721
+ "constrains",
1722
+ "resolves",
1723
+ "documents",
1724
+ "derived_from"
1725
+ ]),
1726
+ sourceEntityId: z4.string(),
1727
+ targetEntityId: z4.string(),
1728
+ description: z4.string(),
1729
+ confidence: z4.number().min(0).max(1)
1730
+ }))
1731
+ });
1732
+ var config2 = {
1733
+ provider: "cloud",
1734
+ model: "fast",
1735
+ temperature: 0.1,
1736
+ maxTokens: 8192,
1737
+ task: LLMTask.RELATIONSHIP_INFERENCE
1738
+ };
1739
+
1740
+ // packages/llm/dist/prompts/merge-detection.js
1741
+ import { z as z5 } from "zod";
1742
+ var outputSchema3 = z5.object({
1743
+ shouldMerge: z5.boolean(),
1744
+ confidence: z5.number().min(0).max(1),
1745
+ reason: z5.string()
1746
+ });
1747
+ var config3 = {
1748
+ provider: "cloud",
1749
+ model: "fast",
1750
+ temperature: 0.1,
1751
+ maxTokens: 500,
1752
+ task: LLMTask.ENTITY_EXTRACTION
1753
+ };
1754
+
1755
+ // packages/llm/dist/prompts/contradiction-detection.js
1756
+ import { z as z6 } from "zod";
1757
+ var outputSchema4 = z6.object({
1758
+ isContradiction: z6.boolean(),
1759
+ severity: z6.enum(["low", "medium", "high"]),
1760
+ description: z6.string(),
1761
+ suggestedResolution: z6.string()
1762
+ });
1763
+ var config4 = {
1764
+ provider: "cloud",
1765
+ model: "fast",
1766
+ temperature: 0.1,
1767
+ maxTokens: 1e3,
1768
+ task: LLMTask.CONTRADICTION_DETECTION
1769
+ };
1770
+
1771
+ // packages/llm/dist/prompts/conversational-query.js
1772
+ var conversational_query_exports = {};
1773
+ __export(conversational_query_exports, {
1774
+ PROMPT_ID: () => PROMPT_ID,
1775
+ PROMPT_VERSION: () => PROMPT_VERSION,
1776
+ buildUserPrompt: () => buildUserPrompt,
1777
+ config: () => config5,
1778
+ systemPrompt: () => systemPrompt
1779
+ });
1780
+ var PROMPT_ID = "conversational_query";
1781
+ var PROMPT_VERSION = "1.0.0";
1782
+ var systemPrompt = `You are Cortex, a knowledge assistant. Answer questions using the provided context from the user's knowledge graph.
1783
+ Be concise and specific. Refer to decisions, patterns, and components by name.
1784
+ Mention the source file when citing a fact. If the context lacks enough information, say so briefly.`;
1785
+ function buildUserPrompt(vars) {
1786
+ const parts = [];
1787
+ if (vars.graphSummary) {
1788
+ parts.push(`Graph stats:
1789
+ ${vars.graphSummary}`);
1790
+ }
1791
+ if (vars.contextEntities.length > 0) {
1792
+ const context = vars.contextEntities.map((e) => {
1793
+ const file = e.sourceFile.replace(/\\/g, "/").split("/").pop() ?? e.sourceFile;
1794
+ const rels = e.relationships.length > 0 ? `
1795
+ Relations: ${e.relationships.map((r) => r.type).join(", ")}` : "";
1796
+ return `[${e.type}] ${e.name}
1797
+ ${e.content}
1798
+ (${file})${rels}`;
1799
+ }).join("\n\n");
1800
+ parts.push(`Relevant entities:
1801
+ ${context}`);
1802
+ }
1803
+ return `${parts.join("\n\n")}
1804
+
1805
+ Question: ${vars.userQuery}`;
1806
+ }
1807
+ var config5 = {
1808
+ provider: "cloud",
1809
+ model: "primary",
1810
+ temperature: 0.7,
1811
+ maxTokens: 600,
1812
+ task: LLMTask.CONVERSATIONAL_QUERY,
1813
+ stream: true
1814
+ };
1815
+
1816
+ // packages/llm/dist/prompts/context-ranking.js
1817
+ import { z as z7 } from "zod";
1818
+ var outputSchema5 = z7.object({
1819
+ rankedIds: z7.array(z7.string()),
1820
+ excludeIds: z7.array(z7.string())
1821
+ });
1822
+ var config6 = {
1823
+ provider: "cloud",
1824
+ model: "fast",
1825
+ temperature: 0.1,
1826
+ maxTokens: 500,
1827
+ task: LLMTask.CONTEXT_RANKING
1828
+ };
1829
+
1830
+ // packages/llm/dist/prompts/follow-up-generation.js
1831
+ import { z as z8 } from "zod";
1832
+ var outputSchema6 = z8.object({
1833
+ followUps: z8.array(z8.string()).min(1).max(5)
1834
+ });
1835
+ var config7 = {
1836
+ provider: "cloud",
1837
+ model: "fast",
1838
+ temperature: 0.8,
1839
+ maxTokens: 300,
1840
+ task: LLMTask.CONVERSATIONAL_QUERY
1841
+ };
1842
+
1843
+ // packages/graph/dist/sqlite-store.js
1844
+ import Database from "better-sqlite3";
1845
+ import { randomUUID } from "node:crypto";
1846
+ import { copyFileSync, statSync, mkdirSync } from "node:fs";
1847
+ import { dirname } from "node:path";
1848
+ import { homedir as homedir3 } from "node:os";
1849
+
1850
+ // packages/graph/dist/migrations/001-initial.js
1851
+ var MIGRATION_VERSION = 1;
1852
+ function up(db) {
1853
+ db.exec(`
1854
+ CREATE TABLE IF NOT EXISTS projects (
1855
+ id TEXT PRIMARY KEY,
1856
+ name TEXT NOT NULL,
1857
+ root_path TEXT NOT NULL UNIQUE,
1858
+ privacy_level TEXT NOT NULL DEFAULT 'standard',
1859
+ file_count INTEGER DEFAULT 0,
1860
+ entity_count INTEGER DEFAULT 0,
1861
+ last_ingested_at TEXT,
1862
+ created_at TEXT NOT NULL
1863
+ );
1864
+
1865
+ CREATE TABLE IF NOT EXISTS entities (
1866
+ id TEXT PRIMARY KEY,
1867
+ type TEXT NOT NULL,
1868
+ name TEXT NOT NULL,
1869
+ content TEXT NOT NULL,
1870
+ summary TEXT,
1871
+ properties TEXT,
1872
+ confidence REAL NOT NULL,
1873
+ source_file TEXT NOT NULL,
1874
+ source_start_line INTEGER,
1875
+ source_end_line INTEGER,
1876
+ project_id TEXT NOT NULL,
1877
+ extracted_by TEXT NOT NULL,
1878
+ tags TEXT,
1879
+ status TEXT NOT NULL DEFAULT 'active',
1880
+ created_at TEXT NOT NULL,
1881
+ updated_at TEXT NOT NULL,
1882
+ deleted_at TEXT,
1883
+ FOREIGN KEY (project_id) REFERENCES projects(id)
1884
+ );
1885
+
1886
+ CREATE TABLE IF NOT EXISTS relationships (
1887
+ id TEXT PRIMARY KEY,
1888
+ type TEXT NOT NULL,
1889
+ source_entity_id TEXT NOT NULL,
1890
+ target_entity_id TEXT NOT NULL,
1891
+ description TEXT,
1892
+ confidence REAL NOT NULL,
1893
+ properties TEXT,
1894
+ extracted_by TEXT NOT NULL,
1895
+ created_at TEXT NOT NULL,
1896
+ updated_at TEXT NOT NULL,
1897
+ FOREIGN KEY (source_entity_id) REFERENCES entities(id),
1898
+ FOREIGN KEY (target_entity_id) REFERENCES entities(id)
1899
+ );
1900
+
1901
+ CREATE TABLE IF NOT EXISTS files (
1902
+ id TEXT PRIMARY KEY,
1903
+ path TEXT NOT NULL UNIQUE,
1904
+ relative_path TEXT NOT NULL,
1905
+ project_id TEXT NOT NULL,
1906
+ content_hash TEXT NOT NULL,
1907
+ file_type TEXT NOT NULL,
1908
+ size_bytes INTEGER NOT NULL,
1909
+ last_modified TEXT NOT NULL,
1910
+ last_ingested_at TEXT,
1911
+ entity_ids TEXT,
1912
+ status TEXT NOT NULL DEFAULT 'pending',
1913
+ parse_error TEXT,
1914
+ FOREIGN KEY (project_id) REFERENCES projects(id)
1915
+ );
1916
+
1917
+ CREATE TABLE IF NOT EXISTS contradictions (
1918
+ id TEXT PRIMARY KEY,
1919
+ entity_id_a TEXT NOT NULL,
1920
+ entity_id_b TEXT NOT NULL,
1921
+ description TEXT NOT NULL,
1922
+ severity TEXT NOT NULL,
1923
+ suggested_resolution TEXT,
1924
+ status TEXT NOT NULL DEFAULT 'active',
1925
+ resolved_action TEXT,
1926
+ resolved_at TEXT,
1927
+ detected_at TEXT NOT NULL,
1928
+ FOREIGN KEY (entity_id_a) REFERENCES entities(id),
1929
+ FOREIGN KEY (entity_id_b) REFERENCES entities(id)
1930
+ );
1931
+
1932
+ CREATE TABLE IF NOT EXISTS token_usage (
1933
+ id TEXT PRIMARY KEY,
1934
+ request_id TEXT NOT NULL,
1935
+ task TEXT NOT NULL,
1936
+ provider TEXT NOT NULL,
1937
+ model TEXT NOT NULL,
1938
+ input_tokens INTEGER NOT NULL,
1939
+ output_tokens INTEGER NOT NULL,
1940
+ estimated_cost_usd REAL NOT NULL,
1941
+ latency_ms INTEGER NOT NULL,
1942
+ timestamp TEXT NOT NULL
1943
+ );
1944
+
1945
+ CREATE TABLE IF NOT EXISTS dead_letter_queue (
1946
+ id TEXT PRIMARY KEY,
1947
+ type TEXT NOT NULL,
1948
+ payload TEXT NOT NULL,
1949
+ error_code TEXT NOT NULL,
1950
+ error_message TEXT NOT NULL,
1951
+ retry_count INTEGER NOT NULL DEFAULT 0,
1952
+ first_failed_at TEXT NOT NULL,
1953
+ last_failed_at TEXT NOT NULL,
1954
+ next_retry_at TEXT,
1955
+ status TEXT NOT NULL DEFAULT 'pending'
1956
+ );
1957
+
1958
+ -- Full-text search
1959
+ CREATE VIRTUAL TABLE IF NOT EXISTS entities_fts USING fts5(name, content, summary, tags);
1960
+
1961
+ -- Indexes
1962
+ CREATE INDEX IF NOT EXISTS idx_entities_type ON entities(type);
1963
+ CREATE INDEX IF NOT EXISTS idx_entities_project ON entities(project_id);
1964
+ CREATE INDEX IF NOT EXISTS idx_entities_status ON entities(status);
1965
+ CREATE INDEX IF NOT EXISTS idx_entities_source ON entities(source_file);
1966
+ CREATE INDEX IF NOT EXISTS idx_relationships_source ON relationships(source_entity_id);
1967
+ CREATE INDEX IF NOT EXISTS idx_relationships_target ON relationships(target_entity_id);
1968
+ CREATE INDEX IF NOT EXISTS idx_relationships_type ON relationships(type);
1969
+ CREATE INDEX IF NOT EXISTS idx_files_project ON files(project_id);
1970
+ CREATE INDEX IF NOT EXISTS idx_files_status ON files(status);
1971
+ CREATE INDEX IF NOT EXISTS idx_files_hash ON files(content_hash);
1972
+ CREATE INDEX IF NOT EXISTS idx_token_usage_month ON token_usage(timestamp);
1973
+ CREATE INDEX IF NOT EXISTS idx_dlq_status ON dead_letter_queue(status);
1974
+
1975
+ -- Schema version tracking
1976
+ CREATE TABLE IF NOT EXISTS schema_version (
1977
+ version INTEGER PRIMARY KEY,
1978
+ applied_at TEXT NOT NULL
1979
+ );
1980
+
1981
+ INSERT OR IGNORE INTO schema_version (version, applied_at) VALUES (${MIGRATION_VERSION}, datetime('now'));
1982
+ `);
1983
+ }
1984
+
1985
+ // packages/graph/dist/sqlite-store.js
1986
+ function resolveHomePath(p) {
1987
+ return p.startsWith("~") ? p.replace("~", homedir3()) : p;
1988
+ }
1989
+ function now() {
1990
+ return (/* @__PURE__ */ new Date()).toISOString();
1991
+ }
1992
+ function rowToContradiction(row) {
1993
+ return {
1994
+ id: row.id,
1995
+ entityIds: [row.entity_id_a, row.entity_id_b],
1996
+ description: row.description,
1997
+ severity: row.severity,
1998
+ suggestedResolution: row.suggested_resolution ?? void 0,
1999
+ status: row.status,
2000
+ resolvedAction: row.resolved_action ?? void 0,
2001
+ resolvedAt: row.resolved_at ?? void 0,
2002
+ detectedAt: row.detected_at
2003
+ };
2004
+ }
2005
+ function rowToEntity(row) {
2006
+ return {
2007
+ id: row.id,
2008
+ type: row.type,
2009
+ name: row.name,
2010
+ content: row.content,
2011
+ summary: row.summary ?? void 0,
2012
+ properties: row.properties ? JSON.parse(row.properties) : {},
2013
+ confidence: row.confidence,
2014
+ sourceFile: row.source_file,
2015
+ sourceRange: row.source_start_line != null && row.source_end_line != null ? { startLine: row.source_start_line, endLine: row.source_end_line } : void 0,
2016
+ projectId: row.project_id,
2017
+ extractedBy: JSON.parse(row.extracted_by),
2018
+ tags: row.tags ? JSON.parse(row.tags) : [],
2019
+ status: row.status,
2020
+ createdAt: row.created_at,
2021
+ updatedAt: row.updated_at
2022
+ };
2023
+ }
2024
+ function rowToRelationship(row) {
2025
+ return {
2026
+ id: row.id,
2027
+ type: row.type,
2028
+ sourceEntityId: row.source_entity_id,
2029
+ targetEntityId: row.target_entity_id,
2030
+ description: row.description ?? void 0,
2031
+ confidence: row.confidence,
2032
+ properties: row.properties ? JSON.parse(row.properties) : {},
2033
+ extractedBy: JSON.parse(row.extracted_by),
2034
+ createdAt: row.created_at,
2035
+ updatedAt: row.updated_at
2036
+ };
2037
+ }
2038
+ function rowToFile(row) {
2039
+ return {
2040
+ id: row.id,
2041
+ path: row.path,
2042
+ relativePath: row.relative_path,
2043
+ projectId: row.project_id,
2044
+ contentHash: row.content_hash,
2045
+ fileType: row.file_type,
2046
+ sizeBytes: row.size_bytes,
2047
+ lastModified: row.last_modified,
2048
+ lastIngestedAt: row.last_ingested_at ?? void 0,
2049
+ entityIds: row.entity_ids ? JSON.parse(row.entity_ids) : [],
2050
+ status: row.status,
2051
+ parseError: row.parse_error ?? void 0
2052
+ };
2053
+ }
2054
+ function rowToProject(row) {
2055
+ return {
2056
+ id: row.id,
2057
+ name: row.name,
2058
+ rootPath: row.root_path,
2059
+ privacyLevel: row.privacy_level,
2060
+ fileCount: row.file_count,
2061
+ entityCount: row.entity_count,
2062
+ lastIngestedAt: row.last_ingested_at ?? void 0,
2063
+ createdAt: row.created_at
2064
+ };
2065
+ }
2066
+ var SQLiteStore = class {
2067
+ db;
2068
+ dbPath;
2069
+ constructor(options = {}) {
2070
+ const { dbPath = "~/.cortex/cortex.db", walMode = true, backupOnStartup = true } = options;
2071
+ this.dbPath = resolveHomePath(dbPath);
2072
+ mkdirSync(dirname(this.dbPath), { recursive: true });
2073
+ if (backupOnStartup) {
2074
+ this.backupSync();
2075
+ }
2076
+ this.db = new Database(this.dbPath);
2077
+ if (walMode) {
2078
+ this.db.pragma("journal_mode = WAL");
2079
+ }
2080
+ this.db.pragma("foreign_keys = ON");
2081
+ this.db.pragma("busy_timeout = 5000");
2082
+ this.migrate();
2083
+ }
2084
+ migrate() {
2085
+ try {
2086
+ up(this.db);
2087
+ } catch (err) {
2088
+ throw new CortexError(GRAPH_DB_ERROR, "critical", "graph", `Migration failed: ${err instanceof Error ? err.message : String(err)}`, void 0, "Delete the database and restart.");
2089
+ }
2090
+ }
2091
+ backupSync() {
2092
+ try {
2093
+ const stat = statSync(this.dbPath);
2094
+ if (stat.isFile()) {
2095
+ const backupPath = `${this.dbPath}.backup`;
2096
+ copyFileSync(this.dbPath, backupPath);
2097
+ }
2098
+ } catch {
2099
+ }
2100
+ }
2101
+ close() {
2102
+ this.db.close();
2103
+ }
2104
+ // --- Entities ---
2105
+ async createEntity(entity) {
2106
+ const id = randomUUID();
2107
+ const ts = now();
2108
+ this.db.prepare(`
2109
+ INSERT INTO entities (
2110
+ id, type, name, content, summary, properties, confidence,
2111
+ source_file, source_start_line, source_end_line,
2112
+ project_id, extracted_by, tags, status, created_at, updated_at
2113
+ ) VALUES (
2114
+ ?, ?, ?, ?, ?, ?, ?,
2115
+ ?, ?, ?,
2116
+ ?, ?, ?, ?, ?, ?
2117
+ )
2118
+ `).run(id, entity.type, entity.name, entity.content, entity.summary ?? null, JSON.stringify(entity.properties), entity.confidence, entity.sourceFile, entity.sourceRange?.startLine ?? null, entity.sourceRange?.endLine ?? null, entity.projectId, JSON.stringify(entity.extractedBy), JSON.stringify(entity.tags), entity.status, ts, ts);
2119
+ this.db.prepare(`
2120
+ INSERT INTO entities_fts (rowid, name, content, summary, tags)
2121
+ VALUES (
2122
+ (SELECT rowid FROM entities WHERE id = ?),
2123
+ ?, ?, ?, ?
2124
+ )
2125
+ `).run(id, entity.name, entity.content, entity.summary ?? "", entity.tags.join(" "));
2126
+ return { ...entity, id, createdAt: ts, updatedAt: ts };
2127
+ }
2128
+ async getEntity(id) {
2129
+ const row = this.db.prepare("SELECT * FROM entities WHERE id = ? AND deleted_at IS NULL").get(id);
2130
+ return row ? rowToEntity(row) : null;
2131
+ }
2132
+ async updateEntity(id, updates) {
2133
+ const existing = await this.getEntity(id);
2134
+ if (!existing) {
2135
+ throw new CortexError(GRAPH_ENTITY_NOT_FOUND, "low", "graph", `Entity not found: ${id}`, { entityId: id });
2136
+ }
2137
+ const merged = { ...existing, ...updates, updatedAt: now() };
2138
+ this.db.prepare(`
2139
+ UPDATE entities SET
2140
+ type = ?, name = ?, content = ?, summary = ?,
2141
+ properties = ?, confidence = ?,
2142
+ source_file = ?, source_start_line = ?, source_end_line = ?,
2143
+ extracted_by = ?, tags = ?, status = ?, updated_at = ?
2144
+ WHERE id = ?
2145
+ `).run(merged.type, merged.name, merged.content, merged.summary ?? null, JSON.stringify(merged.properties), merged.confidence, merged.sourceFile, merged.sourceRange?.startLine ?? null, merged.sourceRange?.endLine ?? null, JSON.stringify(merged.extractedBy), JSON.stringify(merged.tags), merged.status, merged.updatedAt, id);
2146
+ this.db.prepare(`
2147
+ UPDATE entities_fts SET name = ?, content = ?, summary = ?, tags = ?
2148
+ WHERE rowid = (SELECT rowid FROM entities WHERE id = ?)
2149
+ `).run(merged.name, merged.content, merged.summary ?? "", merged.tags.join(" "), id);
2150
+ return merged;
2151
+ }
2152
+ async deleteEntity(id, soft = true) {
2153
+ if (soft) {
2154
+ this.db.prepare("UPDATE entities SET deleted_at = ?, status = ? WHERE id = ?").run(now(), "deleted", id);
2155
+ } else {
2156
+ this.db.prepare("DELETE FROM entities_fts WHERE rowid = (SELECT rowid FROM entities WHERE id = ?)").run(id);
2157
+ this.db.prepare("DELETE FROM entities WHERE id = ?").run(id);
2158
+ }
2159
+ }
2160
+ async findEntities(query) {
2161
+ const conditions = ["deleted_at IS NULL"];
2162
+ const params = [];
2163
+ if (query.type) {
2164
+ conditions.push("type = ?");
2165
+ params.push(query.type);
2166
+ }
2167
+ if (query.projectId) {
2168
+ conditions.push("project_id = ?");
2169
+ params.push(query.projectId);
2170
+ }
2171
+ if (query.status) {
2172
+ conditions.push("status = ?");
2173
+ params.push(query.status);
2174
+ }
2175
+ if (query.since) {
2176
+ conditions.push("created_at >= ?");
2177
+ params.push(query.since);
2178
+ }
2179
+ if (query.before) {
2180
+ conditions.push("created_at < ?");
2181
+ params.push(query.before);
2182
+ }
2183
+ let sql;
2184
+ if (query.search) {
2185
+ sql = `
2186
+ SELECT e.* FROM entities e
2187
+ JOIN entities_fts fts ON fts.rowid = e.rowid
2188
+ WHERE fts.entities_fts MATCH ? AND ${conditions.join(" AND ")}
2189
+ ORDER BY rank
2190
+ `;
2191
+ params.unshift(query.search);
2192
+ } else {
2193
+ sql = `
2194
+ SELECT * FROM entities
2195
+ WHERE ${conditions.join(" AND ")}
2196
+ ORDER BY created_at DESC
2197
+ `;
2198
+ }
2199
+ if (query.limit) {
2200
+ sql += " LIMIT ?";
2201
+ params.push(query.limit);
2202
+ }
2203
+ if (query.offset) {
2204
+ sql += " OFFSET ?";
2205
+ params.push(query.offset);
2206
+ }
2207
+ const rows = this.db.prepare(sql).all(...params);
2208
+ return rows.map(rowToEntity);
2209
+ }
2210
+ // --- Relationships ---
2211
+ async createRelationship(rel) {
2212
+ const id = randomUUID();
2213
+ const ts = now();
2214
+ this.db.prepare(`
2215
+ INSERT INTO relationships (
2216
+ id, type, source_entity_id, target_entity_id,
2217
+ description, confidence, properties, extracted_by,
2218
+ created_at, updated_at
2219
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
2220
+ `).run(id, rel.type, rel.sourceEntityId, rel.targetEntityId, rel.description ?? null, rel.confidence, JSON.stringify(rel.properties), JSON.stringify(rel.extractedBy), ts, ts);
2221
+ return { ...rel, id, createdAt: ts, updatedAt: ts };
2222
+ }
2223
+ async getRelationship(id) {
2224
+ const row = this.db.prepare("SELECT * FROM relationships WHERE id = ?").get(id);
2225
+ return row ? rowToRelationship(row) : null;
2226
+ }
2227
+ async getRelationshipsForEntity(entityId, direction = "both") {
2228
+ let sql;
2229
+ let params;
2230
+ if (direction === "out") {
2231
+ sql = "SELECT * FROM relationships WHERE source_entity_id = ?";
2232
+ params = [entityId];
2233
+ } else if (direction === "in") {
2234
+ sql = "SELECT * FROM relationships WHERE target_entity_id = ?";
2235
+ params = [entityId];
2236
+ } else {
2237
+ sql = "SELECT * FROM relationships WHERE source_entity_id = ? OR target_entity_id = ?";
2238
+ params = [entityId, entityId];
2239
+ }
2240
+ const rows = this.db.prepare(sql).all(...params);
2241
+ return rows.map(rowToRelationship);
2242
+ }
2243
+ async deleteRelationship(id) {
2244
+ this.db.prepare("DELETE FROM relationships WHERE id = ?").run(id);
2245
+ }
2246
+ deleteBySourcePath(pathPrefix) {
2247
+ const normalized = pathPrefix.replace(/\//g, "\\");
2248
+ const pattern = normalized.endsWith("%") ? normalized : normalized + "%";
2249
+ return this.db.transaction(() => {
2250
+ const relResult = this.db.prepare(`
2251
+ DELETE FROM relationships
2252
+ WHERE source_entity_id IN (SELECT id FROM entities WHERE source_file LIKE ?)
2253
+ OR target_entity_id IN (SELECT id FROM entities WHERE source_file LIKE ?)
2254
+ `).run(pattern, pattern);
2255
+ this.db.prepare(`
2256
+ DELETE FROM entities_fts
2257
+ WHERE rowid IN (SELECT rowid FROM entities WHERE source_file LIKE ? AND deleted_at IS NULL)
2258
+ `).run(pattern);
2259
+ const entityResult = this.db.prepare("DELETE FROM entities WHERE source_file LIKE ?").run(pattern);
2260
+ const fileResult = this.db.prepare("DELETE FROM files WHERE path LIKE ?").run(pattern);
2261
+ return {
2262
+ deletedEntities: entityResult.changes,
2263
+ deletedRelationships: relResult.changes,
2264
+ deletedFiles: fileResult.changes
2265
+ };
2266
+ })();
2267
+ }
2268
+ resetDatabase() {
2269
+ this.db.transaction(() => {
2270
+ this.db.prepare("DELETE FROM contradictions").run();
2271
+ this.db.prepare("DELETE FROM relationships").run();
2272
+ this.db.prepare("DELETE FROM entities_fts").run();
2273
+ this.db.prepare("DELETE FROM entities").run();
2274
+ this.db.prepare("DELETE FROM files").run();
2275
+ })();
2276
+ }
2277
+ pruneSoftDeleted() {
2278
+ return this.db.transaction(() => {
2279
+ const relResult = this.db.prepare(`
2280
+ DELETE FROM relationships
2281
+ WHERE source_entity_id IN (SELECT id FROM entities WHERE deleted_at IS NOT NULL)
2282
+ OR target_entity_id IN (SELECT id FROM entities WHERE deleted_at IS NOT NULL)
2283
+ `).run();
2284
+ this.db.prepare(`
2285
+ DELETE FROM entities_fts
2286
+ WHERE rowid IN (SELECT rowid FROM entities WHERE deleted_at IS NOT NULL)
2287
+ `).run();
2288
+ const entityResult = this.db.prepare("DELETE FROM entities WHERE deleted_at IS NOT NULL").run();
2289
+ return {
2290
+ deletedEntities: entityResult.changes,
2291
+ deletedRelationships: relResult.changes
2292
+ };
2293
+ })();
2294
+ }
2295
+ // --- Files ---
2296
+ async upsertFile(file) {
2297
+ const existing = this.db.prepare("SELECT * FROM files WHERE path = ?").get(file.path);
2298
+ if (existing) {
2299
+ this.db.prepare(`
2300
+ UPDATE files SET
2301
+ relative_path = ?, project_id = ?, content_hash = ?,
2302
+ file_type = ?, size_bytes = ?, last_modified = ?,
2303
+ last_ingested_at = ?, entity_ids = ?, status = ?, parse_error = ?
2304
+ WHERE path = ?
2305
+ `).run(file.relativePath, file.projectId, file.contentHash, file.fileType, file.sizeBytes, file.lastModified, file.lastIngestedAt ?? null, JSON.stringify(file.entityIds), file.status, file.parseError ?? null, file.path);
2306
+ return { ...file, id: existing.id };
2307
+ }
2308
+ const id = randomUUID();
2309
+ this.db.prepare(`
2310
+ INSERT INTO files (
2311
+ id, path, relative_path, project_id, content_hash,
2312
+ file_type, size_bytes, last_modified, last_ingested_at,
2313
+ entity_ids, status, parse_error
2314
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
2315
+ `).run(id, file.path, file.relativePath, file.projectId, file.contentHash, file.fileType, file.sizeBytes, file.lastModified, file.lastIngestedAt ?? null, JSON.stringify(file.entityIds), file.status, file.parseError ?? null);
2316
+ return { ...file, id };
2317
+ }
2318
+ async getFile(path) {
2319
+ const row = this.db.prepare("SELECT * FROM files WHERE path = ?").get(path);
2320
+ return row ? rowToFile(row) : null;
2321
+ }
2322
+ async getFilesByProject(projectId) {
2323
+ const rows = this.db.prepare("SELECT * FROM files WHERE project_id = ?").all(projectId);
2324
+ return rows.map(rowToFile);
2325
+ }
2326
+ // --- Projects ---
2327
+ async createProject(project) {
2328
+ const id = randomUUID();
2329
+ const ts = now();
2330
+ this.db.prepare(`
2331
+ INSERT INTO projects (
2332
+ id, name, root_path, privacy_level,
2333
+ file_count, entity_count, last_ingested_at, created_at
2334
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
2335
+ `).run(id, project.name, project.rootPath, project.privacyLevel, project.fileCount, project.entityCount, project.lastIngestedAt ?? null, ts);
2336
+ return { ...project, id, createdAt: ts };
2337
+ }
2338
+ async getProject(id) {
2339
+ const row = this.db.prepare("SELECT * FROM projects WHERE id = ?").get(id);
2340
+ return row ? rowToProject(row) : null;
2341
+ }
2342
+ async listProjects() {
2343
+ const rows = this.db.prepare("SELECT * FROM projects ORDER BY created_at DESC").all();
2344
+ return rows.map(rowToProject);
2345
+ }
2346
+ // --- Contradictions ---
2347
+ async createContradiction(contradiction) {
2348
+ const id = randomUUID();
2349
+ this.db.prepare(`
2350
+ INSERT INTO contradictions (
2351
+ id, entity_id_a, entity_id_b, description, severity,
2352
+ suggested_resolution, status, resolved_action, resolved_at, detected_at
2353
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
2354
+ `).run(id, contradiction.entityIds[0], contradiction.entityIds[1], contradiction.description, contradiction.severity, contradiction.suggestedResolution ?? null, contradiction.status, contradiction.resolvedAction ?? null, contradiction.resolvedAt ?? null, contradiction.detectedAt);
2355
+ return { ...contradiction, id };
2356
+ }
2357
+ async findContradictions(query = {}) {
2358
+ const conditions = [];
2359
+ const params = [];
2360
+ if (query.status) {
2361
+ conditions.push("status = ?");
2362
+ params.push(query.status);
2363
+ }
2364
+ if (query.entityId) {
2365
+ conditions.push("(entity_id_a = ? OR entity_id_b = ?)");
2366
+ params.push(query.entityId, query.entityId);
2367
+ }
2368
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2369
+ let sql = `SELECT * FROM contradictions ${where} ORDER BY detected_at DESC`;
2370
+ if (query.limit) {
2371
+ sql += " LIMIT ?";
2372
+ params.push(query.limit);
2373
+ }
2374
+ const rows = this.db.prepare(sql).all(...params);
2375
+ return rows.map(rowToContradiction);
2376
+ }
2377
+ async updateContradiction(id, update) {
2378
+ this.db.prepare(`
2379
+ UPDATE contradictions SET status = ?, resolved_action = ?, resolved_at = ? WHERE id = ?
2380
+ `).run(update.status, update.resolvedAction ?? null, update.resolvedAt ?? null, id);
2381
+ }
2382
+ // --- Search ---
2383
+ async searchEntities(text, limit = 20) {
2384
+ const rows = this.db.prepare(`
2385
+ SELECT e.* FROM entities e
2386
+ JOIN entities_fts fts ON fts.rowid = e.rowid
2387
+ WHERE fts.entities_fts MATCH ? AND e.deleted_at IS NULL
2388
+ ORDER BY rank
2389
+ LIMIT ?
2390
+ `).all(text, limit);
2391
+ return rows.map(rowToEntity);
2392
+ }
2393
+ async semanticSearch(_embedding, _limit = 20) {
2394
+ return [];
2395
+ }
2396
+ // --- Stats ---
2397
+ async getStats() {
2398
+ const entityCount = this.db.prepare("SELECT COUNT(*) as count FROM entities WHERE deleted_at IS NULL AND status != 'deleted'").get().count;
2399
+ const relationshipCount = this.db.prepare("SELECT COUNT(*) as count FROM relationships").get().count;
2400
+ const fileCount = this.db.prepare("SELECT COUNT(*) as count FROM files").get().count;
2401
+ const projectCount = this.db.prepare("SELECT COUNT(*) as count FROM projects").get().count;
2402
+ const contradictionCount = this.db.prepare("SELECT COUNT(*) as count FROM contradictions WHERE status = 'active'").get().count;
2403
+ let dbSizeBytes = 0;
2404
+ try {
2405
+ dbSizeBytes = statSync(this.dbPath).size;
2406
+ } catch {
2407
+ }
2408
+ return {
2409
+ entityCount,
2410
+ relationshipCount,
2411
+ fileCount,
2412
+ projectCount,
2413
+ contradictionCount,
2414
+ dbSizeBytes,
2415
+ vectorDbSizeBytes: 0
2416
+ // Managed by VectorStore
2417
+ };
2418
+ }
2419
+ // --- Report ---
2420
+ getReportData() {
2421
+ const fileRows = this.db.prepare("SELECT status, COUNT(*) as count FROM files GROUP BY status").all();
2422
+ const fileStatus = { ingested: 0, failed: 0, skipped: 0, pending: 0 };
2423
+ for (const row of fileRows) {
2424
+ if (row.status in fileStatus) {
2425
+ fileStatus[row.status] = row.count;
2426
+ }
2427
+ }
2428
+ const failedFiles = this.db.prepare(`SELECT path, relative_path, parse_error FROM files
2429
+ WHERE status = 'failed' AND parse_error IS NOT NULL
2430
+ ORDER BY path LIMIT 50`).all().map((r) => ({ path: r.path, relativePath: r.relative_path, parseError: r.parse_error }));
2431
+ const entityRows = this.db.prepare(`SELECT type, COUNT(*) as count, AVG(confidence) as avg_confidence
2432
+ FROM entities WHERE deleted_at IS NULL AND status = 'active'
2433
+ GROUP BY type ORDER BY count DESC`).all();
2434
+ const entityBreakdown = entityRows.map((r) => ({
2435
+ type: r.type,
2436
+ count: r.count,
2437
+ avgConfidence: r.avg_confidence
2438
+ }));
2439
+ const supersededCount = this.db.prepare("SELECT COUNT(*) as count FROM entities WHERE status = 'superseded'").get().count;
2440
+ const relRows = this.db.prepare("SELECT type, COUNT(*) as count FROM relationships GROUP BY type ORDER BY count DESC").all();
2441
+ const relationshipBreakdown = relRows.map((r) => ({ type: r.type, count: r.count }));
2442
+ const contrRows = this.db.prepare("SELECT status, severity, COUNT(*) as count FROM contradictions GROUP BY status, severity").all();
2443
+ const contradictions = {
2444
+ active: 0,
2445
+ resolved: 0,
2446
+ dismissed: 0,
2447
+ highSeverity: 0,
2448
+ mediumSeverity: 0,
2449
+ lowSeverity: 0
2450
+ };
2451
+ for (const r of contrRows) {
2452
+ if (r.status === "active")
2453
+ contradictions.active += r.count;
2454
+ if (r.status === "resolved")
2455
+ contradictions.resolved += r.count;
2456
+ if (r.status === "dismissed")
2457
+ contradictions.dismissed += r.count;
2458
+ if (r.severity === "high" || r.severity === "critical")
2459
+ contradictions.highSeverity += r.count;
2460
+ if (r.severity === "medium")
2461
+ contradictions.mediumSeverity += r.count;
2462
+ if (r.severity === "low")
2463
+ contradictions.lowSeverity += r.count;
2464
+ }
2465
+ const topContrRows = this.db.prepare(`SELECT c.id, c.severity, c.description, ea.name as entity_a, eb.name as entity_b
2466
+ FROM contradictions c
2467
+ LEFT JOIN entities ea ON c.entity_id_a = ea.id
2468
+ LEFT JOIN entities eb ON c.entity_id_b = eb.id
2469
+ WHERE c.status = 'active'
2470
+ ORDER BY CASE c.severity
2471
+ WHEN 'critical' THEN 0 WHEN 'high' THEN 1 WHEN 'medium' THEN 2 ELSE 3
2472
+ END, c.detected_at DESC
2473
+ LIMIT 10`).all();
2474
+ const topContradictions = topContrRows.map((r) => ({
2475
+ id: r.id.slice(0, 8),
2476
+ severity: r.severity,
2477
+ description: r.description,
2478
+ entityA: r.entity_a ?? "unknown",
2479
+ entityB: r.entity_b ?? "unknown"
2480
+ }));
2481
+ const tokenRow = this.db.prepare(`SELECT
2482
+ SUM(CAST(JSON_EXTRACT(extracted_by, '$.tokensUsed.input') AS INTEGER)) as total_input,
2483
+ SUM(CAST(JSON_EXTRACT(extracted_by, '$.tokensUsed.output') AS INTEGER)) as total_output
2484
+ FROM entities WHERE deleted_at IS NULL`).get();
2485
+ return {
2486
+ generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
2487
+ fileStatus,
2488
+ failedFiles,
2489
+ entityBreakdown,
2490
+ supersededCount,
2491
+ relationshipBreakdown,
2492
+ contradictions,
2493
+ topContradictions,
2494
+ tokenEstimate: {
2495
+ totalInput: tokenRow.total_input ?? 0,
2496
+ totalOutput: tokenRow.total_output ?? 0
2497
+ }
2498
+ };
2499
+ }
2500
+ // --- Maintenance ---
2501
+ async backup() {
2502
+ const backupPath = `${this.dbPath}.backup-${Date.now()}`;
2503
+ await this.db.backup(backupPath);
2504
+ return backupPath;
2505
+ }
2506
+ async integrityCheck() {
2507
+ const details = [];
2508
+ const orphanedRels = this.db.prepare(`
2509
+ SELECT COUNT(*) as count FROM relationships r
2510
+ WHERE NOT EXISTS (SELECT 1 FROM entities WHERE id = r.source_entity_id)
2511
+ OR NOT EXISTS (SELECT 1 FROM entities WHERE id = r.target_entity_id)
2512
+ `).get().count;
2513
+ if (orphanedRels > 0) {
2514
+ details.push(`Found ${orphanedRels} orphaned relationships`);
2515
+ }
2516
+ const missingProjects = this.db.prepare(`
2517
+ SELECT COUNT(*) as count FROM files f
2518
+ WHERE NOT EXISTS (SELECT 1 FROM projects WHERE id = f.project_id)
2519
+ `).get().count;
2520
+ if (missingProjects > 0) {
2521
+ details.push(`Found ${missingProjects} files referencing missing projects`);
2522
+ }
2523
+ const integrityResult = this.db.pragma("integrity_check");
2524
+ const sqliteOk = integrityResult.length === 1 && integrityResult[0].integrity_check === "ok";
2525
+ if (!sqliteOk) {
2526
+ details.push("SQLite integrity check failed");
2527
+ }
2528
+ return {
2529
+ ok: orphanedRels === 0 && missingProjects === 0 && sqliteOk,
2530
+ orphanedRelationships: orphanedRels,
2531
+ missingFiles: missingProjects,
2532
+ details
2533
+ };
2534
+ }
2535
+ // --- Graph visualization data ---
2536
+ getGraphData(options = {}) {
2537
+ const limit = options.limit ?? 2e3;
2538
+ let entitySql = `SELECT id, name, type, confidence, source_file FROM entities WHERE status = 'active'`;
2539
+ const params = [];
2540
+ if (options.projectId) {
2541
+ entitySql += ` AND project_id = ?`;
2542
+ params.push(options.projectId);
2543
+ }
2544
+ entitySql += ` ORDER BY confidence DESC LIMIT ?`;
2545
+ params.push(limit);
2546
+ const entityRows = this.db.prepare(entitySql).all(...params);
2547
+ const entityIds = new Set(entityRows.map((e) => e.id));
2548
+ const relRows = this.db.prepare(`SELECT id, type, source_entity_id, target_entity_id, confidence
2549
+ FROM relationships
2550
+ LIMIT ?`).all(limit * 2);
2551
+ const edges = relRows.filter((r) => entityIds.has(r.source_entity_id) && entityIds.has(r.target_entity_id)).map((r) => ({
2552
+ id: r.id,
2553
+ source: r.source_entity_id,
2554
+ target: r.target_entity_id,
2555
+ type: r.type,
2556
+ confidence: r.confidence
2557
+ }));
2558
+ return {
2559
+ nodes: entityRows.map((e) => ({
2560
+ id: e.id,
2561
+ name: e.name,
2562
+ type: e.type,
2563
+ confidence: e.confidence,
2564
+ sourceFile: e.source_file
2565
+ })),
2566
+ edges
2567
+ };
2568
+ }
2569
+ };
2570
+
2571
+ // packages/graph/dist/vector-store.js
2572
+ import { connect } from "@lancedb/lancedb";
2573
+ import { mkdirSync as mkdirSync2 } from "node:fs";
2574
+ import { homedir as homedir4 } from "node:os";
2575
+ var logger7 = createLogger("graph:vector-store");
2576
+ function resolveHomePath2(p) {
2577
+ return p.startsWith("~") ? p.replace("~", homedir4()) : p;
2578
+ }
2579
+ var TABLE_NAME = "entity_embeddings";
2580
+ var VectorStore = class {
2581
+ db = null;
2582
+ table = null;
2583
+ dbPath;
2584
+ dimensions;
2585
+ constructor(options = {}) {
2586
+ this.dbPath = resolveHomePath2(options.dbPath ?? "~/.cortex/vector.lance");
2587
+ this.dimensions = options.dimensions ?? 384;
2588
+ }
2589
+ async initialize() {
2590
+ mkdirSync2(this.dbPath, { recursive: true });
2591
+ this.db = await connect(this.dbPath);
2592
+ try {
2593
+ this.table = await this.db.openTable(TABLE_NAME);
2594
+ } catch {
2595
+ logger7.debug("Vector table does not exist yet, will create on first add");
2596
+ }
2597
+ }
2598
+ async ensureTable() {
2599
+ if (this.table)
2600
+ return this.table;
2601
+ if (!this.db)
2602
+ throw new Error("VectorStore not initialized");
2603
+ this.table = await this.db.createTable(TABLE_NAME, [
2604
+ {
2605
+ id: "_init",
2606
+ entityId: "_init",
2607
+ vector: new Array(this.dimensions).fill(0),
2608
+ text: ""
2609
+ }
2610
+ ]);
2611
+ await this.table.delete('id = "_init"');
2612
+ return this.table;
2613
+ }
2614
+ async addVectors(records) {
2615
+ if (records.length === 0)
2616
+ return;
2617
+ const table = await this.ensureTable();
2618
+ const rows = records.map((r) => ({
2619
+ id: r.entityId,
2620
+ entityId: r.entityId,
2621
+ vector: Array.from(r.vector),
2622
+ text: r.text
2623
+ }));
2624
+ await table.add(rows);
2625
+ logger7.debug(`Added ${rows.length} vectors`);
2626
+ }
2627
+ async search(queryVector, limit = 20) {
2628
+ if (!this.table)
2629
+ return [];
2630
+ const results = await this.table.search(Array.from(queryVector)).limit(limit).toArray();
2631
+ return results.map((r) => ({
2632
+ entityId: r.entityId,
2633
+ distance: r._distance,
2634
+ text: r.text
2635
+ }));
2636
+ }
2637
+ async deleteByEntityId(entityId) {
2638
+ if (!this.table)
2639
+ return;
2640
+ await this.table.delete(`entityId = "${entityId}"`);
2641
+ }
2642
+ async count() {
2643
+ if (!this.table)
2644
+ return 0;
2645
+ return await this.table.countRows();
2646
+ }
2647
+ };
2648
+
2649
+ // packages/graph/dist/query-engine.js
2650
+ var logger8 = createLogger("graph:query-engine");
2651
+ var AVG_CHARS_PER_TOKEN = 4;
2652
+ function estimateTokens(text) {
2653
+ return Math.ceil(text.length / AVG_CHARS_PER_TOKEN);
2654
+ }
2655
+ var QueryEngine = class {
2656
+ sqliteStore;
2657
+ vectorStore;
2658
+ maxContextTokens;
2659
+ maxResultEntities;
2660
+ ftsWeight;
2661
+ vectorWeight;
2662
+ constructor(sqliteStore, vectorStore, options = {}) {
2663
+ this.sqliteStore = sqliteStore;
2664
+ this.vectorStore = vectorStore;
2665
+ this.maxContextTokens = options.maxContextTokens ?? 5e4;
2666
+ this.maxResultEntities = options.maxResultEntities ?? 30;
2667
+ this.ftsWeight = options.ftsWeight ?? 0.4;
2668
+ this.vectorWeight = options.vectorWeight ?? 0.6;
2669
+ }
2670
+ async assembleContext(query, queryEmbedding, projectId) {
2671
+ const [ftsResults, vectorResults] = await Promise.all([
2672
+ this.ftsSearch(query, projectId),
2673
+ queryEmbedding ? this.vectorStore.search(queryEmbedding, 30) : Promise.resolve([])
2674
+ ]);
2675
+ const rankedEntities = this.mergeAndRank(ftsResults, vectorResults);
2676
+ const contextEntities = [];
2677
+ let totalTokens = 0;
2678
+ const budgetForEntities = Math.floor(this.maxContextTokens * 0.7);
2679
+ for (const entity of rankedEntities) {
2680
+ if (contextEntities.length >= this.maxResultEntities)
2681
+ break;
2682
+ const entityTokens = estimateTokens(entity.content) + estimateTokens(entity.name);
2683
+ if (totalTokens + entityTokens > budgetForEntities)
2684
+ break;
2685
+ contextEntities.push(entity);
2686
+ totalTokens += entityTokens;
2687
+ }
2688
+ const entityIds = new Set(contextEntities.map((e) => e.id));
2689
+ const relationships = [];
2690
+ for (const entity of contextEntities) {
2691
+ const rels = await this.sqliteStore.getRelationshipsForEntity(entity.id);
2692
+ for (const rel of rels) {
2693
+ if (entityIds.has(rel.sourceEntityId) && entityIds.has(rel.targetEntityId)) {
2694
+ relationships.push(rel);
2695
+ }
2696
+ }
2697
+ }
2698
+ const uniqueRels = [...new Map(relationships.map((r) => [r.id, r])).values()];
2699
+ const relTokens = uniqueRels.reduce((sum, r) => sum + estimateTokens(r.description ?? "") + 20, 0);
2700
+ logger8.debug("Context assembled", {
2701
+ entities: contextEntities.length,
2702
+ relationships: uniqueRels.length,
2703
+ totalTokensEstimate: totalTokens + relTokens
2704
+ });
2705
+ return {
2706
+ entities: contextEntities,
2707
+ relationships: uniqueRels,
2708
+ totalTokensEstimate: totalTokens + relTokens
2709
+ };
2710
+ }
2711
+ /**
2712
+ * Converts a natural language query to an FTS5-safe keyword query.
2713
+ * FTS5 uses AND semantics by default, so "what is the architecture" would
2714
+ * require ALL words to match. We strip stop words and use OR semantics so
2715
+ * entities matching ANY meaningful keyword are returned.
2716
+ */
2717
+ buildFtsQuery(query) {
2718
+ const stopWords = /* @__PURE__ */ new Set([
2719
+ "a",
2720
+ "an",
2721
+ "the",
2722
+ "and",
2723
+ "or",
2724
+ "but",
2725
+ "in",
2726
+ "on",
2727
+ "at",
2728
+ "to",
2729
+ "for",
2730
+ "of",
2731
+ "with",
2732
+ "by",
2733
+ "from",
2734
+ "is",
2735
+ "are",
2736
+ "was",
2737
+ "were",
2738
+ "be",
2739
+ "been",
2740
+ "being",
2741
+ "have",
2742
+ "has",
2743
+ "had",
2744
+ "do",
2745
+ "does",
2746
+ "did",
2747
+ "will",
2748
+ "would",
2749
+ "could",
2750
+ "should",
2751
+ "may",
2752
+ "might",
2753
+ "shall",
2754
+ "can",
2755
+ "need",
2756
+ "must",
2757
+ "what",
2758
+ "which",
2759
+ "who",
2760
+ "how",
2761
+ "why",
2762
+ "when",
2763
+ "where",
2764
+ "that",
2765
+ "this",
2766
+ "these",
2767
+ "those",
2768
+ "it",
2769
+ "its",
2770
+ "me",
2771
+ "my",
2772
+ "you",
2773
+ "your",
2774
+ "we",
2775
+ "our",
2776
+ "they",
2777
+ "their",
2778
+ "he",
2779
+ "she",
2780
+ "i",
2781
+ "all",
2782
+ "any",
2783
+ "each",
2784
+ "some",
2785
+ "no",
2786
+ "not",
2787
+ "so",
2788
+ "yet",
2789
+ "use",
2790
+ "used",
2791
+ "using",
2792
+ "about",
2793
+ "tell",
2794
+ "know",
2795
+ "get",
2796
+ "got",
2797
+ "make",
2798
+ "made",
2799
+ "see",
2800
+ "give",
2801
+ "go",
2802
+ "come",
2803
+ "take"
2804
+ ]);
2805
+ const keywords = query.replace(/[^a-zA-Z0-9\s]/g, " ").toLowerCase().split(/\s+/).filter((w) => w.length >= 3 && !stopWords.has(w));
2806
+ if (keywords.length === 0) {
2807
+ return query.replace(/[^a-zA-Z0-9\s]/g, " ").trim();
2808
+ }
2809
+ return keywords.join(" OR ");
2810
+ }
2811
+ async ftsSearch(query, projectId) {
2812
+ const ftsQuery = this.buildFtsQuery(query);
2813
+ try {
2814
+ if (projectId) {
2815
+ return await this.sqliteStore.findEntities({
2816
+ search: ftsQuery,
2817
+ projectId,
2818
+ limit: 30
2819
+ });
2820
+ }
2821
+ return await this.sqliteStore.searchEntities(ftsQuery, 30);
2822
+ } catch (err) {
2823
+ logger8.warn("FTS search failed, returning empty results", {
2824
+ error: err instanceof Error ? err.message : String(err),
2825
+ query: ftsQuery
2826
+ });
2827
+ return [];
2828
+ }
2829
+ }
2830
+ mergeAndRank(ftsResults, vectorResults) {
2831
+ const scores = /* @__PURE__ */ new Map();
2832
+ for (let i = 0; i < ftsResults.length; i++) {
2833
+ const entity = ftsResults[i];
2834
+ const positionScore = 1 - i / Math.max(ftsResults.length, 1);
2835
+ scores.set(entity.id, {
2836
+ entity,
2837
+ score: positionScore * this.ftsWeight
2838
+ });
2839
+ }
2840
+ if (vectorResults.length > 0) {
2841
+ const maxDist = Math.max(...vectorResults.map((r) => r.distance), 1);
2842
+ for (const vr of vectorResults) {
2843
+ const distScore = 1 - vr.distance / maxDist;
2844
+ const existing = scores.get(vr.entityId);
2845
+ if (existing) {
2846
+ existing.score += distScore * this.vectorWeight;
2847
+ }
2848
+ }
2849
+ }
2850
+ return [...scores.values()].sort((a, b) => b.score - a.score).map((s) => s.entity);
2851
+ }
2852
+ };
2853
+
2854
+ // packages/mcp/dist/store-factory.js
2855
+ async function createStoreBundle(configDir) {
2856
+ const config8 = loadConfig({ configDir });
2857
+ const store = new SQLiteStore({
2858
+ dbPath: config8.graph.dbPath,
2859
+ walMode: config8.graph.walMode,
2860
+ backupOnStartup: false
2861
+ // never backup on MCP startup — adds latency, not needed
2862
+ });
2863
+ const vectorStore = new VectorStore({
2864
+ dbPath: config8.graph.vectorDbPath
2865
+ });
2866
+ await vectorStore.initialize();
2867
+ const queryEngine = new QueryEngine(store, vectorStore, {
2868
+ maxContextTokens: config8.llm.maxContextTokens,
2869
+ maxResultEntities: 10
2870
+ // keep LLM context small for fast MCP responses
2871
+ });
2872
+ return {
2873
+ store,
2874
+ queryEngine,
2875
+ cleanup: () => store.close()
2876
+ };
2877
+ }
2878
+
2879
+ // packages/mcp/dist/server.js
2880
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2881
+ import { z as z9 } from "zod";
2882
+
2883
+ // packages/mcp/dist/tools/query.js
2884
+ async function handleQueryCortex(input, queryEngine, router, store) {
2885
+ const [context, graphStats, projects] = await Promise.all([
2886
+ queryEngine.assembleContext(input.question, void 0, input.projectId),
2887
+ store.getStats(),
2888
+ store.listProjects()
2889
+ ]);
2890
+ const graphSummary = [
2891
+ `${graphStats.entityCount} entities, ${graphStats.relationshipCount} relationships, ${graphStats.fileCount} files indexed`,
2892
+ projects.length > 0 ? `Projects: ${projects.map((p) => `${p.name} (${p.rootPath})`).join(", ")}` : "No projects configured.",
2893
+ projects.some((p) => p.lastIngestedAt) ? `Last ingested: ${projects.map((p) => p.lastIngestedAt).filter(Boolean).sort().pop()}` : ""
2894
+ ].filter(Boolean).join("\n");
2895
+ if (context.entities.length === 0 && graphStats.entityCount === 0) {
2896
+ return {
2897
+ answer: "No entities found. Make sure `cortex watch` has been run on your project files.",
2898
+ citations: [],
2899
+ entityCount: 0,
2900
+ provider: "none",
2901
+ model: "none"
2902
+ };
2903
+ }
2904
+ const contextEntities = context.entities.map((e) => ({
2905
+ id: e.id,
2906
+ type: e.type,
2907
+ name: e.name,
2908
+ content: e.content,
2909
+ sourceFile: e.sourceFile,
2910
+ createdAt: e.createdAt,
2911
+ relationships: context.relationships.filter((r) => r.sourceEntityId === e.id).map((r) => ({ type: r.type, targetEntityId: r.targetEntityId }))
2912
+ }));
2913
+ const result = await router.complete({
2914
+ systemPrompt: conversational_query_exports.systemPrompt,
2915
+ userPrompt: conversational_query_exports.buildUserPrompt({
2916
+ contextEntities,
2917
+ userQuery: input.question,
2918
+ graphSummary
2919
+ }),
2920
+ promptId: conversational_query_exports.PROMPT_ID,
2921
+ promptVersion: conversational_query_exports.PROMPT_VERSION,
2922
+ task: LLMTask.CONVERSATIONAL_QUERY,
2923
+ modelPreference: conversational_query_exports.config.model,
2924
+ temperature: conversational_query_exports.config.temperature,
2925
+ maxTokens: conversational_query_exports.config.maxTokens
2926
+ });
2927
+ return {
2928
+ answer: result.content,
2929
+ citations: context.entities.map((e) => ({
2930
+ entityId: e.id,
2931
+ entityType: e.type,
2932
+ entityName: e.name,
2933
+ sourceFile: e.sourceFile
2934
+ })),
2935
+ entityCount: context.entities.length,
2936
+ provider: result.provider,
2937
+ model: result.model
2938
+ };
2939
+ }
2940
+
2941
+ // packages/mcp/dist/tools/find.js
2942
+ async function handleFindEntity(input, store) {
2943
+ let entities = [];
2944
+ const byId = await store.getEntity(input.name);
2945
+ if (byId) {
2946
+ entities = [byId];
2947
+ } else {
2948
+ const results = await store.searchEntities(input.name, 20);
2949
+ entities = input.type ? results.filter((e) => e.type === input.type) : results;
2950
+ }
2951
+ if (entities.length === 0) {
2952
+ return { found: false, matches: [] };
2953
+ }
2954
+ const matches = await Promise.all(entities.map(async (entity) => {
2955
+ let relationships = [];
2956
+ if (input.expand) {
2957
+ const rels = await store.getRelationshipsForEntity(entity.id, "both");
2958
+ relationships = await Promise.all(rels.map(async (rel) => {
2959
+ const isOutgoing = rel.sourceEntityId === entity.id;
2960
+ const otherId = isOutgoing ? rel.targetEntityId : rel.sourceEntityId;
2961
+ const other = await store.getEntity(otherId);
2962
+ return {
2963
+ id: rel.id,
2964
+ type: rel.type,
2965
+ direction: isOutgoing ? "outgoing" : "incoming",
2966
+ otherEntityId: otherId,
2967
+ otherEntityName: other?.name,
2968
+ description: rel.description,
2969
+ confidence: rel.confidence
2970
+ };
2971
+ }));
2972
+ }
2973
+ return {
2974
+ id: entity.id,
2975
+ type: entity.type,
2976
+ name: entity.name,
2977
+ summary: entity.summary,
2978
+ content: entity.content,
2979
+ sourceFile: entity.sourceFile,
2980
+ sourceRange: entity.sourceRange,
2981
+ confidence: entity.confidence,
2982
+ tags: entity.tags,
2983
+ createdAt: entity.createdAt,
2984
+ relationships
2985
+ };
2986
+ }));
2987
+ return { found: true, matches };
2988
+ }
2989
+
2990
+ // packages/mcp/dist/tools/projects.js
2991
+ async function handleListProjects(store) {
2992
+ const projects = await store.listProjects();
2993
+ return {
2994
+ projects: projects.map((p) => ({
2995
+ id: p.id,
2996
+ name: p.name,
2997
+ rootPath: p.rootPath,
2998
+ privacyLevel: p.privacyLevel,
2999
+ fileCount: p.fileCount,
3000
+ entityCount: p.entityCount,
3001
+ lastIngestedAt: p.lastIngestedAt,
3002
+ createdAt: p.createdAt
3003
+ })),
3004
+ total: projects.length
3005
+ };
3006
+ }
3007
+
3008
+ // packages/mcp/dist/tools/status.js
3009
+ async function handleGetStatus(store) {
3010
+ const stats = await store.getStats();
3011
+ const ready = stats.entityCount > 0;
3012
+ return {
3013
+ graph: {
3014
+ entityCount: stats.entityCount,
3015
+ relationshipCount: stats.relationshipCount,
3016
+ fileCount: stats.fileCount,
3017
+ projectCount: stats.projectCount,
3018
+ contradictionCount: stats.contradictionCount,
3019
+ dbSizeBytes: stats.dbSizeBytes
3020
+ },
3021
+ ready,
3022
+ ...!ready && { hint: "No entities yet. Run `cortex watch` in your project directory to ingest files." }
3023
+ };
3024
+ }
3025
+
3026
+ // packages/mcp/dist/tools/contradictions.js
3027
+ async function handleGetContradictions(input, store) {
3028
+ const contradictions = await store.findContradictions({
3029
+ status: input.status,
3030
+ entityId: input.entityId,
3031
+ limit: input.limit ?? 50
3032
+ });
3033
+ const enriched = await Promise.all(contradictions.map(async (c) => {
3034
+ const [entityA, entityB] = await Promise.all([
3035
+ store.getEntity(c.entityIds[0]).catch(() => null),
3036
+ store.getEntity(c.entityIds[1]).catch(() => null)
3037
+ ]);
3038
+ return {
3039
+ ...c,
3040
+ entityA: entityA ? { id: entityA.id, name: entityA.name, type: entityA.type, summary: entityA.summary } : null,
3041
+ entityB: entityB ? { id: entityB.id, name: entityB.name, type: entityB.type, summary: entityB.summary } : null
3042
+ };
3043
+ }));
3044
+ return JSON.stringify({ count: enriched.length, contradictions: enriched }, null, 2);
3045
+ }
3046
+ async function handleResolveContradiction(input, store) {
3047
+ const validActions = ["supersede", "dismiss", "keep_old", "both_valid"];
3048
+ if (!validActions.includes(input.action)) {
3049
+ return JSON.stringify({ error: `Invalid action. Must be one of: ${validActions.join(", ")}` });
3050
+ }
3051
+ const status = input.action === "dismiss" ? "dismissed" : "resolved";
3052
+ await store.updateContradiction(input.id, {
3053
+ status,
3054
+ resolvedAction: input.action,
3055
+ resolvedAt: (/* @__PURE__ */ new Date()).toISOString()
3056
+ });
3057
+ return JSON.stringify({ success: true, id: input.id, action: input.action, status });
3058
+ }
3059
+
3060
+ // packages/mcp/dist/server.js
3061
+ function createCortexMcpServer(bundle, router) {
3062
+ const server = new McpServer({
3063
+ name: "cortex",
3064
+ version: "0.1.0"
3065
+ });
3066
+ server.registerTool("get_status", {
3067
+ title: "Get Cortex Status",
3068
+ description: "Get current status of the Cortex knowledge graph: entity count, relationship count, file count, and whether the graph has data. Check this first to verify Cortex is populated."
3069
+ }, async () => {
3070
+ const result = await handleGetStatus(bundle.store);
3071
+ return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
3072
+ });
3073
+ server.registerTool("list_projects", {
3074
+ title: "List Cortex Projects",
3075
+ description: "List all projects registered in Cortex with their file and entity counts. Use the project id to scope query_cortex to a specific project."
3076
+ }, async () => {
3077
+ const result = await handleListProjects(bundle.store);
3078
+ return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
3079
+ });
3080
+ server.registerTool("find_entity", {
3081
+ title: "Find Entity",
3082
+ description: "Look up a specific entity by name or UUID in the Cortex knowledge graph. Returns entity details and optionally its relationships to other entities. Use this for precise lookups: decisions, patterns, components, dependencies.",
3083
+ inputSchema: {
3084
+ name: z9.string().describe("Entity name (fuzzy matched) or exact UUID"),
3085
+ expand: z9.boolean().optional().describe("Include all relationships with neighbor entity names (default: false)"),
3086
+ type: z9.string().optional().describe("Filter by entity type: Decision, Requirement, Pattern, Component, Dependency, Interface, Constraint, ActionItem, Risk, Note")
3087
+ }
3088
+ }, async ({ name, expand, type }) => {
3089
+ const result = await handleFindEntity({ name, expand: expand ?? false, type }, bundle.store);
3090
+ return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
3091
+ });
3092
+ server.registerTool("query_cortex", {
3093
+ title: "Query Cortex Knowledge Graph",
3094
+ description: "Answer a natural language question using the Cortex knowledge graph. Returns an LLM-generated answer with cited entities. Use this to understand decisions, architectural choices, patterns, and dependencies across watched projects.",
3095
+ inputSchema: {
3096
+ question: z9.string().describe("The natural language question to answer"),
3097
+ projectId: z9.string().optional().describe("Scope context to a specific project. Get IDs from list_projects.")
3098
+ }
3099
+ }, async ({ question, projectId }) => {
3100
+ const result = await handleQueryCortex({ question, projectId }, bundle.queryEngine, router, bundle.store);
3101
+ return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] };
3102
+ });
3103
+ server.registerTool("get_contradictions", {
3104
+ title: "Get Contradictions",
3105
+ description: "List contradictions detected in the knowledge graph. Contradictions occur when two entities make conflicting claims (e.g., different tech choices for the same thing). Returns both entities with summaries so you can understand and help resolve them.",
3106
+ inputSchema: {
3107
+ status: z9.string().optional().describe("Filter by status: active, resolved, or dismissed (default: all)"),
3108
+ limit: z9.number().optional().describe("Max results to return (default: 50)")
3109
+ }
3110
+ }, async ({ status, limit }) => {
3111
+ const result = await handleGetContradictions({ status, limit }, bundle.store);
3112
+ return { content: [{ type: "text", text: result }] };
3113
+ });
3114
+ server.registerTool("resolve_contradiction", {
3115
+ title: "Resolve Contradiction",
3116
+ description: "Resolve a contradiction by choosing an action: supersede (entity A replaces B), keep_old (keep B, discard A), dismiss (not a real contradiction), both_valid (both are correct in context). Get contradiction IDs from get_contradictions first.",
3117
+ inputSchema: {
3118
+ id: z9.string().describe("The contradiction ID to resolve"),
3119
+ action: z9.string().describe("Resolution action: supersede, keep_old, dismiss, or both_valid")
3120
+ }
3121
+ }, async ({ id, action }) => {
3122
+ const result = await handleResolveContradiction({ id, action }, bundle.store);
3123
+ return { content: [{ type: "text", text: result }] };
3124
+ });
3125
+ return server;
3126
+ }
3127
+
3128
+ // packages/mcp/dist/index.js
3129
+ if (!process.env["CORTEX_LOG_LEVEL"]) {
3130
+ process.env["CORTEX_LOG_LEVEL"] = "error";
3131
+ }
3132
+ async function main() {
3133
+ const configDir = process.env["CORTEX_CONFIG_DIR"];
3134
+ const config8 = loadConfig({ configDir });
3135
+ const bundle = await createStoreBundle(configDir);
3136
+ const router = new Router({ config: config8 });
3137
+ const server = createCortexMcpServer(bundle, router);
3138
+ const transport = new StdioServerTransport();
3139
+ process.on("SIGINT", () => {
3140
+ bundle.cleanup();
3141
+ process.exit(0);
3142
+ });
3143
+ process.on("SIGTERM", () => {
3144
+ bundle.cleanup();
3145
+ process.exit(0);
3146
+ });
3147
+ await server.connect(transport);
3148
+ }
3149
+ main().catch((err) => {
3150
+ process.stderr.write(`[cortex-mcp] Fatal: ${err instanceof Error ? err.message : String(err)}
3151
+ `);
3152
+ process.exit(1);
3153
+ });