opensentinel 3.1.1 → 3.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (325) hide show
  1. package/README.md +126 -83
  2. package/dist/agent-manager-7N7REQZQ.js +39 -0
  3. package/dist/agent-processor-I23VWQY3.js +280 -0
  4. package/dist/agent-processor-I23VWQY3.js.map +1 -0
  5. package/dist/agent-types-2T4PXLFQ.js +12 -0
  6. package/dist/alerting-4I37GG4U.js +699 -0
  7. package/dist/alerting-4I37GG4U.js.map +1 -0
  8. package/dist/analysis-agent-JWN2GXYE.js +288 -0
  9. package/dist/analysis-agent-JWN2GXYE.js.map +1 -0
  10. package/dist/{archiver-AVNBYCKQ.js → archiver-XLRIIXPY.js} +86 -17
  11. package/dist/archiver-XLRIIXPY.js.map +1 -0
  12. package/dist/{audit-logger-OBPR7CRO.js → audit-logger-AU3TMWKI.js} +6 -5
  13. package/dist/{auth-UOX5K2BE.js → auth-PH5IHISW.js} +2 -2
  14. package/dist/{autonomy-ZXDBDQUJ.js → autonomy-N7W5XPLX.js} +4 -3
  15. package/dist/autonomy-N7W5XPLX.js.map +1 -0
  16. package/dist/{aws-s3-Q4LLZZPD.js → aws-s3-QZMURYXB.js} +2 -2
  17. package/dist/{backup-restore-PZ7CYYB7.js → backup-restore-72OQTZO3.js} +2 -2
  18. package/dist/{blocks-R3PODY47.js → blocks-YOWOESDD.js} +4 -4
  19. package/dist/bot-MU2TJQ3Y.js +46 -0
  20. package/dist/brain-SLA474EU.js +65 -0
  21. package/dist/{camera-monitor-M5CYKUU4.js → camera-monitor-LHTUWHEL.js} +2 -2
  22. package/dist/{charts-V7ARZNKF.js → charts-FJ32GQK7.js} +2 -2
  23. package/dist/{chunk-6PMVAAA7.js → chunk-2RGPWU77.js} +3 -3
  24. package/dist/{chunk-TVEWKIK3.js → chunk-2WTKTG2C.js} +2 -2
  25. package/dist/{chunk-MXAPLSJ5.js → chunk-45YXODSB.js} +2 -2
  26. package/dist/{chunk-SJSUSJ47.js → chunk-4YJRBMMA.js} +2 -2
  27. package/dist/chunk-643M3AP5.js +564 -0
  28. package/dist/chunk-643M3AP5.js.map +1 -0
  29. package/dist/{chunk-766ASQWE.js → chunk-6JY4HNUH.js} +2413 -2368
  30. package/dist/chunk-6JY4HNUH.js.map +1 -0
  31. package/dist/chunk-6LTLIYAQ.js +194 -0
  32. package/dist/chunk-6LTLIYAQ.js.map +1 -0
  33. package/dist/chunk-6UZPE35A.js +724 -0
  34. package/dist/chunk-6UZPE35A.js.map +1 -0
  35. package/dist/chunk-6W6PTJFT.js +181 -0
  36. package/dist/chunk-6W6PTJFT.js.map +1 -0
  37. package/dist/chunk-7MZN73J2.js +162 -0
  38. package/dist/chunk-7MZN73J2.js.map +1 -0
  39. package/dist/{chunk-SVAPX2XN.js → chunk-A24GPVLY.js} +9 -7
  40. package/dist/{chunk-SVAPX2XN.js.map → chunk-A24GPVLY.js.map} +1 -1
  41. package/dist/chunk-AD6YEH6U.js +3408 -0
  42. package/dist/chunk-AD6YEH6U.js.map +1 -0
  43. package/dist/chunk-ADTDYJO7.js +265 -0
  44. package/dist/chunk-ADTDYJO7.js.map +1 -0
  45. package/dist/{chunk-WRAKK6K6.js → chunk-AR34B6XR.js} +5 -3
  46. package/dist/{chunk-WRAKK6K6.js.map → chunk-AR34B6XR.js.map} +1 -1
  47. package/dist/chunk-BMOUYXLX.js +418 -0
  48. package/dist/chunk-BMOUYXLX.js.map +1 -0
  49. package/dist/chunk-C6PELIHS.js +60 -0
  50. package/dist/chunk-C6PELIHS.js.map +1 -0
  51. package/dist/{chunk-MQJ2ECQT.js → chunk-CUPEENUY.js} +3 -3
  52. package/dist/{chunk-RZ4YESBG.js → chunk-DOYGMNMK.js} +1 -1
  53. package/dist/chunk-DOYGMNMK.js.map +1 -0
  54. package/dist/chunk-FFV2SXFD.js +380 -0
  55. package/dist/chunk-FFV2SXFD.js.map +1 -0
  56. package/dist/{chunk-EVE7MIIY.js → chunk-GUKKW7JI.js} +15 -16
  57. package/dist/chunk-GUKKW7JI.js.map +1 -0
  58. package/dist/{chunk-66OJ3WB4.js → chunk-H3BOLSTS.js} +2 -2
  59. package/dist/chunk-HKOPRRDJ.js +1021 -0
  60. package/dist/chunk-HKOPRRDJ.js.map +1 -0
  61. package/dist/{chunk-BXZ6EA52.js → chunk-HTF2GIQC.js} +57 -3
  62. package/dist/chunk-HTF2GIQC.js.map +1 -0
  63. package/dist/{chunk-TYAGMJNV.js → chunk-JOA5A3G3.js} +5 -5
  64. package/dist/{chunk-OCVQGBJK.js → chunk-KABG5PG3.js} +6 -4
  65. package/dist/{chunk-OCVQGBJK.js.map → chunk-KABG5PG3.js.map} +1 -1
  66. package/dist/{chunk-VEHFVBLI.js → chunk-KT7NLIXP.js} +2 -2
  67. package/dist/chunk-LFDXEYYB.js +150 -0
  68. package/dist/chunk-LFDXEYYB.js.map +1 -0
  69. package/dist/{chunk-I6BDYQIG.js → chunk-M7YLQHFP.js} +6 -6
  70. package/dist/chunk-M7YLQHFP.js.map +1 -0
  71. package/dist/{chunk-AYUKPTSM.js → chunk-MFK34XSY.js} +96 -218
  72. package/dist/chunk-MFK34XSY.js.map +1 -0
  73. package/dist/chunk-MIC5IBQF.js +386 -0
  74. package/dist/chunk-MIC5IBQF.js.map +1 -0
  75. package/dist/{chunk-4UOE5TUZ.js → chunk-NMSHVO5O.js} +4 -4
  76. package/dist/{chunk-XKYRH4FM.js → chunk-NYVBXUGD.js} +13 -32
  77. package/dist/chunk-NYVBXUGD.js.map +1 -0
  78. package/dist/chunk-ODCFS5WD.js +463 -0
  79. package/dist/chunk-ODCFS5WD.js.map +1 -0
  80. package/dist/{chunk-ZLZKF2PM.js → chunk-PUNIMPMY.js} +32 -2
  81. package/dist/chunk-PUNIMPMY.js.map +1 -0
  82. package/dist/chunk-S4NJJS5C.js +37 -0
  83. package/dist/chunk-S4NJJS5C.js.map +1 -0
  84. package/dist/{chunk-NHMBTUMW.js → chunk-TAAZB5KN.js} +2 -2
  85. package/dist/{chunk-BRBWNV65.js → chunk-U2X2J3FI.js} +3 -3
  86. package/dist/chunk-U2X2J3FI.js.map +1 -0
  87. package/dist/{chunk-PLDDJCW6.js → chunk-UP2VWCW5.js} +1 -12
  88. package/dist/{chunk-4GLYY4NN.js → chunk-UWUIJTT4.js} +8 -2
  89. package/dist/chunk-UWUIJTT4.js.map +1 -0
  90. package/dist/{chunk-SPPMCAKG.js → chunk-VKMFUIVA.js} +2 -2
  91. package/dist/chunk-VKMFUIVA.js.map +1 -0
  92. package/dist/chunk-WZAH34TG.js +129 -0
  93. package/dist/chunk-WZAH34TG.js.map +1 -0
  94. package/dist/{chunk-H5RQOFO2.js → chunk-X6Q3K3L2.js} +6 -6
  95. package/dist/chunk-X6Q3K3L2.js.map +1 -0
  96. package/dist/chunk-XTX7EK43.js +134 -0
  97. package/dist/chunk-XTX7EK43.js.map +1 -0
  98. package/dist/chunk-ZIYTHUM5.js +457 -0
  99. package/dist/chunk-ZIYTHUM5.js.map +1 -0
  100. package/dist/chunker-K6WTR62A.js +12 -0
  101. package/dist/cli.js +1 -1
  102. package/dist/{client-ZQSFPMOB.js → client-FOIYPOZQ.js} +5 -6
  103. package/dist/{clipboard-manager-TEO2GEDN.js → clipboard-manager-4SBNESGZ.js} +2 -2
  104. package/dist/coding-agent-DESSU3AC.js +233 -0
  105. package/dist/coding-agent-DESSU3AC.js.map +1 -0
  106. package/dist/commands/setup.js +1 -1
  107. package/dist/commands/start.js +2 -2
  108. package/dist/commands/status.js +1 -1
  109. package/dist/commands/stop.js +1 -1
  110. package/dist/commands/utils.js +1 -1
  111. package/dist/cost-tracker-EMOIOYH7.js +11 -0
  112. package/dist/{cron-explain-HHQKPD3M.js → cron-explain-UOOOYWZZ.js} +2 -2
  113. package/dist/{crypto-4AP47IKC.js → crypto-2VG3RJR2.js} +2 -2
  114. package/dist/{databases-37X4CI2Y.js → databases-XDPMG5AV.js} +4 -4
  115. package/dist/db-LRIOKQBO.js +77 -0
  116. package/dist/discord-NKR3X4AV.js +80 -0
  117. package/dist/documents-EYIYLZK2.js +184 -0
  118. package/dist/documents-EYIYLZK2.js.map +1 -0
  119. package/dist/docx-parser-EXL4TN5E.js +16 -0
  120. package/dist/{email-K7LO2IPB.js → email-EAQNULVD.js} +33 -25
  121. package/dist/{email-K7LO2IPB.js.map → email-EAQNULVD.js.map} +1 -1
  122. package/dist/{enhanced-retrieval-DNLLEM4Z.js → enhanced-retrieval-OGHT6TS5.js} +11 -8
  123. package/dist/{enhanced-retrieval-DNLLEM4Z.js.map → enhanced-retrieval-OGHT6TS5.js.map} +1 -1
  124. package/dist/enrichment-pipeline-CMUVBDC7.js +14 -0
  125. package/dist/{entity-resolution-Y3IUWEAT.js → entity-resolution-4X4JU43O.js} +6 -5
  126. package/dist/env-CHOFICED.js +12 -0
  127. package/dist/error-tracker-SVQSDQDW.js +32 -0
  128. package/dist/finnhub-X7ZMQSXF.js +178 -0
  129. package/dist/finnhub-X7ZMQSXF.js.map +1 -0
  130. package/dist/fred-TMUF3J2V.js +203 -0
  131. package/dist/fred-TMUF3J2V.js.map +1 -0
  132. package/dist/github-KGNILDWJ.js +833 -0
  133. package/dist/github-KGNILDWJ.js.map +1 -0
  134. package/dist/{google-workspace-DKWUVNGC.js → google-workspace-TSZPZK5G.js} +2 -2
  135. package/dist/{hash-tool-ULQYD7B5.js → hash-tool-ENAB5LWH.js} +2 -2
  136. package/dist/{heartbeat-monitor-GCISLXI3.js → heartbeat-monitor-KRDYTDBF.js} +2 -2
  137. package/dist/hooks-N4MIFBVM.js +14 -0
  138. package/dist/{image-generation-OSU7FP6F.js → image-generation-MDE6AVQO.js} +2 -2
  139. package/dist/imessage-V2XNDDHT.js +43 -0
  140. package/dist/inbox-summarizer-DKKRYXDR.js +55 -0
  141. package/dist/{incident-response-C5J7Q6DT.js → incident-response-ZTIKUWEO.js} +8 -6
  142. package/dist/{incident-response-C5J7Q6DT.js.map → incident-response-ZTIKUWEO.js.map} +1 -1
  143. package/dist/{inventory-manager-352OHXWD.js → inventory-manager-C67BSZM6.js} +2 -2
  144. package/dist/{jira-GSGDBMIG.js → jira-PAGZWUBJ.js} +2 -2
  145. package/dist/{json-tool-QE2SYHEG.js → json-tool-4FK5RNER.js} +2 -2
  146. package/dist/{key-rotation-DPHU4ZTB.js → key-rotation-WCC5FOYS.js} +2 -2
  147. package/dist/knowledge-base-J7PJ7MZ3.js +46 -0
  148. package/dist/lib.d.ts +73 -1
  149. package/dist/lib.js +86 -76
  150. package/dist/lib.js.map +1 -1
  151. package/dist/{mailchimp-KKNF6QJ7.js → mailchimp-ZFYDC44J.js} +2 -2
  152. package/dist/{matrix-QVHG76I7.js → matrix-XHTR53VQ.js} +29 -21
  153. package/dist/{matrix-QVHG76I7.js.map → matrix-XHTR53VQ.js.map} +1 -1
  154. package/dist/{mcp-3JI6W7ZE.js → mcp-3C2TN67D.js} +3 -3
  155. package/dist/metrics-VJDWQWU7.js +25 -0
  156. package/dist/{microsoft365-UCBKJHNX.js → microsoft365-6G2IJMWC.js} +2 -2
  157. package/dist/multi-user-S56GUD6L.js +411 -0
  158. package/dist/multi-user-S56GUD6L.js.map +1 -0
  159. package/dist/{ocr-AC7NPX33.js → ocr-LGUIPKVZ.js} +6 -4
  160. package/dist/{ollama-BOAMSPLJ.js → ollama-J7CU45WT.js} +2 -2
  161. package/dist/osint-agent-RL5XPBRQ.js +189 -0
  162. package/dist/osint-agent-RL5XPBRQ.js.map +1 -0
  163. package/dist/{pages-MI523RB7.js → pages-XDE7JRCA.js} +5 -5
  164. package/dist/{pair-JDFTERIK.js → pair-YZJFQUU5.js} +2 -2
  165. package/dist/{pairing-IFQYCPNS.js → pairing-77N47RAT.js} +2 -2
  166. package/dist/{pdf-ALQVOEJR.js → pdf-67HGXCFJ.js} +3 -3
  167. package/dist/pdf-parser-YLMTTYHL.js +14 -0
  168. package/dist/{presentations-DSV5IHG5.js → presentations-HXTAMGHT.js} +3 -3
  169. package/dist/presentations-HXTAMGHT.js.map +1 -0
  170. package/dist/{prometheus-JNT2BD4L.js → prometheus-YETCZO4I.js} +2 -2
  171. package/dist/{providers-J4LYPHDR.js → providers-H6YIC3MG.js} +6 -4
  172. package/dist/{qr-code-WIX4PB4U.js → qr-code-6WZJHRKL.js} +2 -2
  173. package/dist/{quickbooks-XB4NII2S.js → quickbooks-N675W7IK.js} +2 -2
  174. package/dist/{regex-tool-W4ABRKGK.js → regex-tool-6Q63LQ7B.js} +2 -2
  175. package/dist/regex-tool-6Q63LQ7B.js.map +1 -0
  176. package/dist/research-agent-WCRSY3UZ.js +168 -0
  177. package/dist/research-agent-WCRSY3UZ.js.map +1 -0
  178. package/dist/risk-engine-YKCPT5D5.js +10 -0
  179. package/dist/risk-engine-YKCPT5D5.js.map +1 -0
  180. package/dist/scheduler-CA5UNHZV.js +73 -0
  181. package/dist/scheduler-CA5UNHZV.js.map +1 -0
  182. package/dist/schema-ALJ67YVG.js +72 -0
  183. package/dist/schema-ALJ67YVG.js.map +1 -0
  184. package/dist/{search-BCLBO5E3.js → search-GMLKBHSW.js} +4 -4
  185. package/dist/search-GMLKBHSW.js.map +1 -0
  186. package/dist/{sendgrid-RNXCAFKM.js → sendgrid-QGJIVPWV.js} +2 -2
  187. package/dist/{shopify-NCXYJB4R.js → shopify-ON2PAU27.js} +2 -2
  188. package/dist/signal-X7IQJGRQ.js +43 -0
  189. package/dist/signal-X7IQJGRQ.js.map +1 -0
  190. package/dist/slack-P2LFUJUQ.js +85 -0
  191. package/dist/slack-P2LFUJUQ.js.map +1 -0
  192. package/dist/{sms-M3JIOTCW.js → sms-4VME2HUL.js} +4 -4
  193. package/dist/sms-4VME2HUL.js.map +1 -0
  194. package/dist/{src-VYUE6LRA.js → src-S5KX4YEV.js} +179 -48
  195. package/dist/src-S5KX4YEV.js.map +1 -0
  196. package/dist/{stocks-XXWBPOCU.js → stocks-4M4HZWZS.js} +2 -2
  197. package/dist/stocks-4M4HZWZS.js.map +1 -0
  198. package/dist/text-extractor-OAUBAW5P.js +12 -0
  199. package/dist/text-extractor-OAUBAW5P.js.map +1 -0
  200. package/dist/{text-transform-6SGUA5Z4.js → text-transform-HCLCUDFZ.js} +2 -2
  201. package/dist/text-transform-HCLCUDFZ.js.map +1 -0
  202. package/dist/tools-FGPN522P.js +46 -0
  203. package/dist/tools-FGPN522P.js.map +1 -0
  204. package/dist/{tunnel-IWMXUML4.js → tunnel-XOUVVRAK.js} +4 -2
  205. package/dist/tunnel-XOUVVRAK.js.map +1 -0
  206. package/dist/{twilio-53GEW5JT.js → twilio-3L7DUNYQ.js} +2 -2
  207. package/dist/{unit-converter-ZYXMEZOE.js → unit-converter-LYPAHU64.js} +2 -2
  208. package/dist/unit-converter-LYPAHU64.js.map +1 -0
  209. package/dist/whatsapp-KRPQ4YUX.js +43 -0
  210. package/dist/whatsapp-KRPQ4YUX.js.map +1 -0
  211. package/dist/{word-document-7B6SJMAY.js → word-document-D6N2C47N.js} +4 -4
  212. package/dist/word-document-D6N2C47N.js.map +1 -0
  213. package/dist/workflow-store-ZYAYE5P6.js +373 -0
  214. package/dist/workflow-store-ZYAYE5P6.js.map +1 -0
  215. package/dist/writing-agent-VDGLNOGO.js +243 -0
  216. package/dist/writing-agent-VDGLNOGO.js.map +1 -0
  217. package/dist/{xero-QYO66D45.js → xero-UHAHVYSD.js} +2 -2
  218. package/dist/{zapier-webhook-TBZ5YF2A.js → zapier-webhook-NIELLTXR.js} +2 -2
  219. package/package.json +11 -1
  220. package/dist/archiver-AVNBYCKQ.js.map +0 -1
  221. package/dist/autonomy-ZXDBDQUJ.js.map +0 -1
  222. package/dist/bot-QRARP4UN.js +0 -36
  223. package/dist/brain-7XLLM3KC.js +0 -56
  224. package/dist/chunk-4GLYY4NN.js.map +0 -1
  225. package/dist/chunk-766ASQWE.js.map +0 -1
  226. package/dist/chunk-AYUKPTSM.js.map +0 -1
  227. package/dist/chunk-BRBWNV65.js.map +0 -1
  228. package/dist/chunk-BXZ6EA52.js.map +0 -1
  229. package/dist/chunk-EVE7MIIY.js.map +0 -1
  230. package/dist/chunk-H5RQOFO2.js.map +0 -1
  231. package/dist/chunk-I6BDYQIG.js.map +0 -1
  232. package/dist/chunk-IZJMVV7O.js +0 -347
  233. package/dist/chunk-IZJMVV7O.js.map +0 -1
  234. package/dist/chunk-O7IH7JTI.js +0 -1898
  235. package/dist/chunk-O7IH7JTI.js.map +0 -1
  236. package/dist/chunk-RZ4YESBG.js.map +0 -1
  237. package/dist/chunk-SPPMCAKG.js.map +0 -1
  238. package/dist/chunk-VRD5CYRL.js +0 -1568
  239. package/dist/chunk-VRD5CYRL.js.map +0 -1
  240. package/dist/chunk-XKYRH4FM.js.map +0 -1
  241. package/dist/chunk-ZLZKF2PM.js.map +0 -1
  242. package/dist/discord-B3HUPGQ6.js +0 -70
  243. package/dist/dist-UISMLMFN.js +0 -21847
  244. package/dist/dist-UISMLMFN.js.map +0 -1
  245. package/dist/enrichment-pipeline-MNHNW65K.js +0 -13
  246. package/dist/env-IWXUVTCB.js +0 -12
  247. package/dist/imessage-NGA2XF2V.js +0 -35
  248. package/dist/inbox-summarizer-NRI4S7IF.js +0 -47
  249. package/dist/presentations-DSV5IHG5.js.map +0 -1
  250. package/dist/scheduler-VK4WFERV.js +0 -63
  251. package/dist/signal-6CGDFYL2.js +0 -35
  252. package/dist/slack-IZQWIKOH.js +0 -75
  253. package/dist/src-VYUE6LRA.js.map +0 -1
  254. package/dist/tools-2RLEI2N6.js +0 -38
  255. package/dist/tunnel-IWMXUML4.js.map +0 -1
  256. package/dist/whatsapp-LFX6YKCM.js +0 -35
  257. package/dist/word-document-7B6SJMAY.js.map +0 -1
  258. /package/dist/{audit-logger-OBPR7CRO.js.map → agent-manager-7N7REQZQ.js.map} +0 -0
  259. /package/dist/{auth-UOX5K2BE.js.map → agent-types-2T4PXLFQ.js.map} +0 -0
  260. /package/dist/{backup-restore-PZ7CYYB7.js.map → audit-logger-AU3TMWKI.js.map} +0 -0
  261. /package/dist/{blocks-R3PODY47.js.map → auth-PH5IHISW.js.map} +0 -0
  262. /package/dist/{aws-s3-Q4LLZZPD.js.map → aws-s3-QZMURYXB.js.map} +0 -0
  263. /package/dist/{bot-QRARP4UN.js.map → backup-restore-72OQTZO3.js.map} +0 -0
  264. /package/dist/{brain-7XLLM3KC.js.map → blocks-YOWOESDD.js.map} +0 -0
  265. /package/dist/{chunk-PLDDJCW6.js.map → bot-MU2TJQ3Y.js.map} +0 -0
  266. /package/dist/{client-ZQSFPMOB.js.map → brain-SLA474EU.js.map} +0 -0
  267. /package/dist/{camera-monitor-M5CYKUU4.js.map → camera-monitor-LHTUWHEL.js.map} +0 -0
  268. /package/dist/{charts-V7ARZNKF.js.map → charts-FJ32GQK7.js.map} +0 -0
  269. /package/dist/{chunk-6PMVAAA7.js.map → chunk-2RGPWU77.js.map} +0 -0
  270. /package/dist/{chunk-TVEWKIK3.js.map → chunk-2WTKTG2C.js.map} +0 -0
  271. /package/dist/{chunk-MXAPLSJ5.js.map → chunk-45YXODSB.js.map} +0 -0
  272. /package/dist/{chunk-SJSUSJ47.js.map → chunk-4YJRBMMA.js.map} +0 -0
  273. /package/dist/{chunk-MQJ2ECQT.js.map → chunk-CUPEENUY.js.map} +0 -0
  274. /package/dist/{chunk-66OJ3WB4.js.map → chunk-H3BOLSTS.js.map} +0 -0
  275. /package/dist/{chunk-TYAGMJNV.js.map → chunk-JOA5A3G3.js.map} +0 -0
  276. /package/dist/{chunk-VEHFVBLI.js.map → chunk-KT7NLIXP.js.map} +0 -0
  277. /package/dist/{chunk-4UOE5TUZ.js.map → chunk-NMSHVO5O.js.map} +0 -0
  278. /package/dist/{chunk-NHMBTUMW.js.map → chunk-TAAZB5KN.js.map} +0 -0
  279. /package/dist/{clipboard-manager-TEO2GEDN.js.map → chunk-UP2VWCW5.js.map} +0 -0
  280. /package/dist/{cron-explain-HHQKPD3M.js.map → chunker-K6WTR62A.js.map} +0 -0
  281. /package/dist/{crypto-4AP47IKC.js.map → client-FOIYPOZQ.js.map} +0 -0
  282. /package/dist/{databases-37X4CI2Y.js.map → clipboard-manager-4SBNESGZ.js.map} +0 -0
  283. /package/dist/{discord-B3HUPGQ6.js.map → cost-tracker-EMOIOYH7.js.map} +0 -0
  284. /package/dist/{enrichment-pipeline-MNHNW65K.js.map → cron-explain-UOOOYWZZ.js.map} +0 -0
  285. /package/dist/{entity-resolution-Y3IUWEAT.js.map → crypto-2VG3RJR2.js.map} +0 -0
  286. /package/dist/{env-IWXUVTCB.js.map → databases-XDPMG5AV.js.map} +0 -0
  287. /package/dist/{hash-tool-ULQYD7B5.js.map → db-LRIOKQBO.js.map} +0 -0
  288. /package/dist/{heartbeat-monitor-GCISLXI3.js.map → discord-NKR3X4AV.js.map} +0 -0
  289. /package/dist/{imessage-NGA2XF2V.js.map → docx-parser-EXL4TN5E.js.map} +0 -0
  290. /package/dist/{inbox-summarizer-NRI4S7IF.js.map → enrichment-pipeline-CMUVBDC7.js.map} +0 -0
  291. /package/dist/{inventory-manager-352OHXWD.js.map → entity-resolution-4X4JU43O.js.map} +0 -0
  292. /package/dist/{json-tool-QE2SYHEG.js.map → env-CHOFICED.js.map} +0 -0
  293. /package/dist/{key-rotation-DPHU4ZTB.js.map → error-tracker-SVQSDQDW.js.map} +0 -0
  294. /package/dist/{google-workspace-DKWUVNGC.js.map → google-workspace-TSZPZK5G.js.map} +0 -0
  295. /package/dist/{mcp-3JI6W7ZE.js.map → hash-tool-ENAB5LWH.js.map} +0 -0
  296. /package/dist/{ocr-AC7NPX33.js.map → heartbeat-monitor-KRDYTDBF.js.map} +0 -0
  297. /package/dist/{ollama-BOAMSPLJ.js.map → hooks-N4MIFBVM.js.map} +0 -0
  298. /package/dist/{image-generation-OSU7FP6F.js.map → image-generation-MDE6AVQO.js.map} +0 -0
  299. /package/dist/{pages-MI523RB7.js.map → imessage-V2XNDDHT.js.map} +0 -0
  300. /package/dist/{pairing-IFQYCPNS.js.map → inbox-summarizer-DKKRYXDR.js.map} +0 -0
  301. /package/dist/{pdf-ALQVOEJR.js.map → inventory-manager-C67BSZM6.js.map} +0 -0
  302. /package/dist/{jira-GSGDBMIG.js.map → jira-PAGZWUBJ.js.map} +0 -0
  303. /package/dist/{prometheus-JNT2BD4L.js.map → json-tool-4FK5RNER.js.map} +0 -0
  304. /package/dist/{providers-J4LYPHDR.js.map → key-rotation-WCC5FOYS.js.map} +0 -0
  305. /package/dist/{qr-code-WIX4PB4U.js.map → knowledge-base-J7PJ7MZ3.js.map} +0 -0
  306. /package/dist/{mailchimp-KKNF6QJ7.js.map → mailchimp-ZFYDC44J.js.map} +0 -0
  307. /package/dist/{regex-tool-W4ABRKGK.js.map → mcp-3C2TN67D.js.map} +0 -0
  308. /package/dist/{scheduler-VK4WFERV.js.map → metrics-VJDWQWU7.js.map} +0 -0
  309. /package/dist/{microsoft365-UCBKJHNX.js.map → microsoft365-6G2IJMWC.js.map} +0 -0
  310. /package/dist/{search-BCLBO5E3.js.map → ocr-LGUIPKVZ.js.map} +0 -0
  311. /package/dist/{signal-6CGDFYL2.js.map → ollama-J7CU45WT.js.map} +0 -0
  312. /package/dist/{slack-IZQWIKOH.js.map → pages-XDE7JRCA.js.map} +0 -0
  313. /package/dist/{pair-JDFTERIK.js.map → pair-YZJFQUU5.js.map} +0 -0
  314. /package/dist/{sms-M3JIOTCW.js.map → pairing-77N47RAT.js.map} +0 -0
  315. /package/dist/{stocks-XXWBPOCU.js.map → pdf-67HGXCFJ.js.map} +0 -0
  316. /package/dist/{text-transform-6SGUA5Z4.js.map → pdf-parser-YLMTTYHL.js.map} +0 -0
  317. /package/dist/{tools-2RLEI2N6.js.map → prometheus-YETCZO4I.js.map} +0 -0
  318. /package/dist/{unit-converter-ZYXMEZOE.js.map → providers-H6YIC3MG.js.map} +0 -0
  319. /package/dist/{whatsapp-LFX6YKCM.js.map → qr-code-6WZJHRKL.js.map} +0 -0
  320. /package/dist/{quickbooks-XB4NII2S.js.map → quickbooks-N675W7IK.js.map} +0 -0
  321. /package/dist/{sendgrid-RNXCAFKM.js.map → sendgrid-QGJIVPWV.js.map} +0 -0
  322. /package/dist/{shopify-NCXYJB4R.js.map → shopify-ON2PAU27.js.map} +0 -0
  323. /package/dist/{twilio-53GEW5JT.js.map → twilio-3L7DUNYQ.js.map} +0 -0
  324. /package/dist/{xero-QYO66D45.js.map → xero-UHAHVYSD.js.map} +0 -0
  325. /package/dist/{zapier-webhook-TBZ5YF2A.js.map → zapier-webhook-NIELLTXR.js.map} +0 -0
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/core/memory/hybrid-search.ts","../src/core/memory/contextual-query.ts","../src/core/memory/hyde.ts","../src/core/memory/retrieval-cache.ts","../src/core/memory/reranker.ts","../src/core/memory/multi-step.ts","../src/core/memory/enhanced-retrieval.ts"],"sourcesContent":["/**\r\n * Hybrid Search — Vector + Keyword + Graph with Reciprocal Rank Fusion\r\n *\r\n * Combines three retrieval strategies:\r\n * 1. Vector search (pgvector cosine similarity)\r\n * 2. Keyword search (PostgreSQL tsvector/GIN)\r\n * 3. Graph-augmented search (entity relationship expansion)\r\n *\r\n * Results are fused using Reciprocal Rank Fusion (RRF).\r\n */\r\n\r\nimport { db } from \"../../db\";\r\nimport { sql } from \"drizzle-orm\";\r\nimport { generateEmbedding } from \"../memory\";\r\n\r\nexport interface HybridSearchResult {\r\n id: string;\r\n userId: string | null;\r\n type: string;\r\n content: string;\r\n importance: number;\r\n source: string | null;\r\n provenance: string | null;\r\n similarity: number; // Vector similarity score\r\n keywordRank: number; // Keyword search rank\r\n rrfScore: number; // Combined RRF score\r\n createdAt: Date;\r\n}\r\n\r\nexport interface HybridSearchOptions {\r\n userId?: string;\r\n limit?: number;\r\n since?: Date;\r\n until?: Date;\r\n minImportance?: number;\r\n includeKeyword?: boolean;\r\n includeGraph?: boolean;\r\n}\r\n\r\nconst RRF_K = 60; // RRF constant (standard value)\r\n\r\n/**\r\n * Vector search using pgvector cosine similarity\r\n */\r\nexport async function vectorSearch(\r\n query: string,\r\n userId?: string,\r\n limit = 10\r\n): Promise<Array<{ id: string; content: string; similarity: number; [key: string]: unknown }>> {\r\n const queryEmbedding = await generateEmbedding(query);\r\n\r\n const results = await db.execute(sql`\r\n SELECT\r\n id, user_id, type, content, importance, source, provenance,\r\n created_at,\r\n 1 - (embedding <=> ${JSON.stringify(queryEmbedding)}::vector) as similarity\r\n FROM memories\r\n ${userId ? sql`WHERE user_id = ${userId}` : sql``}\r\n ORDER BY embedding <=> ${JSON.stringify(queryEmbedding)}::vector\r\n LIMIT ${limit}\r\n `);\r\n\r\n return results as any[];\r\n}\r\n\r\n/**\r\n * Keyword search using PostgreSQL tsvector full-text search\r\n */\r\nexport async function keywordSearch(\r\n query: string,\r\n userId?: string,\r\n limit = 10\r\n): Promise<Array<{ id: string; content: string; keywordRank: number; [key: string]: unknown }>> {\r\n const results = await db.execute(sql`\r\n SELECT\r\n id, user_id, type, content, importance, source, provenance,\r\n created_at,\r\n ts_rank(search_vector, plainto_tsquery('english', ${query})) as keyword_rank\r\n FROM memories\r\n WHERE search_vector IS NOT NULL\r\n AND search_vector @@ plainto_tsquery('english', ${query})\r\n ${userId ? sql`AND user_id = ${userId}` : sql``}\r\n ORDER BY keyword_rank DESC\r\n LIMIT ${limit}\r\n `);\r\n\r\n return (results as any[]).map((r: any) => ({\r\n ...r,\r\n keywordRank: r.keyword_rank,\r\n }));\r\n}\r\n\r\n/**\r\n * Graph-augmented search: find entities matching query, expand to related memories\r\n */\r\nexport async function graphAugmentedSearch(\r\n query: string,\r\n userId?: string,\r\n limit = 10\r\n): Promise<Array<{ id: string; content: string; graphScore: number; [key: string]: unknown }>> {\r\n // Find matching graph entities\r\n const entities = await db.execute(sql`\r\n SELECT id, name, type\r\n FROM graph_entities\r\n WHERE name ILIKE ${'%' + query + '%'}\r\n ${userId ? sql`AND user_id = ${userId}` : sql``}\r\n LIMIT 5\r\n `);\r\n\r\n if ((entities as any[]).length === 0) {\r\n return [];\r\n }\r\n\r\n const entityIds = (entities as any[]).map((e: any) => e.id);\r\n\r\n // Find memories related to these entities via relationships\r\n // We search for entity names in memory content\r\n const entityNames = (entities as any[]).map((e: any) => e.name);\r\n const namePattern = entityNames.join(\"|\");\r\n\r\n const results = await db.execute(sql`\r\n SELECT\r\n id, user_id, type, content, importance, source, provenance,\r\n created_at,\r\n 1.0 as graph_score\r\n FROM memories\r\n WHERE content ~* ${namePattern}\r\n ${userId ? sql`AND user_id = ${userId}` : sql``}\r\n ORDER BY importance DESC, created_at DESC\r\n LIMIT ${limit}\r\n `);\r\n\r\n return (results as any[]).map((r: any) => ({\r\n ...r,\r\n graphScore: r.graph_score,\r\n }));\r\n}\r\n\r\n/**\r\n * Reciprocal Rank Fusion: combine ranked lists from multiple sources\r\n */\r\nfunction reciprocalRankFusion(\r\n rankedLists: Array<Array<{ id: string; [key: string]: unknown }>>,\r\n k = RRF_K\r\n): Map<string, number> {\r\n const scores = new Map<string, number>();\r\n\r\n for (const list of rankedLists) {\r\n for (let rank = 0; rank < list.length; rank++) {\r\n const id = list[rank].id;\r\n const rrfScore = 1 / (k + rank + 1);\r\n scores.set(id, (scores.get(id) || 0) + rrfScore);\r\n }\r\n }\r\n\r\n return scores;\r\n}\r\n\r\n/**\r\n * Hybrid search combining vector, keyword, and graph search with RRF\r\n */\r\nexport async function hybridSearch(\r\n query: string,\r\n options: HybridSearchOptions = {}\r\n): Promise<HybridSearchResult[]> {\r\n const {\r\n userId,\r\n limit = 10,\r\n since,\r\n until,\r\n includeKeyword = true,\r\n includeGraph = true,\r\n } = options;\r\n\r\n // Run searches in parallel\r\n const searchPromises: Array<Promise<any[]>> = [\r\n vectorSearch(query, userId, limit * 2), // Fetch more for better fusion\r\n ];\r\n\r\n if (includeKeyword) {\r\n searchPromises.push(keywordSearch(query, userId, limit * 2));\r\n }\r\n\r\n if (includeGraph) {\r\n searchPromises.push(graphAugmentedSearch(query, userId, limit));\r\n }\r\n\r\n const results = await Promise.all(searchPromises);\r\n const [vectorResults, keywordResults, graphResults] = results;\r\n\r\n // Build a map of all unique results\r\n const allResults = new Map<string, any>();\r\n for (const r of vectorResults || []) {\r\n allResults.set(r.id, r);\r\n }\r\n for (const r of keywordResults || []) {\r\n if (!allResults.has(r.id)) allResults.set(r.id, r);\r\n }\r\n for (const r of graphResults || []) {\r\n if (!allResults.has(r.id)) allResults.set(r.id, r);\r\n }\r\n\r\n // Compute RRF scores\r\n const rankedLists = [vectorResults || []];\r\n if (keywordResults) rankedLists.push(keywordResults);\r\n if (graphResults) rankedLists.push(graphResults);\r\n\r\n const rrfScores = reciprocalRankFusion(rankedLists);\r\n\r\n // Build final results\r\n let finalResults: HybridSearchResult[] = [];\r\n for (const [id, rrfScore] of rrfScores) {\r\n const data = allResults.get(id);\r\n if (!data) continue;\r\n\r\n finalResults.push({\r\n id: data.id,\r\n userId: data.user_id,\r\n type: data.type,\r\n content: data.content,\r\n importance: data.importance || 5,\r\n source: data.source,\r\n provenance: data.provenance,\r\n similarity: data.similarity || 0,\r\n keywordRank: data.keywordRank || data.keyword_rank || 0,\r\n rrfScore,\r\n createdAt: data.created_at,\r\n });\r\n }\r\n\r\n // Apply temporal filtering\r\n if (since) {\r\n finalResults = finalResults.filter((r) => new Date(r.createdAt) >= since);\r\n }\r\n if (until) {\r\n finalResults = finalResults.filter((r) => new Date(r.createdAt) <= until);\r\n }\r\n\r\n // Sort by RRF score descending, take top N\r\n return finalResults\r\n .sort((a, b) => b.rrfScore - a.rrfScore)\r\n .slice(0, limit);\r\n}\r\n","/**\n * Contextual Query Rewriting\n *\n * Rewrites user queries by incorporating conversation history to resolve\n * pronouns, references, and implicit context before retrieval. This improves\n * RAG accuracy by ensuring the search query is self-contained.\n *\n * Feature-gated behind env.CONTEXTUAL_QUERY_ENABLED.\n */\n\nimport { env } from \"../../config/env\";\nimport { providerRegistry } from \"../providers\";\n\n// ============================================\n// Types\n// ============================================\n\nexport interface Message {\n role: \"user\" | \"assistant\";\n content: string;\n}\n\nexport interface ContextualQueryOptions {\n /** Maximum number of recent messages to include for context. Default: 4 */\n maxHistoryMessages?: number;\n /** Override the LLM model used for rewriting. */\n model?: string;\n}\n\n// ============================================\n// Constants\n// ============================================\n\nconst DEFAULT_MAX_HISTORY_MESSAGES = 4;\nconst MAX_TOKENS = 200;\n\nconst SYSTEM_PROMPT =\n \"Rewrite this query to be self-contained by resolving pronouns, references, \" +\n \"and implicit context from the conversation. Return ONLY the rewritten query, nothing else.\";\n\n// ============================================\n// Main function\n// ============================================\n\n/**\n * Rewrites a user query to be self-contained by resolving pronouns,\n * references, and implicit context from the recent conversation history.\n *\n * If the feature is disabled, conversation history is too short (< 2 messages),\n * or the LLM call fails, the original query is returned unchanged.\n *\n * @param query - The user's current query\n * @param conversationHistory - Recent conversation messages\n * @param opts - Optional configuration\n * @returns The rewritten self-contained query, or the original on failure\n */\nexport async function buildContextualQuery(\n query: string,\n conversationHistory: Message[],\n opts?: ContextualQueryOptions\n): Promise<string> {\n try {\n // Feature gate: return original query if disabled\n if (!env.CONTEXTUAL_QUERY_ENABLED) {\n return query;\n }\n\n // Not enough context to resolve references\n if (!conversationHistory || conversationHistory.length < 2) {\n return query;\n }\n\n const maxHistory = opts?.maxHistoryMessages ?? DEFAULT_MAX_HISTORY_MESSAGES;\n\n // Take only the last N messages from history\n const recentHistory = conversationHistory.slice(-maxHistory);\n\n // Build the conversation context for the LLM\n const conversationText = recentHistory\n .map((msg) => `${msg.role === \"user\" ? \"User\" : \"Assistant\"}: ${msg.content}`)\n .join(\"\\n\");\n\n const userPrompt =\n `Conversation so far:\\n${conversationText}\\n\\nCurrent query to rewrite:\\n${query}`;\n\n const provider = providerRegistry.getDefault();\n const model = opts?.model ?? \"claude-sonnet-4-5-20250929\";\n\n const response = await provider.createMessage({\n model,\n max_tokens: MAX_TOKENS,\n system: SYSTEM_PROMPT,\n messages: [\n {\n role: \"user\",\n content: userPrompt,\n },\n ],\n });\n\n // Extract text from the response content blocks\n const rewrittenQuery = response.content\n .filter((block) => block.type === \"text\" && block.text)\n .map((block) => block.text!)\n .join(\"\")\n .trim();\n\n // If the LLM returned an empty response, fall back to the original query\n if (!rewrittenQuery) {\n return query;\n }\n\n return rewrittenQuery;\n } catch {\n // On any failure, return the original query unchanged\n return query;\n }\n}\n","/**\r\n * HyDE — Hypothetical Document Embeddings\r\n *\r\n * Instead of embedding the raw user query for retrieval, this module\r\n * asks an LLM to generate a hypothetical \"ideal answer\" document,\r\n * embeds *that*, and uses the resulting vector for similarity search.\r\n *\r\n * This dramatically improves retrieval quality because the hypothetical\r\n * document lives in the same semantic space as stored memories/documents,\r\n * whereas a short question often does not.\r\n *\r\n * Pipeline:\r\n * 1. User query → LLM generates hypothetical document\r\n * 2. Hypothetical document → embedding via OpenAI\r\n * 3. Embedding → pgvector cosine similarity search\r\n * 4. Original query → keyword search (tsvector)\r\n * 5. Results merged via Reciprocal Rank Fusion (RRF)\r\n *\r\n * Gated behind env.HYDE_ENABLED.\r\n */\r\n\r\nimport { db } from \"../../db\";\r\nimport { sql } from \"drizzle-orm\";\r\nimport { env } from \"../../config/env\";\r\nimport { providerRegistry } from \"../providers\";\r\nimport { generateEmbedding } from \"../memory\";\r\nimport {\r\n hybridSearch,\r\n keywordSearch,\r\n type HybridSearchResult,\r\n} from \"./hybrid-search\";\r\n\r\n// ============================================\r\n// Types\r\n// ============================================\r\n\r\nexport interface HyDEOptions {\r\n /** Max tokens for the hypothetical document generation (default: 300) */\r\n maxTokens?: number;\r\n /** Override the system prompt used for hypothetical document generation */\r\n systemPrompt?: string;\r\n}\r\n\r\nexport interface HyDESearchResult extends HybridSearchResult {\r\n /** The hypothetical document that was generated and embedded for retrieval */\r\n hydeDocument: string;\r\n}\r\n\r\n// ============================================\r\n// Constants\r\n// ============================================\r\n\r\nconst RRF_K = 60;\r\n\r\nconst DEFAULT_SYSTEM_PROMPT =\r\n \"You are a helpful assistant. Generate a detailed document that would perfectly answer the following question. Write as if this document already exists in a knowledge base. Be specific and factual.\";\r\n\r\nconst DEFAULT_MAX_TOKENS = 300;\r\n\r\n// ============================================\r\n// Hypothetical Document Generation\r\n// ============================================\r\n\r\n/**\r\n * Generate a hypothetical document that would perfectly answer the given query.\r\n *\r\n * Uses the default LLM provider to produce a ~200-word passage written as if\r\n * it were an existing knowledge-base entry. This passage is later embedded\r\n * so that the embedding sits closer to relevant stored documents than the\r\n * raw question would.\r\n */\r\nexport async function generateHypotheticalDocument(\r\n query: string,\r\n opts?: HyDEOptions\r\n): Promise<string> {\r\n const systemPrompt = opts?.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;\r\n const maxTokens = opts?.maxTokens ?? DEFAULT_MAX_TOKENS;\r\n\r\n const provider = providerRegistry.getDefault();\r\n\r\n const response = await provider.createMessage({\r\n model: \"claude-sonnet-4-20250514\",\r\n max_tokens: maxTokens,\r\n system: systemPrompt,\r\n messages: [\r\n {\r\n role: \"user\",\r\n content: query,\r\n },\r\n ],\r\n });\r\n\r\n // Extract the text from the response content blocks\r\n const text = response.content\r\n .filter((block) => block.type === \"text\")\r\n .map((block) => block.text ?? \"\")\r\n .join(\"\");\r\n\r\n return text;\r\n}\r\n\r\n// ============================================\r\n// Vector Search with Pre-computed Embedding\r\n// ============================================\r\n\r\n/**\r\n * Run pgvector cosine similarity search using a pre-computed embedding\r\n * rather than generating one from a text query. This is the core of HyDE:\r\n * we embed the hypothetical document and search with that vector.\r\n */\r\nasync function vectorSearchWithEmbedding(\r\n embedding: number[],\r\n userId?: string,\r\n limit = 10\r\n): Promise<\r\n Array<{\r\n id: string;\r\n user_id: string | null;\r\n type: string;\r\n content: string;\r\n importance: number;\r\n source: string | null;\r\n provenance: string | null;\r\n created_at: Date;\r\n similarity: number;\r\n }>\r\n> {\r\n const embeddingStr = JSON.stringify(embedding);\r\n\r\n const results = await db.execute(sql`\r\n SELECT\r\n id, user_id, type, content, importance, source, provenance,\r\n created_at,\r\n 1 - (embedding <=> ${embeddingStr}::vector) as similarity\r\n FROM memories\r\n ${userId ? sql`WHERE user_id = ${userId}` : sql``}\r\n ORDER BY embedding <=> ${embeddingStr}::vector\r\n LIMIT ${limit}\r\n `);\r\n\r\n return results as any[];\r\n}\r\n\r\n// ============================================\r\n// Reciprocal Rank Fusion\r\n// ============================================\r\n\r\n/**\r\n * Combine ranked lists from multiple retrieval strategies using RRF.\r\n * Each item receives score = sum over lists of 1/(k + rank + 1).\r\n */\r\nfunction reciprocalRankFusion(\r\n rankedLists: Array<Array<{ id: string; [key: string]: unknown }>>,\r\n k = RRF_K\r\n): Map<string, number> {\r\n const scores = new Map<string, number>();\r\n\r\n for (const list of rankedLists) {\r\n for (let rank = 0; rank < list.length; rank++) {\r\n const id = list[rank].id;\r\n const rrfScore = 1 / (k + rank + 1);\r\n scores.set(id, (scores.get(id) || 0) + rrfScore);\r\n }\r\n }\r\n\r\n return scores;\r\n}\r\n\r\n// ============================================\r\n// HyDE Search Pipeline\r\n// ============================================\r\n\r\n/**\r\n * Full HyDE search pipeline:\r\n *\r\n * 1. Generate a hypothetical document from the query via LLM\r\n * 2. Embed the hypothetical document\r\n * 3. Run pgvector cosine similarity with that embedding\r\n * 4. Run keyword search with the *original* query (keywords matter!)\r\n * 5. Fuse results with Reciprocal Rank Fusion\r\n *\r\n * If HyDE is disabled (env.HYDE_ENABLED === false) or the LLM call fails,\r\n * this falls back to the standard hybridSearch with the original query.\r\n */\r\nexport async function hydeSearch(\r\n query: string,\r\n userId?: string,\r\n limit = 10\r\n): Promise<HyDESearchResult[]> {\r\n // Gate check: fall back to regular hybrid search if HyDE is disabled\r\n if (!env.HYDE_ENABLED) {\r\n const results = await hybridSearch(query, { userId, limit });\r\n return results.map((r) => ({ ...r, hydeDocument: \"\" }));\r\n }\r\n\r\n // Step 1: Generate hypothetical document\r\n let hydeDocument: string;\r\n try {\r\n hydeDocument = await generateHypotheticalDocument(query);\r\n } catch (error) {\r\n console.warn(\"[HyDE] LLM generation failed, falling back to hybrid search:\", error);\r\n const results = await hybridSearch(query, { userId, limit });\r\n return results.map((r) => ({ ...r, hydeDocument: \"\" }));\r\n }\r\n\r\n if (!hydeDocument || hydeDocument.trim().length === 0) {\r\n console.warn(\"[HyDE] Empty hypothetical document, falling back to hybrid search\");\r\n const results = await hybridSearch(query, { userId, limit });\r\n return results.map((r) => ({ ...r, hydeDocument: \"\" }));\r\n }\r\n\r\n // Step 2: Embed the hypothetical document\r\n let hydeEmbedding: number[];\r\n try {\r\n hydeEmbedding = await generateEmbedding(hydeDocument);\r\n } catch (error) {\r\n console.warn(\"[HyDE] Embedding generation failed, falling back to hybrid search:\", error);\r\n const results = await hybridSearch(query, { userId, limit });\r\n return results.map((r) => ({ ...r, hydeDocument: \"\" }));\r\n }\r\n\r\n // Step 3 & 4: Run vector search (with HyDE embedding) and keyword search (with original query) in parallel\r\n const fetchLimit = limit * 2; // Over-fetch for better fusion\r\n\r\n const [vectorResults, kwResults] = await Promise.all([\r\n vectorSearchWithEmbedding(hydeEmbedding, userId, fetchLimit),\r\n keywordSearch(query, userId, fetchLimit),\r\n ]);\r\n\r\n // Build a map of all unique results keyed by id\r\n const allResults = new Map<string, any>();\r\n for (const r of vectorResults) {\r\n allResults.set(r.id, r);\r\n }\r\n for (const r of kwResults) {\r\n if (!allResults.has(r.id)) {\r\n allResults.set(r.id, r);\r\n }\r\n }\r\n\r\n // Step 5: RRF fusion\r\n const rrfScores = reciprocalRankFusion([vectorResults, kwResults]);\r\n\r\n // Build final results\r\n const finalResults: HyDESearchResult[] = [];\r\n for (const [id, rrfScore] of rrfScores) {\r\n const data = allResults.get(id);\r\n if (!data) continue;\r\n\r\n finalResults.push({\r\n id: data.id,\r\n userId: data.user_id,\r\n type: data.type,\r\n content: data.content,\r\n importance: data.importance || 5,\r\n source: data.source,\r\n provenance: data.provenance,\r\n similarity: data.similarity || 0,\r\n keywordRank: data.keywordRank || data.keyword_rank || 0,\r\n rrfScore,\r\n createdAt: data.created_at,\r\n hydeDocument,\r\n });\r\n }\r\n\r\n // Sort by RRF score descending and take top N\r\n return finalResults\r\n .sort((a, b) => b.rrfScore - a.rrfScore)\r\n .slice(0, limit);\r\n}\r\n","/**\n * Retrieval Cache — Redis-backed cache for RAG pipeline search results\n *\n * Caches hybrid search results keyed by embedding hash to avoid\n * redundant vector searches. Feature-gated behind RETRIEVAL_CACHE_ENABLED.\n */\n\nimport Redis from \"ioredis\";\nimport { createHash } from \"crypto\";\nimport { env } from \"../../config/env\";\nimport type { HybridSearchResult } from \"./hybrid-search\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface CachedResult {\n results: HybridSearchResult[];\n cachedAt: number;\n queryHash: string;\n}\n\nexport interface CacheOptions {\n ttlSeconds?: number;\n maxCacheSize?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nconst KEY_PREFIX = \"rag:cache:\";\nconst DEFAULT_TTL_SECONDS = 3600;\n\n// ---------------------------------------------------------------------------\n// RetrievalCache\n// ---------------------------------------------------------------------------\n\nexport class RetrievalCache {\n private redis: Redis | null = null;\n private connecting = false;\n\n /**\n * Lazily create / return a Redis client.\n * Returns null if Redis is unavailable.\n */\n private async getClient(): Promise<Redis | null> {\n if (this.redis) return this.redis;\n if (this.connecting) return null;\n\n try {\n this.connecting = true;\n const client = new Redis(env.REDIS_URL, {\n maxRetriesPerRequest: null,\n lazyConnect: true,\n });\n await client.connect();\n this.redis = client;\n return this.redis;\n } catch {\n this.redis = null;\n return null;\n } finally {\n this.connecting = false;\n }\n }\n\n /**\n * Hash an embedding vector for use as a cache key.\n *\n * Rounds each component to 4 decimal places to tolerate minor\n * floating-point differences, then produces a SHA-256 hex digest.\n */\n private hashEmbedding(embedding: number[]): string {\n const rounded = embedding.map((v) => v.toFixed(4)).join(\",\");\n return createHash(\"sha256\").update(rounded).digest(\"hex\");\n }\n\n /**\n * Look up cached search results for the given embedding.\n * Returns null on cache miss or if the cache is disabled / unavailable.\n */\n async getCachedResults(queryEmbedding: number[]): Promise<CachedResult | null> {\n if (!env.RETRIEVAL_CACHE_ENABLED) return null;\n\n try {\n const client = await this.getClient();\n if (!client) return null;\n\n const hash = this.hashEmbedding(queryEmbedding);\n const key = `${KEY_PREFIX}${hash}`;\n const raw = await client.get(key);\n\n if (!raw) return null;\n\n const cached: CachedResult = JSON.parse(raw);\n\n // Rehydrate Date objects that were serialised as strings\n cached.results = cached.results.map((r) => ({\n ...r,\n createdAt: new Date(r.createdAt),\n }));\n\n return cached;\n } catch {\n return null;\n }\n }\n\n /**\n * Store search results in Redis, keyed by embedding hash.\n *\n * @param queryEmbedding - The embedding vector used for the search.\n * @param results - The hybrid search results to cache.\n * @param ttl - Time-to-live in seconds (default 3600).\n */\n async cacheResults(\n queryEmbedding: number[],\n results: HybridSearchResult[],\n ttl: number = DEFAULT_TTL_SECONDS,\n ): Promise<void> {\n if (!env.RETRIEVAL_CACHE_ENABLED) return;\n\n try {\n const client = await this.getClient();\n if (!client) return;\n\n const hash = this.hashEmbedding(queryEmbedding);\n const key = `${KEY_PREFIX}${hash}`;\n\n const entry: CachedResult = {\n results,\n cachedAt: Date.now(),\n queryHash: hash,\n };\n\n await client.set(key, JSON.stringify(entry), \"EX\", ttl);\n } catch {\n // Fail silently — caching is best-effort\n }\n }\n\n /**\n * Remove cache entries matching a key pattern.\n *\n * @param pattern - Glob pattern appended to the key prefix.\n * Defaults to `*` (all retrieval cache keys).\n * @returns The number of keys deleted.\n */\n async invalidateCache(pattern?: string): Promise<number> {\n try {\n const client = await this.getClient();\n if (!client) return 0;\n\n const scanPattern = `${KEY_PREFIX}${pattern ?? \"*\"}`;\n let deleted = 0;\n let cursor = \"0\";\n\n do {\n const [nextCursor, keys] = await client.scan(\n cursor,\n \"MATCH\",\n scanPattern,\n \"COUNT\",\n 100,\n );\n cursor = nextCursor;\n\n if (keys.length > 0) {\n deleted += await client.del(...keys);\n }\n } while (cursor !== \"0\");\n\n return deleted;\n } catch {\n return 0;\n }\n }\n}\n\n// ---------------------------------------------------------------------------\n// Singleton factory\n// ---------------------------------------------------------------------------\n\nlet instance: RetrievalCache | null = null;\n\n/**\n * Lazy singleton factory.\n * Returns the same RetrievalCache instance across the process lifetime.\n */\nexport function getRetrievalCache(): RetrievalCache {\n if (!instance) {\n instance = new RetrievalCache();\n }\n return instance;\n}\n","/**\n * Cross-Encoder Re-ranking — LLM-as-Judge relevance scoring\n *\n * After initial retrieval (vector + keyword + graph), this module sends\n * query-document pairs to an LLM in batches and asks it to score relevance\n * on a 0-10 scale. Results are then re-sorted by true semantic relevance\n * rather than embedding distance alone.\n *\n * Feature-gated behind env.RERANK_ENABLED.\n */\n\nimport { env } from \"../../config/env\";\nimport { providerRegistry } from \"../providers\";\nimport type { HybridSearchResult } from \"./hybrid-search\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface RankedResult extends HybridSearchResult {\n /** Relevance score assigned by the cross-encoder LLM judge (0-10) */\n rerankScore: number;\n}\n\nexport interface RerankOptions {\n /** Maximum number of results to return after re-ranking */\n topK?: number;\n /** Minimum relevance score to keep a result (0-10). Defaults to env.RERANK_MIN_SCORE */\n minScore?: number;\n /** Override the model used for scoring */\n model?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nconst DEFAULT_BATCH_SIZE = 5;\nconst DEFAULT_MAX_TOKENS = 100;\nconst DEFAULT_SCORE = 5;\nconst DEFAULT_MODEL = \"claude-sonnet-4-20250514\";\n\n// ---------------------------------------------------------------------------\n// Prompt\n// ---------------------------------------------------------------------------\n\nfunction buildScoringPrompt(query: string, documents: string[]): string {\n const docList = documents\n .map((doc, i) => `${i + 1}. ${doc}`)\n .join(\"\\n\");\n\n return (\n `You are a relevance judge. Given a search query and a list of retrieved documents, ` +\n `rate each document's relevance to the query on a scale of 0-10.\\n\\n` +\n `Query: ${query}\\n\\n` +\n `Documents:\\n${docList}\\n\\n` +\n `Return a JSON array of scores: [score1, score2, ...]\\n` +\n `Only return the JSON array, nothing else.`\n );\n}\n\n// ---------------------------------------------------------------------------\n// Score parsing\n// ---------------------------------------------------------------------------\n\n/**\n * Parse the LLM response into an array of numeric scores.\n * First tries JSON.parse; if that fails, falls back to extracting numbers\n * from the raw text.\n */\nfunction parseScores(text: string, expectedCount: number): number[] {\n // Try strict JSON parse first\n try {\n const parsed = JSON.parse(text.trim());\n if (Array.isArray(parsed)) {\n return parsed.map((s) => {\n const n = Number(s);\n return Number.isFinite(n) ? Math.min(10, Math.max(0, n)) : DEFAULT_SCORE;\n });\n }\n } catch {\n // Fall through to regex extraction\n }\n\n // Fallback: extract all numbers from the text\n const matches = text.match(/\\d+(?:\\.\\d+)?/g);\n if (matches && matches.length > 0) {\n return matches.slice(0, expectedCount).map((m) => {\n const n = Number(m);\n return Number.isFinite(n) ? Math.min(10, Math.max(0, n)) : DEFAULT_SCORE;\n });\n }\n\n // Last resort: return default scores\n return new Array(expectedCount).fill(DEFAULT_SCORE);\n}\n\n// ---------------------------------------------------------------------------\n// Batch re-ranking\n// ---------------------------------------------------------------------------\n\n/**\n * Internal helper: groups results into batches and sends each batch as a\n * single LLM call for efficiency.\n */\nexport async function batchRerank(\n query: string,\n results: HybridSearchResult[],\n batchSize: number = DEFAULT_BATCH_SIZE,\n model?: string\n): Promise<RankedResult[]> {\n const provider = providerRegistry.getDefault();\n const resolvedModel = model || DEFAULT_MODEL;\n const rankedResults: RankedResult[] = [];\n\n // Split results into batches\n const batches: HybridSearchResult[][] = [];\n for (let i = 0; i < results.length; i += batchSize) {\n batches.push(results.slice(i, i + batchSize));\n }\n\n // Process each batch\n const batchPromises = batches.map(async (batch) => {\n const documents = batch.map((r) => r.content);\n const prompt = buildScoringPrompt(query, documents);\n\n try {\n const response = await provider.createMessage({\n model: resolvedModel,\n max_tokens: DEFAULT_MAX_TOKENS,\n system: \"You are a relevance scoring assistant. Only output valid JSON.\",\n messages: [{ role: \"user\", content: prompt }],\n });\n\n // Extract text from response\n const responseText = response.content\n .filter((block) => block.type === \"text\")\n .map((block) => block.text || \"\")\n .join(\"\");\n\n const scores = parseScores(responseText, batch.length);\n\n return batch.map((result, idx) => ({\n ...result,\n rerankScore: idx < scores.length ? scores[idx] : DEFAULT_SCORE,\n }));\n } catch (error) {\n // If LLM call fails, assign default score to all results in this batch\n console.warn(\n `[Reranker] LLM scoring failed for batch, assigning default score of ${DEFAULT_SCORE}:`,\n error instanceof Error ? error.message : error\n );\n return batch.map((result) => ({\n ...result,\n rerankScore: DEFAULT_SCORE,\n }));\n }\n });\n\n const batchResults = await Promise.all(batchPromises);\n for (const batch of batchResults) {\n rankedResults.push(...batch);\n }\n\n return rankedResults;\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Re-rank search results using an LLM as a cross-encoder relevance judge.\n *\n * Sends query + results to the LLM in batches, gets relevance scores (0-10),\n * filters by minimum score, and returns results sorted by relevance.\n *\n * If RERANK_ENABLED is false, returns results as-is with a default rerankScore.\n */\nexport async function rerank(\n query: string,\n results: HybridSearchResult[],\n opts?: RerankOptions\n): Promise<RankedResult[]> {\n try {\n const minScore = opts?.minScore ?? env.RERANK_MIN_SCORE;\n\n // If re-ranking is disabled, pass through with default scores\n if (!env.RERANK_ENABLED) {\n return results.map((r) => ({ ...r, rerankScore: DEFAULT_SCORE }));\n }\n\n // No point re-ranking 0 or 1 results\n if (results.length <= 1) {\n return results.map((r) => ({ ...r, rerankScore: 10 }));\n }\n\n // Score all results via batched LLM calls\n let ranked = await batchRerank(query, results, DEFAULT_BATCH_SIZE, opts?.model);\n\n // Filter out results below the minimum score threshold\n ranked = ranked.filter((r) => r.rerankScore >= minScore);\n\n // Sort by rerankScore descending\n ranked.sort((a, b) => b.rerankScore - a.rerankScore);\n\n // Apply topK limit if specified\n if (opts?.topK && opts.topK > 0) {\n ranked = ranked.slice(0, opts.topK);\n }\n\n return ranked;\n } catch (error) {\n // If anything goes wrong, return original results with default scores\n console.error(\n \"[Reranker] Re-ranking failed, returning original results:\",\n error instanceof Error ? error.message : error\n );\n return results.map((r) => ({ ...r, rerankScore: DEFAULT_SCORE }));\n }\n}\n","/**\n * Recursive / Multi-Step RAG\n *\n * After initial retrieval + re-ranking, this module analyses whether the\n * retrieved context fully answers the user's query. When gaps are detected\n * it generates targeted follow-up queries, retrieves additional memories,\n * re-ranks them, and merges them with the existing result set.\n *\n * The loop repeats up to `maxSteps` iterations (default from\n * env.MULTISTEP_MAX_STEPS, itself defaulting to 2).\n *\n * Feature-gated behind env.MULTISTEP_RAG_ENABLED.\n */\n\nimport { env } from \"../../config/env\";\nimport { providerRegistry } from \"../providers\";\nimport { hybridSearch, type HybridSearchResult } from \"./hybrid-search\";\nimport { rerank, type RankedResult } from \"./reranker\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface MultiStepOptions {\n maxSteps?: number;\n userId?: string;\n limit?: number;\n}\n\nexport interface MultiStepResult {\n results: RankedResult[];\n steps: number;\n followUpQueries: string[];\n}\n\nexport interface CompletenessEvaluation {\n complete: boolean;\n gaps: string[];\n followUpQueries: string[];\n}\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nconst COMPLETENESS_MAX_TOKENS = 300;\nconst COMPLETENESS_MODEL = \"claude-sonnet-4-5-20250929\";\n\n// ---------------------------------------------------------------------------\n// Completeness evaluation\n// ---------------------------------------------------------------------------\n\n/**\n * Uses the LLM to judge whether the retrieved context fully answers the\n * query. Returns a structured evaluation with any identified gaps and\n * suggested follow-up queries.\n */\nexport async function evaluateCompleteness(\n query: string,\n context: string\n): Promise<CompletenessEvaluation> {\n try {\n const provider = providerRegistry.getDefault();\n\n const response = await provider.createMessage({\n model: COMPLETENESS_MODEL,\n max_tokens: COMPLETENESS_MAX_TOKENS,\n system: \"You are a retrieval evaluation assistant. Only output valid JSON.\",\n messages: [\n {\n role: \"user\",\n content:\n `Analyze whether the following context sufficiently answers the query. Return a JSON object with:\\n` +\n `- \"complete\": boolean (true if context fully answers the query)\\n` +\n `- \"gaps\": string[] (list of missing information)\\n` +\n `- \"followUpQueries\": string[] (search queries to fill the gaps, max 2)\\n\\n` +\n `Query: ${query}\\n\\n` +\n `Context:\\n${context}\\n\\n` +\n `Return only the JSON object.`,\n },\n ],\n });\n\n const responseText = response.content\n .filter((block) => block.type === \"text\")\n .map((block) => block.text || \"\")\n .join(\"\");\n\n const parsed = JSON.parse(responseText.trim());\n\n return {\n complete: Boolean(parsed.complete),\n gaps: Array.isArray(parsed.gaps) ? parsed.gaps : [],\n followUpQueries: Array.isArray(parsed.followUpQueries)\n ? parsed.followUpQueries.slice(0, 2)\n : [],\n };\n } catch (error) {\n // If LLM call or JSON parse fails, assume complete (stop iterating)\n console.warn(\n \"[MultiStepRAG] Completeness evaluation failed, assuming complete:\",\n error instanceof Error ? error.message : error\n );\n return { complete: true, gaps: [], followUpQueries: [] };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Main multi-step retrieval\n// ---------------------------------------------------------------------------\n\n/**\n * Performs recursive multi-step retrieval-augmented generation.\n *\n * 1. Evaluates whether `initialResults` fully answer the `query`.\n * 2. If gaps are found, generates follow-up queries, retrieves more\n * context via hybrid search + re-ranking, merges and deduplicates.\n * 3. Repeats up to `maxSteps` times.\n *\n * Returns the merged, deduplicated, and sorted result set together with\n * metadata about how many steps were taken and which follow-up queries\n * were used.\n */\nexport async function multiStepRetrieve(\n query: string,\n initialResults: RankedResult[],\n opts?: MultiStepOptions\n): Promise<MultiStepResult> {\n // If feature is disabled, return initial results unchanged\n if (!env.MULTISTEP_RAG_ENABLED) {\n return {\n results: initialResults,\n steps: 0,\n followUpQueries: [],\n };\n }\n\n const maxSteps = opts?.maxSteps ?? env.MULTISTEP_MAX_STEPS;\n const userId = opts?.userId;\n const limit = opts?.limit ?? 10;\n\n // Use a Map keyed by memory id for deduplication\n const resultsById = new Map<string, RankedResult>();\n for (const r of initialResults) {\n resultsById.set(r.id, r);\n }\n\n const allFollowUpQueries: string[] = [];\n let stepsPerformed = 0;\n\n for (let step = 0; step < maxSteps; step++) {\n // Build context string from current result set\n const currentResults = Array.from(resultsById.values());\n const contextText = currentResults.map((r) => r.content).join(\"\\n\\n\");\n\n // Evaluate completeness\n const evaluation = await evaluateCompleteness(query, contextText);\n\n if (evaluation.complete || evaluation.followUpQueries.length === 0) {\n break;\n }\n\n stepsPerformed++;\n\n // Limit to 2 follow-up queries per step\n const followUps = evaluation.followUpQueries.slice(0, 2);\n allFollowUpQueries.push(...followUps);\n\n // Retrieve additional results for each follow-up query\n const retrievalPromises = followUps.map(async (followUpQuery) => {\n const searchResults = await hybridSearch(followUpQuery, {\n userId,\n limit,\n });\n const rankedResults = await rerank(followUpQuery, searchResults);\n return rankedResults;\n });\n\n const additionalResultSets = await Promise.all(retrievalPromises);\n\n // Merge new results, deduplicating by id\n for (const resultSet of additionalResultSets) {\n for (const r of resultSet) {\n if (!resultsById.has(r.id)) {\n resultsById.set(r.id, r);\n }\n }\n }\n }\n\n // Sort merged results by rerankScore descending\n const mergedResults = Array.from(resultsById.values()).sort(\n (a, b) => b.rerankScore - a.rerankScore\n );\n\n return {\n results: mergedResults,\n steps: stepsPerformed,\n followUpQueries: allFollowUpQueries,\n };\n}\n","/**\r\n * Enhanced Retrieval Pipeline Orchestrator\r\n *\r\n * Wires together all 5 RAG enhancement stages into a single composable\r\n * pipeline that degrades gracefully when individual features are disabled:\r\n *\r\n * 1. Contextual query rewrite (CONTEXTUAL_QUERY_ENABLED)\r\n * 2. HyDE embedding generation (HYDE_ENABLED)\r\n * 3. Cache check / store (RETRIEVAL_CACHE_ENABLED)\r\n * 4. Hybrid search (always)\r\n * 5. Cross-encoder re-ranking (RERANK_ENABLED)\r\n * 6. Multi-step gap filling (MULTISTEP_RAG_ENABLED)\r\n *\r\n * If every feature flag is off the pipeline reduces to a plain hybridSearch\r\n * call with a default rerankScore appended.\r\n */\r\n\r\nimport { env } from \"../../config/env\";\r\nimport { hybridSearch, type HybridSearchResult } from \"./hybrid-search\";\r\nimport { buildContextualQuery, type Message } from \"./contextual-query\";\r\nimport { hydeSearch } from \"./hyde\";\r\nimport { getRetrievalCache } from \"./retrieval-cache\";\r\nimport { rerank, type RankedResult } from \"./reranker\";\r\nimport { multiStepRetrieve } from \"./multi-step\";\r\nimport { generateEmbedding } from \"../memory\";\r\n\r\n// ---------------------------------------------------------------------------\r\n// Types\r\n// ---------------------------------------------------------------------------\r\n\r\nexport interface EnhancedRetrievalOptions {\r\n userId?: string;\r\n limit?: number;\r\n conversationHistory?: Message[];\r\n}\r\n\r\nexport interface EnhancedRetrievalResult {\r\n results: RankedResult[];\r\n cached: boolean;\r\n steps: number;\r\n queryUsed: string;\r\n}\r\n\r\n// ---------------------------------------------------------------------------\r\n// Helpers\r\n// ---------------------------------------------------------------------------\r\n\r\n/**\r\n * Convert HybridSearchResults to RankedResults by attaching a rerankScore\r\n * derived from the existing rrfScore.\r\n */\r\nfunction toRankedResults(\r\n results: HybridSearchResult[],\r\n defaultScore?: number,\r\n): RankedResult[] {\r\n return results.map((r) => ({\r\n ...r,\r\n rerankScore: defaultScore ?? r.rrfScore * 10,\r\n }));\r\n}\r\n\r\n// ---------------------------------------------------------------------------\r\n// Main orchestrator\r\n// ---------------------------------------------------------------------------\r\n\r\n/**\r\n * Run the full enhanced retrieval pipeline.\r\n *\r\n * Each stage is feature-gated and will be skipped when its flag is off.\r\n * On any unrecoverable error the function falls back to a plain\r\n * hybridSearch so callers always receive results.\r\n */\r\nexport async function enhancedRetrieve(\r\n query: string,\r\n opts?: EnhancedRetrievalOptions,\r\n): Promise<EnhancedRetrievalResult> {\r\n const userId = opts?.userId;\r\n const limit = opts?.limit ?? 10;\r\n const conversationHistory = opts?.conversationHistory;\r\n\r\n let steps = 0;\r\n\r\n try {\r\n // ------------------------------------------------------------------\r\n // Step 1 — Contextual Query Rewrite\r\n // ------------------------------------------------------------------\r\n let effectiveQuery = query;\r\n\r\n if (\r\n env.CONTEXTUAL_QUERY_ENABLED &&\r\n conversationHistory &&\r\n conversationHistory.length >= 2\r\n ) {\r\n console.log(\"[EnhancedRetrieval] Contextual query rewrite enabled, rewriting query...\");\r\n effectiveQuery = await buildContextualQuery(query, conversationHistory);\r\n steps++;\r\n console.log(`[EnhancedRetrieval] Rewritten query: \"${effectiveQuery}\"`);\r\n }\r\n\r\n // ------------------------------------------------------------------\r\n // Step 2a — Cache Check\r\n // ------------------------------------------------------------------\r\n let cached = false;\r\n let queryEmbedding: number[] | null = null;\r\n\r\n if (env.RETRIEVAL_CACHE_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] Cache enabled, checking for cached results...\");\r\n queryEmbedding = await generateEmbedding(effectiveQuery);\r\n const cache = getRetrievalCache();\r\n const cachedResult = await cache.getCachedResults(queryEmbedding);\r\n\r\n if (cachedResult) {\r\n console.log(\"[EnhancedRetrieval] Cache hit, using cached results\");\r\n cached = true;\r\n steps++;\r\n\r\n let rankedResults = toRankedResults(cachedResult.results, 5);\r\n\r\n // Even on a cache hit we still run re-ranking and multi-step\r\n // so the caller gets the most relevant ordering.\r\n\r\n // Step 3 — Re-ranking (on cached results)\r\n if (env.RERANK_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] Re-ranking cached results...\");\r\n rankedResults = await rerank(effectiveQuery, cachedResult.results);\r\n steps++;\r\n }\r\n\r\n // Step 4 — Multi-step gap filling (on cached + re-ranked results)\r\n if (env.MULTISTEP_RAG_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] Multi-step retrieval on cached results...\");\r\n rankedResults = await multiStepRetrieve(effectiveQuery, rankedResults, { userId });\r\n steps++;\r\n }\r\n\r\n return {\r\n results: rankedResults,\r\n cached: true,\r\n steps,\r\n queryUsed: effectiveQuery,\r\n };\r\n }\r\n\r\n console.log(\"[EnhancedRetrieval] Cache miss, proceeding to search\");\r\n }\r\n\r\n // ------------------------------------------------------------------\r\n // Step 2 — HyDE Search vs Regular Hybrid Search\r\n // ------------------------------------------------------------------\r\n let searchResults: HybridSearchResult[];\r\n\r\n if (env.HYDE_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] HyDE enabled, generating hypothetical doc...\");\r\n searchResults = await hydeSearch(effectiveQuery, userId, limit);\r\n steps++;\r\n } else {\r\n console.log(\"[EnhancedRetrieval] Running hybrid search...\");\r\n searchResults = await hybridSearch(effectiveQuery, { userId, limit });\r\n steps++;\r\n }\r\n\r\n // ------------------------------------------------------------------\r\n // Step 3 — Re-ranking\r\n // ------------------------------------------------------------------\r\n let rankedResults: RankedResult[];\r\n\r\n if (env.RERANK_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] Re-ranking results...\");\r\n rankedResults = await rerank(effectiveQuery, searchResults);\r\n steps++;\r\n } else {\r\n // Convert to RankedResults with a score derived from RRF\r\n rankedResults = toRankedResults(searchResults);\r\n }\r\n\r\n // ------------------------------------------------------------------\r\n // Step 3a — Cache Store\r\n // ------------------------------------------------------------------\r\n if (env.RETRIEVAL_CACHE_ENABLED && !cached) {\r\n console.log(\"[EnhancedRetrieval] Storing results in cache...\");\r\n\r\n // Reuse the embedding we already computed during the cache check.\r\n // If it was not computed yet (shouldn't happen given the flow above),\r\n // generate it now.\r\n if (!queryEmbedding) {\r\n queryEmbedding = await generateEmbedding(effectiveQuery);\r\n }\r\n\r\n const cache = getRetrievalCache();\r\n await cache.cacheResults(queryEmbedding, searchResults);\r\n }\r\n\r\n // ------------------------------------------------------------------\r\n // Step 4 — Multi-Step Gap Filling\r\n // ------------------------------------------------------------------\r\n if (env.MULTISTEP_RAG_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] Multi-step retrieval enabled, filling gaps...\");\r\n rankedResults = await multiStepRetrieve(effectiveQuery, rankedResults, { userId });\r\n steps++;\r\n }\r\n\r\n return {\r\n results: rankedResults,\r\n cached,\r\n steps,\r\n queryUsed: effectiveQuery,\r\n };\r\n } catch (error) {\r\n // ------------------------------------------------------------------\r\n // Fallback — plain hybrid search so we never return nothing\r\n // ------------------------------------------------------------------\r\n console.error(\r\n \"[EnhancedRetrieval] Pipeline failed, falling back to plain hybrid search:\",\r\n error instanceof Error ? error.message : error,\r\n );\r\n\r\n try {\r\n const fallbackResults = await hybridSearch(query, { userId, limit });\r\n return {\r\n results: toRankedResults(fallbackResults, 5),\r\n cached: false,\r\n steps: 0,\r\n queryUsed: query,\r\n };\r\n } catch (fallbackError) {\r\n console.error(\r\n \"[EnhancedRetrieval] Fallback hybrid search also failed:\",\r\n fallbackError instanceof Error ? fallbackError.message : fallbackError,\r\n );\r\n return {\r\n results: [],\r\n cached: false,\r\n steps: 0,\r\n queryUsed: query,\r\n };\r\n }\r\n }\r\n}\r\n"],"mappings":";;;;;;;;;;;;;;;;AAYA,SAAS,WAAW;AA2BpB,IAAM,QAAQ;AAKd,eAAsB,aACpB,OACA,QACA,QAAQ,IACqF;AAC7F,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAEpD,QAAM,UAAU,MAAM,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA,2BAIR,KAAK,UAAU,cAAc,CAAC;AAAA;AAAA,MAEnD,SAAS,sBAAsB,MAAM,KAAK,KAAK;AAAA,6BACxB,KAAK,UAAU,cAAc,CAAC;AAAA,YAC/C,KAAK;AAAA,GACd;AAED,SAAO;AACT;AAKA,eAAsB,cACpB,OACA,QACA,QAAQ,IACsF;AAC9F,QAAM,UAAU,MAAM,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA,0DAIuB,KAAK;AAAA;AAAA;AAAA,wDAGP,KAAK;AAAA,QACrD,SAAS,oBAAoB,MAAM,KAAK,KAAK;AAAA;AAAA,YAEzC,KAAK;AAAA,GACd;AAED,SAAQ,QAAkB,IAAI,CAAC,OAAY;AAAA,IACzC,GAAG;AAAA,IACH,aAAa,EAAE;AAAA,EACjB,EAAE;AACJ;AAKA,eAAsB,qBACpB,OACA,QACA,QAAQ,IACqF;AAE7F,QAAM,WAAW,MAAM,GAAG,QAAQ;AAAA;AAAA;AAAA,uBAGb,MAAM,QAAQ,GAAG;AAAA,QAChC,SAAS,oBAAoB,MAAM,KAAK,KAAK;AAAA;AAAA,GAElD;AAED,MAAK,SAAmB,WAAW,GAAG;AACpC,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,YAAa,SAAmB,IAAI,CAAC,MAAW,EAAE,EAAE;AAI1D,QAAM,cAAe,SAAmB,IAAI,CAAC,MAAW,EAAE,IAAI;AAC9D,QAAM,cAAc,YAAY,KAAK,GAAG;AAExC,QAAM,UAAU,MAAM,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAMZ,WAAW;AAAA,QAC1B,SAAS,oBAAoB,MAAM,KAAK,KAAK;AAAA;AAAA,YAEzC,KAAK;AAAA,GACd;AAED,SAAQ,QAAkB,IAAI,CAAC,OAAY;AAAA,IACzC,GAAG;AAAA,IACH,YAAY,EAAE;AAAA,EAChB,EAAE;AACJ;AAKA,SAAS,qBACP,aACA,IAAI,OACiB;AACrB,QAAM,SAAS,oBAAI,IAAoB;AAEvC,aAAW,QAAQ,aAAa;AAC9B,aAAS,OAAO,GAAG,OAAO,KAAK,QAAQ,QAAQ;AAC7C,YAAM,KAAK,KAAK,IAAI,EAAE;AACtB,YAAM,WAAW,KAAK,IAAI,OAAO;AACjC,aAAO,IAAI,KAAK,OAAO,IAAI,EAAE,KAAK,KAAK,QAAQ;AAAA,IACjD;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,aACpB,OACA,UAA+B,CAAC,GACD;AAC/B,QAAM;AAAA,IACJ;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB,IAAI;AAGJ,QAAM,iBAAwC;AAAA,IAC5C,aAAa,OAAO,QAAQ,QAAQ,CAAC;AAAA;AAAA,EACvC;AAEA,MAAI,gBAAgB;AAClB,mBAAe,KAAK,cAAc,OAAO,QAAQ,QAAQ,CAAC,CAAC;AAAA,EAC7D;AAEA,MAAI,cAAc;AAChB,mBAAe,KAAK,qBAAqB,OAAO,QAAQ,KAAK,CAAC;AAAA,EAChE;AAEA,QAAM,UAAU,MAAM,QAAQ,IAAI,cAAc;AAChD,QAAM,CAAC,eAAe,gBAAgB,YAAY,IAAI;AAGtD,QAAM,aAAa,oBAAI,IAAiB;AACxC,aAAW,KAAK,iBAAiB,CAAC,GAAG;AACnC,eAAW,IAAI,EAAE,IAAI,CAAC;AAAA,EACxB;AACA,aAAW,KAAK,kBAAkB,CAAC,GAAG;AACpC,QAAI,CAAC,WAAW,IAAI,EAAE,EAAE,EAAG,YAAW,IAAI,EAAE,IAAI,CAAC;AAAA,EACnD;AACA,aAAW,KAAK,gBAAgB,CAAC,GAAG;AAClC,QAAI,CAAC,WAAW,IAAI,EAAE,EAAE,EAAG,YAAW,IAAI,EAAE,IAAI,CAAC;AAAA,EACnD;AAGA,QAAM,cAAc,CAAC,iBAAiB,CAAC,CAAC;AACxC,MAAI,eAAgB,aAAY,KAAK,cAAc;AACnD,MAAI,aAAc,aAAY,KAAK,YAAY;AAE/C,QAAM,YAAY,qBAAqB,WAAW;AAGlD,MAAI,eAAqC,CAAC;AAC1C,aAAW,CAAC,IAAI,QAAQ,KAAK,WAAW;AACtC,UAAM,OAAO,WAAW,IAAI,EAAE;AAC9B,QAAI,CAAC,KAAM;AAEX,iBAAa,KAAK;AAAA,MAChB,IAAI,KAAK;AAAA,MACT,QAAQ,KAAK;AAAA,MACb,MAAM,KAAK;AAAA,MACX,SAAS,KAAK;AAAA,MACd,YAAY,KAAK,cAAc;AAAA,MAC/B,QAAQ,KAAK;AAAA,MACb,YAAY,KAAK;AAAA,MACjB,YAAY,KAAK,cAAc;AAAA,MAC/B,aAAa,KAAK,eAAe,KAAK,gBAAgB;AAAA,MACtD;AAAA,MACA,WAAW,KAAK;AAAA,IAClB,CAAC;AAAA,EACH;AAGA,MAAI,OAAO;AACT,mBAAe,aAAa,OAAO,CAAC,MAAM,IAAI,KAAK,EAAE,SAAS,KAAK,KAAK;AAAA,EAC1E;AACA,MAAI,OAAO;AACT,mBAAe,aAAa,OAAO,CAAC,MAAM,IAAI,KAAK,EAAE,SAAS,KAAK,KAAK;AAAA,EAC1E;AAGA,SAAO,aACJ,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,QAAQ,EACtC,MAAM,GAAG,KAAK;AACnB;;;ACjNA,IAAM,+BAA+B;AACrC,IAAM,aAAa;AAEnB,IAAM,gBACJ;AAmBF,eAAsB,qBACpB,OACA,qBACA,MACiB;AACjB,MAAI;AAEF,QAAI,CAAC,IAAI,0BAA0B;AACjC,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,uBAAuB,oBAAoB,SAAS,GAAG;AAC1D,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,MAAM,sBAAsB;AAG/C,UAAM,gBAAgB,oBAAoB,MAAM,CAAC,UAAU;AAG3D,UAAM,mBAAmB,cACtB,IAAI,CAAC,QAAQ,GAAG,IAAI,SAAS,SAAS,SAAS,WAAW,KAAK,IAAI,OAAO,EAAE,EAC5E,KAAK,IAAI;AAEZ,UAAM,aACJ;AAAA,EAAyB,gBAAgB;AAAA;AAAA;AAAA,EAAkC,KAAK;AAElF,UAAM,WAAW,iBAAiB,WAAW;AAC7C,UAAM,QAAQ,MAAM,SAAS;AAE7B,UAAM,WAAW,MAAM,SAAS,cAAc;AAAA,MAC5C;AAAA,MACA,YAAY;AAAA,MACZ,QAAQ;AAAA,MACR,UAAU;AAAA,QACR;AAAA,UACE,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAGD,UAAM,iBAAiB,SAAS,QAC7B,OAAO,CAAC,UAAU,MAAM,SAAS,UAAU,MAAM,IAAI,EACrD,IAAI,CAAC,UAAU,MAAM,IAAK,EAC1B,KAAK,EAAE,EACP,KAAK;AAGR,QAAI,CAAC,gBAAgB;AACnB,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;;;AC/FA,SAAS,OAAAA,YAAW;AA8BpB,IAAMC,SAAQ;AAEd,IAAM,wBACJ;AAEF,IAAM,qBAAqB;AAc3B,eAAsB,6BACpB,OACA,MACiB;AACjB,QAAM,eAAe,MAAM,gBAAgB;AAC3C,QAAM,YAAY,MAAM,aAAa;AAErC,QAAM,WAAW,iBAAiB,WAAW;AAE7C,QAAM,WAAW,MAAM,SAAS,cAAc;AAAA,IAC5C,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,QAAQ;AAAA,IACR,UAAU;AAAA,MACR;AAAA,QACE,MAAM;AAAA,QACN,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF,CAAC;AAGD,QAAM,OAAO,SAAS,QACnB,OAAO,CAAC,UAAU,MAAM,SAAS,MAAM,EACvC,IAAI,CAAC,UAAU,MAAM,QAAQ,EAAE,EAC/B,KAAK,EAAE;AAEV,SAAO;AACT;AAWA,eAAe,0BACb,WACA,QACA,QAAQ,IAaR;AACA,QAAM,eAAe,KAAK,UAAU,SAAS;AAE7C,QAAM,UAAU,MAAM,GAAG,QAAQC;AAAA;AAAA;AAAA;AAAA,2BAIR,YAAY;AAAA;AAAA,MAEjC,SAASA,uBAAsB,MAAM,KAAKA,MAAK;AAAA,6BACxB,YAAY;AAAA,YAC7B,KAAK;AAAA,GACd;AAED,SAAO;AACT;AAUA,SAASC,sBACP,aACA,IAAIF,QACiB;AACrB,QAAM,SAAS,oBAAI,IAAoB;AAEvC,aAAW,QAAQ,aAAa;AAC9B,aAAS,OAAO,GAAG,OAAO,KAAK,QAAQ,QAAQ;AAC7C,YAAM,KAAK,KAAK,IAAI,EAAE;AACtB,YAAM,WAAW,KAAK,IAAI,OAAO;AACjC,aAAO,IAAI,KAAK,OAAO,IAAI,EAAE,KAAK,KAAK,QAAQ;AAAA,IACjD;AAAA,EACF;AAEA,SAAO;AACT;AAkBA,eAAsB,WACpB,OACA,QACA,QAAQ,IACqB;AAE7B,MAAI,CAAC,IAAI,cAAc;AACrB,UAAM,UAAU,MAAM,aAAa,OAAO,EAAE,QAAQ,MAAM,CAAC;AAC3D,WAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,cAAc,GAAG,EAAE;AAAA,EACxD;AAGA,MAAI;AACJ,MAAI;AACF,mBAAe,MAAM,6BAA6B,KAAK;AAAA,EACzD,SAAS,OAAO;AACd,YAAQ,KAAK,gEAAgE,KAAK;AAClF,UAAM,UAAU,MAAM,aAAa,OAAO,EAAE,QAAQ,MAAM,CAAC;AAC3D,WAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,cAAc,GAAG,EAAE;AAAA,EACxD;AAEA,MAAI,CAAC,gBAAgB,aAAa,KAAK,EAAE,WAAW,GAAG;AACrD,YAAQ,KAAK,mEAAmE;AAChF,UAAM,UAAU,MAAM,aAAa,OAAO,EAAE,QAAQ,MAAM,CAAC;AAC3D,WAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,cAAc,GAAG,EAAE;AAAA,EACxD;AAGA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,kBAAkB,YAAY;AAAA,EACtD,SAAS,OAAO;AACd,YAAQ,KAAK,sEAAsE,KAAK;AACxF,UAAM,UAAU,MAAM,aAAa,OAAO,EAAE,QAAQ,MAAM,CAAC;AAC3D,WAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,cAAc,GAAG,EAAE;AAAA,EACxD;AAGA,QAAM,aAAa,QAAQ;AAE3B,QAAM,CAAC,eAAe,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,IACnD,0BAA0B,eAAe,QAAQ,UAAU;AAAA,IAC3D,cAAc,OAAO,QAAQ,UAAU;AAAA,EACzC,CAAC;AAGD,QAAM,aAAa,oBAAI,IAAiB;AACxC,aAAW,KAAK,eAAe;AAC7B,eAAW,IAAI,EAAE,IAAI,CAAC;AAAA,EACxB;AACA,aAAW,KAAK,WAAW;AACzB,QAAI,CAAC,WAAW,IAAI,EAAE,EAAE,GAAG;AACzB,iBAAW,IAAI,EAAE,IAAI,CAAC;AAAA,IACxB;AAAA,EACF;AAGA,QAAM,YAAYE,sBAAqB,CAAC,eAAe,SAAS,CAAC;AAGjE,QAAM,eAAmC,CAAC;AAC1C,aAAW,CAAC,IAAI,QAAQ,KAAK,WAAW;AACtC,UAAM,OAAO,WAAW,IAAI,EAAE;AAC9B,QAAI,CAAC,KAAM;AAEX,iBAAa,KAAK;AAAA,MAChB,IAAI,KAAK;AAAA,MACT,QAAQ,KAAK;AAAA,MACb,MAAM,KAAK;AAAA,MACX,SAAS,KAAK;AAAA,MACd,YAAY,KAAK,cAAc;AAAA,MAC/B,QAAQ,KAAK;AAAA,MACb,YAAY,KAAK;AAAA,MACjB,YAAY,KAAK,cAAc;AAAA,MAC/B,aAAa,KAAK,eAAe,KAAK,gBAAgB;AAAA,MACtD;AAAA,MACA,WAAW,KAAK;AAAA,MAChB;AAAA,IACF,CAAC;AAAA,EACH;AAGA,SAAO,aACJ,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,QAAQ,EACtC,MAAM,GAAG,KAAK;AACnB;;;ACtQA,OAAO,WAAW;AAClB,SAAS,kBAAkB;AAuB3B,IAAM,aAAa;AACnB,IAAM,sBAAsB;AAMrB,IAAM,iBAAN,MAAqB;AAAA,EAClB,QAAsB;AAAA,EACtB,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA,EAMrB,MAAc,YAAmC;AAC/C,QAAI,KAAK,MAAO,QAAO,KAAK;AAC5B,QAAI,KAAK,WAAY,QAAO;AAE5B,QAAI;AACF,WAAK,aAAa;AAClB,YAAM,SAAS,IAAI,MAAM,IAAI,WAAW;AAAA,QACtC,sBAAsB;AAAA,QACtB,aAAa;AAAA,MACf,CAAC;AACD,YAAM,OAAO,QAAQ;AACrB,WAAK,QAAQ;AACb,aAAO,KAAK;AAAA,IACd,QAAQ;AACN,WAAK,QAAQ;AACb,aAAO;AAAA,IACT,UAAE;AACA,WAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,cAAc,WAA6B;AACjD,UAAM,UAAU,UAAU,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,EAAE,KAAK,GAAG;AAC3D,WAAO,WAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,gBAAwD;AAC7E,QAAI,CAAC,IAAI,wBAAyB,QAAO;AAEzC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,UAAU;AACpC,UAAI,CAAC,OAAQ,QAAO;AAEpB,YAAM,OAAO,KAAK,cAAc,cAAc;AAC9C,YAAM,MAAM,GAAG,UAAU,GAAG,IAAI;AAChC,YAAM,MAAM,MAAM,OAAO,IAAI,GAAG;AAEhC,UAAI,CAAC,IAAK,QAAO;AAEjB,YAAM,SAAuB,KAAK,MAAM,GAAG;AAG3C,aAAO,UAAU,OAAO,QAAQ,IAAI,CAAC,OAAO;AAAA,QAC1C,GAAG;AAAA,QACH,WAAW,IAAI,KAAK,EAAE,SAAS;AAAA,MACjC,EAAE;AAEF,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,aACJ,gBACA,SACA,MAAc,qBACC;AACf,QAAI,CAAC,IAAI,wBAAyB;AAElC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,UAAU;AACpC,UAAI,CAAC,OAAQ;AAEb,YAAM,OAAO,KAAK,cAAc,cAAc;AAC9C,YAAM,MAAM,GAAG,UAAU,GAAG,IAAI;AAEhC,YAAM,QAAsB;AAAA,QAC1B;AAAA,QACA,UAAU,KAAK,IAAI;AAAA,QACnB,WAAW;AAAA,MACb;AAEA,YAAM,OAAO,IAAI,KAAK,KAAK,UAAU,KAAK,GAAG,MAAM,GAAG;AAAA,IACxD,QAAQ;AAAA,IAER;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,gBAAgB,SAAmC;AACvD,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,UAAU;AACpC,UAAI,CAAC,OAAQ,QAAO;AAEpB,YAAM,cAAc,GAAG,UAAU,GAAG,WAAW,GAAG;AAClD,UAAI,UAAU;AACd,UAAI,SAAS;AAEb,SAAG;AACD,cAAM,CAAC,YAAY,IAAI,IAAI,MAAM,OAAO;AAAA,UACtC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,iBAAS;AAET,YAAI,KAAK,SAAS,GAAG;AACnB,qBAAW,MAAM,OAAO,IAAI,GAAG,IAAI;AAAA,QACrC;AAAA,MACF,SAAS,WAAW;AAEpB,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAMA,IAAI,WAAkC;AAM/B,SAAS,oBAAoC;AAClD,MAAI,CAAC,UAAU;AACb,eAAW,IAAI,eAAe;AAAA,EAChC;AACA,SAAO;AACT;;;AC9JA,IAAM,qBAAqB;AAC3B,IAAMC,sBAAqB;AAC3B,IAAM,gBAAgB;AACtB,IAAM,gBAAgB;AAMtB,SAAS,mBAAmB,OAAe,WAA6B;AACtE,QAAM,UAAU,UACb,IAAI,CAAC,KAAK,MAAM,GAAG,IAAI,CAAC,KAAK,GAAG,EAAE,EAClC,KAAK,IAAI;AAEZ,SACE;AAAA;AAAA,SAEU,KAAK;AAAA;AAAA;AAAA,EACA,OAAO;AAAA;AAAA;AAAA;AAI1B;AAWA,SAAS,YAAY,MAAc,eAAiC;AAElE,MAAI;AACF,UAAM,SAAS,KAAK,MAAM,KAAK,KAAK,CAAC;AACrC,QAAI,MAAM,QAAQ,MAAM,GAAG;AACzB,aAAO,OAAO,IAAI,CAAC,MAAM;AACvB,cAAM,IAAI,OAAO,CAAC;AAClB,eAAO,OAAO,SAAS,CAAC,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,GAAG,CAAC,CAAC,IAAI;AAAA,MAC7D,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AAAA,EAER;AAGA,QAAM,UAAU,KAAK,MAAM,gBAAgB;AAC3C,MAAI,WAAW,QAAQ,SAAS,GAAG;AACjC,WAAO,QAAQ,MAAM,GAAG,aAAa,EAAE,IAAI,CAAC,MAAM;AAChD,YAAM,IAAI,OAAO,CAAC;AAClB,aAAO,OAAO,SAAS,CAAC,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,GAAG,CAAC,CAAC,IAAI;AAAA,IAC7D,CAAC;AAAA,EACH;AAGA,SAAO,IAAI,MAAM,aAAa,EAAE,KAAK,aAAa;AACpD;AAUA,eAAsB,YACpB,OACA,SACA,YAAoB,oBACpB,OACyB;AACzB,QAAM,WAAW,iBAAiB,WAAW;AAC7C,QAAM,gBAAgB,SAAS;AAC/B,QAAM,gBAAgC,CAAC;AAGvC,QAAM,UAAkC,CAAC;AACzC,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,WAAW;AAClD,YAAQ,KAAK,QAAQ,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,EAC9C;AAGA,QAAM,gBAAgB,QAAQ,IAAI,OAAO,UAAU;AACjD,UAAM,YAAY,MAAM,IAAI,CAAC,MAAM,EAAE,OAAO;AAC5C,UAAM,SAAS,mBAAmB,OAAO,SAAS;AAElD,QAAI;AACF,YAAM,WAAW,MAAM,SAAS,cAAc;AAAA,QAC5C,OAAO;AAAA,QACP,YAAYA;AAAA,QACZ,QAAQ;AAAA,QACR,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,MAC9C,CAAC;AAGD,YAAM,eAAe,SAAS,QAC3B,OAAO,CAAC,UAAU,MAAM,SAAS,MAAM,EACvC,IAAI,CAAC,UAAU,MAAM,QAAQ,EAAE,EAC/B,KAAK,EAAE;AAEV,YAAM,SAAS,YAAY,cAAc,MAAM,MAAM;AAErD,aAAO,MAAM,IAAI,CAAC,QAAQ,SAAS;AAAA,QACjC,GAAG;AAAA,QACH,aAAa,MAAM,OAAO,SAAS,OAAO,GAAG,IAAI;AAAA,MACnD,EAAE;AAAA,IACJ,SAAS,OAAO;AAEd,cAAQ;AAAA,QACN,uEAAuE,aAAa;AAAA,QACpF,iBAAiB,QAAQ,MAAM,UAAU;AAAA,MAC3C;AACA,aAAO,MAAM,IAAI,CAAC,YAAY;AAAA,QAC5B,GAAG;AAAA,QACH,aAAa;AAAA,MACf,EAAE;AAAA,IACJ;AAAA,EACF,CAAC;AAED,QAAM,eAAe,MAAM,QAAQ,IAAI,aAAa;AACpD,aAAW,SAAS,cAAc;AAChC,kBAAc,KAAK,GAAG,KAAK;AAAA,EAC7B;AAEA,SAAO;AACT;AAcA,eAAsB,OACpB,OACA,SACA,MACyB;AACzB,MAAI;AACF,UAAM,WAAW,MAAM,YAAY,IAAI;AAGvC,QAAI,CAAC,IAAI,gBAAgB;AACvB,aAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,aAAa,cAAc,EAAE;AAAA,IAClE;AAGA,QAAI,QAAQ,UAAU,GAAG;AACvB,aAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,aAAa,GAAG,EAAE;AAAA,IACvD;AAGA,QAAI,SAAS,MAAM,YAAY,OAAO,SAAS,oBAAoB,MAAM,KAAK;AAG9E,aAAS,OAAO,OAAO,CAAC,MAAM,EAAE,eAAe,QAAQ;AAGvD,WAAO,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAGnD,QAAI,MAAM,QAAQ,KAAK,OAAO,GAAG;AAC/B,eAAS,OAAO,MAAM,GAAG,KAAK,IAAI;AAAA,IACpC;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AAEd,YAAQ;AAAA,MACN;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAC3C;AACA,WAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,aAAa,cAAc,EAAE;AAAA,EAClE;AACF;;;AC/KA,IAAM,0BAA0B;AAChC,IAAM,qBAAqB;AAW3B,eAAsB,qBACpB,OACA,SACiC;AACjC,MAAI;AACF,UAAM,WAAW,iBAAiB,WAAW;AAE7C,UAAM,WAAW,MAAM,SAAS,cAAc;AAAA,MAC5C,OAAO;AAAA,MACP,YAAY;AAAA,MACZ,QAAQ;AAAA,MACR,UAAU;AAAA,QACR;AAAA,UACE,MAAM;AAAA,UACN,SACE;AAAA;AAAA;AAAA;AAAA;AAAA,SAIU,KAAK;AAAA;AAAA;AAAA,EACF,OAAO;AAAA;AAAA;AAAA,QAExB;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,eAAe,SAAS,QAC3B,OAAO,CAAC,UAAU,MAAM,SAAS,MAAM,EACvC,IAAI,CAAC,UAAU,MAAM,QAAQ,EAAE,EAC/B,KAAK,EAAE;AAEV,UAAM,SAAS,KAAK,MAAM,aAAa,KAAK,CAAC;AAE7C,WAAO;AAAA,MACL,UAAU,QAAQ,OAAO,QAAQ;AAAA,MACjC,MAAM,MAAM,QAAQ,OAAO,IAAI,IAAI,OAAO,OAAO,CAAC;AAAA,MAClD,iBAAiB,MAAM,QAAQ,OAAO,eAAe,IACjD,OAAO,gBAAgB,MAAM,GAAG,CAAC,IACjC,CAAC;AAAA,IACP;AAAA,EACF,SAAS,OAAO;AAEd,YAAQ;AAAA,MACN;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAC3C;AACA,WAAO,EAAE,UAAU,MAAM,MAAM,CAAC,GAAG,iBAAiB,CAAC,EAAE;AAAA,EACzD;AACF;AAkBA,eAAsB,kBACpB,OACA,gBACA,MAC0B;AAE1B,MAAI,CAAC,IAAI,uBAAuB;AAC9B,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,MACP,iBAAiB,CAAC;AAAA,IACpB;AAAA,EACF;AAEA,QAAM,WAAW,MAAM,YAAY,IAAI;AACvC,QAAM,SAAS,MAAM;AACrB,QAAM,QAAQ,MAAM,SAAS;AAG7B,QAAM,cAAc,oBAAI,IAA0B;AAClD,aAAW,KAAK,gBAAgB;AAC9B,gBAAY,IAAI,EAAE,IAAI,CAAC;AAAA,EACzB;AAEA,QAAM,qBAA+B,CAAC;AACtC,MAAI,iBAAiB;AAErB,WAAS,OAAO,GAAG,OAAO,UAAU,QAAQ;AAE1C,UAAM,iBAAiB,MAAM,KAAK,YAAY,OAAO,CAAC;AACtD,UAAM,cAAc,eAAe,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,MAAM;AAGpE,UAAM,aAAa,MAAM,qBAAqB,OAAO,WAAW;AAEhE,QAAI,WAAW,YAAY,WAAW,gBAAgB,WAAW,GAAG;AAClE;AAAA,IACF;AAEA;AAGA,UAAM,YAAY,WAAW,gBAAgB,MAAM,GAAG,CAAC;AACvD,uBAAmB,KAAK,GAAG,SAAS;AAGpC,UAAM,oBAAoB,UAAU,IAAI,OAAO,kBAAkB;AAC/D,YAAM,gBAAgB,MAAM,aAAa,eAAe;AAAA,QACtD;AAAA,QACA;AAAA,MACF,CAAC;AACD,YAAM,gBAAgB,MAAM,OAAO,eAAe,aAAa;AAC/D,aAAO;AAAA,IACT,CAAC;AAED,UAAM,uBAAuB,MAAM,QAAQ,IAAI,iBAAiB;AAGhE,eAAW,aAAa,sBAAsB;AAC5C,iBAAW,KAAK,WAAW;AACzB,YAAI,CAAC,YAAY,IAAI,EAAE,EAAE,GAAG;AAC1B,sBAAY,IAAI,EAAE,IAAI,CAAC;AAAA,QACzB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,gBAAgB,MAAM,KAAK,YAAY,OAAO,CAAC,EAAE;AAAA,IACrD,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE;AAAA,EAC9B;AAEA,SAAO;AAAA,IACL,SAAS;AAAA,IACT,OAAO;AAAA,IACP,iBAAiB;AAAA,EACnB;AACF;;;ACrJA,SAAS,gBACP,SACA,cACgB;AAChB,SAAO,QAAQ,IAAI,CAAC,OAAO;AAAA,IACzB,GAAG;AAAA,IACH,aAAa,gBAAgB,EAAE,WAAW;AAAA,EAC5C,EAAE;AACJ;AAaA,eAAsB,iBACpB,OACA,MACkC;AAClC,QAAM,SAAS,MAAM;AACrB,QAAM,QAAQ,MAAM,SAAS;AAC7B,QAAM,sBAAsB,MAAM;AAElC,MAAI,QAAQ;AAEZ,MAAI;AAIF,QAAI,iBAAiB;AAErB,QACE,IAAI,4BACJ,uBACA,oBAAoB,UAAU,GAC9B;AACA,cAAQ,IAAI,0EAA0E;AACtF,uBAAiB,MAAM,qBAAqB,OAAO,mBAAmB;AACtE;AACA,cAAQ,IAAI,yCAAyC,cAAc,GAAG;AAAA,IACxE;AAKA,QAAI,SAAS;AACb,QAAI,iBAAkC;AAEtC,QAAI,IAAI,yBAAyB;AAC/B,cAAQ,IAAI,mEAAmE;AAC/E,uBAAiB,MAAM,kBAAkB,cAAc;AACvD,YAAM,QAAQ,kBAAkB;AAChC,YAAM,eAAe,MAAM,MAAM,iBAAiB,cAAc;AAEhE,UAAI,cAAc;AAChB,gBAAQ,IAAI,qDAAqD;AACjE,iBAAS;AACT;AAEA,YAAIC,iBAAgB,gBAAgB,aAAa,SAAS,CAAC;AAM3D,YAAI,IAAI,gBAAgB;AACtB,kBAAQ,IAAI,kDAAkD;AAC9D,UAAAA,iBAAgB,MAAM,OAAO,gBAAgB,aAAa,OAAO;AACjE;AAAA,QACF;AAGA,YAAI,IAAI,uBAAuB;AAC7B,kBAAQ,IAAI,+DAA+D;AAC3E,UAAAA,iBAAgB,MAAM,kBAAkB,gBAAgBA,gBAAe,EAAE,OAAO,CAAC;AACjF;AAAA,QACF;AAEA,eAAO;AAAA,UACL,SAASA;AAAA,UACT,QAAQ;AAAA,UACR;AAAA,UACA,WAAW;AAAA,QACb;AAAA,MACF;AAEA,cAAQ,IAAI,sDAAsD;AAAA,IACpE;AAKA,QAAI;AAEJ,QAAI,IAAI,cAAc;AACpB,cAAQ,IAAI,kEAAkE;AAC9E,sBAAgB,MAAM,WAAW,gBAAgB,QAAQ,KAAK;AAC9D;AAAA,IACF,OAAO;AACL,cAAQ,IAAI,8CAA8C;AAC1D,sBAAgB,MAAM,aAAa,gBAAgB,EAAE,QAAQ,MAAM,CAAC;AACpE;AAAA,IACF;AAKA,QAAI;AAEJ,QAAI,IAAI,gBAAgB;AACtB,cAAQ,IAAI,2CAA2C;AACvD,sBAAgB,MAAM,OAAO,gBAAgB,aAAa;AAC1D;AAAA,IACF,OAAO;AAEL,sBAAgB,gBAAgB,aAAa;AAAA,IAC/C;AAKA,QAAI,IAAI,2BAA2B,CAAC,QAAQ;AAC1C,cAAQ,IAAI,iDAAiD;AAK7D,UAAI,CAAC,gBAAgB;AACnB,yBAAiB,MAAM,kBAAkB,cAAc;AAAA,MACzD;AAEA,YAAM,QAAQ,kBAAkB;AAChC,YAAM,MAAM,aAAa,gBAAgB,aAAa;AAAA,IACxD;AAKA,QAAI,IAAI,uBAAuB;AAC7B,cAAQ,IAAI,mEAAmE;AAC/E,sBAAgB,MAAM,kBAAkB,gBAAgB,eAAe,EAAE,OAAO,CAAC;AACjF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,WAAW;AAAA,IACb;AAAA,EACF,SAAS,OAAO;AAId,YAAQ;AAAA,MACN;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAC3C;AAEA,QAAI;AACF,YAAM,kBAAkB,MAAM,aAAa,OAAO,EAAE,QAAQ,MAAM,CAAC;AACnE,aAAO;AAAA,QACL,SAAS,gBAAgB,iBAAiB,CAAC;AAAA,QAC3C,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,WAAW;AAAA,MACb;AAAA,IACF,SAAS,eAAe;AACtB,cAAQ;AAAA,QACN;AAAA,QACA,yBAAyB,QAAQ,cAAc,UAAU;AAAA,MAC3D;AACA,aAAO;AAAA,QACL,SAAS,CAAC;AAAA,QACV,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,WAAW;AAAA,MACb;AAAA,IACF;AAAA,EACF;AACF;","names":["sql","RRF_K","sql","reciprocalRankFusion","DEFAULT_MAX_TOKENS","rankedResults"]}
1
+ {"version":3,"sources":["../src/core/memory/hybrid-search.ts","../src/core/memory/contextual-query.ts","../src/core/memory/hyde.ts","../src/core/memory/retrieval-cache.ts","../src/core/memory/reranker.ts","../src/core/memory/multi-step.ts","../src/core/memory/enhanced-retrieval.ts"],"sourcesContent":["/**\r\n * Hybrid Search — Vector + Keyword + Graph with Reciprocal Rank Fusion\r\n *\r\n * Combines three retrieval strategies:\r\n * 1. Vector search (pgvector cosine similarity)\r\n * 2. Keyword search (PostgreSQL tsvector/GIN)\r\n * 3. Graph-augmented search (entity relationship expansion)\r\n *\r\n * Results are fused using Reciprocal Rank Fusion (RRF).\r\n */\r\n\r\nimport { db } from \"../../db\";\r\nimport { sql } from \"drizzle-orm\";\r\nimport { generateEmbedding } from \"../memory\";\r\n\r\nexport interface HybridSearchResult {\r\n id: string;\r\n userId: string | null;\r\n type: string;\r\n content: string;\r\n importance: number;\r\n source: string | null;\r\n provenance: string | null;\r\n similarity: number; // Vector similarity score\r\n keywordRank: number; // Keyword search rank\r\n rrfScore: number; // Combined RRF score\r\n createdAt: Date;\r\n}\r\n\r\nexport interface HybridSearchOptions {\r\n userId?: string;\r\n limit?: number;\r\n since?: Date;\r\n until?: Date;\r\n minImportance?: number;\r\n includeKeyword?: boolean;\r\n includeGraph?: boolean;\r\n}\r\n\r\nconst RRF_K = 60; // RRF constant (standard value)\r\n\r\n/**\r\n * Vector search using pgvector cosine similarity\r\n */\r\nexport async function vectorSearch(\r\n query: string,\r\n userId?: string,\r\n limit = 10\r\n): Promise<Array<{ id: string; content: string; similarity: number; [key: string]: unknown }>> {\r\n const queryEmbedding = await generateEmbedding(query);\r\n\r\n const results = await db.execute(sql`\r\n SELECT\r\n id, user_id, type, content, importance, source, provenance,\r\n created_at,\r\n 1 - (embedding <=> ${JSON.stringify(queryEmbedding)}::vector) as similarity\r\n FROM memories\r\n ${userId ? sql`WHERE user_id = ${userId}` : sql``}\r\n ORDER BY embedding <=> ${JSON.stringify(queryEmbedding)}::vector\r\n LIMIT ${limit}\r\n `);\r\n\r\n return results as any[];\r\n}\r\n\r\n/**\r\n * Keyword search using PostgreSQL tsvector full-text search\r\n */\r\nexport async function keywordSearch(\r\n query: string,\r\n userId?: string,\r\n limit = 10\r\n): Promise<Array<{ id: string; content: string; keywordRank: number; [key: string]: unknown }>> {\r\n const results = await db.execute(sql`\r\n SELECT\r\n id, user_id, type, content, importance, source, provenance,\r\n created_at,\r\n ts_rank(search_vector, plainto_tsquery('english', ${query})) as keyword_rank\r\n FROM memories\r\n WHERE search_vector IS NOT NULL\r\n AND search_vector @@ plainto_tsquery('english', ${query})\r\n ${userId ? sql`AND user_id = ${userId}` : sql``}\r\n ORDER BY keyword_rank DESC\r\n LIMIT ${limit}\r\n `);\r\n\r\n return (results as any[]).map((r: any) => ({\r\n ...r,\r\n keywordRank: r.keyword_rank,\r\n }));\r\n}\r\n\r\n/**\r\n * Graph-augmented search: find entities matching query, expand to related memories\r\n */\r\nexport async function graphAugmentedSearch(\r\n query: string,\r\n userId?: string,\r\n limit = 10\r\n): Promise<Array<{ id: string; content: string; graphScore: number; [key: string]: unknown }>> {\r\n // Find matching graph entities\r\n const entities = await db.execute(sql`\r\n SELECT id, name, type\r\n FROM graph_entities\r\n WHERE name ILIKE ${'%' + query + '%'}\r\n ${userId ? sql`AND user_id = ${userId}` : sql``}\r\n LIMIT 5\r\n `);\r\n\r\n if ((entities as any[]).length === 0) {\r\n return [];\r\n }\r\n\r\n const entityIds = (entities as any[]).map((e: any) => e.id);\r\n\r\n // Find memories related to these entities via relationships\r\n // We search for entity names in memory content\r\n const entityNames = (entities as any[]).map((e: any) => e.name);\r\n const namePattern = entityNames.join(\"|\");\r\n\r\n const results = await db.execute(sql`\r\n SELECT\r\n id, user_id, type, content, importance, source, provenance,\r\n created_at,\r\n 1.0 as graph_score\r\n FROM memories\r\n WHERE content ~* ${namePattern}\r\n ${userId ? sql`AND user_id = ${userId}` : sql``}\r\n ORDER BY importance DESC, created_at DESC\r\n LIMIT ${limit}\r\n `);\r\n\r\n return (results as any[]).map((r: any) => ({\r\n ...r,\r\n graphScore: r.graph_score,\r\n }));\r\n}\r\n\r\n/**\r\n * Reciprocal Rank Fusion: combine ranked lists from multiple sources\r\n */\r\nfunction reciprocalRankFusion(\r\n rankedLists: Array<Array<{ id: string; [key: string]: unknown }>>,\r\n k = RRF_K\r\n): Map<string, number> {\r\n const scores = new Map<string, number>();\r\n\r\n for (const list of rankedLists) {\r\n for (let rank = 0; rank < list.length; rank++) {\r\n const id = list[rank].id;\r\n const rrfScore = 1 / (k + rank + 1);\r\n scores.set(id, (scores.get(id) || 0) + rrfScore);\r\n }\r\n }\r\n\r\n return scores;\r\n}\r\n\r\n/**\r\n * Hybrid search combining vector, keyword, and graph search with RRF\r\n */\r\nexport async function hybridSearch(\r\n query: string,\r\n options: HybridSearchOptions = {}\r\n): Promise<HybridSearchResult[]> {\r\n const {\r\n userId,\r\n limit = 10,\r\n since,\r\n until,\r\n includeKeyword = true,\r\n includeGraph = true,\r\n } = options;\r\n\r\n // Run searches in parallel\r\n const searchPromises: Array<Promise<any[]>> = [\r\n vectorSearch(query, userId, limit * 2), // Fetch more for better fusion\r\n ];\r\n\r\n if (includeKeyword) {\r\n searchPromises.push(keywordSearch(query, userId, limit * 2));\r\n }\r\n\r\n if (includeGraph) {\r\n searchPromises.push(graphAugmentedSearch(query, userId, limit));\r\n }\r\n\r\n const results = await Promise.all(searchPromises);\r\n const [vectorResults, keywordResults, graphResults] = results;\r\n\r\n // Build a map of all unique results\r\n const allResults = new Map<string, any>();\r\n for (const r of vectorResults || []) {\r\n allResults.set(r.id, r);\r\n }\r\n for (const r of keywordResults || []) {\r\n if (!allResults.has(r.id)) allResults.set(r.id, r);\r\n }\r\n for (const r of graphResults || []) {\r\n if (!allResults.has(r.id)) allResults.set(r.id, r);\r\n }\r\n\r\n // Compute RRF scores\r\n const rankedLists = [vectorResults || []];\r\n if (keywordResults) rankedLists.push(keywordResults);\r\n if (graphResults) rankedLists.push(graphResults);\r\n\r\n const rrfScores = reciprocalRankFusion(rankedLists);\r\n\r\n // Build final results\r\n let finalResults: HybridSearchResult[] = [];\r\n for (const [id, rrfScore] of rrfScores) {\r\n const data = allResults.get(id);\r\n if (!data) continue;\r\n\r\n finalResults.push({\r\n id: data.id,\r\n userId: data.user_id,\r\n type: data.type,\r\n content: data.content,\r\n importance: data.importance || 5,\r\n source: data.source,\r\n provenance: data.provenance,\r\n similarity: data.similarity || 0,\r\n keywordRank: data.keywordRank || data.keyword_rank || 0,\r\n rrfScore,\r\n createdAt: data.created_at,\r\n });\r\n }\r\n\r\n // Apply temporal filtering\r\n if (since) {\r\n finalResults = finalResults.filter((r) => new Date(r.createdAt) >= since);\r\n }\r\n if (until) {\r\n finalResults = finalResults.filter((r) => new Date(r.createdAt) <= until);\r\n }\r\n\r\n // Sort by RRF score descending, take top N\r\n return finalResults\r\n .sort((a, b) => b.rrfScore - a.rrfScore)\r\n .slice(0, limit);\r\n}\r\n","/**\n * Contextual Query Rewriting\n *\n * Rewrites user queries by incorporating conversation history to resolve\n * pronouns, references, and implicit context before retrieval. This improves\n * RAG accuracy by ensuring the search query is self-contained.\n *\n * Feature-gated behind env.CONTEXTUAL_QUERY_ENABLED.\n */\n\nimport { env } from \"../../config/env\";\nimport { providerRegistry } from \"../providers\";\n\n// ============================================\n// Types\n// ============================================\n\nexport interface Message {\n role: \"user\" | \"assistant\";\n content: string;\n}\n\nexport interface ContextualQueryOptions {\n /** Maximum number of recent messages to include for context. Default: 4 */\n maxHistoryMessages?: number;\n /** Override the LLM model used for rewriting. */\n model?: string;\n}\n\n// ============================================\n// Constants\n// ============================================\n\nconst DEFAULT_MAX_HISTORY_MESSAGES = 4;\nconst MAX_TOKENS = 200;\n\nconst SYSTEM_PROMPT =\n \"Rewrite this query to be self-contained by resolving pronouns, references, \" +\n \"and implicit context from the conversation. Return ONLY the rewritten query, nothing else.\";\n\n// ============================================\n// Main function\n// ============================================\n\n/**\n * Rewrites a user query to be self-contained by resolving pronouns,\n * references, and implicit context from the recent conversation history.\n *\n * If the feature is disabled, conversation history is too short (< 2 messages),\n * or the LLM call fails, the original query is returned unchanged.\n *\n * @param query - The user's current query\n * @param conversationHistory - Recent conversation messages\n * @param opts - Optional configuration\n * @returns The rewritten self-contained query, or the original on failure\n */\nexport async function buildContextualQuery(\n query: string,\n conversationHistory: Message[],\n opts?: ContextualQueryOptions\n): Promise<string> {\n try {\n // Feature gate: return original query if disabled\n if (!env.CONTEXTUAL_QUERY_ENABLED) {\n return query;\n }\n\n // Not enough context to resolve references\n if (!conversationHistory || conversationHistory.length < 2) {\n return query;\n }\n\n const maxHistory = opts?.maxHistoryMessages ?? DEFAULT_MAX_HISTORY_MESSAGES;\n\n // Take only the last N messages from history\n const recentHistory = conversationHistory.slice(-maxHistory);\n\n // Build the conversation context for the LLM\n const conversationText = recentHistory\n .map((msg) => `${msg.role === \"user\" ? \"User\" : \"Assistant\"}: ${msg.content}`)\n .join(\"\\n\");\n\n const userPrompt =\n `Conversation so far:\\n${conversationText}\\n\\nCurrent query to rewrite:\\n${query}`;\n\n const provider = providerRegistry.getDefault();\n const model = opts?.model ?? \"claude-sonnet-4-5-20250929\";\n\n const response = await provider.createMessage({\n model,\n max_tokens: MAX_TOKENS,\n system: SYSTEM_PROMPT,\n messages: [\n {\n role: \"user\",\n content: userPrompt,\n },\n ],\n });\n\n // Extract text from the response content blocks\n const rewrittenQuery = response.content\n .filter((block) => block.type === \"text\" && block.text)\n .map((block) => block.text!)\n .join(\"\")\n .trim();\n\n // If the LLM returned an empty response, fall back to the original query\n if (!rewrittenQuery) {\n return query;\n }\n\n return rewrittenQuery;\n } catch {\n // On any failure, return the original query unchanged\n return query;\n }\n}\n","/**\r\n * HyDE — Hypothetical Document Embeddings\r\n *\r\n * Instead of embedding the raw user query for retrieval, this module\r\n * asks an LLM to generate a hypothetical \"ideal answer\" document,\r\n * embeds *that*, and uses the resulting vector for similarity search.\r\n *\r\n * This dramatically improves retrieval quality because the hypothetical\r\n * document lives in the same semantic space as stored memories/documents,\r\n * whereas a short question often does not.\r\n *\r\n * Pipeline:\r\n * 1. User query → LLM generates hypothetical document\r\n * 2. Hypothetical document → embedding via OpenAI\r\n * 3. Embedding → pgvector cosine similarity search\r\n * 4. Original query → keyword search (tsvector)\r\n * 5. Results merged via Reciprocal Rank Fusion (RRF)\r\n *\r\n * Gated behind env.HYDE_ENABLED.\r\n */\r\n\r\nimport { db } from \"../../db\";\r\nimport { sql } from \"drizzle-orm\";\r\nimport { env } from \"../../config/env\";\r\nimport { providerRegistry } from \"../providers\";\r\nimport { generateEmbedding } from \"../memory\";\r\nimport {\r\n hybridSearch,\r\n keywordSearch,\r\n type HybridSearchResult,\r\n} from \"./hybrid-search\";\r\n\r\n// ============================================\r\n// Types\r\n// ============================================\r\n\r\nexport interface HyDEOptions {\r\n /** Max tokens for the hypothetical document generation (default: 300) */\r\n maxTokens?: number;\r\n /** Override the system prompt used for hypothetical document generation */\r\n systemPrompt?: string;\r\n}\r\n\r\nexport interface HyDESearchResult extends HybridSearchResult {\r\n /** The hypothetical document that was generated and embedded for retrieval */\r\n hydeDocument: string;\r\n}\r\n\r\n// ============================================\r\n// Constants\r\n// ============================================\r\n\r\nconst RRF_K = 60;\r\n\r\nconst DEFAULT_SYSTEM_PROMPT =\r\n \"You are a helpful assistant. Generate a detailed document that would perfectly answer the following question. Write as if this document already exists in a knowledge base. Be specific and factual.\";\r\n\r\nconst DEFAULT_MAX_TOKENS = 300;\r\n\r\n// ============================================\r\n// Hypothetical Document Generation\r\n// ============================================\r\n\r\n/**\r\n * Generate a hypothetical document that would perfectly answer the given query.\r\n *\r\n * Uses the default LLM provider to produce a ~200-word passage written as if\r\n * it were an existing knowledge-base entry. This passage is later embedded\r\n * so that the embedding sits closer to relevant stored documents than the\r\n * raw question would.\r\n */\r\nexport async function generateHypotheticalDocument(\r\n query: string,\r\n opts?: HyDEOptions\r\n): Promise<string> {\r\n const systemPrompt = opts?.systemPrompt ?? DEFAULT_SYSTEM_PROMPT;\r\n const maxTokens = opts?.maxTokens ?? DEFAULT_MAX_TOKENS;\r\n\r\n const provider = providerRegistry.getDefault();\r\n\r\n const response = await provider.createMessage({\r\n model: \"claude-sonnet-4-20250514\",\r\n max_tokens: maxTokens,\r\n system: systemPrompt,\r\n messages: [\r\n {\r\n role: \"user\",\r\n content: query,\r\n },\r\n ],\r\n });\r\n\r\n // Extract the text from the response content blocks\r\n const text = response.content\r\n .filter((block) => block.type === \"text\")\r\n .map((block) => block.text ?? \"\")\r\n .join(\"\");\r\n\r\n return text;\r\n}\r\n\r\n// ============================================\r\n// Vector Search with Pre-computed Embedding\r\n// ============================================\r\n\r\n/**\r\n * Run pgvector cosine similarity search using a pre-computed embedding\r\n * rather than generating one from a text query. This is the core of HyDE:\r\n * we embed the hypothetical document and search with that vector.\r\n */\r\nasync function vectorSearchWithEmbedding(\r\n embedding: number[],\r\n userId?: string,\r\n limit = 10\r\n): Promise<\r\n Array<{\r\n id: string;\r\n user_id: string | null;\r\n type: string;\r\n content: string;\r\n importance: number;\r\n source: string | null;\r\n provenance: string | null;\r\n created_at: Date;\r\n similarity: number;\r\n }>\r\n> {\r\n const embeddingStr = JSON.stringify(embedding);\r\n\r\n const results = await db.execute(sql`\r\n SELECT\r\n id, user_id, type, content, importance, source, provenance,\r\n created_at,\r\n 1 - (embedding <=> ${embeddingStr}::vector) as similarity\r\n FROM memories\r\n ${userId ? sql`WHERE user_id = ${userId}` : sql``}\r\n ORDER BY embedding <=> ${embeddingStr}::vector\r\n LIMIT ${limit}\r\n `);\r\n\r\n return results as any[];\r\n}\r\n\r\n// ============================================\r\n// Reciprocal Rank Fusion\r\n// ============================================\r\n\r\n/**\r\n * Combine ranked lists from multiple retrieval strategies using RRF.\r\n * Each item receives score = sum over lists of 1/(k + rank + 1).\r\n */\r\nfunction reciprocalRankFusion(\r\n rankedLists: Array<Array<{ id: string; [key: string]: unknown }>>,\r\n k = RRF_K\r\n): Map<string, number> {\r\n const scores = new Map<string, number>();\r\n\r\n for (const list of rankedLists) {\r\n for (let rank = 0; rank < list.length; rank++) {\r\n const id = list[rank].id;\r\n const rrfScore = 1 / (k + rank + 1);\r\n scores.set(id, (scores.get(id) || 0) + rrfScore);\r\n }\r\n }\r\n\r\n return scores;\r\n}\r\n\r\n// ============================================\r\n// HyDE Search Pipeline\r\n// ============================================\r\n\r\n/**\r\n * Full HyDE search pipeline:\r\n *\r\n * 1. Generate a hypothetical document from the query via LLM\r\n * 2. Embed the hypothetical document\r\n * 3. Run pgvector cosine similarity with that embedding\r\n * 4. Run keyword search with the *original* query (keywords matter!)\r\n * 5. Fuse results with Reciprocal Rank Fusion\r\n *\r\n * If HyDE is disabled (env.HYDE_ENABLED === false) or the LLM call fails,\r\n * this falls back to the standard hybridSearch with the original query.\r\n */\r\nexport async function hydeSearch(\r\n query: string,\r\n userId?: string,\r\n limit = 10\r\n): Promise<HyDESearchResult[]> {\r\n // Gate check: fall back to regular hybrid search if HyDE is disabled\r\n if (!env.HYDE_ENABLED) {\r\n const results = await hybridSearch(query, { userId, limit });\r\n return results.map((r) => ({ ...r, hydeDocument: \"\" }));\r\n }\r\n\r\n // Step 1: Generate hypothetical document\r\n let hydeDocument: string;\r\n try {\r\n hydeDocument = await generateHypotheticalDocument(query);\r\n } catch (error) {\r\n console.warn(\"[HyDE] LLM generation failed, falling back to hybrid search:\", error);\r\n const results = await hybridSearch(query, { userId, limit });\r\n return results.map((r) => ({ ...r, hydeDocument: \"\" }));\r\n }\r\n\r\n if (!hydeDocument || hydeDocument.trim().length === 0) {\r\n console.warn(\"[HyDE] Empty hypothetical document, falling back to hybrid search\");\r\n const results = await hybridSearch(query, { userId, limit });\r\n return results.map((r) => ({ ...r, hydeDocument: \"\" }));\r\n }\r\n\r\n // Step 2: Embed the hypothetical document\r\n let hydeEmbedding: number[];\r\n try {\r\n hydeEmbedding = await generateEmbedding(hydeDocument);\r\n } catch (error) {\r\n console.warn(\"[HyDE] Embedding generation failed, falling back to hybrid search:\", error);\r\n const results = await hybridSearch(query, { userId, limit });\r\n return results.map((r) => ({ ...r, hydeDocument: \"\" }));\r\n }\r\n\r\n // Step 3 & 4: Run vector search (with HyDE embedding) and keyword search (with original query) in parallel\r\n const fetchLimit = limit * 2; // Over-fetch for better fusion\r\n\r\n const [vectorResults, kwResults] = await Promise.all([\r\n vectorSearchWithEmbedding(hydeEmbedding, userId, fetchLimit),\r\n keywordSearch(query, userId, fetchLimit),\r\n ]);\r\n\r\n // Build a map of all unique results keyed by id\r\n const allResults = new Map<string, any>();\r\n for (const r of vectorResults) {\r\n allResults.set(r.id, r);\r\n }\r\n for (const r of kwResults) {\r\n if (!allResults.has(r.id)) {\r\n allResults.set(r.id, r);\r\n }\r\n }\r\n\r\n // Step 5: RRF fusion\r\n const rrfScores = reciprocalRankFusion([vectorResults, kwResults]);\r\n\r\n // Build final results\r\n const finalResults: HyDESearchResult[] = [];\r\n for (const [id, rrfScore] of rrfScores) {\r\n const data = allResults.get(id);\r\n if (!data) continue;\r\n\r\n finalResults.push({\r\n id: data.id,\r\n userId: data.user_id,\r\n type: data.type,\r\n content: data.content,\r\n importance: data.importance || 5,\r\n source: data.source,\r\n provenance: data.provenance,\r\n similarity: data.similarity || 0,\r\n keywordRank: data.keywordRank || data.keyword_rank || 0,\r\n rrfScore,\r\n createdAt: data.created_at,\r\n hydeDocument,\r\n });\r\n }\r\n\r\n // Sort by RRF score descending and take top N\r\n return finalResults\r\n .sort((a, b) => b.rrfScore - a.rrfScore)\r\n .slice(0, limit);\r\n}\r\n","/**\n * Retrieval Cache — Redis-backed cache for RAG pipeline search results\n *\n * Caches hybrid search results keyed by embedding hash to avoid\n * redundant vector searches. Feature-gated behind RETRIEVAL_CACHE_ENABLED.\n */\n\nimport Redis from \"ioredis\";\nimport { createHash } from \"crypto\";\nimport { env } from \"../../config/env\";\nimport type { HybridSearchResult } from \"./hybrid-search\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface CachedResult {\n results: HybridSearchResult[];\n cachedAt: number;\n queryHash: string;\n}\n\nexport interface CacheOptions {\n ttlSeconds?: number;\n maxCacheSize?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nconst KEY_PREFIX = \"rag:cache:\";\nconst DEFAULT_TTL_SECONDS = 3600;\n\n// ---------------------------------------------------------------------------\n// RetrievalCache\n// ---------------------------------------------------------------------------\n\nexport class RetrievalCache {\n private redis: Redis | null = null;\n private connecting = false;\n\n /**\n * Lazily create / return a Redis client.\n * Returns null if Redis is unavailable.\n */\n private async getClient(): Promise<Redis | null> {\n if (this.redis) return this.redis;\n if (this.connecting) return null;\n\n try {\n this.connecting = true;\n const client = new Redis(env.REDIS_URL, {\n maxRetriesPerRequest: null,\n lazyConnect: true,\n });\n await client.connect();\n this.redis = client;\n return this.redis;\n } catch {\n this.redis = null;\n return null;\n } finally {\n this.connecting = false;\n }\n }\n\n /**\n * Hash an embedding vector for use as a cache key.\n *\n * Rounds each component to 4 decimal places to tolerate minor\n * floating-point differences, then produces a SHA-256 hex digest.\n */\n private hashEmbedding(embedding: number[]): string {\n const rounded = embedding.map((v) => v.toFixed(4)).join(\",\");\n return createHash(\"sha256\").update(rounded).digest(\"hex\");\n }\n\n /**\n * Look up cached search results for the given embedding.\n * Returns null on cache miss or if the cache is disabled / unavailable.\n */\n async getCachedResults(queryEmbedding: number[]): Promise<CachedResult | null> {\n if (!env.RETRIEVAL_CACHE_ENABLED) return null;\n\n try {\n const client = await this.getClient();\n if (!client) return null;\n\n const hash = this.hashEmbedding(queryEmbedding);\n const key = `${KEY_PREFIX}${hash}`;\n const raw = await client.get(key);\n\n if (!raw) return null;\n\n const cached: CachedResult = JSON.parse(raw);\n\n // Rehydrate Date objects that were serialised as strings\n cached.results = cached.results.map((r) => ({\n ...r,\n createdAt: new Date(r.createdAt),\n }));\n\n return cached;\n } catch {\n return null;\n }\n }\n\n /**\n * Store search results in Redis, keyed by embedding hash.\n *\n * @param queryEmbedding - The embedding vector used for the search.\n * @param results - The hybrid search results to cache.\n * @param ttl - Time-to-live in seconds (default 3600).\n */\n async cacheResults(\n queryEmbedding: number[],\n results: HybridSearchResult[],\n ttl: number = DEFAULT_TTL_SECONDS,\n ): Promise<void> {\n if (!env.RETRIEVAL_CACHE_ENABLED) return;\n\n try {\n const client = await this.getClient();\n if (!client) return;\n\n const hash = this.hashEmbedding(queryEmbedding);\n const key = `${KEY_PREFIX}${hash}`;\n\n const entry: CachedResult = {\n results,\n cachedAt: Date.now(),\n queryHash: hash,\n };\n\n await client.set(key, JSON.stringify(entry), \"EX\", ttl);\n } catch {\n // Fail silently — caching is best-effort\n }\n }\n\n /**\n * Remove cache entries matching a key pattern.\n *\n * @param pattern - Glob pattern appended to the key prefix.\n * Defaults to `*` (all retrieval cache keys).\n * @returns The number of keys deleted.\n */\n async invalidateCache(pattern?: string): Promise<number> {\n try {\n const client = await this.getClient();\n if (!client) return 0;\n\n const scanPattern = `${KEY_PREFIX}${pattern ?? \"*\"}`;\n let deleted = 0;\n let cursor = \"0\";\n\n do {\n const [nextCursor, keys] = await client.scan(\n cursor,\n \"MATCH\",\n scanPattern,\n \"COUNT\",\n 100,\n );\n cursor = nextCursor;\n\n if (keys.length > 0) {\n deleted += await client.del(...keys);\n }\n } while (cursor !== \"0\");\n\n return deleted;\n } catch {\n return 0;\n }\n }\n}\n\n// ---------------------------------------------------------------------------\n// Singleton factory\n// ---------------------------------------------------------------------------\n\nlet instance: RetrievalCache | null = null;\n\n/**\n * Lazy singleton factory.\n * Returns the same RetrievalCache instance across the process lifetime.\n */\nexport function getRetrievalCache(): RetrievalCache {\n if (!instance) {\n instance = new RetrievalCache();\n }\n return instance;\n}\n","/**\n * Cross-Encoder Re-ranking — LLM-as-Judge relevance scoring\n *\n * After initial retrieval (vector + keyword + graph), this module sends\n * query-document pairs to an LLM in batches and asks it to score relevance\n * on a 0-10 scale. Results are then re-sorted by true semantic relevance\n * rather than embedding distance alone.\n *\n * Feature-gated behind env.RERANK_ENABLED.\n */\n\nimport { env } from \"../../config/env\";\nimport { providerRegistry } from \"../providers\";\nimport type { HybridSearchResult } from \"./hybrid-search\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface RankedResult extends HybridSearchResult {\n /** Relevance score assigned by the cross-encoder LLM judge (0-10) */\n rerankScore: number;\n}\n\nexport interface RerankOptions {\n /** Maximum number of results to return after re-ranking */\n topK?: number;\n /** Minimum relevance score to keep a result (0-10). Defaults to env.RERANK_MIN_SCORE */\n minScore?: number;\n /** Override the model used for scoring */\n model?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nconst DEFAULT_BATCH_SIZE = 5;\nconst DEFAULT_MAX_TOKENS = 100;\nconst DEFAULT_SCORE = 5;\nconst DEFAULT_MODEL = \"claude-sonnet-4-20250514\";\n\n// ---------------------------------------------------------------------------\n// Prompt\n// ---------------------------------------------------------------------------\n\nfunction buildScoringPrompt(query: string, documents: string[]): string {\n const docList = documents\n .map((doc, i) => `${i + 1}. ${doc}`)\n .join(\"\\n\");\n\n return (\n `You are a relevance judge. Given a search query and a list of retrieved documents, ` +\n `rate each document's relevance to the query on a scale of 0-10.\\n\\n` +\n `Query: ${query}\\n\\n` +\n `Documents:\\n${docList}\\n\\n` +\n `Return a JSON array of scores: [score1, score2, ...]\\n` +\n `Only return the JSON array, nothing else.`\n );\n}\n\n// ---------------------------------------------------------------------------\n// Score parsing\n// ---------------------------------------------------------------------------\n\n/**\n * Parse the LLM response into an array of numeric scores.\n * First tries JSON.parse; if that fails, falls back to extracting numbers\n * from the raw text.\n */\nfunction parseScores(text: string, expectedCount: number): number[] {\n // Try strict JSON parse first\n try {\n const parsed = JSON.parse(text.trim());\n if (Array.isArray(parsed)) {\n return parsed.map((s) => {\n const n = Number(s);\n return Number.isFinite(n) ? Math.min(10, Math.max(0, n)) : DEFAULT_SCORE;\n });\n }\n } catch {\n // Fall through to regex extraction\n }\n\n // Fallback: extract all numbers from the text\n const matches = text.match(/\\d+(?:\\.\\d+)?/g);\n if (matches && matches.length > 0) {\n return matches.slice(0, expectedCount).map((m) => {\n const n = Number(m);\n return Number.isFinite(n) ? Math.min(10, Math.max(0, n)) : DEFAULT_SCORE;\n });\n }\n\n // Last resort: return default scores\n return new Array(expectedCount).fill(DEFAULT_SCORE);\n}\n\n// ---------------------------------------------------------------------------\n// Batch re-ranking\n// ---------------------------------------------------------------------------\n\n/**\n * Internal helper: groups results into batches and sends each batch as a\n * single LLM call for efficiency.\n */\nexport async function batchRerank(\n query: string,\n results: HybridSearchResult[],\n batchSize: number = DEFAULT_BATCH_SIZE,\n model?: string\n): Promise<RankedResult[]> {\n const provider = providerRegistry.getDefault();\n const resolvedModel = model || DEFAULT_MODEL;\n const rankedResults: RankedResult[] = [];\n\n // Split results into batches\n const batches: HybridSearchResult[][] = [];\n for (let i = 0; i < results.length; i += batchSize) {\n batches.push(results.slice(i, i + batchSize));\n }\n\n // Process each batch\n const batchPromises = batches.map(async (batch) => {\n const documents = batch.map((r) => r.content);\n const prompt = buildScoringPrompt(query, documents);\n\n try {\n const response = await provider.createMessage({\n model: resolvedModel,\n max_tokens: DEFAULT_MAX_TOKENS,\n system: \"You are a relevance scoring assistant. Only output valid JSON.\",\n messages: [{ role: \"user\", content: prompt }],\n });\n\n // Extract text from response\n const responseText = response.content\n .filter((block) => block.type === \"text\")\n .map((block) => block.text || \"\")\n .join(\"\");\n\n const scores = parseScores(responseText, batch.length);\n\n return batch.map((result, idx) => ({\n ...result,\n rerankScore: idx < scores.length ? scores[idx] : DEFAULT_SCORE,\n }));\n } catch (error) {\n // If LLM call fails, assign default score to all results in this batch\n console.warn(\n `[Reranker] LLM scoring failed for batch, assigning default score of ${DEFAULT_SCORE}:`,\n error instanceof Error ? error.message : error\n );\n return batch.map((result) => ({\n ...result,\n rerankScore: DEFAULT_SCORE,\n }));\n }\n });\n\n const batchResults = await Promise.all(batchPromises);\n for (const batch of batchResults) {\n rankedResults.push(...batch);\n }\n\n return rankedResults;\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Re-rank search results using an LLM as a cross-encoder relevance judge.\n *\n * Sends query + results to the LLM in batches, gets relevance scores (0-10),\n * filters by minimum score, and returns results sorted by relevance.\n *\n * If RERANK_ENABLED is false, returns results as-is with a default rerankScore.\n */\nexport async function rerank(\n query: string,\n results: HybridSearchResult[],\n opts?: RerankOptions\n): Promise<RankedResult[]> {\n try {\n const minScore = opts?.minScore ?? env.RERANK_MIN_SCORE;\n\n // If re-ranking is disabled, pass through with default scores\n if (!env.RERANK_ENABLED) {\n return results.map((r) => ({ ...r, rerankScore: DEFAULT_SCORE }));\n }\n\n // No point re-ranking 0 or 1 results\n if (results.length <= 1) {\n return results.map((r) => ({ ...r, rerankScore: 10 }));\n }\n\n // Score all results via batched LLM calls\n let ranked = await batchRerank(query, results, DEFAULT_BATCH_SIZE, opts?.model);\n\n // Filter out results below the minimum score threshold\n ranked = ranked.filter((r) => r.rerankScore >= minScore);\n\n // Sort by rerankScore descending\n ranked.sort((a, b) => b.rerankScore - a.rerankScore);\n\n // Apply topK limit if specified\n if (opts?.topK && opts.topK > 0) {\n ranked = ranked.slice(0, opts.topK);\n }\n\n return ranked;\n } catch (error) {\n // If anything goes wrong, return original results with default scores\n console.error(\n \"[Reranker] Re-ranking failed, returning original results:\",\n error instanceof Error ? error.message : error\n );\n return results.map((r) => ({ ...r, rerankScore: DEFAULT_SCORE }));\n }\n}\n","/**\n * Recursive / Multi-Step RAG\n *\n * After initial retrieval + re-ranking, this module analyses whether the\n * retrieved context fully answers the user's query. When gaps are detected\n * it generates targeted follow-up queries, retrieves additional memories,\n * re-ranks them, and merges them with the existing result set.\n *\n * The loop repeats up to `maxSteps` iterations (default from\n * env.MULTISTEP_MAX_STEPS, itself defaulting to 2).\n *\n * Feature-gated behind env.MULTISTEP_RAG_ENABLED.\n */\n\nimport { env } from \"../../config/env\";\nimport { providerRegistry } from \"../providers\";\nimport { hybridSearch, type HybridSearchResult } from \"./hybrid-search\";\nimport { rerank, type RankedResult } from \"./reranker\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface MultiStepOptions {\n maxSteps?: number;\n userId?: string;\n limit?: number;\n}\n\nexport interface MultiStepResult {\n results: RankedResult[];\n steps: number;\n followUpQueries: string[];\n}\n\nexport interface CompletenessEvaluation {\n complete: boolean;\n gaps: string[];\n followUpQueries: string[];\n}\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\nconst COMPLETENESS_MAX_TOKENS = 300;\nconst COMPLETENESS_MODEL = \"claude-sonnet-4-5-20250929\";\n\n// ---------------------------------------------------------------------------\n// Completeness evaluation\n// ---------------------------------------------------------------------------\n\n/**\n * Uses the LLM to judge whether the retrieved context fully answers the\n * query. Returns a structured evaluation with any identified gaps and\n * suggested follow-up queries.\n */\nexport async function evaluateCompleteness(\n query: string,\n context: string\n): Promise<CompletenessEvaluation> {\n try {\n const provider = providerRegistry.getDefault();\n\n const response = await provider.createMessage({\n model: COMPLETENESS_MODEL,\n max_tokens: COMPLETENESS_MAX_TOKENS,\n system: \"You are a retrieval evaluation assistant. Only output valid JSON.\",\n messages: [\n {\n role: \"user\",\n content:\n `Analyze whether the following context sufficiently answers the query. Return a JSON object with:\\n` +\n `- \"complete\": boolean (true if context fully answers the query)\\n` +\n `- \"gaps\": string[] (list of missing information)\\n` +\n `- \"followUpQueries\": string[] (search queries to fill the gaps, max 2)\\n\\n` +\n `Query: ${query}\\n\\n` +\n `Context:\\n${context}\\n\\n` +\n `Return only the JSON object.`,\n },\n ],\n });\n\n const responseText = response.content\n .filter((block) => block.type === \"text\")\n .map((block) => block.text || \"\")\n .join(\"\");\n\n const parsed = JSON.parse(responseText.trim());\n\n return {\n complete: Boolean(parsed.complete),\n gaps: Array.isArray(parsed.gaps) ? parsed.gaps : [],\n followUpQueries: Array.isArray(parsed.followUpQueries)\n ? parsed.followUpQueries.slice(0, 2)\n : [],\n };\n } catch (error) {\n // If LLM call or JSON parse fails, assume complete (stop iterating)\n console.warn(\n \"[MultiStepRAG] Completeness evaluation failed, assuming complete:\",\n error instanceof Error ? error.message : error\n );\n return { complete: true, gaps: [], followUpQueries: [] };\n }\n}\n\n// ---------------------------------------------------------------------------\n// Main multi-step retrieval\n// ---------------------------------------------------------------------------\n\n/**\n * Performs recursive multi-step retrieval-augmented generation.\n *\n * 1. Evaluates whether `initialResults` fully answer the `query`.\n * 2. If gaps are found, generates follow-up queries, retrieves more\n * context via hybrid search + re-ranking, merges and deduplicates.\n * 3. Repeats up to `maxSteps` times.\n *\n * Returns the merged, deduplicated, and sorted result set together with\n * metadata about how many steps were taken and which follow-up queries\n * were used.\n */\nexport async function multiStepRetrieve(\n query: string,\n initialResults: RankedResult[],\n opts?: MultiStepOptions\n): Promise<MultiStepResult> {\n // If feature is disabled, return initial results unchanged\n if (!env.MULTISTEP_RAG_ENABLED) {\n return {\n results: initialResults,\n steps: 0,\n followUpQueries: [],\n };\n }\n\n const maxSteps = opts?.maxSteps ?? env.MULTISTEP_MAX_STEPS;\n const userId = opts?.userId;\n const limit = opts?.limit ?? 10;\n\n // Use a Map keyed by memory id for deduplication\n const resultsById = new Map<string, RankedResult>();\n for (const r of initialResults) {\n resultsById.set(r.id, r);\n }\n\n const allFollowUpQueries: string[] = [];\n let stepsPerformed = 0;\n\n for (let step = 0; step < maxSteps; step++) {\n // Build context string from current result set\n const currentResults = Array.from(resultsById.values());\n const contextText = currentResults.map((r) => r.content).join(\"\\n\\n\");\n\n // Evaluate completeness\n const evaluation = await evaluateCompleteness(query, contextText);\n\n if (evaluation.complete || evaluation.followUpQueries.length === 0) {\n break;\n }\n\n stepsPerformed++;\n\n // Limit to 2 follow-up queries per step\n const followUps = evaluation.followUpQueries.slice(0, 2);\n allFollowUpQueries.push(...followUps);\n\n // Retrieve additional results for each follow-up query\n const retrievalPromises = followUps.map(async (followUpQuery) => {\n const searchResults = await hybridSearch(followUpQuery, {\n userId,\n limit,\n });\n const rankedResults = await rerank(followUpQuery, searchResults);\n return rankedResults;\n });\n\n const additionalResultSets = await Promise.all(retrievalPromises);\n\n // Merge new results, deduplicating by id\n for (const resultSet of additionalResultSets) {\n for (const r of resultSet) {\n if (!resultsById.has(r.id)) {\n resultsById.set(r.id, r);\n }\n }\n }\n }\n\n // Sort merged results by rerankScore descending\n const mergedResults = Array.from(resultsById.values()).sort(\n (a, b) => b.rerankScore - a.rerankScore\n );\n\n return {\n results: mergedResults,\n steps: stepsPerformed,\n followUpQueries: allFollowUpQueries,\n };\n}\n","/**\r\n * Enhanced Retrieval Pipeline Orchestrator\r\n *\r\n * Wires together all 5 RAG enhancement stages into a single composable\r\n * pipeline that degrades gracefully when individual features are disabled:\r\n *\r\n * 1. Contextual query rewrite (CONTEXTUAL_QUERY_ENABLED)\r\n * 2. HyDE embedding generation (HYDE_ENABLED)\r\n * 3. Cache check / store (RETRIEVAL_CACHE_ENABLED)\r\n * 4. Hybrid search (always)\r\n * 5. Cross-encoder re-ranking (RERANK_ENABLED)\r\n * 6. Multi-step gap filling (MULTISTEP_RAG_ENABLED)\r\n *\r\n * If every feature flag is off the pipeline reduces to a plain hybridSearch\r\n * call with a default rerankScore appended.\r\n */\r\n\r\nimport { env } from \"../../config/env\";\r\nimport { hybridSearch, type HybridSearchResult } from \"./hybrid-search\";\r\nimport { buildContextualQuery, type Message } from \"./contextual-query\";\r\nimport { hydeSearch } from \"./hyde\";\r\nimport { getRetrievalCache } from \"./retrieval-cache\";\r\nimport { rerank, type RankedResult } from \"./reranker\";\r\nimport { multiStepRetrieve } from \"./multi-step\";\r\nimport { generateEmbedding } from \"../memory\";\r\n\r\n// ---------------------------------------------------------------------------\r\n// Types\r\n// ---------------------------------------------------------------------------\r\n\r\nexport interface EnhancedRetrievalOptions {\r\n userId?: string;\r\n limit?: number;\r\n conversationHistory?: Message[];\r\n}\r\n\r\nexport interface EnhancedRetrievalResult {\r\n results: RankedResult[];\r\n cached: boolean;\r\n steps: number;\r\n queryUsed: string;\r\n}\r\n\r\n// ---------------------------------------------------------------------------\r\n// Helpers\r\n// ---------------------------------------------------------------------------\r\n\r\n/**\r\n * Convert HybridSearchResults to RankedResults by attaching a rerankScore\r\n * derived from the existing rrfScore.\r\n */\r\nfunction toRankedResults(\r\n results: HybridSearchResult[],\r\n defaultScore?: number,\r\n): RankedResult[] {\r\n return results.map((r) => ({\r\n ...r,\r\n rerankScore: defaultScore ?? r.rrfScore * 10,\r\n }));\r\n}\r\n\r\n// ---------------------------------------------------------------------------\r\n// Main orchestrator\r\n// ---------------------------------------------------------------------------\r\n\r\n/**\r\n * Run the full enhanced retrieval pipeline.\r\n *\r\n * Each stage is feature-gated and will be skipped when its flag is off.\r\n * On any unrecoverable error the function falls back to a plain\r\n * hybridSearch so callers always receive results.\r\n */\r\nexport async function enhancedRetrieve(\r\n query: string,\r\n opts?: EnhancedRetrievalOptions,\r\n): Promise<EnhancedRetrievalResult> {\r\n const userId = opts?.userId;\r\n const limit = opts?.limit ?? 10;\r\n const conversationHistory = opts?.conversationHistory;\r\n\r\n let steps = 0;\r\n\r\n try {\r\n // ------------------------------------------------------------------\r\n // Step 1 — Contextual Query Rewrite\r\n // ------------------------------------------------------------------\r\n let effectiveQuery = query;\r\n\r\n if (\r\n env.CONTEXTUAL_QUERY_ENABLED &&\r\n conversationHistory &&\r\n conversationHistory.length >= 2\r\n ) {\r\n console.log(\"[EnhancedRetrieval] Contextual query rewrite enabled, rewriting query...\");\r\n effectiveQuery = await buildContextualQuery(query, conversationHistory);\r\n steps++;\r\n console.log(`[EnhancedRetrieval] Rewritten query: \"${effectiveQuery}\"`);\r\n }\r\n\r\n // ------------------------------------------------------------------\r\n // Step 2a — Cache Check\r\n // ------------------------------------------------------------------\r\n let cached = false;\r\n let queryEmbedding: number[] | null = null;\r\n\r\n if (env.RETRIEVAL_CACHE_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] Cache enabled, checking for cached results...\");\r\n queryEmbedding = await generateEmbedding(effectiveQuery);\r\n const cache = getRetrievalCache();\r\n const cachedResult = await cache.getCachedResults(queryEmbedding);\r\n\r\n if (cachedResult) {\r\n console.log(\"[EnhancedRetrieval] Cache hit, using cached results\");\r\n cached = true;\r\n steps++;\r\n\r\n let rankedResults = toRankedResults(cachedResult.results, 5);\r\n\r\n // Even on a cache hit we still run re-ranking and multi-step\r\n // so the caller gets the most relevant ordering.\r\n\r\n // Step 3 — Re-ranking (on cached results)\r\n if (env.RERANK_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] Re-ranking cached results...\");\r\n rankedResults = await rerank(effectiveQuery, cachedResult.results);\r\n steps++;\r\n }\r\n\r\n // Step 4 — Multi-step gap filling (on cached + re-ranked results)\r\n if (env.MULTISTEP_RAG_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] Multi-step retrieval on cached results...\");\r\n const multiStepResult = await multiStepRetrieve(effectiveQuery, rankedResults, { userId });\r\n rankedResults = multiStepResult.results;\r\n steps++;\r\n }\r\n\r\n return {\r\n results: rankedResults,\r\n cached: true,\r\n steps,\r\n queryUsed: effectiveQuery,\r\n };\r\n }\r\n\r\n console.log(\"[EnhancedRetrieval] Cache miss, proceeding to search\");\r\n }\r\n\r\n // ------------------------------------------------------------------\r\n // Step 2 — HyDE Search vs Regular Hybrid Search\r\n // ------------------------------------------------------------------\r\n let searchResults: HybridSearchResult[];\r\n\r\n if (env.HYDE_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] HyDE enabled, generating hypothetical doc...\");\r\n searchResults = await hydeSearch(effectiveQuery, userId, limit);\r\n steps++;\r\n } else {\r\n console.log(\"[EnhancedRetrieval] Running hybrid search...\");\r\n searchResults = await hybridSearch(effectiveQuery, { userId, limit });\r\n steps++;\r\n }\r\n\r\n // ------------------------------------------------------------------\r\n // Step 3 — Re-ranking\r\n // ------------------------------------------------------------------\r\n let rankedResults: RankedResult[];\r\n\r\n if (env.RERANK_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] Re-ranking results...\");\r\n rankedResults = await rerank(effectiveQuery, searchResults);\r\n steps++;\r\n } else {\r\n // Convert to RankedResults with a score derived from RRF\r\n rankedResults = toRankedResults(searchResults);\r\n }\r\n\r\n // ------------------------------------------------------------------\r\n // Step 3a — Cache Store\r\n // ------------------------------------------------------------------\r\n if (env.RETRIEVAL_CACHE_ENABLED && !cached) {\r\n console.log(\"[EnhancedRetrieval] Storing results in cache...\");\r\n\r\n // Reuse the embedding we already computed during the cache check.\r\n // If it was not computed yet (shouldn't happen given the flow above),\r\n // generate it now.\r\n if (!queryEmbedding) {\r\n queryEmbedding = await generateEmbedding(effectiveQuery);\r\n }\r\n\r\n const cache = getRetrievalCache();\r\n await cache.cacheResults(queryEmbedding, searchResults);\r\n }\r\n\r\n // ------------------------------------------------------------------\r\n // Step 4 — Multi-Step Gap Filling\r\n // ------------------------------------------------------------------\r\n if (env.MULTISTEP_RAG_ENABLED) {\r\n console.log(\"[EnhancedRetrieval] Multi-step retrieval enabled, filling gaps...\");\r\n const multiStepResult = await multiStepRetrieve(effectiveQuery, rankedResults, { userId });\r\n rankedResults = multiStepResult.results;\r\n steps++;\r\n }\r\n\r\n return {\r\n results: rankedResults,\r\n cached,\r\n steps,\r\n queryUsed: effectiveQuery,\r\n };\r\n } catch (error) {\r\n // ------------------------------------------------------------------\r\n // Fallback — plain hybrid search so we never return nothing\r\n // ------------------------------------------------------------------\r\n console.error(\r\n \"[EnhancedRetrieval] Pipeline failed, falling back to plain hybrid search:\",\r\n error instanceof Error ? error.message : error,\r\n );\r\n\r\n try {\r\n const fallbackResults = await hybridSearch(query, { userId, limit });\r\n return {\r\n results: toRankedResults(fallbackResults, 5),\r\n cached: false,\r\n steps: 0,\r\n queryUsed: query,\r\n };\r\n } catch (fallbackError) {\r\n console.error(\r\n \"[EnhancedRetrieval] Fallback hybrid search also failed:\",\r\n fallbackError instanceof Error ? fallbackError.message : fallbackError,\r\n );\r\n return {\r\n results: [],\r\n cached: false,\r\n steps: 0,\r\n queryUsed: query,\r\n };\r\n }\r\n }\r\n}\r\n"],"mappings":";;;;;;;;;;;;;;;;;AAYA,SAAS,WAAW;AA2BpB,IAAM,QAAQ;AAKd,eAAsB,aACpB,OACA,QACA,QAAQ,IACqF;AAC7F,QAAM,iBAAiB,MAAM,kBAAkB,KAAK;AAEpD,QAAM,UAAU,MAAM,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA,2BAIR,KAAK,UAAU,cAAc,CAAC;AAAA;AAAA,MAEnD,SAAS,sBAAsB,MAAM,KAAK,KAAK;AAAA,6BACxB,KAAK,UAAU,cAAc,CAAC;AAAA,YAC/C,KAAK;AAAA,GACd;AAED,SAAO;AACT;AAKA,eAAsB,cACpB,OACA,QACA,QAAQ,IACsF;AAC9F,QAAM,UAAU,MAAM,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA,0DAIuB,KAAK;AAAA;AAAA;AAAA,wDAGP,KAAK;AAAA,QACrD,SAAS,oBAAoB,MAAM,KAAK,KAAK;AAAA;AAAA,YAEzC,KAAK;AAAA,GACd;AAED,SAAQ,QAAkB,IAAI,CAAC,OAAY;AAAA,IACzC,GAAG;AAAA,IACH,aAAa,EAAE;AAAA,EACjB,EAAE;AACJ;AAKA,eAAsB,qBACpB,OACA,QACA,QAAQ,IACqF;AAE7F,QAAM,WAAW,MAAM,GAAG,QAAQ;AAAA;AAAA;AAAA,uBAGb,MAAM,QAAQ,GAAG;AAAA,QAChC,SAAS,oBAAoB,MAAM,KAAK,KAAK;AAAA;AAAA,GAElD;AAED,MAAK,SAAmB,WAAW,GAAG;AACpC,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,YAAa,SAAmB,IAAI,CAAC,MAAW,EAAE,EAAE;AAI1D,QAAM,cAAe,SAAmB,IAAI,CAAC,MAAW,EAAE,IAAI;AAC9D,QAAM,cAAc,YAAY,KAAK,GAAG;AAExC,QAAM,UAAU,MAAM,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAMZ,WAAW;AAAA,QAC1B,SAAS,oBAAoB,MAAM,KAAK,KAAK;AAAA;AAAA,YAEzC,KAAK;AAAA,GACd;AAED,SAAQ,QAAkB,IAAI,CAAC,OAAY;AAAA,IACzC,GAAG;AAAA,IACH,YAAY,EAAE;AAAA,EAChB,EAAE;AACJ;AAKA,SAAS,qBACP,aACA,IAAI,OACiB;AACrB,QAAM,SAAS,oBAAI,IAAoB;AAEvC,aAAW,QAAQ,aAAa;AAC9B,aAAS,OAAO,GAAG,OAAO,KAAK,QAAQ,QAAQ;AAC7C,YAAM,KAAK,KAAK,IAAI,EAAE;AACtB,YAAM,WAAW,KAAK,IAAI,OAAO;AACjC,aAAO,IAAI,KAAK,OAAO,IAAI,EAAE,KAAK,KAAK,QAAQ;AAAA,IACjD;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,aACpB,OACA,UAA+B,CAAC,GACD;AAC/B,QAAM;AAAA,IACJ;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA,iBAAiB;AAAA,IACjB,eAAe;AAAA,EACjB,IAAI;AAGJ,QAAM,iBAAwC;AAAA,IAC5C,aAAa,OAAO,QAAQ,QAAQ,CAAC;AAAA;AAAA,EACvC;AAEA,MAAI,gBAAgB;AAClB,mBAAe,KAAK,cAAc,OAAO,QAAQ,QAAQ,CAAC,CAAC;AAAA,EAC7D;AAEA,MAAI,cAAc;AAChB,mBAAe,KAAK,qBAAqB,OAAO,QAAQ,KAAK,CAAC;AAAA,EAChE;AAEA,QAAM,UAAU,MAAM,QAAQ,IAAI,cAAc;AAChD,QAAM,CAAC,eAAe,gBAAgB,YAAY,IAAI;AAGtD,QAAM,aAAa,oBAAI,IAAiB;AACxC,aAAW,KAAK,iBAAiB,CAAC,GAAG;AACnC,eAAW,IAAI,EAAE,IAAI,CAAC;AAAA,EACxB;AACA,aAAW,KAAK,kBAAkB,CAAC,GAAG;AACpC,QAAI,CAAC,WAAW,IAAI,EAAE,EAAE,EAAG,YAAW,IAAI,EAAE,IAAI,CAAC;AAAA,EACnD;AACA,aAAW,KAAK,gBAAgB,CAAC,GAAG;AAClC,QAAI,CAAC,WAAW,IAAI,EAAE,EAAE,EAAG,YAAW,IAAI,EAAE,IAAI,CAAC;AAAA,EACnD;AAGA,QAAM,cAAc,CAAC,iBAAiB,CAAC,CAAC;AACxC,MAAI,eAAgB,aAAY,KAAK,cAAc;AACnD,MAAI,aAAc,aAAY,KAAK,YAAY;AAE/C,QAAM,YAAY,qBAAqB,WAAW;AAGlD,MAAI,eAAqC,CAAC;AAC1C,aAAW,CAAC,IAAI,QAAQ,KAAK,WAAW;AACtC,UAAM,OAAO,WAAW,IAAI,EAAE;AAC9B,QAAI,CAAC,KAAM;AAEX,iBAAa,KAAK;AAAA,MAChB,IAAI,KAAK;AAAA,MACT,QAAQ,KAAK;AAAA,MACb,MAAM,KAAK;AAAA,MACX,SAAS,KAAK;AAAA,MACd,YAAY,KAAK,cAAc;AAAA,MAC/B,QAAQ,KAAK;AAAA,MACb,YAAY,KAAK;AAAA,MACjB,YAAY,KAAK,cAAc;AAAA,MAC/B,aAAa,KAAK,eAAe,KAAK,gBAAgB;AAAA,MACtD;AAAA,MACA,WAAW,KAAK;AAAA,IAClB,CAAC;AAAA,EACH;AAGA,MAAI,OAAO;AACT,mBAAe,aAAa,OAAO,CAAC,MAAM,IAAI,KAAK,EAAE,SAAS,KAAK,KAAK;AAAA,EAC1E;AACA,MAAI,OAAO;AACT,mBAAe,aAAa,OAAO,CAAC,MAAM,IAAI,KAAK,EAAE,SAAS,KAAK,KAAK;AAAA,EAC1E;AAGA,SAAO,aACJ,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,QAAQ,EACtC,MAAM,GAAG,KAAK;AACnB;;;ACjNA,IAAM,+BAA+B;AACrC,IAAM,aAAa;AAEnB,IAAM,gBACJ;AAmBF,eAAsB,qBACpB,OACA,qBACA,MACiB;AACjB,MAAI;AAEF,QAAI,CAAC,IAAI,0BAA0B;AACjC,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,uBAAuB,oBAAoB,SAAS,GAAG;AAC1D,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,MAAM,sBAAsB;AAG/C,UAAM,gBAAgB,oBAAoB,MAAM,CAAC,UAAU;AAG3D,UAAM,mBAAmB,cACtB,IAAI,CAAC,QAAQ,GAAG,IAAI,SAAS,SAAS,SAAS,WAAW,KAAK,IAAI,OAAO,EAAE,EAC5E,KAAK,IAAI;AAEZ,UAAM,aACJ;AAAA,EAAyB,gBAAgB;AAAA;AAAA;AAAA,EAAkC,KAAK;AAElF,UAAM,WAAW,iBAAiB,WAAW;AAC7C,UAAM,QAAQ,MAAM,SAAS;AAE7B,UAAM,WAAW,MAAM,SAAS,cAAc;AAAA,MAC5C;AAAA,MACA,YAAY;AAAA,MACZ,QAAQ;AAAA,MACR,UAAU;AAAA,QACR;AAAA,UACE,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,MACF;AAAA,IACF,CAAC;AAGD,UAAM,iBAAiB,SAAS,QAC7B,OAAO,CAAC,UAAU,MAAM,SAAS,UAAU,MAAM,IAAI,EACrD,IAAI,CAAC,UAAU,MAAM,IAAK,EAC1B,KAAK,EAAE,EACP,KAAK;AAGR,QAAI,CAAC,gBAAgB;AACnB,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;;;AC/FA,SAAS,OAAAA,YAAW;AA8BpB,IAAMC,SAAQ;AAEd,IAAM,wBACJ;AAEF,IAAM,qBAAqB;AAc3B,eAAsB,6BACpB,OACA,MACiB;AACjB,QAAM,eAAe,MAAM,gBAAgB;AAC3C,QAAM,YAAY,MAAM,aAAa;AAErC,QAAM,WAAW,iBAAiB,WAAW;AAE7C,QAAM,WAAW,MAAM,SAAS,cAAc;AAAA,IAC5C,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,QAAQ;AAAA,IACR,UAAU;AAAA,MACR;AAAA,QACE,MAAM;AAAA,QACN,SAAS;AAAA,MACX;AAAA,IACF;AAAA,EACF,CAAC;AAGD,QAAM,OAAO,SAAS,QACnB,OAAO,CAAC,UAAU,MAAM,SAAS,MAAM,EACvC,IAAI,CAAC,UAAU,MAAM,QAAQ,EAAE,EAC/B,KAAK,EAAE;AAEV,SAAO;AACT;AAWA,eAAe,0BACb,WACA,QACA,QAAQ,IAaR;AACA,QAAM,eAAe,KAAK,UAAU,SAAS;AAE7C,QAAM,UAAU,MAAM,GAAG,QAAQC;AAAA;AAAA;AAAA;AAAA,2BAIR,YAAY;AAAA;AAAA,MAEjC,SAASA,uBAAsB,MAAM,KAAKA,MAAK;AAAA,6BACxB,YAAY;AAAA,YAC7B,KAAK;AAAA,GACd;AAED,SAAO;AACT;AAUA,SAASC,sBACP,aACA,IAAIF,QACiB;AACrB,QAAM,SAAS,oBAAI,IAAoB;AAEvC,aAAW,QAAQ,aAAa;AAC9B,aAAS,OAAO,GAAG,OAAO,KAAK,QAAQ,QAAQ;AAC7C,YAAM,KAAK,KAAK,IAAI,EAAE;AACtB,YAAM,WAAW,KAAK,IAAI,OAAO;AACjC,aAAO,IAAI,KAAK,OAAO,IAAI,EAAE,KAAK,KAAK,QAAQ;AAAA,IACjD;AAAA,EACF;AAEA,SAAO;AACT;AAkBA,eAAsB,WACpB,OACA,QACA,QAAQ,IACqB;AAE7B,MAAI,CAAC,IAAI,cAAc;AACrB,UAAM,UAAU,MAAM,aAAa,OAAO,EAAE,QAAQ,MAAM,CAAC;AAC3D,WAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,cAAc,GAAG,EAAE;AAAA,EACxD;AAGA,MAAI;AACJ,MAAI;AACF,mBAAe,MAAM,6BAA6B,KAAK;AAAA,EACzD,SAAS,OAAO;AACd,YAAQ,KAAK,gEAAgE,KAAK;AAClF,UAAM,UAAU,MAAM,aAAa,OAAO,EAAE,QAAQ,MAAM,CAAC;AAC3D,WAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,cAAc,GAAG,EAAE;AAAA,EACxD;AAEA,MAAI,CAAC,gBAAgB,aAAa,KAAK,EAAE,WAAW,GAAG;AACrD,YAAQ,KAAK,mEAAmE;AAChF,UAAM,UAAU,MAAM,aAAa,OAAO,EAAE,QAAQ,MAAM,CAAC;AAC3D,WAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,cAAc,GAAG,EAAE;AAAA,EACxD;AAGA,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAM,kBAAkB,YAAY;AAAA,EACtD,SAAS,OAAO;AACd,YAAQ,KAAK,sEAAsE,KAAK;AACxF,UAAM,UAAU,MAAM,aAAa,OAAO,EAAE,QAAQ,MAAM,CAAC;AAC3D,WAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,cAAc,GAAG,EAAE;AAAA,EACxD;AAGA,QAAM,aAAa,QAAQ;AAE3B,QAAM,CAAC,eAAe,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,IACnD,0BAA0B,eAAe,QAAQ,UAAU;AAAA,IAC3D,cAAc,OAAO,QAAQ,UAAU;AAAA,EACzC,CAAC;AAGD,QAAM,aAAa,oBAAI,IAAiB;AACxC,aAAW,KAAK,eAAe;AAC7B,eAAW,IAAI,EAAE,IAAI,CAAC;AAAA,EACxB;AACA,aAAW,KAAK,WAAW;AACzB,QAAI,CAAC,WAAW,IAAI,EAAE,EAAE,GAAG;AACzB,iBAAW,IAAI,EAAE,IAAI,CAAC;AAAA,IACxB;AAAA,EACF;AAGA,QAAM,YAAYE,sBAAqB,CAAC,eAAe,SAAS,CAAC;AAGjE,QAAM,eAAmC,CAAC;AAC1C,aAAW,CAAC,IAAI,QAAQ,KAAK,WAAW;AACtC,UAAM,OAAO,WAAW,IAAI,EAAE;AAC9B,QAAI,CAAC,KAAM;AAEX,iBAAa,KAAK;AAAA,MAChB,IAAI,KAAK;AAAA,MACT,QAAQ,KAAK;AAAA,MACb,MAAM,KAAK;AAAA,MACX,SAAS,KAAK;AAAA,MACd,YAAY,KAAK,cAAc;AAAA,MAC/B,QAAQ,KAAK;AAAA,MACb,YAAY,KAAK;AAAA,MACjB,YAAY,KAAK,cAAc;AAAA,MAC/B,aAAa,KAAK,eAAe,KAAK,gBAAgB;AAAA,MACtD;AAAA,MACA,WAAW,KAAK;AAAA,MAChB;AAAA,IACF,CAAC;AAAA,EACH;AAGA,SAAO,aACJ,KAAK,CAAC,GAAG,MAAM,EAAE,WAAW,EAAE,QAAQ,EACtC,MAAM,GAAG,KAAK;AACnB;;;ACtQA,OAAO,WAAW;AAClB,SAAS,kBAAkB;AAuB3B,IAAM,aAAa;AACnB,IAAM,sBAAsB;AAMrB,IAAM,iBAAN,MAAqB;AAAA,EAClB,QAAsB;AAAA,EACtB,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA,EAMrB,MAAc,YAAmC;AAC/C,QAAI,KAAK,MAAO,QAAO,KAAK;AAC5B,QAAI,KAAK,WAAY,QAAO;AAE5B,QAAI;AACF,WAAK,aAAa;AAClB,YAAM,SAAS,IAAI,MAAM,IAAI,WAAW;AAAA,QACtC,sBAAsB;AAAA,QACtB,aAAa;AAAA,MACf,CAAC;AACD,YAAM,OAAO,QAAQ;AACrB,WAAK,QAAQ;AACb,aAAO,KAAK;AAAA,IACd,QAAQ;AACN,WAAK,QAAQ;AACb,aAAO;AAAA,IACT,UAAE;AACA,WAAK,aAAa;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,cAAc,WAA6B;AACjD,UAAM,UAAU,UAAU,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,EAAE,KAAK,GAAG;AAC3D,WAAO,WAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,gBAAwD;AAC7E,QAAI,CAAC,IAAI,wBAAyB,QAAO;AAEzC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,UAAU;AACpC,UAAI,CAAC,OAAQ,QAAO;AAEpB,YAAM,OAAO,KAAK,cAAc,cAAc;AAC9C,YAAM,MAAM,GAAG,UAAU,GAAG,IAAI;AAChC,YAAM,MAAM,MAAM,OAAO,IAAI,GAAG;AAEhC,UAAI,CAAC,IAAK,QAAO;AAEjB,YAAM,SAAuB,KAAK,MAAM,GAAG;AAG3C,aAAO,UAAU,OAAO,QAAQ,IAAI,CAAC,OAAO;AAAA,QAC1C,GAAG;AAAA,QACH,WAAW,IAAI,KAAK,EAAE,SAAS;AAAA,MACjC,EAAE;AAEF,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,aACJ,gBACA,SACA,MAAc,qBACC;AACf,QAAI,CAAC,IAAI,wBAAyB;AAElC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,UAAU;AACpC,UAAI,CAAC,OAAQ;AAEb,YAAM,OAAO,KAAK,cAAc,cAAc;AAC9C,YAAM,MAAM,GAAG,UAAU,GAAG,IAAI;AAEhC,YAAM,QAAsB;AAAA,QAC1B;AAAA,QACA,UAAU,KAAK,IAAI;AAAA,QACnB,WAAW;AAAA,MACb;AAEA,YAAM,OAAO,IAAI,KAAK,KAAK,UAAU,KAAK,GAAG,MAAM,GAAG;AAAA,IACxD,QAAQ;AAAA,IAER;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,gBAAgB,SAAmC;AACvD,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,UAAU;AACpC,UAAI,CAAC,OAAQ,QAAO;AAEpB,YAAM,cAAc,GAAG,UAAU,GAAG,WAAW,GAAG;AAClD,UAAI,UAAU;AACd,UAAI,SAAS;AAEb,SAAG;AACD,cAAM,CAAC,YAAY,IAAI,IAAI,MAAM,OAAO;AAAA,UACtC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,iBAAS;AAET,YAAI,KAAK,SAAS,GAAG;AACnB,qBAAW,MAAM,OAAO,IAAI,GAAG,IAAI;AAAA,QACrC;AAAA,MACF,SAAS,WAAW;AAEpB,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAMA,IAAI,WAAkC;AAM/B,SAAS,oBAAoC;AAClD,MAAI,CAAC,UAAU;AACb,eAAW,IAAI,eAAe;AAAA,EAChC;AACA,SAAO;AACT;;;AC9JA,IAAM,qBAAqB;AAC3B,IAAMC,sBAAqB;AAC3B,IAAM,gBAAgB;AACtB,IAAM,gBAAgB;AAMtB,SAAS,mBAAmB,OAAe,WAA6B;AACtE,QAAM,UAAU,UACb,IAAI,CAAC,KAAK,MAAM,GAAG,IAAI,CAAC,KAAK,GAAG,EAAE,EAClC,KAAK,IAAI;AAEZ,SACE;AAAA;AAAA,SAEU,KAAK;AAAA;AAAA;AAAA,EACA,OAAO;AAAA;AAAA;AAAA;AAI1B;AAWA,SAAS,YAAY,MAAc,eAAiC;AAElE,MAAI;AACF,UAAM,SAAS,KAAK,MAAM,KAAK,KAAK,CAAC;AACrC,QAAI,MAAM,QAAQ,MAAM,GAAG;AACzB,aAAO,OAAO,IAAI,CAAC,MAAM;AACvB,cAAM,IAAI,OAAO,CAAC;AAClB,eAAO,OAAO,SAAS,CAAC,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,GAAG,CAAC,CAAC,IAAI;AAAA,MAC7D,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AAAA,EAER;AAGA,QAAM,UAAU,KAAK,MAAM,gBAAgB;AAC3C,MAAI,WAAW,QAAQ,SAAS,GAAG;AACjC,WAAO,QAAQ,MAAM,GAAG,aAAa,EAAE,IAAI,CAAC,MAAM;AAChD,YAAM,IAAI,OAAO,CAAC;AAClB,aAAO,OAAO,SAAS,CAAC,IAAI,KAAK,IAAI,IAAI,KAAK,IAAI,GAAG,CAAC,CAAC,IAAI;AAAA,IAC7D,CAAC;AAAA,EACH;AAGA,SAAO,IAAI,MAAM,aAAa,EAAE,KAAK,aAAa;AACpD;AAUA,eAAsB,YACpB,OACA,SACA,YAAoB,oBACpB,OACyB;AACzB,QAAM,WAAW,iBAAiB,WAAW;AAC7C,QAAM,gBAAgB,SAAS;AAC/B,QAAM,gBAAgC,CAAC;AAGvC,QAAM,UAAkC,CAAC;AACzC,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK,WAAW;AAClD,YAAQ,KAAK,QAAQ,MAAM,GAAG,IAAI,SAAS,CAAC;AAAA,EAC9C;AAGA,QAAM,gBAAgB,QAAQ,IAAI,OAAO,UAAU;AACjD,UAAM,YAAY,MAAM,IAAI,CAAC,MAAM,EAAE,OAAO;AAC5C,UAAM,SAAS,mBAAmB,OAAO,SAAS;AAElD,QAAI;AACF,YAAM,WAAW,MAAM,SAAS,cAAc;AAAA,QAC5C,OAAO;AAAA,QACP,YAAYA;AAAA,QACZ,QAAQ;AAAA,QACR,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,MAC9C,CAAC;AAGD,YAAM,eAAe,SAAS,QAC3B,OAAO,CAAC,UAAU,MAAM,SAAS,MAAM,EACvC,IAAI,CAAC,UAAU,MAAM,QAAQ,EAAE,EAC/B,KAAK,EAAE;AAEV,YAAM,SAAS,YAAY,cAAc,MAAM,MAAM;AAErD,aAAO,MAAM,IAAI,CAAC,QAAQ,SAAS;AAAA,QACjC,GAAG;AAAA,QACH,aAAa,MAAM,OAAO,SAAS,OAAO,GAAG,IAAI;AAAA,MACnD,EAAE;AAAA,IACJ,SAAS,OAAO;AAEd,cAAQ;AAAA,QACN,uEAAuE,aAAa;AAAA,QACpF,iBAAiB,QAAQ,MAAM,UAAU;AAAA,MAC3C;AACA,aAAO,MAAM,IAAI,CAAC,YAAY;AAAA,QAC5B,GAAG;AAAA,QACH,aAAa;AAAA,MACf,EAAE;AAAA,IACJ;AAAA,EACF,CAAC;AAED,QAAM,eAAe,MAAM,QAAQ,IAAI,aAAa;AACpD,aAAW,SAAS,cAAc;AAChC,kBAAc,KAAK,GAAG,KAAK;AAAA,EAC7B;AAEA,SAAO;AACT;AAcA,eAAsB,OACpB,OACA,SACA,MACyB;AACzB,MAAI;AACF,UAAM,WAAW,MAAM,YAAY,IAAI;AAGvC,QAAI,CAAC,IAAI,gBAAgB;AACvB,aAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,aAAa,cAAc,EAAE;AAAA,IAClE;AAGA,QAAI,QAAQ,UAAU,GAAG;AACvB,aAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,aAAa,GAAG,EAAE;AAAA,IACvD;AAGA,QAAI,SAAS,MAAM,YAAY,OAAO,SAAS,oBAAoB,MAAM,KAAK;AAG9E,aAAS,OAAO,OAAO,CAAC,MAAM,EAAE,eAAe,QAAQ;AAGvD,WAAO,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE,WAAW;AAGnD,QAAI,MAAM,QAAQ,KAAK,OAAO,GAAG;AAC/B,eAAS,OAAO,MAAM,GAAG,KAAK,IAAI;AAAA,IACpC;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AAEd,YAAQ;AAAA,MACN;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAC3C;AACA,WAAO,QAAQ,IAAI,CAAC,OAAO,EAAE,GAAG,GAAG,aAAa,cAAc,EAAE;AAAA,EAClE;AACF;;;AC/KA,IAAM,0BAA0B;AAChC,IAAM,qBAAqB;AAW3B,eAAsB,qBACpB,OACA,SACiC;AACjC,MAAI;AACF,UAAM,WAAW,iBAAiB,WAAW;AAE7C,UAAM,WAAW,MAAM,SAAS,cAAc;AAAA,MAC5C,OAAO;AAAA,MACP,YAAY;AAAA,MACZ,QAAQ;AAAA,MACR,UAAU;AAAA,QACR;AAAA,UACE,MAAM;AAAA,UACN,SACE;AAAA;AAAA;AAAA;AAAA;AAAA,SAIU,KAAK;AAAA;AAAA;AAAA,EACF,OAAO;AAAA;AAAA;AAAA,QAExB;AAAA,MACF;AAAA,IACF,CAAC;AAED,UAAM,eAAe,SAAS,QAC3B,OAAO,CAAC,UAAU,MAAM,SAAS,MAAM,EACvC,IAAI,CAAC,UAAU,MAAM,QAAQ,EAAE,EAC/B,KAAK,EAAE;AAEV,UAAM,SAAS,KAAK,MAAM,aAAa,KAAK,CAAC;AAE7C,WAAO;AAAA,MACL,UAAU,QAAQ,OAAO,QAAQ;AAAA,MACjC,MAAM,MAAM,QAAQ,OAAO,IAAI,IAAI,OAAO,OAAO,CAAC;AAAA,MAClD,iBAAiB,MAAM,QAAQ,OAAO,eAAe,IACjD,OAAO,gBAAgB,MAAM,GAAG,CAAC,IACjC,CAAC;AAAA,IACP;AAAA,EACF,SAAS,OAAO;AAEd,YAAQ;AAAA,MACN;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAC3C;AACA,WAAO,EAAE,UAAU,MAAM,MAAM,CAAC,GAAG,iBAAiB,CAAC,EAAE;AAAA,EACzD;AACF;AAkBA,eAAsB,kBACpB,OACA,gBACA,MAC0B;AAE1B,MAAI,CAAC,IAAI,uBAAuB;AAC9B,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,MACP,iBAAiB,CAAC;AAAA,IACpB;AAAA,EACF;AAEA,QAAM,WAAW,MAAM,YAAY,IAAI;AACvC,QAAM,SAAS,MAAM;AACrB,QAAM,QAAQ,MAAM,SAAS;AAG7B,QAAM,cAAc,oBAAI,IAA0B;AAClD,aAAW,KAAK,gBAAgB;AAC9B,gBAAY,IAAI,EAAE,IAAI,CAAC;AAAA,EACzB;AAEA,QAAM,qBAA+B,CAAC;AACtC,MAAI,iBAAiB;AAErB,WAAS,OAAO,GAAG,OAAO,UAAU,QAAQ;AAE1C,UAAM,iBAAiB,MAAM,KAAK,YAAY,OAAO,CAAC;AACtD,UAAM,cAAc,eAAe,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,MAAM;AAGpE,UAAM,aAAa,MAAM,qBAAqB,OAAO,WAAW;AAEhE,QAAI,WAAW,YAAY,WAAW,gBAAgB,WAAW,GAAG;AAClE;AAAA,IACF;AAEA;AAGA,UAAM,YAAY,WAAW,gBAAgB,MAAM,GAAG,CAAC;AACvD,uBAAmB,KAAK,GAAG,SAAS;AAGpC,UAAM,oBAAoB,UAAU,IAAI,OAAO,kBAAkB;AAC/D,YAAM,gBAAgB,MAAM,aAAa,eAAe;AAAA,QACtD;AAAA,QACA;AAAA,MACF,CAAC;AACD,YAAM,gBAAgB,MAAM,OAAO,eAAe,aAAa;AAC/D,aAAO;AAAA,IACT,CAAC;AAED,UAAM,uBAAuB,MAAM,QAAQ,IAAI,iBAAiB;AAGhE,eAAW,aAAa,sBAAsB;AAC5C,iBAAW,KAAK,WAAW;AACzB,YAAI,CAAC,YAAY,IAAI,EAAE,EAAE,GAAG;AAC1B,sBAAY,IAAI,EAAE,IAAI,CAAC;AAAA,QACzB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,gBAAgB,MAAM,KAAK,YAAY,OAAO,CAAC,EAAE;AAAA,IACrD,CAAC,GAAG,MAAM,EAAE,cAAc,EAAE;AAAA,EAC9B;AAEA,SAAO;AAAA,IACL,SAAS;AAAA,IACT,OAAO;AAAA,IACP,iBAAiB;AAAA,EACnB;AACF;;;ACrJA,SAAS,gBACP,SACA,cACgB;AAChB,SAAO,QAAQ,IAAI,CAAC,OAAO;AAAA,IACzB,GAAG;AAAA,IACH,aAAa,gBAAgB,EAAE,WAAW;AAAA,EAC5C,EAAE;AACJ;AAaA,eAAsB,iBACpB,OACA,MACkC;AAClC,QAAM,SAAS,MAAM;AACrB,QAAM,QAAQ,MAAM,SAAS;AAC7B,QAAM,sBAAsB,MAAM;AAElC,MAAI,QAAQ;AAEZ,MAAI;AAIF,QAAI,iBAAiB;AAErB,QACE,IAAI,4BACJ,uBACA,oBAAoB,UAAU,GAC9B;AACA,cAAQ,IAAI,0EAA0E;AACtF,uBAAiB,MAAM,qBAAqB,OAAO,mBAAmB;AACtE;AACA,cAAQ,IAAI,yCAAyC,cAAc,GAAG;AAAA,IACxE;AAKA,QAAI,SAAS;AACb,QAAI,iBAAkC;AAEtC,QAAI,IAAI,yBAAyB;AAC/B,cAAQ,IAAI,mEAAmE;AAC/E,uBAAiB,MAAM,kBAAkB,cAAc;AACvD,YAAM,QAAQ,kBAAkB;AAChC,YAAM,eAAe,MAAM,MAAM,iBAAiB,cAAc;AAEhE,UAAI,cAAc;AAChB,gBAAQ,IAAI,qDAAqD;AACjE,iBAAS;AACT;AAEA,YAAIC,iBAAgB,gBAAgB,aAAa,SAAS,CAAC;AAM3D,YAAI,IAAI,gBAAgB;AACtB,kBAAQ,IAAI,kDAAkD;AAC9D,UAAAA,iBAAgB,MAAM,OAAO,gBAAgB,aAAa,OAAO;AACjE;AAAA,QACF;AAGA,YAAI,IAAI,uBAAuB;AAC7B,kBAAQ,IAAI,+DAA+D;AAC3E,gBAAM,kBAAkB,MAAM,kBAAkB,gBAAgBA,gBAAe,EAAE,OAAO,CAAC;AACzF,UAAAA,iBAAgB,gBAAgB;AAChC;AAAA,QACF;AAEA,eAAO;AAAA,UACL,SAASA;AAAA,UACT,QAAQ;AAAA,UACR;AAAA,UACA,WAAW;AAAA,QACb;AAAA,MACF;AAEA,cAAQ,IAAI,sDAAsD;AAAA,IACpE;AAKA,QAAI;AAEJ,QAAI,IAAI,cAAc;AACpB,cAAQ,IAAI,kEAAkE;AAC9E,sBAAgB,MAAM,WAAW,gBAAgB,QAAQ,KAAK;AAC9D;AAAA,IACF,OAAO;AACL,cAAQ,IAAI,8CAA8C;AAC1D,sBAAgB,MAAM,aAAa,gBAAgB,EAAE,QAAQ,MAAM,CAAC;AACpE;AAAA,IACF;AAKA,QAAI;AAEJ,QAAI,IAAI,gBAAgB;AACtB,cAAQ,IAAI,2CAA2C;AACvD,sBAAgB,MAAM,OAAO,gBAAgB,aAAa;AAC1D;AAAA,IACF,OAAO;AAEL,sBAAgB,gBAAgB,aAAa;AAAA,IAC/C;AAKA,QAAI,IAAI,2BAA2B,CAAC,QAAQ;AAC1C,cAAQ,IAAI,iDAAiD;AAK7D,UAAI,CAAC,gBAAgB;AACnB,yBAAiB,MAAM,kBAAkB,cAAc;AAAA,MACzD;AAEA,YAAM,QAAQ,kBAAkB;AAChC,YAAM,MAAM,aAAa,gBAAgB,aAAa;AAAA,IACxD;AAKA,QAAI,IAAI,uBAAuB;AAC7B,cAAQ,IAAI,mEAAmE;AAC/E,YAAM,kBAAkB,MAAM,kBAAkB,gBAAgB,eAAe,EAAE,OAAO,CAAC;AACzF,sBAAgB,gBAAgB;AAChC;AAAA,IACF;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,WAAW;AAAA,IACb;AAAA,EACF,SAAS,OAAO;AAId,YAAQ;AAAA,MACN;AAAA,MACA,iBAAiB,QAAQ,MAAM,UAAU;AAAA,IAC3C;AAEA,QAAI;AACF,YAAM,kBAAkB,MAAM,aAAa,OAAO,EAAE,QAAQ,MAAM,CAAC;AACnE,aAAO;AAAA,QACL,SAAS,gBAAgB,iBAAiB,CAAC;AAAA,QAC3C,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,WAAW;AAAA,MACb;AAAA,IACF,SAAS,eAAe;AACtB,cAAQ;AAAA,QACN;AAAA,QACA,yBAAyB,QAAQ,cAAc,UAAU;AAAA,MAC3D;AACA,aAAO;AAAA,QACL,SAAS,CAAC;AAAA,QACV,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,WAAW;AAAA,MACb;AAAA,IACF;AAAA,EACF;AACF;","names":["sql","RRF_K","sql","reciprocalRankFusion","DEFAULT_MAX_TOKENS","rankedResults"]}
@@ -0,0 +1,14 @@
1
+ import {
2
+ batchEnrich,
3
+ enrichEntity
4
+ } from "./chunk-A24GPVLY.js";
5
+ import "./chunk-AR34B6XR.js";
6
+ import "./chunk-S4NJJS5C.js";
7
+ import "./chunk-PUNIMPMY.js";
8
+ import "./chunk-NYVBXUGD.js";
9
+ import "./chunk-UP2VWCW5.js";
10
+ export {
11
+ batchEnrich,
12
+ enrichEntity
13
+ };
14
+ //# sourceMappingURL=enrichment-pipeline-CMUVBDC7.js.map
@@ -7,10 +7,11 @@ import {
7
7
  mergeEntities,
8
8
  normalizeEntityName,
9
9
  resolveEntity
10
- } from "./chunk-WRAKK6K6.js";
11
- import "./chunk-XKYRH4FM.js";
12
- import "./chunk-ZLZKF2PM.js";
13
- import "./chunk-PLDDJCW6.js";
10
+ } from "./chunk-AR34B6XR.js";
11
+ import "./chunk-S4NJJS5C.js";
12
+ import "./chunk-PUNIMPMY.js";
13
+ import "./chunk-NYVBXUGD.js";
14
+ import "./chunk-UP2VWCW5.js";
14
15
  export {
15
16
  findDuplicates,
16
17
  fuzzyMatch,
@@ -21,4 +22,4 @@ export {
21
22
  normalizeEntityName,
22
23
  resolveEntity
23
24
  };
24
- //# sourceMappingURL=entity-resolution-Y3IUWEAT.js.map
25
+ //# sourceMappingURL=entity-resolution-4X4JU43O.js.map
@@ -0,0 +1,12 @@
1
+ import {
2
+ configure,
3
+ env,
4
+ ready
5
+ } from "./chunk-PUNIMPMY.js";
6
+ import "./chunk-UP2VWCW5.js";
7
+ export {
8
+ configure,
9
+ env,
10
+ ready
11
+ };
12
+ //# sourceMappingURL=env-CHOFICED.js.map
@@ -0,0 +1,32 @@
1
+ import {
2
+ captureException,
3
+ cleanupOldErrors,
4
+ errorContext,
5
+ getErrorStats,
6
+ getRecentErrors,
7
+ getSimilarErrors,
8
+ getUnresolvedErrors,
9
+ markErrorResolved,
10
+ queryErrors,
11
+ trackError,
12
+ withErrorTracking
13
+ } from "./chunk-7MZN73J2.js";
14
+ import "./chunk-6LTLIYAQ.js";
15
+ import "./chunk-S4NJJS5C.js";
16
+ import "./chunk-PUNIMPMY.js";
17
+ import "./chunk-NYVBXUGD.js";
18
+ import "./chunk-UP2VWCW5.js";
19
+ export {
20
+ captureException,
21
+ cleanupOldErrors,
22
+ errorContext,
23
+ getErrorStats,
24
+ getRecentErrors,
25
+ getSimilarErrors,
26
+ getUnresolvedErrors,
27
+ markErrorResolved,
28
+ queryErrors,
29
+ trackError,
30
+ withErrorTracking
31
+ };
32
+ //# sourceMappingURL=error-tracker-SVQSDQDW.js.map
@@ -0,0 +1,178 @@
1
+ import {
2
+ LinearRegression
3
+ } from "./chunk-WZAH34TG.js";
4
+ import "./chunk-UP2VWCW5.js";
5
+
6
+ // src/integrations/finance/finnhub.ts
7
+ var BASE_URL = "https://finnhub.io/api/v1";
8
+ var FinnhubClient = class {
9
+ apiKey;
10
+ timeout;
11
+ constructor(config) {
12
+ this.apiKey = config.apiKey;
13
+ this.timeout = config.timeout ?? 15e3;
14
+ }
15
+ async request(endpoint, params = {}) {
16
+ const url = new URL(`${BASE_URL}/${endpoint}`);
17
+ url.searchParams.set("token", this.apiKey);
18
+ for (const [key, value] of Object.entries(params)) {
19
+ url.searchParams.set(key, value);
20
+ }
21
+ const controller = new AbortController();
22
+ const timer = setTimeout(() => controller.abort(), this.timeout);
23
+ try {
24
+ const response = await fetch(url.toString(), { signal: controller.signal });
25
+ if (!response.ok) {
26
+ throw new Error(`Finnhub API error: ${response.status} ${response.statusText}`);
27
+ }
28
+ return await response.json();
29
+ } finally {
30
+ clearTimeout(timer);
31
+ }
32
+ }
33
+ /**
34
+ * Get real-time stock quote.
35
+ */
36
+ async getQuote(symbol) {
37
+ const data = await this.request("quote", { symbol: symbol.toUpperCase() });
38
+ return {
39
+ symbol: symbol.toUpperCase(),
40
+ current: data.c,
41
+ change: data.d,
42
+ percentChange: data.dp,
43
+ high: data.h,
44
+ low: data.l,
45
+ open: data.o,
46
+ previousClose: data.pc,
47
+ timestamp: data.t
48
+ };
49
+ }
50
+ /**
51
+ * Get company profile.
52
+ */
53
+ async getCompanyProfile(symbol) {
54
+ return this.request("stock/profile2", { symbol: symbol.toUpperCase() });
55
+ }
56
+ /**
57
+ * Get company news.
58
+ */
59
+ async getCompanyNews(symbol, from, to) {
60
+ const now = /* @__PURE__ */ new Date();
61
+ const weekAgo = new Date(now.getTime() - 7 * 864e5);
62
+ return this.request("company-news", {
63
+ symbol: symbol.toUpperCase(),
64
+ from: from ?? weekAgo.toISOString().split("T")[0],
65
+ to: to ?? now.toISOString().split("T")[0]
66
+ });
67
+ }
68
+ /**
69
+ * Get social media sentiment for a stock.
70
+ */
71
+ async getSentiment(symbol) {
72
+ const data = await this.request("news-sentiment", { symbol: symbol.toUpperCase() });
73
+ const bullish = data.sentiment?.bullishPercent ?? 0.5;
74
+ const bearish = data.sentiment?.bearishPercent ?? 0.5;
75
+ return {
76
+ symbol: symbol.toUpperCase(),
77
+ bullishPercent: bullish * 100,
78
+ bearishPercent: bearish * 100,
79
+ buzz: data.buzz?.buzz ?? 0,
80
+ sentiment: bullish > 0.6 ? "bullish" : bearish > 0.6 ? "bearish" : "neutral"
81
+ };
82
+ }
83
+ /**
84
+ * Get analyst recommendations.
85
+ */
86
+ async getRecommendations(symbol) {
87
+ const data = await this.request("stock/recommendation", { symbol: symbol.toUpperCase() });
88
+ return data.map((r) => ({
89
+ symbol: r.symbol,
90
+ buy: r.buy,
91
+ hold: r.hold,
92
+ sell: r.sell,
93
+ strongBuy: r.strongBuy,
94
+ strongSell: r.strongSell,
95
+ period: r.period
96
+ }));
97
+ }
98
+ /**
99
+ * Get upcoming earnings.
100
+ */
101
+ async getEarningsCalendar(from, to) {
102
+ const now = /* @__PURE__ */ new Date();
103
+ const nextMonth = new Date(now.getTime() + 30 * 864e5);
104
+ const data = await this.request("calendar/earnings", {
105
+ from: from ?? now.toISOString().split("T")[0],
106
+ to: to ?? nextMonth.toISOString().split("T")[0]
107
+ });
108
+ return (data.earningsCalendar ?? []).map((e) => ({
109
+ symbol: e.symbol,
110
+ date: e.date,
111
+ epsEstimate: e.epsEstimate,
112
+ epsActual: e.epsActual,
113
+ revenueEstimate: e.revenueEstimate,
114
+ revenueActual: e.revenueActual,
115
+ quarter: e.quarter,
116
+ year: e.year
117
+ }));
118
+ }
119
+ /**
120
+ * Get economic calendar.
121
+ */
122
+ async getEconomicCalendar(from, to) {
123
+ const now = /* @__PURE__ */ new Date();
124
+ const nextWeek = new Date(now.getTime() + 7 * 864e5);
125
+ const data = await this.request("calendar/economic", {
126
+ from: from ?? now.toISOString().split("T")[0],
127
+ to: to ?? nextWeek.toISOString().split("T")[0]
128
+ });
129
+ return (data.economicCalendar ?? []).map((e) => ({
130
+ country: e.country,
131
+ event: e.event,
132
+ impact: e.impact || "low",
133
+ actual: e.actual,
134
+ estimate: e.estimate,
135
+ previous: e.prev,
136
+ unit: e.unit,
137
+ time: e.time
138
+ }));
139
+ }
140
+ /**
141
+ * Get stock candles (OHLCV) and detect price trend using Linear Regression.
142
+ */
143
+ async getPriceTrend(symbol, resolution = "D", count = 30) {
144
+ const now = Math.floor(Date.now() / 1e3);
145
+ const from = now - count * 86400;
146
+ const data = await this.request("stock/candle", {
147
+ symbol: symbol.toUpperCase(),
148
+ resolution,
149
+ from: String(from),
150
+ to: String(now)
151
+ });
152
+ if (data.s !== "ok" || !data.c || data.c.length < 2) {
153
+ return { trend: "flat", strength: 0, dailyChange: 0, closingPrices: [] };
154
+ }
155
+ const trend = LinearRegression.detectTrend(data.c);
156
+ return {
157
+ trend: trend.direction,
158
+ strength: trend.strength,
159
+ dailyChange: trend.slopePerUnit,
160
+ closingPrices: data.c
161
+ };
162
+ }
163
+ /**
164
+ * Search for a symbol.
165
+ */
166
+ async symbolSearch(query) {
167
+ const data = await this.request("search", { q: query });
168
+ return (data.result ?? []).slice(0, 10);
169
+ }
170
+ };
171
+ function createFinnhubClient(apiKey) {
172
+ return new FinnhubClient({ apiKey });
173
+ }
174
+ export {
175
+ FinnhubClient,
176
+ createFinnhubClient
177
+ };
178
+ //# sourceMappingURL=finnhub-X7ZMQSXF.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/integrations/finance/finnhub.ts"],"sourcesContent":["/**\r\n * Finnhub Client — Real-time market data and financial intelligence\r\n *\r\n * Provides: Real-time quotes, company news, earnings calendars, market sentiment,\r\n * analyst recommendations, and economic calendars.\r\n *\r\n * API Docs: https://finnhub.io/docs/api\r\n * Uses Linear Regression (Algorithm #1) for price trend analysis.\r\n */\r\n\r\nimport { LinearRegression } from \"../../core/ml/linear-regression\";\r\n\r\nexport interface FinnhubConfig {\r\n apiKey: string;\r\n timeout?: number;\r\n}\r\n\r\nexport interface FinnhubQuote {\r\n symbol: string;\r\n current: number;\r\n change: number;\r\n percentChange: number;\r\n high: number;\r\n low: number;\r\n open: number;\r\n previousClose: number;\r\n timestamp: number;\r\n}\r\n\r\nexport interface CompanyNews {\r\n category: string;\r\n datetime: number;\r\n headline: string;\r\n id: number;\r\n image: string;\r\n related: string;\r\n source: string;\r\n summary: string;\r\n url: string;\r\n}\r\n\r\nexport interface MarketSentiment {\r\n symbol: string;\r\n bullishPercent: number;\r\n bearishPercent: number;\r\n buzz: number;\r\n sentiment: \"bullish\" | \"bearish\" | \"neutral\";\r\n}\r\n\r\nexport interface AnalystRecommendation {\r\n symbol: string;\r\n buy: number;\r\n hold: number;\r\n sell: number;\r\n strongBuy: number;\r\n strongSell: number;\r\n period: string;\r\n}\r\n\r\nexport interface EarningsCalendarEntry {\r\n symbol: string;\r\n date: string;\r\n epsEstimate: number | null;\r\n epsActual: number | null;\r\n revenueEstimate: number | null;\r\n revenueActual: number | null;\r\n quarter: number;\r\n year: number;\r\n}\r\n\r\nexport interface CompanyProfile {\r\n country: string;\r\n currency: string;\r\n exchange: string;\r\n ipo: string;\r\n marketCapitalization: number;\r\n name: string;\r\n phone: string;\r\n shareOutstanding: number;\r\n ticker: string;\r\n weburl: string;\r\n logo: string;\r\n finnhubIndustry: string;\r\n}\r\n\r\nexport interface EconomicEvent {\r\n country: string;\r\n event: string;\r\n impact: \"low\" | \"medium\" | \"high\";\r\n actual: number | null;\r\n estimate: number | null;\r\n previous: number | null;\r\n unit: string;\r\n time: string;\r\n}\r\n\r\nconst BASE_URL = \"https://finnhub.io/api/v1\";\r\n\r\nexport class FinnhubClient {\r\n private apiKey: string;\r\n private timeout: number;\r\n\r\n constructor(config: FinnhubConfig) {\r\n this.apiKey = config.apiKey;\r\n this.timeout = config.timeout ?? 15000;\r\n }\r\n\r\n private async request<T>(endpoint: string, params: Record<string, string> = {}): Promise<T> {\r\n const url = new URL(`${BASE_URL}/${endpoint}`);\r\n url.searchParams.set(\"token\", this.apiKey);\r\n for (const [key, value] of Object.entries(params)) {\r\n url.searchParams.set(key, value);\r\n }\r\n\r\n const controller = new AbortController();\r\n const timer = setTimeout(() => controller.abort(), this.timeout);\r\n\r\n try {\r\n const response = await fetch(url.toString(), { signal: controller.signal });\r\n if (!response.ok) {\r\n throw new Error(`Finnhub API error: ${response.status} ${response.statusText}`);\r\n }\r\n return await response.json() as T;\r\n } finally {\r\n clearTimeout(timer);\r\n }\r\n }\r\n\r\n /**\r\n * Get real-time stock quote.\r\n */\r\n async getQuote(symbol: string): Promise<FinnhubQuote> {\r\n const data = await this.request<{\r\n c: number; d: number; dp: number; h: number; l: number; o: number; pc: number; t: number;\r\n }>(\"quote\", { symbol: symbol.toUpperCase() });\r\n\r\n return {\r\n symbol: symbol.toUpperCase(),\r\n current: data.c,\r\n change: data.d,\r\n percentChange: data.dp,\r\n high: data.h,\r\n low: data.l,\r\n open: data.o,\r\n previousClose: data.pc,\r\n timestamp: data.t,\r\n };\r\n }\r\n\r\n /**\r\n * Get company profile.\r\n */\r\n async getCompanyProfile(symbol: string): Promise<CompanyProfile> {\r\n return this.request<CompanyProfile>(\"stock/profile2\", { symbol: symbol.toUpperCase() });\r\n }\r\n\r\n /**\r\n * Get company news.\r\n */\r\n async getCompanyNews(\r\n symbol: string,\r\n from?: string,\r\n to?: string\r\n ): Promise<CompanyNews[]> {\r\n const now = new Date();\r\n const weekAgo = new Date(now.getTime() - 7 * 86400000);\r\n\r\n return this.request<CompanyNews[]>(\"company-news\", {\r\n symbol: symbol.toUpperCase(),\r\n from: from ?? weekAgo.toISOString().split(\"T\")[0],\r\n to: to ?? now.toISOString().split(\"T\")[0],\r\n });\r\n }\r\n\r\n /**\r\n * Get social media sentiment for a stock.\r\n */\r\n async getSentiment(symbol: string): Promise<MarketSentiment> {\r\n const data = await this.request<{\r\n buzz: { buzzHigh: number; weeklyAverage: number; buzz: number };\r\n companyNewsScore: number;\r\n sectorAverageBullishPercent: number;\r\n sectorAverageNewsScore: number;\r\n sentiment: { bearishPercent: number; bullishPercent: number };\r\n symbol: string;\r\n }>(\"news-sentiment\", { symbol: symbol.toUpperCase() });\r\n\r\n const bullish = data.sentiment?.bullishPercent ?? 0.5;\r\n const bearish = data.sentiment?.bearishPercent ?? 0.5;\r\n\r\n return {\r\n symbol: symbol.toUpperCase(),\r\n bullishPercent: bullish * 100,\r\n bearishPercent: bearish * 100,\r\n buzz: data.buzz?.buzz ?? 0,\r\n sentiment: bullish > 0.6 ? \"bullish\" : bearish > 0.6 ? \"bearish\" : \"neutral\",\r\n };\r\n }\r\n\r\n /**\r\n * Get analyst recommendations.\r\n */\r\n async getRecommendations(symbol: string): Promise<AnalystRecommendation[]> {\r\n const data = await this.request<Array<{\r\n buy: number; hold: number; sell: number; strongBuy: number; strongSell: number; period: string; symbol: string;\r\n }>>(\"stock/recommendation\", { symbol: symbol.toUpperCase() });\r\n\r\n return data.map((r) => ({\r\n symbol: r.symbol,\r\n buy: r.buy,\r\n hold: r.hold,\r\n sell: r.sell,\r\n strongBuy: r.strongBuy,\r\n strongSell: r.strongSell,\r\n period: r.period,\r\n }));\r\n }\r\n\r\n /**\r\n * Get upcoming earnings.\r\n */\r\n async getEarningsCalendar(\r\n from?: string,\r\n to?: string\r\n ): Promise<EarningsCalendarEntry[]> {\r\n const now = new Date();\r\n const nextMonth = new Date(now.getTime() + 30 * 86400000);\r\n\r\n const data = await this.request<{\r\n earningsCalendar: Array<{\r\n symbol: string; date: string; epsEstimate: number; epsActual: number;\r\n revenueEstimate: number; revenueActual: number; quarter: number; year: number;\r\n }>;\r\n }>(\"calendar/earnings\", {\r\n from: from ?? now.toISOString().split(\"T\")[0],\r\n to: to ?? nextMonth.toISOString().split(\"T\")[0],\r\n });\r\n\r\n return (data.earningsCalendar ?? []).map((e) => ({\r\n symbol: e.symbol,\r\n date: e.date,\r\n epsEstimate: e.epsEstimate,\r\n epsActual: e.epsActual,\r\n revenueEstimate: e.revenueEstimate,\r\n revenueActual: e.revenueActual,\r\n quarter: e.quarter,\r\n year: e.year,\r\n }));\r\n }\r\n\r\n /**\r\n * Get economic calendar.\r\n */\r\n async getEconomicCalendar(from?: string, to?: string): Promise<EconomicEvent[]> {\r\n const now = new Date();\r\n const nextWeek = new Date(now.getTime() + 7 * 86400000);\r\n\r\n const data = await this.request<{\r\n economicCalendar: Array<{\r\n country: string; event: string; impact: string;\r\n actual: number; estimate: number; prev: number; unit: string; time: string;\r\n }>;\r\n }>(\"calendar/economic\", {\r\n from: from ?? now.toISOString().split(\"T\")[0],\r\n to: to ?? nextWeek.toISOString().split(\"T\")[0],\r\n });\r\n\r\n return (data.economicCalendar ?? []).map((e) => ({\r\n country: e.country,\r\n event: e.event,\r\n impact: (e.impact as \"low\" | \"medium\" | \"high\") || \"low\",\r\n actual: e.actual,\r\n estimate: e.estimate,\r\n previous: e.prev,\r\n unit: e.unit,\r\n time: e.time,\r\n }));\r\n }\r\n\r\n /**\r\n * Get stock candles (OHLCV) and detect price trend using Linear Regression.\r\n */\r\n async getPriceTrend(\r\n symbol: string,\r\n resolution: \"1\" | \"5\" | \"15\" | \"30\" | \"60\" | \"D\" | \"W\" | \"M\" = \"D\",\r\n count: number = 30\r\n ): Promise<{\r\n trend: \"up\" | \"down\" | \"flat\";\r\n strength: number;\r\n dailyChange: number;\r\n closingPrices: number[];\r\n }> {\r\n const now = Math.floor(Date.now() / 1000);\r\n const from = now - count * 86400; // Approximate\r\n\r\n const data = await this.request<{\r\n c: number[]; h: number[]; l: number[]; o: number[]; v: number[]; t: number[]; s: string;\r\n }>(\"stock/candle\", {\r\n symbol: symbol.toUpperCase(),\r\n resolution,\r\n from: String(from),\r\n to: String(now),\r\n });\r\n\r\n if (data.s !== \"ok\" || !data.c || data.c.length < 2) {\r\n return { trend: \"flat\", strength: 0, dailyChange: 0, closingPrices: [] };\r\n }\r\n\r\n const trend = LinearRegression.detectTrend(data.c);\r\n\r\n return {\r\n trend: trend.direction,\r\n strength: trend.strength,\r\n dailyChange: trend.slopePerUnit,\r\n closingPrices: data.c,\r\n };\r\n }\r\n\r\n /**\r\n * Search for a symbol.\r\n */\r\n async symbolSearch(query: string): Promise<Array<{ symbol: string; description: string; type: string }>> {\r\n const data = await this.request<{\r\n result: Array<{ symbol: string; description: string; type: string }>;\r\n }>(\"search\", { q: query });\r\n\r\n return (data.result ?? []).slice(0, 10);\r\n }\r\n}\r\n\r\nexport function createFinnhubClient(apiKey: string): FinnhubClient {\r\n return new FinnhubClient({ apiKey });\r\n}\r\n"],"mappings":";;;;;;AAgGA,IAAM,WAAW;AAEV,IAAM,gBAAN,MAAoB;AAAA,EACjB;AAAA,EACA;AAAA,EAER,YAAY,QAAuB;AACjC,SAAK,SAAS,OAAO;AACrB,SAAK,UAAU,OAAO,WAAW;AAAA,EACnC;AAAA,EAEA,MAAc,QAAW,UAAkB,SAAiC,CAAC,GAAe;AAC1F,UAAM,MAAM,IAAI,IAAI,GAAG,QAAQ,IAAI,QAAQ,EAAE;AAC7C,QAAI,aAAa,IAAI,SAAS,KAAK,MAAM;AACzC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,UAAI,aAAa,IAAI,KAAK,KAAK;AAAA,IACjC;AAEA,UAAM,aAAa,IAAI,gBAAgB;AACvC,UAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,KAAK,OAAO;AAE/D,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,IAAI,SAAS,GAAG,EAAE,QAAQ,WAAW,OAAO,CAAC;AAC1E,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,sBAAsB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,MAChF;AACA,aAAO,MAAM,SAAS,KAAK;AAAA,IAC7B,UAAE;AACA,mBAAa,KAAK;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAAS,QAAuC;AACpD,UAAM,OAAO,MAAM,KAAK,QAErB,SAAS,EAAE,QAAQ,OAAO,YAAY,EAAE,CAAC;AAE5C,WAAO;AAAA,MACL,QAAQ,OAAO,YAAY;AAAA,MAC3B,SAAS,KAAK;AAAA,MACd,QAAQ,KAAK;AAAA,MACb,eAAe,KAAK;AAAA,MACpB,MAAM,KAAK;AAAA,MACX,KAAK,KAAK;AAAA,MACV,MAAM,KAAK;AAAA,MACX,eAAe,KAAK;AAAA,MACpB,WAAW,KAAK;AAAA,IAClB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,kBAAkB,QAAyC;AAC/D,WAAO,KAAK,QAAwB,kBAAkB,EAAE,QAAQ,OAAO,YAAY,EAAE,CAAC;AAAA,EACxF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eACJ,QACA,MACA,IACwB;AACxB,UAAM,MAAM,oBAAI,KAAK;AACrB,UAAM,UAAU,IAAI,KAAK,IAAI,QAAQ,IAAI,IAAI,KAAQ;AAErD,WAAO,KAAK,QAAuB,gBAAgB;AAAA,MACjD,QAAQ,OAAO,YAAY;AAAA,MAC3B,MAAM,QAAQ,QAAQ,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,MAChD,IAAI,MAAM,IAAI,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,IAC1C,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,QAA0C;AAC3D,UAAM,OAAO,MAAM,KAAK,QAOrB,kBAAkB,EAAE,QAAQ,OAAO,YAAY,EAAE,CAAC;AAErD,UAAM,UAAU,KAAK,WAAW,kBAAkB;AAClD,UAAM,UAAU,KAAK,WAAW,kBAAkB;AAElD,WAAO;AAAA,MACL,QAAQ,OAAO,YAAY;AAAA,MAC3B,gBAAgB,UAAU;AAAA,MAC1B,gBAAgB,UAAU;AAAA,MAC1B,MAAM,KAAK,MAAM,QAAQ;AAAA,MACzB,WAAW,UAAU,MAAM,YAAY,UAAU,MAAM,YAAY;AAAA,IACrE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,mBAAmB,QAAkD;AACzE,UAAM,OAAO,MAAM,KAAK,QAEpB,wBAAwB,EAAE,QAAQ,OAAO,YAAY,EAAE,CAAC;AAE5D,WAAO,KAAK,IAAI,CAAC,OAAO;AAAA,MACtB,QAAQ,EAAE;AAAA,MACV,KAAK,EAAE;AAAA,MACP,MAAM,EAAE;AAAA,MACR,MAAM,EAAE;AAAA,MACR,WAAW,EAAE;AAAA,MACb,YAAY,EAAE;AAAA,MACd,QAAQ,EAAE;AAAA,IACZ,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,oBACJ,MACA,IACkC;AAClC,UAAM,MAAM,oBAAI,KAAK;AACrB,UAAM,YAAY,IAAI,KAAK,IAAI,QAAQ,IAAI,KAAK,KAAQ;AAExD,UAAM,OAAO,MAAM,KAAK,QAKrB,qBAAqB;AAAA,MACtB,MAAM,QAAQ,IAAI,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,MAC5C,IAAI,MAAM,UAAU,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,IAChD,CAAC;AAED,YAAQ,KAAK,oBAAoB,CAAC,GAAG,IAAI,CAAC,OAAO;AAAA,MAC/C,QAAQ,EAAE;AAAA,MACV,MAAM,EAAE;AAAA,MACR,aAAa,EAAE;AAAA,MACf,WAAW,EAAE;AAAA,MACb,iBAAiB,EAAE;AAAA,MACnB,eAAe,EAAE;AAAA,MACjB,SAAS,EAAE;AAAA,MACX,MAAM,EAAE;AAAA,IACV,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,oBAAoB,MAAe,IAAuC;AAC9E,UAAM,MAAM,oBAAI,KAAK;AACrB,UAAM,WAAW,IAAI,KAAK,IAAI,QAAQ,IAAI,IAAI,KAAQ;AAEtD,UAAM,OAAO,MAAM,KAAK,QAKrB,qBAAqB;AAAA,MACtB,MAAM,QAAQ,IAAI,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,MAC5C,IAAI,MAAM,SAAS,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,IAC/C,CAAC;AAED,YAAQ,KAAK,oBAAoB,CAAC,GAAG,IAAI,CAAC,OAAO;AAAA,MAC/C,SAAS,EAAE;AAAA,MACX,OAAO,EAAE;AAAA,MACT,QAAS,EAAE,UAAwC;AAAA,MACnD,QAAQ,EAAE;AAAA,MACV,UAAU,EAAE;AAAA,MACZ,UAAU,EAAE;AAAA,MACZ,MAAM,EAAE;AAAA,MACR,MAAM,EAAE;AAAA,IACV,EAAE;AAAA,EACJ;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cACJ,QACA,aAA+D,KAC/D,QAAgB,IAMf;AACD,UAAM,MAAM,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AACxC,UAAM,OAAO,MAAM,QAAQ;AAE3B,UAAM,OAAO,MAAM,KAAK,QAErB,gBAAgB;AAAA,MACjB,QAAQ,OAAO,YAAY;AAAA,MAC3B;AAAA,MACA,MAAM,OAAO,IAAI;AAAA,MACjB,IAAI,OAAO,GAAG;AAAA,IAChB,CAAC;AAED,QAAI,KAAK,MAAM,QAAQ,CAAC,KAAK,KAAK,KAAK,EAAE,SAAS,GAAG;AACnD,aAAO,EAAE,OAAO,QAAQ,UAAU,GAAG,aAAa,GAAG,eAAe,CAAC,EAAE;AAAA,IACzE;AAEA,UAAM,QAAQ,iBAAiB,YAAY,KAAK,CAAC;AAEjD,WAAO;AAAA,MACL,OAAO,MAAM;AAAA,MACb,UAAU,MAAM;AAAA,MAChB,aAAa,MAAM;AAAA,MACnB,eAAe,KAAK;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,OAAsF;AACvG,UAAM,OAAO,MAAM,KAAK,QAErB,UAAU,EAAE,GAAG,MAAM,CAAC;AAEzB,YAAQ,KAAK,UAAU,CAAC,GAAG,MAAM,GAAG,EAAE;AAAA,EACxC;AACF;AAEO,SAAS,oBAAoB,QAA+B;AACjE,SAAO,IAAI,cAAc,EAAE,OAAO,CAAC;AACrC;","names":[]}