autohand-cli 0.7.14 → 0.8.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (414) hide show
  1. package/README.md +97 -2
  2. package/assets/icon.png +0 -0
  3. package/dist/AgentRegistry-7LDL5HJH.js +10 -0
  4. package/dist/AgentRegistry-NQCLWABO.cjs +10 -0
  5. package/dist/{AutomodeManager-NGRAO2MH.js → AutomodeManager-MWLKGPZK.js} +2 -0
  6. package/dist/{AutomodeManager-ZKQMBM4T.cjs → AutomodeManager-NYIZNODK.cjs} +3 -1
  7. package/dist/CommunitySkillsCache-6QPRMTJO.js +8 -0
  8. package/dist/CommunitySkillsCache-GTQMOCCO.cjs +8 -0
  9. package/dist/{GitHubRegistryFetcher-US2JJID4.js → GitHubRegistryFetcher-6JQ5JEDZ.js} +1 -0
  10. package/dist/{GitHubRegistryFetcher-K744NNAJ.cjs → GitHubRegistryFetcher-S7QFUEKV.cjs} +1 -0
  11. package/dist/HookManager-Q2KYMCP4.cjs +7 -0
  12. package/dist/HookManager-TTP4Y6DC.js +7 -0
  13. package/dist/ImportWizard-35YBJ4AM.cjs +466 -0
  14. package/dist/ImportWizard-XH7CINCH.js +466 -0
  15. package/dist/LearnAdvisor-A4Q5PPBI.js +9 -0
  16. package/dist/LearnAdvisor-GASQD7HT.cjs +9 -0
  17. package/dist/McpClientManager-7RM6YT35.js +8 -0
  18. package/dist/McpClientManager-RKD7C6OY.cjs +8 -0
  19. package/dist/MemoryManager-GUNLRP5S.js +8 -0
  20. package/dist/MemoryManager-TNSGKDKX.cjs +8 -0
  21. package/dist/{PermissionManager-U5OMGR3L.js → PermissionManager-ATUV34LQ.js} +4 -3
  22. package/dist/PermissionManager-KMN53FJP.cjs +11 -0
  23. package/dist/ProjectProfiler-UMJJSOCE.js +194 -0
  24. package/dist/ProjectProfiler-ZDWR2ODG.cjs +194 -0
  25. package/dist/ProviderFactory-MR5B23QJ.js +9 -0
  26. package/dist/ProviderFactory-VFGCJJX6.cjs +9 -0
  27. package/dist/SessionManager-FEUAU3ZJ.cjs +10 -0
  28. package/dist/SessionManager-IKWAK2PI.js +10 -0
  29. package/dist/SkillsRegistry-KPQFTRIT.cjs +9 -0
  30. package/dist/SkillsRegistry-XJSKPDF2.js +9 -0
  31. package/dist/SubAgent-NYH6GWQ3.js +11 -0
  32. package/dist/SubAgent-PZKBDUBA.cjs +11 -0
  33. package/dist/{SyncApiClient-AYXYSOJM.js → SyncApiClient-LVIO4C2S.js} +1 -0
  34. package/dist/{SyncApiClient-ID3KXEMA.cjs → SyncApiClient-ZNYMT36M.cjs} +1 -0
  35. package/dist/about-HHTF2YFL.js +12 -0
  36. package/dist/about-JGRVNNQC.cjs +12 -0
  37. package/dist/actionExecutor-U6IBN2TU.cjs +19 -0
  38. package/dist/actionExecutor-XT5FW3W6.js +19 -0
  39. package/dist/add-dir-247K3XRY.js +10 -0
  40. package/dist/add-dir-GS4DXKKH.cjs +10 -0
  41. package/dist/agents/builtin/code-cleaner.md +14 -0
  42. package/dist/agents/builtin/docs-writer.md +14 -0
  43. package/dist/agents/builtin/researcher.md +14 -0
  44. package/dist/agents/builtin/reviewer.md +15 -0
  45. package/dist/agents/builtin/tester.md +15 -0
  46. package/dist/agents/builtin/todo-resolver.md +15 -0
  47. package/dist/agents-R6ZEFTVR.cjs +12 -0
  48. package/dist/agents-WJPQWQF2.js +12 -0
  49. package/dist/agents-new-HKVEIBDJ.js +14 -0
  50. package/dist/agents-new-X6GTHIO6.cjs +14 -0
  51. package/dist/assets/icon.png +0 -0
  52. package/dist/autoSkill-6TGBTEQD.js +20 -0
  53. package/dist/autoSkill-H4T6VVDA.cjs +20 -0
  54. package/dist/automode-BC6NVECO.js +10 -0
  55. package/dist/automode-WN2RSOGW.cjs +10 -0
  56. package/dist/{cc-UTTLESTY.js → cc-7LEIJ3KF.js} +1 -0
  57. package/dist/{cc-2W6M7J45.cjs → cc-Q5MM4AWC.cjs} +1 -0
  58. package/dist/{chunk-N4ZSG6JJ.cjs → chunk-22D2CNTP.cjs} +2 -2
  59. package/dist/chunk-245KJE5Y.cjs +55 -0
  60. package/dist/chunk-24QIWILL.js +51 -0
  61. package/dist/{chunk-MYNHJHDZ.js → chunk-2AA5MFES.js} +1 -1
  62. package/dist/{chunk-TSY7JHIV.cjs → chunk-33A755XB.cjs} +2 -2
  63. package/dist/chunk-33RSHBDH.js +131 -0
  64. package/dist/{chunk-QRGPAUST.js → chunk-34M3HWLR.js} +2 -2
  65. package/dist/{chunk-U7CZFKPL.cjs → chunk-3K2ESU53.cjs} +2 -2
  66. package/dist/{chunk-SIGWDEPS.cjs → chunk-3L53OA4E.cjs} +10 -10
  67. package/dist/chunk-3OEDGIFW.js +42 -0
  68. package/dist/chunk-3OTU3RS3.cjs +1607 -0
  69. package/dist/{chunk-MNSTWHK3.cjs → chunk-3PCTTUNW.cjs} +11 -11
  70. package/dist/{chunk-P5VDZ6PV.js → chunk-3PDTTAKJ.js} +1 -1
  71. package/dist/chunk-47CKWKEX.cjs +59 -0
  72. package/dist/{chunk-Z4J4W6YQ.cjs → chunk-4JNNTOGF.cjs} +2 -48
  73. package/dist/{chunk-GVZPIQWB.js → chunk-4PKF7WPD.js} +11 -5
  74. package/dist/{chunk-3S4DEIJP.cjs → chunk-5IXII4HX.cjs} +2 -2
  75. package/dist/{chunk-CRQKDBLD.js → chunk-5P2NXKP3.js} +98 -64
  76. package/dist/{chunk-DSKVMFRM.cjs → chunk-643VRA5S.cjs} +12 -4
  77. package/dist/{chunk-VVBBEYTH.cjs → chunk-6HYLHBQG.cjs} +10 -10
  78. package/dist/{chunk-L5ZFPWHY.js → chunk-6OYHF6MF.js} +12 -4
  79. package/dist/{chunk-BPTBKO7D.js → chunk-6RF7UKUS.js} +224 -37
  80. package/dist/{chunk-YHGTBPEC.js → chunk-6ZCULLCA.js} +1 -1
  81. package/dist/{chunk-VHBUKGRG.js → chunk-72FKPBT5.js} +4 -4
  82. package/dist/{chunk-2U5HFVRO.cjs → chunk-7BTSG4ME.cjs} +5165 -2194
  83. package/dist/chunk-7UOUW76C.js +603 -0
  84. package/dist/{chunk-FEVHH525.cjs → chunk-A4IJHHV7.cjs} +11 -5
  85. package/dist/{chunk-B6EBHCK2.cjs → chunk-AEJH23FO.cjs} +6 -6
  86. package/dist/{chunk-WLTVF77A.js → chunk-ALYU6VTM.js} +1 -1
  87. package/dist/{chunk-NMWEDN4Z.js → chunk-APIXPPMT.js} +5165 -2194
  88. package/dist/chunk-AS6RTLN7.cjs +203 -0
  89. package/dist/{chunk-BHV7CBNT.js → chunk-AYS2ASM7.js} +1 -1
  90. package/dist/{chunk-GRSVQ5YZ.js → chunk-AYSFIUFW.js} +44 -12
  91. package/dist/{chunk-NUHYCFHW.cjs → chunk-BVKXEQVG.cjs} +54 -65
  92. package/dist/chunk-BWN2CLLM.cjs +298 -0
  93. package/dist/{chunk-4HA7IHLJ.cjs → chunk-C5IJIM2V.cjs} +38 -16
  94. package/dist/{chunk-SRLY7K6J.js → chunk-CAMZTXV6.js} +2 -2
  95. package/dist/chunk-CDBPBM2K.cjs +29 -0
  96. package/dist/chunk-CNBKZEX5.cjs +109 -0
  97. package/dist/{chunk-WQUQ5JMM.js → chunk-CWMZKFTT.js} +4 -4
  98. package/dist/{chunk-CKN2BLHK.cjs → chunk-CZXGCVTR.cjs} +2 -2
  99. package/dist/{chunk-SZP4ULM5.cjs → chunk-DJDE4DTT.cjs} +17 -17
  100. package/dist/chunk-DN573ME7.cjs +1675 -0
  101. package/dist/chunk-DRE2RXBZ.js +4498 -0
  102. package/dist/chunk-DSPQEHDT.js +29 -0
  103. package/dist/{chunk-SFNT5DYE.cjs → chunk-DVUHHH3B.cjs} +4 -4
  104. package/dist/chunk-DVZOENQ7.cjs +58 -0
  105. package/dist/{chunk-PWLLLJHU.js → chunk-EGFT4PGW.js} +3 -1
  106. package/dist/chunk-EGMZDTSL.js +55 -0
  107. package/dist/chunk-EZMINVLU.js +123 -0
  108. package/dist/chunk-FHK7UDOJ.cjs +42 -0
  109. package/dist/{chunk-KWRUQRXR.js → chunk-FKSDEWDH.js} +44 -10
  110. package/dist/chunk-FMB3TSWP.cjs +218 -0
  111. package/dist/chunk-FW774QXH.js +1838 -0
  112. package/dist/{chunk-MY3TZER2.js → chunk-G27PQQFD.js} +1 -1
  113. package/dist/{chunk-Y2ZSH3YF.cjs → chunk-G3V4SFET.cjs} +57 -23
  114. package/dist/chunk-G4CAKI3V.js +58 -0
  115. package/dist/{chunk-FB6JWNJS.js → chunk-GBHDROGL.js} +54 -65
  116. package/dist/{chunk-DEAEO7RI.js → chunk-GJH7XMSK.js} +15 -1
  117. package/dist/chunk-GLBAF54O.js +218 -0
  118. package/dist/{chunk-S47TCZDL.js → chunk-H5SWOLG6.js} +7 -7
  119. package/dist/chunk-HBXAA3XB.js +83 -0
  120. package/dist/{chunk-63BXZQZW.js → chunk-HIVRCQS2.js} +26 -4
  121. package/dist/{chunk-WOGJXDBU.cjs → chunk-HLHTG5ZU.cjs} +18 -4
  122. package/dist/{chunk-FK2DVRPJ.js → chunk-HLQV64Y5.js} +170 -4
  123. package/dist/chunk-HOAHWIQ5.cjs +260 -0
  124. package/dist/{chunk-D2XFTCRP.js → chunk-HQ7YZKXE.js} +1 -1
  125. package/dist/{chunk-L42HTMMR.cjs → chunk-HTLINWX6.cjs} +2 -2
  126. package/dist/{chunk-PKOAXQKW.cjs → chunk-HVKOZ2VP.cjs} +11 -11
  127. package/dist/chunk-HXGBSJL5.cjs +27 -0
  128. package/dist/chunk-I5IW3T2Y.js +310 -0
  129. package/dist/chunk-IETRBBMP.cjs +603 -0
  130. package/dist/{chunk-MTALRU7R.cjs → chunk-IFFXSTOM.cjs} +3 -3
  131. package/dist/{chunk-V7YTCNMN.cjs → chunk-IKGWDOGU.cjs} +174 -8
  132. package/dist/chunk-IQ5RXU6O.js +1675 -0
  133. package/dist/{chunk-ZBIBLOZL.js → chunk-IVM5F2AE.js} +500 -317
  134. package/dist/chunk-J4Q7XR3G.js +260 -0
  135. package/dist/{chunk-XL77XYI2.cjs → chunk-J6QET7EF.cjs} +27 -7
  136. package/dist/{chunk-TQB222ZB.js → chunk-JCLYQ2JC.js} +2 -2
  137. package/dist/chunk-JSBRDJBE.js +30 -0
  138. package/dist/{chunk-XPOHYKR3.js → chunk-JX3DFKBI.js} +2 -2
  139. package/dist/chunk-JYTDYJVW.js +27 -0
  140. package/dist/{chunk-6SHHB2VD.js → chunk-KPELYZ6L.js} +2 -2
  141. package/dist/{chunk-ZLOTP56B.cjs → chunk-KWXVKLQ5.cjs} +5 -5
  142. package/dist/chunk-L3WAH3EM.cjs +131 -0
  143. package/dist/{chunk-ZXIQCYYV.cjs → chunk-LA7H35XM.cjs} +9 -9
  144. package/dist/chunk-LENHP55G.cjs +1838 -0
  145. package/dist/chunk-LJFUXC56.cjs +123 -0
  146. package/dist/{chunk-R5OO7MEB.cjs → chunk-LNMYK2F5.cjs} +22 -22
  147. package/dist/chunk-LQGVEP3E.js +109 -0
  148. package/dist/{chunk-KXAAEROY.js → chunk-LWUJFGOZ.js} +2 -2
  149. package/dist/chunk-MAKMSQMQ.cjs +504 -0
  150. package/dist/{chunk-BG4OQUKP.js → chunk-MBBY4ZIK.js} +1 -1
  151. package/dist/chunk-MSED7RH2.cjs +267 -0
  152. package/dist/{chunk-NMGF2KUN.js → chunk-MYISNQH4.js} +1 -1
  153. package/dist/chunk-N23UAW4I.js +59 -0
  154. package/dist/chunk-N254NRHT.cjs +30 -0
  155. package/dist/{chunk-TOTDRAWG.js → chunk-NDMIPTV4.js} +1 -1
  156. package/dist/{chunk-AIH6GUGB.cjs → chunk-NNPAM4HC.cjs} +5 -5
  157. package/dist/{chunk-HSPWX4Z2.cjs → chunk-O4IF4NJT.cjs} +231 -44
  158. package/dist/{chunk-DZHR34H6.cjs → chunk-OGV4WJ5L.cjs} +8 -8
  159. package/dist/chunk-OHUZKDGX.js +348 -0
  160. package/dist/{chunk-BRXIEKJ3.cjs → chunk-OLSBBZW6.cjs} +5 -5
  161. package/dist/{chunk-MILZEEUV.js → chunk-OOKY3HPZ.js} +9 -3
  162. package/dist/chunk-P47WPOXN.js +298 -0
  163. package/dist/{chunk-ULMPJUJW.cjs → chunk-PRRCJFU3.cjs} +23 -23
  164. package/dist/{chunk-SMHY3Q7B.cjs → chunk-Q7XSCYND.cjs} +54 -22
  165. package/dist/chunk-QCLYBIMM.cjs +51 -0
  166. package/dist/chunk-QMAKTSZB.cjs +48 -0
  167. package/dist/{chunk-DTFR3WD6.js → chunk-QNGEW5TC.js} +1 -1
  168. package/dist/chunk-QOXPOR5D.js +267 -0
  169. package/dist/chunk-R33VKSH5.cjs +348 -0
  170. package/dist/{chunk-RJP3SZ7Q.cjs → chunk-RD5XAJR2.cjs} +492 -309
  171. package/dist/chunk-RGR6ME5J.cjs +844 -0
  172. package/dist/{chunk-EOGKE5GD.cjs → chunk-RKJTGGMU.cjs} +221 -126
  173. package/dist/{chunk-GD4AFYJ3.js → chunk-RO6WYEWH.js} +24 -4
  174. package/dist/chunk-S52YW5ZQ.js +844 -0
  175. package/dist/{chunk-6DWXHBAY.js → chunk-SAHBLB3E.js} +222 -127
  176. package/dist/{chunk-JHOQABEF.js → chunk-SCXX4LW5.js} +5 -5
  177. package/dist/{chunk-GIZL57FE.cjs → chunk-SEKD5FH3.cjs} +3 -1
  178. package/dist/{chunk-JWPI6O5Z.js → chunk-SKV2F3NM.js} +31 -4
  179. package/dist/{chunk-FHUNAB2K.cjs → chunk-SKYG33B2.cjs} +33 -6
  180. package/dist/{chunk-BISFR6ZL.js → chunk-SLQAYV3W.js} +1 -1
  181. package/dist/{chunk-RFNCTE4V.cjs → chunk-SYVYLZZF.cjs} +2 -2
  182. package/dist/{chunk-3XJD56Z4.js → chunk-T73IDKDF.js} +10 -3
  183. package/dist/chunk-TBEGGJNC.cjs +310 -0
  184. package/dist/{chunk-RRZS5A53.js → chunk-TNZRZQ7Q.js} +1 -1
  185. package/dist/{chunk-CH4SPVFD.cjs → chunk-TXSDBGKX.cjs} +10 -3
  186. package/dist/chunk-U3WDY42C.cjs +42 -0
  187. package/dist/{chunk-425MT6Y5.cjs → chunk-U46VYPLR.cjs} +9 -9
  188. package/dist/{chunk-OLG7LZBD.js → chunk-VG34MG2U.js} +1 -1
  189. package/dist/{chunk-XDVG3NM4.js → chunk-W3X6PAC7.js} +2 -48
  190. package/dist/{chunk-LYMTYC67.js → chunk-WHE2SWHU.js} +2 -2
  191. package/dist/chunk-WM5PAOTQ.cjs +4498 -0
  192. package/dist/chunk-WNUVPKBW.js +42 -0
  193. package/dist/{chunk-EV53SLSB.cjs → chunk-WPVWQSL7.cjs} +4 -4
  194. package/dist/chunk-WQ3VJXZB.js +118 -0
  195. package/dist/{chunk-HMRDNRTH.js → chunk-X2MSVKDV.js} +2 -2
  196. package/dist/chunk-X3WS5LDG.js +504 -0
  197. package/dist/{chunk-43XS26AQ.cjs → chunk-X5VSP65C.cjs} +4 -4
  198. package/dist/{chunk-DSCQPWUB.cjs → chunk-X5YJ34FZ.cjs} +15 -15
  199. package/dist/chunk-XAV24VYN.js +48 -0
  200. package/dist/chunk-XDLH4EDL.cjs +118 -0
  201. package/dist/{chunk-X765A7J5.js → chunk-XRZEUWKF.js} +1 -1
  202. package/dist/{chunk-ZKZRFH37.cjs → chunk-XTB6VJVQ.cjs} +6 -6
  203. package/dist/{chunk-H3GBSPK5.js → chunk-XX2ZO7DS.js} +14 -6
  204. package/dist/{chunk-RUZB43HU.cjs → chunk-Y72HH2TF.cjs} +22 -14
  205. package/dist/chunk-YFXTE422.cjs +92 -0
  206. package/dist/{chunk-OSUWEUZE.js → chunk-YGN4CQIP.js} +1 -1
  207. package/dist/{chunk-KC5FPUOF.cjs → chunk-YRLYSQEQ.cjs} +2 -2
  208. package/dist/{chunk-3KBBARKO.js → chunk-YZXUDM5X.js} +85 -28
  209. package/dist/chunk-Z36XBUMX.cjs +83 -0
  210. package/dist/chunk-ZK6HOR62.js +92 -0
  211. package/dist/{chunk-PDKNHU5G.cjs → chunk-ZQE72E6W.cjs} +22 -16
  212. package/dist/chunk-ZVY2XD6T.js +1607 -0
  213. package/dist/{chunk-XBUMKEFN.cjs → chunk-ZYQMLKOK.cjs} +91 -34
  214. package/dist/clear-UO4MNWZW.cjs +12 -0
  215. package/dist/clear-ZJ5NYP6E.js +12 -0
  216. package/dist/communityInstaller-6KCFN7YZ.js +19 -0
  217. package/dist/communityInstaller-PVSOFDZD.cjs +19 -0
  218. package/dist/completion-MMF2PN2H.js +14 -0
  219. package/dist/completion-UI5WKHXI.cjs +14 -0
  220. package/dist/config-E7RINK4R.cjs +18 -0
  221. package/dist/config-ZN66VXPS.js +18 -0
  222. package/dist/constants-6CPCJ3DY.cjs +21 -0
  223. package/dist/{constants-V6J54N3X.js → constants-UFM5B232.js} +2 -1
  224. package/dist/{defaultHooks-WLMRQUXG.cjs → defaultHooks-RCXPHF4M.cjs} +3 -1
  225. package/dist/{defaultHooks-R56VYG7I.js → defaultHooks-RDRMER3Z.js} +2 -0
  226. package/dist/export-N4XIVDSL.cjs +12 -0
  227. package/dist/export-W22L4D5C.js +12 -0
  228. package/dist/extractSessionMemories-SDW2MVBQ.cjs +7 -0
  229. package/dist/extractSessionMemories-V7K42ZHW.js +7 -0
  230. package/dist/feedback-DR6ADSNE.cjs +15 -0
  231. package/dist/feedback-FEEAP4QW.js +15 -0
  232. package/dist/filesystem-3SGCW2BF.js +10 -0
  233. package/dist/filesystem-MCFXJQ6R.cjs +10 -0
  234. package/dist/formatters-6K7QVWQL.cjs +10 -0
  235. package/dist/formatters-DQHO5I36.js +10 -0
  236. package/dist/{help-LKKQU2TN.js → help-2BLR7L43.js} +3 -2
  237. package/dist/help-AQHGTS7P.cjs +12 -0
  238. package/dist/{history-AV4XBFRK.js → history-5FZ3M2AK.js} +3 -2
  239. package/dist/history-NIUDRMKA.cjs +14 -0
  240. package/dist/hooks-2EY4IPKV.js +13 -0
  241. package/dist/hooks-LJVORRIG.cjs +13 -0
  242. package/dist/i18n-ARDG2SMC.cjs +33 -0
  243. package/dist/{i18n-BSAPXM56.js → i18n-K7QOWIBH.js} +2 -1
  244. package/dist/ide-GFW6IJHD.js +12 -0
  245. package/dist/ide-N2ZNSSB3.cjs +12 -0
  246. package/dist/import-DFVN3KNZ.js +10 -0
  247. package/dist/import-QEME3E4T.cjs +170 -0
  248. package/dist/import-UXM3VK7B.js +170 -0
  249. package/dist/import-ZS6DPGU5.cjs +10 -0
  250. package/dist/index.cjs +11233 -11804
  251. package/dist/index.js +12594 -13165
  252. package/dist/init-PY75HA3S.cjs +10 -0
  253. package/dist/init-QNMWLAVY.js +10 -0
  254. package/dist/language-5UE4G2BT.cjs +18 -0
  255. package/dist/language-UXMHEZUJ.js +18 -0
  256. package/dist/learn-HJ3FLNZC.cjs +20 -0
  257. package/dist/learn-MVYS3RU5.js +20 -0
  258. package/dist/{lint-44UQJ673.cjs → lint-D5UOJWIK.cjs} +1 -0
  259. package/dist/{lint-TA2ZHVLM.js → lint-NJPZWVN2.js} +1 -0
  260. package/dist/{localProjectPermissions-WQYMGI42.js → localProjectPermissions-N77HA3XK.js} +3 -2
  261. package/dist/localProjectPermissions-UFSMNTBJ.cjs +18 -0
  262. package/dist/login-DSE7H63A.js +20 -0
  263. package/dist/login-V3MEWPKN.cjs +20 -0
  264. package/dist/logout-BMVCLKKW.js +18 -0
  265. package/dist/logout-XEG7FHOZ.cjs +18 -0
  266. package/dist/mcp-PYUR4PHO.js +18 -0
  267. package/dist/mcp-SG6JFLGC.cjs +18 -0
  268. package/dist/{mcp-install-2KVKRAMQ.cjs → mcp-install-G27HSS3Z.cjs} +26 -14
  269. package/dist/{mcp-install-77UXRN6R.js → mcp-install-VESN42PI.js} +21 -9
  270. package/dist/memory-4ZMMEZ2Z.js +10 -0
  271. package/dist/memory-QSGMVVGH.cjs +10 -0
  272. package/dist/message-JUBOK2VU.js +9 -0
  273. package/dist/message-ZJ5AYAMT.cjs +9 -0
  274. package/dist/model-NANLBZ4Z.cjs +10 -0
  275. package/dist/model-ZXNV4AF7.js +10 -0
  276. package/dist/new-5QJY5JP2.js +12 -0
  277. package/dist/new-PMMG55UX.cjs +12 -0
  278. package/dist/{patch-BAAQIYSW.js → patch-5F6VBIT3.js} +2 -0
  279. package/dist/{patch-J32X2QQP.cjs → patch-MOD7QC3D.cjs} +3 -1
  280. package/dist/permissions-LECTCJ4H.cjs +13 -0
  281. package/dist/permissions-VP5VGIBL.js +13 -0
  282. package/dist/{plan-JFGNRL2S.js → plan-G5CEKJI4.js} +1 -0
  283. package/dist/{plan-B3CW5DXJ.cjs → plan-QKOHE3LH.cjs} +1 -0
  284. package/dist/quit-BKOOPHU5.cjs +10 -0
  285. package/dist/quit-FVFNYACP.js +10 -0
  286. package/dist/registry-KWZGYJC2.js +2108 -0
  287. package/dist/registry-YN4FQPOO.cjs +2108 -0
  288. package/dist/resume-EXFQSQPH.js +13 -0
  289. package/dist/resume-PP2IQM5S.cjs +13 -0
  290. package/dist/search-C56FBN67.cjs +17 -0
  291. package/dist/search-XGZDYBF4.js +17 -0
  292. package/dist/{session-T3TAZ5ZU.cjs → session-BSU2L5UI.cjs} +1 -0
  293. package/dist/{session-H5QWKE5E.js → session-SZMRN5KG.js} +1 -0
  294. package/dist/sessions-54KI3F2Q.js +10 -0
  295. package/dist/sessions-DDTSPNVW.cjs +10 -0
  296. package/dist/settings-BDO37TTO.cjs +30 -0
  297. package/dist/settings-FHRDFPLK.js +30 -0
  298. package/dist/share-IERCTBGN.cjs +14 -0
  299. package/dist/share-TGROUE6R.js +14 -0
  300. package/dist/skills-6OL4OSGA.js +76 -0
  301. package/dist/skills-FYY6F2WV.cjs +76 -0
  302. package/dist/skills-OM4IGBAA.cjs +26 -0
  303. package/dist/skills-S3GRN323.js +26 -0
  304. package/dist/{skills-install-MQINL3EC.js → skills-install-6CSWC24P.js} +97 -26
  305. package/dist/{skills-install-IKJZN4G2.cjs → skills-install-O3LZ2ETC.cjs} +106 -35
  306. package/dist/skills-new-ALD2PTHN.js +15 -0
  307. package/dist/skills-new-PWLKK7GW.cjs +15 -0
  308. package/dist/slashCommands-L4ZD33LJ.js +75 -0
  309. package/dist/slashCommands-YY2VUUDF.cjs +75 -0
  310. package/dist/status-3PC5XWSS.cjs +11 -0
  311. package/dist/status-KCLVOYPD.js +11 -0
  312. package/dist/sync-6SDWG5RK.js +18 -0
  313. package/dist/sync-7JMZVEQD.cjs +40 -0
  314. package/dist/{sync-EXYX7HXW.js → sync-KWX67OUN.js} +3 -2
  315. package/dist/sync-WHURZL3U.cjs +18 -0
  316. package/dist/tasks-5FPBIFLC.js +9 -0
  317. package/dist/tasks-TXGKGNH6.cjs +9 -0
  318. package/dist/team-5YXP3JGR.js +9 -0
  319. package/dist/team-IIWEZKNR.cjs +9 -0
  320. package/dist/teammate-2KMKJXAM.cjs +139 -0
  321. package/dist/teammate-L6EZQ3I2.js +139 -0
  322. package/dist/theme-BE5A4FPN.cjs +18 -0
  323. package/dist/theme-YMFCQP7J.js +18 -0
  324. package/dist/ui/questionModal.cjs +7 -25
  325. package/dist/ui/questionModal.js +6 -24
  326. package/dist/undo-KZHUUZTD.cjs +10 -0
  327. package/dist/undo-NEIEHQVX.js +10 -0
  328. package/dist/update-TVAJMMBC.js +82 -0
  329. package/dist/update-Z6BIIQDC.cjs +82 -0
  330. package/package.json +10 -3
  331. package/dist/CommunitySkillsCache-ILWHWE5P.js +0 -7
  332. package/dist/CommunitySkillsCache-KHC6RUJW.cjs +0 -7
  333. package/dist/HookManager-X47HCM5G.cjs +0 -6
  334. package/dist/HookManager-ZXKHCD7U.js +0 -6
  335. package/dist/MemoryManager-6ZT7IDO5.cjs +0 -7
  336. package/dist/MemoryManager-AJGS5AKB.js +0 -7
  337. package/dist/PermissionManager-HG6W2DGU.cjs +0 -10
  338. package/dist/SessionManager-BJ2G6VV4.cjs +0 -9
  339. package/dist/SessionManager-ENPGYK5J.js +0 -9
  340. package/dist/SkillsRegistry-6ZFOCT25.cjs +0 -8
  341. package/dist/SkillsRegistry-C2SHOZ5D.js +0 -8
  342. package/dist/about-3BJTNSLK.js +0 -11
  343. package/dist/about-EABQNJGV.cjs +0 -11
  344. package/dist/add-dir-7FD4DMDA.cjs +0 -9
  345. package/dist/add-dir-LOYJ4YB5.js +0 -9
  346. package/dist/agents-2Y6ASV7C.js +0 -10
  347. package/dist/agents-UOSPKLQL.cjs +0 -10
  348. package/dist/agents-new-23NSGAM5.js +0 -13
  349. package/dist/agents-new-WI2EL7IJ.cjs +0 -13
  350. package/dist/automode-LGWTY3UX.js +0 -9
  351. package/dist/automode-WLBQ7MN7.cjs +0 -9
  352. package/dist/chunk-5UBW2BGC.js +0 -33
  353. package/dist/chunk-I6DBWNLN.cjs +0 -169
  354. package/dist/chunk-IZBCMJHJ.cjs +0 -33
  355. package/dist/completion-7WGMHKOR.cjs +0 -13
  356. package/dist/completion-KH33NSGP.js +0 -13
  357. package/dist/constants-RBQTR32A.cjs +0 -20
  358. package/dist/export-3QN3IH7A.js +0 -11
  359. package/dist/export-BI54X3MP.cjs +0 -11
  360. package/dist/feedback-CI4OIPOS.cjs +0 -14
  361. package/dist/feedback-GFPL5STE.js +0 -14
  362. package/dist/formatters-N5IJKYZY.cjs +0 -8
  363. package/dist/formatters-UG6VZJJ5.js +0 -8
  364. package/dist/help-CWMUGD3V.cjs +0 -11
  365. package/dist/history-73VBEMSI.cjs +0 -13
  366. package/dist/hooks-62UDQBGH.cjs +0 -12
  367. package/dist/hooks-XORDJD5X.js +0 -12
  368. package/dist/i18n-X2IU2EZD.cjs +0 -32
  369. package/dist/ide-RPKZALQV.js +0 -11
  370. package/dist/ide-YMNXJB6A.cjs +0 -11
  371. package/dist/init-J5HR4R7U.js +0 -9
  372. package/dist/init-JCC7RVMC.cjs +0 -9
  373. package/dist/language-AZISJCEZ.js +0 -16
  374. package/dist/language-F65RA6FZ.cjs +0 -16
  375. package/dist/localProjectPermissions-2EATUDZM.cjs +0 -17
  376. package/dist/login-5HLPMECE.js +0 -18
  377. package/dist/login-ISWYYBXP.cjs +0 -18
  378. package/dist/logout-3EKZM5J3.cjs +0 -16
  379. package/dist/logout-GE7TSZ24.js +0 -16
  380. package/dist/mcp-EW64QRFA.cjs +0 -15
  381. package/dist/mcp-VHS7AMF2.js +0 -15
  382. package/dist/memory-2I473RU3.js +0 -9
  383. package/dist/memory-JZ6NPSP3.cjs +0 -9
  384. package/dist/model-GXZLARPT.js +0 -9
  385. package/dist/model-Y274DBDO.cjs +0 -9
  386. package/dist/new-BG5VIGZ7.cjs +0 -9
  387. package/dist/new-YXFDQOA7.js +0 -9
  388. package/dist/permissions-QILEAGBP.cjs +0 -12
  389. package/dist/permissions-WVEOVMWO.js +0 -12
  390. package/dist/quit-NC32OEJG.cjs +0 -9
  391. package/dist/quit-WRRIGU33.js +0 -9
  392. package/dist/resume-GJIKIDPR.cjs +0 -12
  393. package/dist/resume-RMJNCAOK.js +0 -12
  394. package/dist/search-UIWIXB73.js +0 -14
  395. package/dist/search-WQNXDA2E.cjs +0 -14
  396. package/dist/sessions-HPFX2GDD.js +0 -9
  397. package/dist/sessions-SAQU6MFA.cjs +0 -9
  398. package/dist/share-2WH5ZVOO.cjs +0 -13
  399. package/dist/share-PSSWWVV5.js +0 -13
  400. package/dist/skills-LJZA6PVJ.js +0 -13
  401. package/dist/skills-YTYGART7.cjs +0 -13
  402. package/dist/skills-new-3WCU3CWB.js +0 -14
  403. package/dist/skills-new-O5LFVFZU.cjs +0 -14
  404. package/dist/slashCommands-7IRDOXOQ.cjs +0 -55
  405. package/dist/slashCommands-C6CAQA25.js +0 -55
  406. package/dist/status-4EDV2LSY.cjs +0 -10
  407. package/dist/status-NU7TJDCE.js +0 -10
  408. package/dist/sync-3GFSEIAZ.js +0 -16
  409. package/dist/sync-6M3WRKMH.cjs +0 -39
  410. package/dist/sync-CQNQDNTJ.cjs +0 -16
  411. package/dist/theme-EMJGULMI.cjs +0 -16
  412. package/dist/theme-FGDSXNU3.js +0 -16
  413. package/dist/undo-CTXQYE7C.cjs +0 -9
  414. package/dist/undo-HX2ZMECP.js +0 -9
@@ -0,0 +1,1607 @@
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }// src/providers/OllamaProvider.ts
2
+ var OllamaProvider = class {
3
+ constructor(config) {
4
+ this.disableTools = false;
5
+ this.baseUrl = config.baseUrl || "http://localhost:11434";
6
+ this.model = config.model || "llama3.2:latest";
7
+ }
8
+ getName() {
9
+ return "ollama";
10
+ }
11
+ setModel(model) {
12
+ this.model = model;
13
+ }
14
+ async listModels() {
15
+ try {
16
+ const response = await fetch(`${this.baseUrl}/api/tags`);
17
+ if (!response.ok) {
18
+ return [];
19
+ }
20
+ const data = await response.json();
21
+ return data.models.map((m) => m.name);
22
+ } catch (e) {
23
+ return [];
24
+ }
25
+ }
26
+ async isAvailable() {
27
+ try {
28
+ const response = await fetch(`${this.baseUrl}/api/tags`);
29
+ return response.ok;
30
+ } catch (e2) {
31
+ return false;
32
+ }
33
+ }
34
+ async complete(request) {
35
+ const messages = request.messages.map((msg) => {
36
+ const mapped = {
37
+ role: msg.role,
38
+ content: _nullishCoalesce(msg.content, () => ( ""))
39
+ };
40
+ if (msg.name) {
41
+ mapped.name = msg.name;
42
+ }
43
+ if (msg.role === "tool" && msg.tool_call_id) {
44
+ mapped.tool_call_id = msg.tool_call_id;
45
+ }
46
+ if (msg.role === "assistant" && msg.tool_calls) {
47
+ mapped.tool_calls = msg.tool_calls;
48
+ }
49
+ return mapped;
50
+ });
51
+ const body = {
52
+ model: request.model || this.model,
53
+ messages,
54
+ stream: request.stream || false
55
+ };
56
+ if (request.temperature !== void 0) {
57
+ body.options = { temperature: request.temperature };
58
+ }
59
+ if (!this.disableTools && request.tools && request.tools.length > 0) {
60
+ body.tools = request.tools.map((tool) => ({
61
+ type: "function",
62
+ function: {
63
+ name: tool.name,
64
+ description: tool.description,
65
+ parameters: _nullishCoalesce(tool.parameters, () => ( { type: "object", properties: {} }))
66
+ }
67
+ }));
68
+ }
69
+ const response = await fetch(`${this.baseUrl}/api/chat`, {
70
+ method: "POST",
71
+ headers: {
72
+ "Content-Type": "application/json"
73
+ },
74
+ body: JSON.stringify(body),
75
+ signal: request.signal
76
+ });
77
+ if (!response.ok) {
78
+ let errorDetail = "";
79
+ try {
80
+ const errorBody = await response.text();
81
+ errorDetail = errorBody ? `: ${errorBody}` : "";
82
+ if (errorBody.includes("does not support tools") && body.tools) {
83
+ console.warn(`Model ${body.model} does not support tools. Retrying without tool support.`);
84
+ this.disableTools = true;
85
+ delete body.tools;
86
+ return this.complete(request);
87
+ }
88
+ } catch (e3) {
89
+ }
90
+ throw new Error(`Ollama API error: ${response.status} ${response.statusText}${errorDetail}`);
91
+ }
92
+ if (request.stream) {
93
+ return this.handleStreamingResponse(response);
94
+ }
95
+ const data = await response.json();
96
+ let toolCalls;
97
+ if (data.message.tool_calls && Array.isArray(data.message.tool_calls)) {
98
+ toolCalls = data.message.tool_calls.map((tc, index) => ({
99
+ id: `ollama-tool-${Date.now()}-${index}`,
100
+ type: "function",
101
+ function: {
102
+ name: tc.function.name,
103
+ // Ollama returns arguments as object, convert to JSON string for consistency
104
+ arguments: JSON.stringify(tc.function.arguments)
105
+ }
106
+ }));
107
+ }
108
+ let usage;
109
+ if (data.prompt_eval_count !== void 0 || data.eval_count !== void 0) {
110
+ usage = {
111
+ promptTokens: _nullishCoalesce(data.prompt_eval_count, () => ( 0)),
112
+ completionTokens: _nullishCoalesce(data.eval_count, () => ( 0)),
113
+ totalTokens: (_nullishCoalesce(data.prompt_eval_count, () => ( 0))) + (_nullishCoalesce(data.eval_count, () => ( 0)))
114
+ };
115
+ }
116
+ return {
117
+ id: `ollama-${Date.now()}`,
118
+ created: Math.floor(new Date(data.created_at).getTime() / 1e3),
119
+ content: data.message.content,
120
+ toolCalls,
121
+ finishReason: _optionalChain([toolCalls, 'optionalAccess', _ => _.length]) ? "tool_calls" : "stop",
122
+ usage,
123
+ raw: data
124
+ };
125
+ }
126
+ async handleStreamingResponse(response) {
127
+ const reader = _optionalChain([response, 'access', _2 => _2.body, 'optionalAccess', _3 => _3.getReader, 'call', _4 => _4()]);
128
+ if (!reader) {
129
+ throw new Error("No response body");
130
+ }
131
+ const decoder = new TextDecoder();
132
+ let fullContent = "";
133
+ let lastData = null;
134
+ try {
135
+ while (true) {
136
+ const { done, value } = await reader.read();
137
+ if (done) break;
138
+ const chunk = decoder.decode(value, { stream: true });
139
+ const lines = chunk.split("\n").filter((line) => line.trim());
140
+ for (const line of lines) {
141
+ try {
142
+ const data = JSON.parse(line);
143
+ fullContent += data.message.content;
144
+ lastData = data;
145
+ } catch (e4) {
146
+ }
147
+ }
148
+ }
149
+ } finally {
150
+ reader.releaseLock();
151
+ }
152
+ return {
153
+ id: `ollama-${Date.now()}`,
154
+ created: lastData ? Math.floor(new Date(lastData.created_at).getTime() / 1e3) : Date.now(),
155
+ content: fullContent,
156
+ raw: lastData
157
+ };
158
+ }
159
+ };
160
+
161
+ // src/providers/OpenAIProvider.ts
162
+ var OpenAIProvider = class {
163
+ constructor(config) {
164
+ this.baseUrl = config.baseUrl || "https://api.openai.com/v1";
165
+ this.apiKey = config.apiKey || "";
166
+ this.model = config.model || "gpt-4o";
167
+ }
168
+ getName() {
169
+ return "openai";
170
+ }
171
+ setModel(model) {
172
+ this.model = model;
173
+ }
174
+ async listModels() {
175
+ return [
176
+ "gpt-4o",
177
+ "gpt-4o-mini",
178
+ "gpt-4-turbo",
179
+ "gpt-4",
180
+ "gpt-3.5-turbo"
181
+ ];
182
+ }
183
+ async isAvailable() {
184
+ try {
185
+ const response = await fetch(`${this.baseUrl}/models`, {
186
+ headers: {
187
+ "Authorization": `Bearer ${this.apiKey}`
188
+ }
189
+ });
190
+ return response.ok;
191
+ } catch (e5) {
192
+ return false;
193
+ }
194
+ }
195
+ async complete(request) {
196
+ const body = {
197
+ model: request.model || this.model,
198
+ messages: request.messages.map((msg) => {
199
+ const mapped = {
200
+ role: msg.role === "system" ? "system" : msg.role === "user" ? "user" : msg.role === "tool" ? "tool" : "assistant",
201
+ content: msg.content
202
+ };
203
+ if (msg.role === "tool" && msg.tool_call_id) {
204
+ mapped.tool_call_id = msg.tool_call_id;
205
+ }
206
+ if (msg.name) {
207
+ mapped.name = msg.name;
208
+ }
209
+ return mapped;
210
+ }),
211
+ temperature: request.temperature || 0.7,
212
+ max_tokens: request.maxTokens
213
+ };
214
+ if (request.tools && request.tools.length > 0) {
215
+ body.tools = request.tools.map((tool) => ({
216
+ type: "function",
217
+ function: {
218
+ name: tool.name,
219
+ description: tool.description,
220
+ parameters: _nullishCoalesce(tool.parameters, () => ( { type: "object", properties: {} }))
221
+ }
222
+ }));
223
+ if (request.toolChoice) {
224
+ body.tool_choice = request.toolChoice;
225
+ }
226
+ }
227
+ const response = await fetch(`${this.baseUrl}/chat/completions`, {
228
+ method: "POST",
229
+ headers: {
230
+ "Content-Type": "application/json",
231
+ "Authorization": `Bearer ${this.apiKey}`
232
+ },
233
+ body: JSON.stringify(body),
234
+ signal: request.signal
235
+ });
236
+ if (!response.ok) {
237
+ const error = await response.text();
238
+ throw new Error(`OpenAI API error: ${response.status} ${error}`);
239
+ }
240
+ const data = await response.json();
241
+ const message = data.choices[0].message;
242
+ const finishReason = data.choices[0].finish_reason;
243
+ let toolCalls;
244
+ if (message.tool_calls && Array.isArray(message.tool_calls)) {
245
+ toolCalls = message.tool_calls.map((tc) => ({
246
+ id: tc.id,
247
+ type: "function",
248
+ function: {
249
+ name: tc.function.name,
250
+ arguments: tc.function.arguments
251
+ }
252
+ }));
253
+ }
254
+ let usage;
255
+ if (data.usage) {
256
+ usage = {
257
+ promptTokens: data.usage.prompt_tokens,
258
+ completionTokens: data.usage.completion_tokens,
259
+ totalTokens: data.usage.total_tokens
260
+ };
261
+ }
262
+ return {
263
+ id: data.id,
264
+ created: data.created,
265
+ content: _nullishCoalesce(message.content, () => ( "")),
266
+ toolCalls,
267
+ finishReason,
268
+ usage,
269
+ raw: data
270
+ };
271
+ }
272
+ };
273
+
274
+ // src/providers/LlamaCppProvider.ts
275
+ var LlamaCppProvider = class {
276
+ constructor(config) {
277
+ const port = config.port || 8080;
278
+ this.baseUrl = config.baseUrl || `http://localhost:${port}`;
279
+ this.model = config.model || "llama-model";
280
+ }
281
+ getName() {
282
+ return "llamacpp";
283
+ }
284
+ setModel(model) {
285
+ this.model = model;
286
+ }
287
+ async listModels() {
288
+ try {
289
+ const response = await fetch(`${this.baseUrl}/v1/models`);
290
+ if (!response.ok) {
291
+ return this.model ? [this.model] : [];
292
+ }
293
+ const data = await response.json();
294
+ return _nullishCoalesce(_optionalChain([data, 'access', _5 => _5.data, 'optionalAccess', _6 => _6.map, 'call', _7 => _7((m) => m.id)]), () => ( [this.model]));
295
+ } catch (e6) {
296
+ return this.model ? [this.model] : [];
297
+ }
298
+ }
299
+ async isAvailable() {
300
+ try {
301
+ const response = await fetch(`${this.baseUrl}/health`);
302
+ return response.ok;
303
+ } catch (e7) {
304
+ return false;
305
+ }
306
+ }
307
+ async complete(request) {
308
+ const body = {
309
+ model: request.model || this.model,
310
+ messages: request.messages.map((msg) => {
311
+ const mapped = {
312
+ role: msg.role,
313
+ content: msg.content
314
+ };
315
+ if (msg.name) mapped.name = msg.name;
316
+ if (msg.role === "tool" && msg.tool_call_id) mapped.tool_call_id = msg.tool_call_id;
317
+ if (msg.role === "assistant" && msg.tool_calls) mapped.tool_calls = msg.tool_calls;
318
+ return mapped;
319
+ }),
320
+ temperature: _nullishCoalesce(request.temperature, () => ( 0.7)),
321
+ max_tokens: _nullishCoalesce(request.maxTokens, () => ( 4096)),
322
+ stream: false
323
+ };
324
+ if (request.tools && request.tools.length > 0) {
325
+ body.tools = request.tools.map((tool) => ({
326
+ type: "function",
327
+ function: {
328
+ name: tool.name,
329
+ description: tool.description,
330
+ parameters: _nullishCoalesce(tool.parameters, () => ( { type: "object", properties: {} }))
331
+ }
332
+ }));
333
+ }
334
+ const response = await fetch(`${this.baseUrl}/v1/chat/completions`, {
335
+ method: "POST",
336
+ headers: {
337
+ "Content-Type": "application/json"
338
+ },
339
+ body: JSON.stringify(body),
340
+ signal: request.signal
341
+ });
342
+ if (!response.ok) {
343
+ throw new Error(`llama.cpp API error: ${response.status} ${response.statusText}`);
344
+ }
345
+ const data = await response.json();
346
+ const choice = data.choices[0];
347
+ let toolCalls;
348
+ if (_optionalChain([choice, 'optionalAccess', _8 => _8.message, 'access', _9 => _9.tool_calls, 'optionalAccess', _10 => _10.length])) {
349
+ toolCalls = choice.message.tool_calls.map((tc) => ({
350
+ id: tc.id,
351
+ type: "function",
352
+ function: {
353
+ name: tc.function.name,
354
+ arguments: tc.function.arguments
355
+ }
356
+ }));
357
+ }
358
+ let usage;
359
+ if (data.usage) {
360
+ usage = {
361
+ promptTokens: data.usage.prompt_tokens,
362
+ completionTokens: data.usage.completion_tokens,
363
+ totalTokens: data.usage.total_tokens
364
+ };
365
+ }
366
+ const finishReason = _optionalChain([toolCalls, 'optionalAccess', _11 => _11.length]) ? "tool_calls" : _optionalChain([choice, 'optionalAccess', _12 => _12.finish_reason]) === "stop" || _optionalChain([choice, 'optionalAccess', _13 => _13.finish_reason]) === "length" || _optionalChain([choice, 'optionalAccess', _14 => _14.finish_reason]) === "content_filter" ? choice.finish_reason : "stop";
367
+ return {
368
+ id: data.id || `llamacpp-${Date.now()}`,
369
+ created: data.created || Math.floor(Date.now() / 1e3),
370
+ content: _nullishCoalesce(_optionalChain([choice, 'optionalAccess', _15 => _15.message, 'access', _16 => _16.content]), () => ( "")),
371
+ toolCalls,
372
+ finishReason,
373
+ usage,
374
+ raw: data
375
+ };
376
+ }
377
+ };
378
+
379
+ // src/providers/OpenRouterClient.ts
380
+ function sanitizeMessages(messages) {
381
+ return messages.map((msg) => {
382
+ const sanitized = {
383
+ role: msg.role,
384
+ content: msg.content
385
+ };
386
+ if (msg.role === "tool" && msg.tool_call_id) {
387
+ sanitized.tool_call_id = msg.tool_call_id;
388
+ }
389
+ if (msg.role === "assistant" && _optionalChain([msg, 'access', _17 => _17.tool_calls, 'optionalAccess', _18 => _18.length])) {
390
+ sanitized.tool_calls = msg.tool_calls;
391
+ }
392
+ if (msg.name) {
393
+ sanitized.name = msg.name;
394
+ }
395
+ return sanitized;
396
+ });
397
+ }
398
+ var DEFAULT_BASE_URL = "https://openrouter.ai/api/v1";
399
+ var DEFAULT_MAX_RETRIES = 3;
400
+ var MAX_ALLOWED_RETRIES = 5;
401
+ var DEFAULT_RETRY_DELAY = 1e3;
402
+ var DEFAULT_TIMEOUT = 3e4;
403
+ var FRIENDLY_ERRORS = {
404
+ 400: "The request was malformed. This often happens when the context is too long. Try /undo to remove recent turns or /new to start fresh.",
405
+ 401: "Authentication failed. Please verify your API key in ~/.autohand/config.json.",
406
+ 402: "Payment required. Please check your account balance or billing settings.",
407
+ 403: "Access denied. Your API key may not have permission for this model.",
408
+ 404: "The requested model was not found. Use /model to select a different one.",
409
+ 429: "Rate limit exceeded. Please wait a moment and try again, or choose a different model.",
410
+ 500: "The AI service encountered an internal error. Please try again later.",
411
+ 502: "The AI service is temporarily unavailable. Please try again in a few moments.",
412
+ 503: "The AI service is currently overloaded. Please try again later.",
413
+ 504: "The request timed out. The AI service may be experiencing high load."
414
+ };
415
+ var OpenRouterClient = class {
416
+ constructor(settings, networkSettings) {
417
+ this.apiKey = _nullishCoalesce(settings.apiKey, () => ( ""));
418
+ this.baseUrl = _nullishCoalesce(settings.baseUrl, () => ( DEFAULT_BASE_URL));
419
+ this.defaultModel = settings.model;
420
+ const configuredRetries = _nullishCoalesce(_optionalChain([networkSettings, 'optionalAccess', _19 => _19.maxRetries]), () => ( DEFAULT_MAX_RETRIES));
421
+ this.maxRetries = Math.min(
422
+ Math.max(0, configuredRetries),
423
+ MAX_ALLOWED_RETRIES
424
+ );
425
+ this.retryDelay = _nullishCoalesce(_optionalChain([networkSettings, 'optionalAccess', _20 => _20.retryDelay]), () => ( DEFAULT_RETRY_DELAY));
426
+ this.timeout = _nullishCoalesce(_optionalChain([networkSettings, 'optionalAccess', _21 => _21.timeout]), () => ( DEFAULT_TIMEOUT));
427
+ }
428
+ setDefaultModel(model) {
429
+ this.defaultModel = model;
430
+ }
431
+ async complete(request) {
432
+ const payload = {
433
+ model: _nullishCoalesce(request.model, () => ( this.defaultModel)),
434
+ messages: sanitizeMessages(request.messages),
435
+ temperature: _nullishCoalesce(request.temperature, () => ( 0.2)),
436
+ max_tokens: _nullishCoalesce(request.maxTokens, () => ( 16e3)),
437
+ // Increased from 1000 to allow large file generation
438
+ stream: _nullishCoalesce(request.stream, () => ( false))
439
+ };
440
+ if (request.tools && request.tools.length > 0) {
441
+ payload.tools = request.tools.map((tool) => ({
442
+ type: "function",
443
+ function: {
444
+ name: tool.name,
445
+ description: tool.description,
446
+ parameters: _nullishCoalesce(tool.parameters, () => ( { type: "object", properties: {} }))
447
+ }
448
+ }));
449
+ if (request.toolChoice) {
450
+ payload.tool_choice = request.toolChoice;
451
+ }
452
+ }
453
+ const model = (_nullishCoalesce(request.model, () => ( this.defaultModel))).toLowerCase();
454
+ if (request.thinkingLevel && request.thinkingLevel !== "normal") {
455
+ if (model.includes("o1") || model.includes("o3")) {
456
+ if (request.thinkingLevel === "extended") {
457
+ payload.reasoning_effort = "high";
458
+ } else if (request.thinkingLevel === "none") {
459
+ payload.reasoning_effort = "low";
460
+ }
461
+ }
462
+ if (model.includes("claude") && request.thinkingLevel === "extended") {
463
+ payload.provider = {
464
+ anthropic: {
465
+ thinking: {
466
+ type: "enabled",
467
+ budget_tokens: 1e4
468
+ }
469
+ }
470
+ };
471
+ }
472
+ }
473
+ const headers = {
474
+ "Content-Type": "application/json",
475
+ "HTTP-Referer": "https://autohand.dev",
476
+ "X-OpenRouter-Title": "Autohand Code CLI",
477
+ "X-OpenRouter-Categories": "cli-agent"
478
+ };
479
+ if (this.apiKey) {
480
+ headers.Authorization = `Bearer ${this.apiKey}`;
481
+ }
482
+ const payloadJson = JSON.stringify(payload);
483
+ const payloadSizeBytes = payloadJson.length;
484
+ const maxPayloadSize = 5 * 1024 * 1024;
485
+ if (payloadSizeBytes > maxPayloadSize) {
486
+ const sizeMB = (payloadSizeBytes / (1024 * 1024)).toFixed(2);
487
+ throw new Error(
488
+ `Request payload too large (${sizeMB}MB). This usually happens when the conversation history grows too long. Try using /undo to remove recent turns or /new to start fresh.`
489
+ );
490
+ }
491
+ let lastError = null;
492
+ for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
493
+ try {
494
+ const response = await this.makeRequest(
495
+ payload,
496
+ headers,
497
+ request.signal,
498
+ payloadJson
499
+ );
500
+ return response;
501
+ } catch (error) {
502
+ lastError = error;
503
+ if (this.isNonRetryableError(error)) {
504
+ throw error;
505
+ }
506
+ if (attempt < this.maxRetries) {
507
+ const delay = this.retryDelay * Math.pow(2, attempt);
508
+ await this.sleep(delay);
509
+ }
510
+ }
511
+ }
512
+ throw _nullishCoalesce(lastError, () => ( new Error("Failed to communicate with the AI service. Please try again.")));
513
+ }
514
+ async makeRequest(payload, headers, signal, preSerializedBody) {
515
+ let response;
516
+ try {
517
+ const timeoutController = new AbortController();
518
+ const timeoutId = setTimeout(
519
+ () => timeoutController.abort(),
520
+ this.timeout
521
+ );
522
+ const combinedSignal = signal ? this.combineSignals(signal, timeoutController.signal) : timeoutController.signal;
523
+ try {
524
+ response = await fetch(`${this.baseUrl}/chat/completions`, {
525
+ method: "POST",
526
+ headers,
527
+ body: _nullishCoalesce(preSerializedBody, () => ( JSON.stringify(payload))),
528
+ signal: combinedSignal
529
+ });
530
+ } finally {
531
+ clearTimeout(timeoutId);
532
+ }
533
+ } catch (error) {
534
+ const err = error;
535
+ if (err.name === "AbortError" && _optionalChain([signal, 'optionalAccess', _22 => _22.aborted])) {
536
+ throw new Error("Request cancelled.");
537
+ }
538
+ if (err.name === "AbortError") {
539
+ throw new Error(
540
+ "Request timed out. The AI service may be experiencing high load."
541
+ );
542
+ }
543
+ throw new Error(
544
+ "Unable to connect to the AI service. Please check your internet connection."
545
+ );
546
+ }
547
+ if (!response.ok) {
548
+ throw new Error(await this.buildFriendlyError(response));
549
+ }
550
+ const json = await response.json();
551
+ const message = _optionalChain([json, 'optionalAccess', _23 => _23.choices, 'optionalAccess', _24 => _24[0], 'optionalAccess', _25 => _25.message]);
552
+ const text = _nullishCoalesce(_optionalChain([message, 'optionalAccess', _26 => _26.content]), () => ( ""));
553
+ const finishReason = _optionalChain([json, 'optionalAccess', _27 => _27.choices, 'optionalAccess', _28 => _28[0], 'optionalAccess', _29 => _29.finish_reason]);
554
+ let toolCalls;
555
+ if (_optionalChain([message, 'optionalAccess', _30 => _30.tool_calls]) && Array.isArray(message.tool_calls)) {
556
+ toolCalls = message.tool_calls.map((tc) => {
557
+ const rawArgs = _optionalChain([tc, 'access', _31 => _31.function, 'optionalAccess', _32 => _32.arguments]);
558
+ return {
559
+ id: tc.id,
560
+ type: "function",
561
+ function: {
562
+ name: _nullishCoalesce(_optionalChain([tc, 'access', _33 => _33.function, 'optionalAccess', _34 => _34.name]), () => ( "")),
563
+ arguments: _nullishCoalesce(rawArgs, () => ( "{}"))
564
+ }
565
+ };
566
+ });
567
+ }
568
+ let usage;
569
+ if (_optionalChain([json, 'optionalAccess', _35 => _35.usage])) {
570
+ usage = {
571
+ promptTokens: _nullishCoalesce(json.usage.prompt_tokens, () => ( 0)),
572
+ completionTokens: _nullishCoalesce(json.usage.completion_tokens, () => ( 0)),
573
+ totalTokens: _nullishCoalesce(json.usage.total_tokens, () => ( 0))
574
+ };
575
+ }
576
+ return {
577
+ id: _nullishCoalesce(json.id, () => ( "autohand-local")),
578
+ created: _nullishCoalesce(json.created, () => ( Date.now())),
579
+ content: text,
580
+ toolCalls,
581
+ finishReason,
582
+ usage,
583
+ raw: json
584
+ };
585
+ }
586
+ async buildFriendlyError(response) {
587
+ const status = response.status;
588
+ let errorDetail = "";
589
+ try {
590
+ const body = await response.json();
591
+ errorDetail = _optionalChain([body, 'optionalAccess', _36 => _36.error, 'optionalAccess', _37 => _37.message]) || _optionalChain([body, 'optionalAccess', _38 => _38.error]) || _optionalChain([body, 'optionalAccess', _39 => _39.message]) || "";
592
+ if (typeof errorDetail === "object") {
593
+ errorDetail = JSON.stringify(errorDetail);
594
+ }
595
+ } catch (e8) {
596
+ try {
597
+ errorDetail = await response.text();
598
+ } catch (e9) {
599
+ }
600
+ }
601
+ const friendlyMessage = FRIENDLY_ERRORS[status];
602
+ if (friendlyMessage) {
603
+ return errorDetail ? `${friendlyMessage}
604
+ ${errorDetail}` : friendlyMessage;
605
+ }
606
+ if (status >= 500) {
607
+ const base = "The AI service is temporarily unavailable. Please try again later.";
608
+ return errorDetail ? `${base}
609
+ (${status}: ${errorDetail})` : base;
610
+ }
611
+ if (status >= 400) {
612
+ const base = "The request could not be processed.";
613
+ return errorDetail ? `${base} (${status}: ${errorDetail})` : `${base} (HTTP ${status}) Please try again or adjust your prompt.`;
614
+ }
615
+ return errorDetail ? `An unexpected error occurred: ${errorDetail}` : "An unexpected error occurred. Please try again.";
616
+ }
617
+ isNonRetryableError(error) {
618
+ const message = error.message.toLowerCase();
619
+ if (message.includes("cancelled") || message.includes("aborted")) {
620
+ return true;
621
+ }
622
+ if (message.includes("authentication") || message.includes("api key")) {
623
+ return true;
624
+ }
625
+ if (message.includes("payment") || message.includes("access denied")) {
626
+ return true;
627
+ }
628
+ if (message.includes("not found")) {
629
+ return true;
630
+ }
631
+ return false;
632
+ }
633
+ combineSignals(signal1, signal2) {
634
+ const controller = new AbortController();
635
+ const abort = () => controller.abort();
636
+ signal1.addEventListener("abort", abort);
637
+ signal2.addEventListener("abort", abort);
638
+ if (signal1.aborted || signal2.aborted) {
639
+ controller.abort();
640
+ }
641
+ return controller.signal;
642
+ }
643
+ sleep(ms) {
644
+ return new Promise((resolve) => setTimeout(resolve, ms));
645
+ }
646
+ };
647
+
648
+ // src/providers/OpenRouterProvider.ts
649
+ var OpenRouterProvider = class {
650
+ constructor(config, networkSettings) {
651
+ this.client = new OpenRouterClient(config, networkSettings);
652
+ this.model = config.model;
653
+ }
654
+ getName() {
655
+ return "openrouter";
656
+ }
657
+ setModel(model) {
658
+ this.model = model;
659
+ this.client.setDefaultModel(model);
660
+ }
661
+ async listModels() {
662
+ return [
663
+ "anthropic/claude-3.5-sonnet",
664
+ "anthropic/claude-3-opus",
665
+ "google/gemini-pro-1.5",
666
+ "openai/gpt-4o",
667
+ "x-ai/grok-2-latest",
668
+ "meta-llama/llama-3.1-70b-instruct"
669
+ ];
670
+ }
671
+ async isAvailable() {
672
+ return true;
673
+ }
674
+ async complete(request) {
675
+ return this.client.complete(request);
676
+ }
677
+ };
678
+
679
+ // src/utils/platform.ts
680
+ function isAppleSilicon() {
681
+ return process.platform === "darwin" && process.arch === "arm64";
682
+ }
683
+ function isMLXSupported() {
684
+ return isAppleSilicon();
685
+ }
686
+
687
+ // src/providers/MLXProvider.ts
688
+ var MLXProvider = class {
689
+ constructor(config) {
690
+ const port = config.port || 8080;
691
+ this.baseUrl = config.baseUrl || `http://localhost:${port}`;
692
+ this.model = config.model || "mlx-model";
693
+ }
694
+ getName() {
695
+ return "mlx";
696
+ }
697
+ setModel(model) {
698
+ this.model = model;
699
+ }
700
+ async listModels() {
701
+ if (!isMLXSupported()) {
702
+ return [];
703
+ }
704
+ try {
705
+ const response = await fetch(`${this.baseUrl}/v1/models`);
706
+ if (!response.ok) {
707
+ return this.model ? [this.model] : [];
708
+ }
709
+ const data = await response.json();
710
+ return _nullishCoalesce(_optionalChain([data, 'access', _40 => _40.data, 'optionalAccess', _41 => _41.map, 'call', _42 => _42((m) => m.id)]), () => ( (this.model ? [this.model] : [])));
711
+ } catch (e10) {
712
+ return this.model ? [this.model] : [];
713
+ }
714
+ }
715
+ async isAvailable() {
716
+ if (!isMLXSupported()) {
717
+ return false;
718
+ }
719
+ try {
720
+ const response = await fetch(`${this.baseUrl}/v1/models`);
721
+ return response.ok;
722
+ } catch (e11) {
723
+ return false;
724
+ }
725
+ }
726
+ async complete(request) {
727
+ if (!isMLXSupported()) {
728
+ throw new Error("MLX is only supported on macOS with Apple Silicon");
729
+ }
730
+ const body = {
731
+ model: request.model || this.model,
732
+ messages: request.messages.map((msg) => {
733
+ const mapped = {
734
+ role: msg.role,
735
+ content: msg.content
736
+ };
737
+ if (msg.name) mapped.name = msg.name;
738
+ if (msg.role === "tool" && msg.tool_call_id) mapped.tool_call_id = msg.tool_call_id;
739
+ if (msg.role === "assistant" && msg.tool_calls) mapped.tool_calls = msg.tool_calls;
740
+ return mapped;
741
+ }),
742
+ temperature: _nullishCoalesce(request.temperature, () => ( 0.7)),
743
+ max_tokens: _nullishCoalesce(request.maxTokens, () => ( 4096)),
744
+ stream: false
745
+ };
746
+ if (request.tools && request.tools.length > 0) {
747
+ body.tools = request.tools.map((tool) => ({
748
+ type: "function",
749
+ function: {
750
+ name: tool.name,
751
+ description: tool.description,
752
+ parameters: _nullishCoalesce(tool.parameters, () => ( { type: "object", properties: {} }))
753
+ }
754
+ }));
755
+ }
756
+ const response = await fetch(`${this.baseUrl}/v1/chat/completions`, {
757
+ method: "POST",
758
+ headers: {
759
+ "Content-Type": "application/json"
760
+ },
761
+ body: JSON.stringify(body),
762
+ signal: request.signal
763
+ });
764
+ if (!response.ok) {
765
+ throw new Error(`MLX API error: ${response.status} ${response.statusText}`);
766
+ }
767
+ const data = await response.json();
768
+ const choice = data.choices[0];
769
+ let toolCalls;
770
+ if (_optionalChain([choice, 'optionalAccess', _43 => _43.message, 'access', _44 => _44.tool_calls, 'optionalAccess', _45 => _45.length])) {
771
+ toolCalls = choice.message.tool_calls.map((tc) => ({
772
+ id: tc.id,
773
+ type: "function",
774
+ function: {
775
+ name: tc.function.name,
776
+ arguments: tc.function.arguments
777
+ }
778
+ }));
779
+ }
780
+ let usage;
781
+ if (data.usage) {
782
+ usage = {
783
+ promptTokens: data.usage.prompt_tokens,
784
+ completionTokens: data.usage.completion_tokens,
785
+ totalTokens: data.usage.total_tokens
786
+ };
787
+ }
788
+ const finishReason = _optionalChain([toolCalls, 'optionalAccess', _46 => _46.length]) ? "tool_calls" : _optionalChain([choice, 'optionalAccess', _47 => _47.finish_reason]) === "stop" || _optionalChain([choice, 'optionalAccess', _48 => _48.finish_reason]) === "length" || _optionalChain([choice, 'optionalAccess', _49 => _49.finish_reason]) === "content_filter" ? choice.finish_reason : "stop";
789
+ return {
790
+ id: data.id || `mlx-${Date.now()}`,
791
+ created: data.created || Math.floor(Date.now() / 1e3),
792
+ content: _nullishCoalesce(_optionalChain([choice, 'optionalAccess', _50 => _50.message, 'access', _51 => _51.content]), () => ( "")),
793
+ toolCalls,
794
+ finishReason,
795
+ usage,
796
+ raw: data
797
+ };
798
+ }
799
+ };
800
+
801
+ // src/providers/LLMGatewayClient.ts
802
+ function sanitizeMessages2(messages) {
803
+ return messages.map((msg) => {
804
+ const sanitized = {
805
+ role: msg.role,
806
+ content: msg.content
807
+ };
808
+ if (msg.role === "tool" && msg.tool_call_id) {
809
+ sanitized.tool_call_id = msg.tool_call_id;
810
+ }
811
+ if (msg.role === "assistant" && _optionalChain([msg, 'access', _52 => _52.tool_calls, 'optionalAccess', _53 => _53.length])) {
812
+ sanitized.tool_calls = msg.tool_calls;
813
+ }
814
+ if (msg.name) {
815
+ sanitized.name = msg.name;
816
+ }
817
+ return sanitized;
818
+ });
819
+ }
820
+ var DEFAULT_BASE_URL2 = "https://api.llmgateway.io/v1";
821
+ var DEFAULT_MAX_RETRIES2 = 3;
822
+ var MAX_ALLOWED_RETRIES2 = 5;
823
+ var DEFAULT_RETRY_DELAY2 = 1e3;
824
+ var DEFAULT_TIMEOUT2 = 3e4;
825
+ var FRIENDLY_ERRORS2 = {
826
+ 400: "The request was malformed. This often happens when the context is too long. Try /undo to remove recent turns or /new to start fresh.",
827
+ 401: "Authentication failed. Please verify your LLM Gateway API key in ~/.autohand/config.json.",
828
+ 402: "Payment required. Please check your LLM Gateway account balance or billing settings.",
829
+ 403: "Access denied. Your API key may not have permission for this model.",
830
+ 404: "The requested model was not found. Use /model to select a different one.",
831
+ 429: "Rate limit exceeded. Please wait a moment and try again, or choose a different model.",
832
+ 500: "The LLM Gateway service encountered an internal error. Please try again later.",
833
+ 502: "The LLM Gateway service is temporarily unavailable. Please try again in a few moments.",
834
+ 503: "The LLM Gateway service is currently overloaded. Please try again later.",
835
+ 504: "The request timed out. The service may be experiencing high load."
836
+ };
837
+ var LLMGatewayClient = class {
838
+ constructor(settings, networkSettings) {
839
+ this.apiKey = _nullishCoalesce(settings.apiKey, () => ( ""));
840
+ this.baseUrl = _nullishCoalesce(settings.baseUrl, () => ( DEFAULT_BASE_URL2));
841
+ this.defaultModel = settings.model;
842
+ const configuredRetries = _nullishCoalesce(_optionalChain([networkSettings, 'optionalAccess', _54 => _54.maxRetries]), () => ( DEFAULT_MAX_RETRIES2));
843
+ this.maxRetries = Math.min(
844
+ Math.max(0, configuredRetries),
845
+ MAX_ALLOWED_RETRIES2
846
+ );
847
+ this.retryDelay = _nullishCoalesce(_optionalChain([networkSettings, 'optionalAccess', _55 => _55.retryDelay]), () => ( DEFAULT_RETRY_DELAY2));
848
+ this.timeout = _nullishCoalesce(_optionalChain([networkSettings, 'optionalAccess', _56 => _56.timeout]), () => ( DEFAULT_TIMEOUT2));
849
+ }
850
+ setDefaultModel(model) {
851
+ this.defaultModel = model;
852
+ }
853
+ async complete(request) {
854
+ const payload = {
855
+ model: _nullishCoalesce(request.model, () => ( this.defaultModel)),
856
+ messages: sanitizeMessages2(request.messages),
857
+ temperature: _nullishCoalesce(request.temperature, () => ( 0.2)),
858
+ max_tokens: _nullishCoalesce(request.maxTokens, () => ( 16e3)),
859
+ stream: _nullishCoalesce(request.stream, () => ( false))
860
+ };
861
+ if (request.tools && request.tools.length > 0) {
862
+ payload.tools = request.tools.map((tool) => ({
863
+ type: "function",
864
+ function: {
865
+ name: tool.name,
866
+ description: tool.description,
867
+ parameters: _nullishCoalesce(tool.parameters, () => ( { type: "object", properties: {} }))
868
+ }
869
+ }));
870
+ if (request.toolChoice) {
871
+ payload.tool_choice = request.toolChoice;
872
+ }
873
+ }
874
+ const headers = {
875
+ "Content-Type": "application/json"
876
+ };
877
+ if (this.apiKey) {
878
+ headers.Authorization = `Bearer ${this.apiKey}`;
879
+ }
880
+ const payloadJson = JSON.stringify(payload);
881
+ const payloadSizeBytes = payloadJson.length;
882
+ const maxPayloadSize = 5 * 1024 * 1024;
883
+ if (payloadSizeBytes > maxPayloadSize) {
884
+ const sizeMB = (payloadSizeBytes / (1024 * 1024)).toFixed(2);
885
+ throw new Error(
886
+ `Request payload too large (${sizeMB}MB). This usually happens when the conversation history grows too long. Try using /undo to remove recent turns or /new to start fresh.`
887
+ );
888
+ }
889
+ let lastError = null;
890
+ for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
891
+ try {
892
+ const response = await this.makeRequest(
893
+ payload,
894
+ headers,
895
+ request.signal,
896
+ payloadJson
897
+ );
898
+ return response;
899
+ } catch (error) {
900
+ lastError = error;
901
+ if (this.isNonRetryableError(error)) {
902
+ throw error;
903
+ }
904
+ if (attempt < this.maxRetries) {
905
+ const delay = this.retryDelay * Math.pow(2, attempt);
906
+ await this.sleep(delay);
907
+ }
908
+ }
909
+ }
910
+ throw _nullishCoalesce(lastError, () => ( new Error("Failed to communicate with LLM Gateway. Please try again.")));
911
+ }
912
+ async makeRequest(payload, headers, signal, preSerializedBody) {
913
+ let response;
914
+ try {
915
+ const timeoutController = new AbortController();
916
+ const timeoutId = setTimeout(
917
+ () => timeoutController.abort(),
918
+ this.timeout
919
+ );
920
+ const combinedSignal = signal ? this.combineSignals(signal, timeoutController.signal) : timeoutController.signal;
921
+ try {
922
+ response = await fetch(`${this.baseUrl}/chat/completions`, {
923
+ method: "POST",
924
+ headers,
925
+ body: _nullishCoalesce(preSerializedBody, () => ( JSON.stringify(payload))),
926
+ signal: combinedSignal
927
+ });
928
+ } finally {
929
+ clearTimeout(timeoutId);
930
+ }
931
+ } catch (error) {
932
+ const err = error;
933
+ if (err.name === "AbortError" && _optionalChain([signal, 'optionalAccess', _57 => _57.aborted])) {
934
+ throw new Error("Request cancelled.");
935
+ }
936
+ if (err.name === "AbortError") {
937
+ throw new Error(
938
+ "Request timed out. The LLM Gateway service may be experiencing high load."
939
+ );
940
+ }
941
+ throw new Error(
942
+ "Unable to connect to LLM Gateway. Please check your internet connection."
943
+ );
944
+ }
945
+ if (!response.ok) {
946
+ throw new Error(await this.buildFriendlyError(response));
947
+ }
948
+ const json = await response.json();
949
+ const message = _optionalChain([json, 'optionalAccess', _58 => _58.choices, 'optionalAccess', _59 => _59[0], 'optionalAccess', _60 => _60.message]);
950
+ const text = _nullishCoalesce(_optionalChain([message, 'optionalAccess', _61 => _61.content]), () => ( ""));
951
+ const finishReason = _optionalChain([json, 'optionalAccess', _62 => _62.choices, 'optionalAccess', _63 => _63[0], 'optionalAccess', _64 => _64.finish_reason]);
952
+ let toolCalls;
953
+ if (_optionalChain([message, 'optionalAccess', _65 => _65.tool_calls]) && Array.isArray(message.tool_calls)) {
954
+ toolCalls = message.tool_calls.map((tc) => {
955
+ const rawArgs = _optionalChain([tc, 'access', _66 => _66.function, 'optionalAccess', _67 => _67.arguments]);
956
+ return {
957
+ id: tc.id,
958
+ type: "function",
959
+ function: {
960
+ name: _nullishCoalesce(_optionalChain([tc, 'access', _68 => _68.function, 'optionalAccess', _69 => _69.name]), () => ( "")),
961
+ arguments: _nullishCoalesce(rawArgs, () => ( "{}"))
962
+ }
963
+ };
964
+ });
965
+ }
966
+ let usage;
967
+ if (_optionalChain([json, 'optionalAccess', _70 => _70.usage])) {
968
+ usage = {
969
+ promptTokens: _nullishCoalesce(json.usage.prompt_tokens, () => ( 0)),
970
+ completionTokens: _nullishCoalesce(json.usage.completion_tokens, () => ( 0)),
971
+ totalTokens: _nullishCoalesce(json.usage.total_tokens, () => ( 0))
972
+ };
973
+ }
974
+ return {
975
+ id: _nullishCoalesce(json.id, () => ( "llmgateway-response")),
976
+ created: _nullishCoalesce(json.created, () => ( Date.now())),
977
+ content: text,
978
+ toolCalls,
979
+ finishReason,
980
+ usage,
981
+ raw: json
982
+ };
983
+ }
984
+ async buildFriendlyError(response) {
985
+ const status = response.status;
986
+ let errorDetail = "";
987
+ try {
988
+ const body = await response.json();
989
+ errorDetail = _optionalChain([body, 'optionalAccess', _71 => _71.error, 'optionalAccess', _72 => _72.message]) || _optionalChain([body, 'optionalAccess', _73 => _73.error]) || _optionalChain([body, 'optionalAccess', _74 => _74.message]) || "";
990
+ if (typeof errorDetail === "object") {
991
+ errorDetail = JSON.stringify(errorDetail);
992
+ }
993
+ } catch (e12) {
994
+ try {
995
+ errorDetail = await response.text();
996
+ } catch (e13) {
997
+ }
998
+ }
999
+ const friendlyMessage = FRIENDLY_ERRORS2[status];
1000
+ if (friendlyMessage) {
1001
+ return errorDetail ? `${friendlyMessage}
1002
+ ${errorDetail}` : friendlyMessage;
1003
+ }
1004
+ if (status >= 500) {
1005
+ const base = "The LLM Gateway service is temporarily unavailable. Please try again later.";
1006
+ return errorDetail ? `${base}
1007
+ (${status}: ${errorDetail})` : base;
1008
+ }
1009
+ if (status >= 400) {
1010
+ const base = "The request could not be processed.";
1011
+ return errorDetail ? `${base} (${status}: ${errorDetail})` : `${base} (HTTP ${status}) Please try again or adjust your prompt.`;
1012
+ }
1013
+ return errorDetail ? `An unexpected error occurred: ${errorDetail}` : "An unexpected error occurred. Please try again.";
1014
+ }
1015
+ isNonRetryableError(error) {
1016
+ const message = error.message.toLowerCase();
1017
+ if (message.includes("cancelled") || message.includes("aborted")) {
1018
+ return true;
1019
+ }
1020
+ if (message.includes("authentication") || message.includes("api key")) {
1021
+ return true;
1022
+ }
1023
+ if (message.includes("payment") || message.includes("access denied")) {
1024
+ return true;
1025
+ }
1026
+ if (message.includes("not found")) {
1027
+ return true;
1028
+ }
1029
+ return false;
1030
+ }
1031
+ combineSignals(signal1, signal2) {
1032
+ const controller = new AbortController();
1033
+ const abort = () => controller.abort();
1034
+ signal1.addEventListener("abort", abort);
1035
+ signal2.addEventListener("abort", abort);
1036
+ if (signal1.aborted || signal2.aborted) {
1037
+ controller.abort();
1038
+ }
1039
+ return controller.signal;
1040
+ }
1041
+ sleep(ms) {
1042
+ return new Promise((resolve) => setTimeout(resolve, ms));
1043
+ }
1044
+ };
1045
+
1046
+ // src/providers/LLMGatewayProvider.ts
1047
+ var LLMGatewayProvider = class {
1048
+ constructor(config, networkSettings) {
1049
+ this.client = new LLMGatewayClient(config, networkSettings);
1050
+ this.model = config.model;
1051
+ }
1052
+ getName() {
1053
+ return "llmgateway";
1054
+ }
1055
+ setModel(model) {
1056
+ this.model = model;
1057
+ this.client.setDefaultModel(model);
1058
+ }
1059
+ async listModels() {
1060
+ return [
1061
+ "gpt-4o",
1062
+ "gpt-4o-mini",
1063
+ "gpt-4-turbo",
1064
+ "claude-3-5-sonnet-20241022",
1065
+ "claude-3-5-haiku-20241022",
1066
+ "gemini-1.5-pro",
1067
+ "gemini-1.5-flash"
1068
+ ];
1069
+ }
1070
+ async isAvailable() {
1071
+ return true;
1072
+ }
1073
+ async complete(request) {
1074
+ return this.client.complete(request);
1075
+ }
1076
+ };
1077
+
1078
+ // src/providers/azure/tokenManager.ts
1079
+ var EXPIRY_BUFFER_MS = 5 * 60 * 1e3;
1080
+ var IMDS_ENDPOINT = "http://169.254.169.254/metadata/identity/oauth2/token";
1081
+ var COGNITIVE_SCOPE = "https://cognitiveservices.azure.com/.default";
1082
+ var AzureTokenManager = class {
1083
+ constructor() {
1084
+ this.cache = null;
1085
+ }
1086
+ async getToken(request) {
1087
+ if (request.authMethod === "api-key") {
1088
+ if (!request.apiKey) {
1089
+ throw new Error("API key is required for api-key authentication.");
1090
+ }
1091
+ return request.apiKey;
1092
+ }
1093
+ if (this.cache && !this.isTokenExpired()) {
1094
+ return this.cache.token;
1095
+ }
1096
+ if (request.authMethod === "entra-id") {
1097
+ return this.acquireEntraIdToken(request);
1098
+ }
1099
+ if (request.authMethod === "managed-identity") {
1100
+ return this.acquireManagedIdentityToken();
1101
+ }
1102
+ throw new Error(`Unsupported auth method: ${request.authMethod}`);
1103
+ }
1104
+ async getAuthHeaders(request) {
1105
+ if (request.authMethod === "api-key") {
1106
+ const key = await this.getToken(request);
1107
+ return { "api-key": key };
1108
+ }
1109
+ const token = await this.getToken(request);
1110
+ return { Authorization: `Bearer ${token}` };
1111
+ }
1112
+ async acquireEntraIdToken(request) {
1113
+ if (!request.tenantId) {
1114
+ throw new Error("tenantId is required for Entra ID authentication.");
1115
+ }
1116
+ if (!request.clientId) {
1117
+ throw new Error("clientId is required for Entra ID authentication.");
1118
+ }
1119
+ if (!request.clientSecret) {
1120
+ throw new Error("clientSecret is required for Entra ID authentication.");
1121
+ }
1122
+ const url = `https://login.microsoftonline.com/${request.tenantId}/oauth2/v2.0/token`;
1123
+ const body = new URLSearchParams({
1124
+ grant_type: "client_credentials",
1125
+ client_id: request.clientId,
1126
+ client_secret: request.clientSecret,
1127
+ scope: COGNITIVE_SCOPE
1128
+ });
1129
+ const response = await fetch(url, {
1130
+ method: "POST",
1131
+ headers: { "Content-Type": "application/x-www-form-urlencoded" },
1132
+ body: body.toString()
1133
+ });
1134
+ if (!response.ok) {
1135
+ const error = await response.json().catch(() => ({}));
1136
+ const description = error.error_description || error.error || `HTTP ${response.status}`;
1137
+ throw new Error(`Entra ID authentication failed: ${description}`);
1138
+ }
1139
+ const data = await response.json();
1140
+ this.cacheToken(data.access_token, data.expires_in);
1141
+ return data.access_token;
1142
+ }
1143
+ async acquireManagedIdentityToken() {
1144
+ const url = `${IMDS_ENDPOINT}?api-version=2018-02-01&resource=https://cognitiveservices.azure.com`;
1145
+ let response;
1146
+ try {
1147
+ response = await fetch(url, {
1148
+ headers: { Metadata: "true" }
1149
+ });
1150
+ } catch (e14) {
1151
+ throw new Error(
1152
+ "Managed Identity token acquisition failed. This auth method only works inside Azure VMs, App Service, or containers with managed identity enabled."
1153
+ );
1154
+ }
1155
+ if (!response.ok) {
1156
+ const error = await response.json().catch(() => ({}));
1157
+ throw new Error(
1158
+ `Managed Identity token error (${response.status}): ${error.error_description || error.error || "Unknown"}`
1159
+ );
1160
+ }
1161
+ const data = await response.json();
1162
+ this.cacheToken(data.access_token, data.expires_in);
1163
+ return data.access_token;
1164
+ }
1165
+ cacheToken(token, expiresInSeconds) {
1166
+ this.cache = {
1167
+ token,
1168
+ expiresAt: Date.now() + expiresInSeconds * 1e3
1169
+ };
1170
+ }
1171
+ isTokenExpired() {
1172
+ if (!this.cache) return true;
1173
+ return Date.now() >= this.cache.expiresAt - EXPIRY_BUFFER_MS;
1174
+ }
1175
+ };
1176
+
1177
+ // src/providers/AzureClient.ts
1178
+ function sanitizeMessages3(messages) {
1179
+ return messages.map((msg) => {
1180
+ const sanitized = {
1181
+ role: msg.role,
1182
+ content: msg.content
1183
+ };
1184
+ if (msg.role === "tool" && msg.tool_call_id) {
1185
+ sanitized.tool_call_id = msg.tool_call_id;
1186
+ }
1187
+ if (msg.role === "assistant" && _optionalChain([msg, 'access', _75 => _75.tool_calls, 'optionalAccess', _76 => _76.length])) {
1188
+ sanitized.tool_calls = msg.tool_calls;
1189
+ }
1190
+ if (msg.name) {
1191
+ sanitized.name = msg.name;
1192
+ }
1193
+ return sanitized;
1194
+ });
1195
+ }
1196
+ var DEFAULT_API_VERSION = "2024-10-21";
1197
+ var DEFAULT_MAX_RETRIES3 = 3;
1198
+ var MAX_ALLOWED_RETRIES3 = 5;
1199
+ var DEFAULT_RETRY_DELAY3 = 1e3;
1200
+ var DEFAULT_TIMEOUT3 = 3e4;
1201
+ var FRIENDLY_ERRORS3 = {
1202
+ 400: "The request was malformed. This often happens when the context is too long. Try /undo to remove recent turns or /new to start fresh.",
1203
+ 401: "Authentication failed. Please verify your Azure API key or credentials in ~/.autohand/config.json.",
1204
+ 402: "Payment required. Please check your Azure subscription and billing settings.",
1205
+ 403: "Access denied. Your credentials may not have permission for this Azure deployment.",
1206
+ 404: "The Azure deployment was not found. Verify your resourceName and deploymentName in ~/.autohand/config.json.",
1207
+ 429: "Rate limit exceeded. Please wait a moment and try again, or adjust your Azure deployment capacity.",
1208
+ 500: "Azure OpenAI encountered an internal error. Please try again later.",
1209
+ 502: "Azure OpenAI is temporarily unavailable. Please try again in a few moments.",
1210
+ 503: "Azure OpenAI is currently overloaded. Please try again later.",
1211
+ 504: "The request timed out. Azure OpenAI may be experiencing high load."
1212
+ };
1213
+ var AzureClient = class {
1214
+ constructor(options, networkSettings) {
1215
+ this.options = options;
1216
+ this.tokenManager = new AzureTokenManager();
1217
+ this.defaultModel = options.model;
1218
+ const configuredRetries = _nullishCoalesce(_optionalChain([networkSettings, 'optionalAccess', _77 => _77.maxRetries]), () => ( DEFAULT_MAX_RETRIES3));
1219
+ this.maxRetries = Math.min(
1220
+ Math.max(0, configuredRetries),
1221
+ MAX_ALLOWED_RETRIES3
1222
+ );
1223
+ this.retryDelay = _nullishCoalesce(_optionalChain([networkSettings, 'optionalAccess', _78 => _78.retryDelay]), () => ( DEFAULT_RETRY_DELAY3));
1224
+ this.timeout = _nullishCoalesce(_optionalChain([networkSettings, 'optionalAccess', _79 => _79.timeout]), () => ( DEFAULT_TIMEOUT3));
1225
+ }
1226
+ setDefaultModel(model) {
1227
+ this.defaultModel = model;
1228
+ }
1229
+ /**
1230
+ * Build the full Azure OpenAI endpoint URL.
1231
+ *
1232
+ * If baseUrl is provided:
1233
+ * {baseUrl}/chat/completions?api-version={apiVersion}
1234
+ *
1235
+ * If resourceName is a full URL (starts with https://):
1236
+ * {origin}/openai/deployments/{deploymentName}/chat/completions?api-version={apiVersion}
1237
+ * Supports all Azure endpoint domains:
1238
+ * - *.openai.azure.com (Azure OpenAI)
1239
+ * - *.services.ai.azure.com (Microsoft Foundry)
1240
+ * - *.cognitiveservices.azure.com (Azure AI Services)
1241
+ *
1242
+ * Otherwise, from resourceName + deploymentName:
1243
+ * https://{resourceName}.openai.azure.com/openai/deployments/{deploymentName}/chat/completions?api-version={apiVersion}
1244
+ */
1245
+ buildEndpointUrl() {
1246
+ const apiVersion = _nullishCoalesce(this.options.apiVersion, () => ( DEFAULT_API_VERSION));
1247
+ if (this.options.baseUrl) {
1248
+ return `${this.options.baseUrl}/chat/completions?api-version=${apiVersion}`;
1249
+ }
1250
+ const { resourceName, deploymentName } = this.options;
1251
+ if (!resourceName || !deploymentName) {
1252
+ throw new Error(
1253
+ "Azure OpenAI requires either baseUrl or both resourceName and deploymentName in ~/.autohand/config.json."
1254
+ );
1255
+ }
1256
+ if (resourceName.startsWith("http://") || resourceName.startsWith("https://")) {
1257
+ try {
1258
+ const parsed = new URL(resourceName);
1259
+ return `${parsed.origin}/openai/deployments/${deploymentName}/chat/completions?api-version=${apiVersion}`;
1260
+ } catch (e15) {
1261
+ }
1262
+ }
1263
+ return `https://${resourceName}.openai.azure.com/openai/deployments/${deploymentName}/chat/completions?api-version=${apiVersion}`;
1264
+ }
1265
+ async complete(request) {
1266
+ const payload = {
1267
+ messages: sanitizeMessages3(request.messages),
1268
+ temperature: _nullishCoalesce(request.temperature, () => ( 0.2)),
1269
+ max_tokens: _nullishCoalesce(request.maxTokens, () => ( 16e3)),
1270
+ stream: _nullishCoalesce(request.stream, () => ( false))
1271
+ };
1272
+ if (request.tools && request.tools.length > 0) {
1273
+ payload.tools = request.tools.map((tool) => ({
1274
+ type: "function",
1275
+ function: {
1276
+ name: tool.name,
1277
+ description: tool.description,
1278
+ parameters: _nullishCoalesce(tool.parameters, () => ( { type: "object", properties: {} }))
1279
+ }
1280
+ }));
1281
+ if (request.toolChoice) {
1282
+ payload.tool_choice = request.toolChoice;
1283
+ }
1284
+ }
1285
+ const authHeaders = await this.tokenManager.getAuthHeaders({
1286
+ authMethod: this.options.authMethod,
1287
+ apiKey: this.options.apiKey,
1288
+ tenantId: this.options.tenantId,
1289
+ clientId: this.options.clientId,
1290
+ clientSecret: this.options.clientSecret
1291
+ });
1292
+ const headers = {
1293
+ "Content-Type": "application/json",
1294
+ ...authHeaders
1295
+ };
1296
+ const payloadJson = JSON.stringify(payload);
1297
+ const payloadSizeBytes = payloadJson.length;
1298
+ const maxPayloadSize = 5 * 1024 * 1024;
1299
+ if (payloadSizeBytes > maxPayloadSize) {
1300
+ const sizeMB = (payloadSizeBytes / (1024 * 1024)).toFixed(2);
1301
+ throw new Error(
1302
+ `Request payload too large (${sizeMB}MB). This usually happens when the conversation history grows too long. Try using /undo to remove recent turns or /new to start fresh.`
1303
+ );
1304
+ }
1305
+ let lastError = null;
1306
+ for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
1307
+ try {
1308
+ const response = await this.makeRequest(
1309
+ payload,
1310
+ headers,
1311
+ request.signal,
1312
+ payloadJson
1313
+ );
1314
+ return response;
1315
+ } catch (error) {
1316
+ lastError = error;
1317
+ if (this.isNonRetryableError(error)) {
1318
+ throw error;
1319
+ }
1320
+ if (attempt < this.maxRetries) {
1321
+ const delay = this.retryDelay * Math.pow(2, attempt);
1322
+ await this.sleep(delay);
1323
+ }
1324
+ }
1325
+ }
1326
+ throw _nullishCoalesce(lastError, () => ( new Error(
1327
+ "Failed to communicate with Azure OpenAI. Please try again."
1328
+ )));
1329
+ }
1330
+ async makeRequest(payload, headers, signal, preSerializedBody) {
1331
+ let response;
1332
+ const url = this.buildEndpointUrl();
1333
+ try {
1334
+ const timeoutController = new AbortController();
1335
+ const timeoutId = setTimeout(
1336
+ () => timeoutController.abort(),
1337
+ this.timeout
1338
+ );
1339
+ const combinedSignal = signal ? this.combineSignals(signal, timeoutController.signal) : timeoutController.signal;
1340
+ try {
1341
+ response = await fetch(url, {
1342
+ method: "POST",
1343
+ headers,
1344
+ body: _nullishCoalesce(preSerializedBody, () => ( JSON.stringify(payload))),
1345
+ signal: combinedSignal
1346
+ });
1347
+ } finally {
1348
+ clearTimeout(timeoutId);
1349
+ }
1350
+ } catch (error) {
1351
+ const err = error;
1352
+ if (err.name === "AbortError" && _optionalChain([signal, 'optionalAccess', _80 => _80.aborted])) {
1353
+ throw new Error("Request cancelled.");
1354
+ }
1355
+ if (err.name === "AbortError") {
1356
+ throw new Error(
1357
+ "Request timed out. Azure OpenAI may be experiencing high load."
1358
+ );
1359
+ }
1360
+ throw new Error(
1361
+ "Unable to connect to Azure OpenAI. Please check your internet connection and Azure configuration."
1362
+ );
1363
+ }
1364
+ if (!response.ok) {
1365
+ throw new Error(await this.buildFriendlyError(response));
1366
+ }
1367
+ const json = await response.json();
1368
+ const message = _optionalChain([json, 'optionalAccess', _81 => _81.choices, 'optionalAccess', _82 => _82[0], 'optionalAccess', _83 => _83.message]);
1369
+ const text = _nullishCoalesce(_optionalChain([message, 'optionalAccess', _84 => _84.content]), () => ( ""));
1370
+ const finishReason = _optionalChain([json, 'optionalAccess', _85 => _85.choices, 'optionalAccess', _86 => _86[0], 'optionalAccess', _87 => _87.finish_reason]);
1371
+ let toolCalls;
1372
+ if (_optionalChain([message, 'optionalAccess', _88 => _88.tool_calls]) && Array.isArray(message.tool_calls)) {
1373
+ toolCalls = message.tool_calls.map((tc) => {
1374
+ const rawArgs = _optionalChain([tc, 'access', _89 => _89.function, 'optionalAccess', _90 => _90.arguments]);
1375
+ return {
1376
+ id: tc.id,
1377
+ type: "function",
1378
+ function: {
1379
+ name: _nullishCoalesce(_optionalChain([tc, 'access', _91 => _91.function, 'optionalAccess', _92 => _92.name]), () => ( "")),
1380
+ arguments: _nullishCoalesce(rawArgs, () => ( "{}"))
1381
+ }
1382
+ };
1383
+ });
1384
+ }
1385
+ let usage;
1386
+ if (_optionalChain([json, 'optionalAccess', _93 => _93.usage])) {
1387
+ usage = {
1388
+ promptTokens: _nullishCoalesce(json.usage.prompt_tokens, () => ( 0)),
1389
+ completionTokens: _nullishCoalesce(json.usage.completion_tokens, () => ( 0)),
1390
+ totalTokens: _nullishCoalesce(json.usage.total_tokens, () => ( 0))
1391
+ };
1392
+ }
1393
+ return {
1394
+ id: _nullishCoalesce(json.id, () => ( "autohand-azure")),
1395
+ created: _nullishCoalesce(json.created, () => ( Date.now())),
1396
+ content: text,
1397
+ toolCalls,
1398
+ finishReason,
1399
+ usage,
1400
+ raw: json
1401
+ };
1402
+ }
1403
+ async buildFriendlyError(response) {
1404
+ const status = response.status;
1405
+ let errorDetail = "";
1406
+ try {
1407
+ const body = await response.json();
1408
+ errorDetail = _optionalChain([body, 'optionalAccess', _94 => _94.error, 'optionalAccess', _95 => _95.message]) || _optionalChain([body, 'optionalAccess', _96 => _96.error]) || _optionalChain([body, 'optionalAccess', _97 => _97.message]) || "";
1409
+ if (typeof errorDetail === "object") {
1410
+ errorDetail = JSON.stringify(errorDetail);
1411
+ }
1412
+ } catch (e16) {
1413
+ try {
1414
+ errorDetail = await response.text();
1415
+ } catch (e17) {
1416
+ }
1417
+ }
1418
+ const friendlyMessage = FRIENDLY_ERRORS3[status];
1419
+ if (friendlyMessage) {
1420
+ return errorDetail ? `${friendlyMessage}
1421
+ ${errorDetail}` : friendlyMessage;
1422
+ }
1423
+ if (status >= 500) {
1424
+ const base = "Azure OpenAI is temporarily unavailable. Please try again later.";
1425
+ return errorDetail ? `${base}
1426
+ (${status}: ${errorDetail})` : base;
1427
+ }
1428
+ if (status >= 400) {
1429
+ const base = "The request could not be processed by Azure OpenAI.";
1430
+ return errorDetail ? `${base} (${status}: ${errorDetail})` : `${base} (HTTP ${status}) Please try again or adjust your prompt.`;
1431
+ }
1432
+ return errorDetail ? `An unexpected Azure OpenAI error occurred: ${errorDetail}` : "An unexpected Azure OpenAI error occurred. Please try again.";
1433
+ }
1434
+ isNonRetryableError(error) {
1435
+ const message = error.message.toLowerCase();
1436
+ if (message.includes("cancelled") || message.includes("aborted")) {
1437
+ return true;
1438
+ }
1439
+ if (message.includes("authentication") || message.includes("api key")) {
1440
+ return true;
1441
+ }
1442
+ if (message.includes("payment") || message.includes("access denied")) {
1443
+ return true;
1444
+ }
1445
+ if (message.includes("not found")) {
1446
+ return true;
1447
+ }
1448
+ return false;
1449
+ }
1450
+ combineSignals(signal1, signal2) {
1451
+ const controller = new AbortController();
1452
+ const abort = () => controller.abort();
1453
+ signal1.addEventListener("abort", abort);
1454
+ signal2.addEventListener("abort", abort);
1455
+ if (signal1.aborted || signal2.aborted) {
1456
+ controller.abort();
1457
+ }
1458
+ return controller.signal;
1459
+ }
1460
+ sleep(ms) {
1461
+ return new Promise((resolve) => setTimeout(resolve, ms));
1462
+ }
1463
+ };
1464
+
1465
+ // src/providers/AzureProvider.ts
1466
+ var AzureProvider = class {
1467
+ constructor(config, networkSettings) {
1468
+ this.client = new AzureClient(
1469
+ {
1470
+ model: config.model,
1471
+ resourceName: config.resourceName,
1472
+ deploymentName: config.deploymentName,
1473
+ baseUrl: config.baseUrl,
1474
+ apiVersion: config.apiVersion,
1475
+ apiKey: config.apiKey,
1476
+ authMethod: _nullishCoalesce(config.authMethod, () => ( "api-key")),
1477
+ tenantId: config.tenantId,
1478
+ clientId: config.clientId,
1479
+ clientSecret: config.clientSecret
1480
+ },
1481
+ networkSettings
1482
+ );
1483
+ this.model = config.model;
1484
+ }
1485
+ getName() {
1486
+ return "azure";
1487
+ }
1488
+ setModel(model) {
1489
+ this.model = model;
1490
+ this.client.setDefaultModel(model);
1491
+ }
1492
+ async listModels() {
1493
+ return ["gpt-4o", "gpt-4o-mini", "gpt-4-turbo", "gpt-4", "gpt-3.5-turbo"];
1494
+ }
1495
+ async isAvailable() {
1496
+ return true;
1497
+ }
1498
+ async complete(request) {
1499
+ return this.client.complete(request);
1500
+ }
1501
+ };
1502
+
1503
+ // src/providers/ProviderFactory.ts
1504
+ var ProviderNotConfiguredError = class extends Error {
1505
+ constructor(providerName) {
1506
+ super(`PROVIDER_NOT_CONFIGURED:${providerName}`);
1507
+ this.providerName = providerName;
1508
+ this.name = "ProviderNotConfiguredError";
1509
+ }
1510
+ };
1511
+ var UnconfiguredProvider = class {
1512
+ constructor(providerName) {
1513
+ this.providerName = providerName;
1514
+ }
1515
+ getName() {
1516
+ return "unconfigured";
1517
+ }
1518
+ async complete(_request) {
1519
+ throw new ProviderNotConfiguredError(this.providerName);
1520
+ }
1521
+ async listModels() {
1522
+ return [];
1523
+ }
1524
+ async isAvailable() {
1525
+ return false;
1526
+ }
1527
+ setModel(_model) {
1528
+ }
1529
+ };
1530
+ var ProviderFactory = class {
1531
+ /**
1532
+ * Create an LLM provider based on configuration.
1533
+ * Returns an UnconfiguredProvider if the selected provider is not configured,
1534
+ * allowing the agent to handle it gracefully instead of crashing.
1535
+ */
1536
+ static create(config) {
1537
+ const providerName = config.provider || "openrouter";
1538
+ switch (providerName) {
1539
+ case "ollama":
1540
+ if (!config.ollama) {
1541
+ return new UnconfiguredProvider("ollama");
1542
+ }
1543
+ return new OllamaProvider(config.ollama);
1544
+ case "openai":
1545
+ if (!config.openai) {
1546
+ return new UnconfiguredProvider("openai");
1547
+ }
1548
+ return new OpenAIProvider(config.openai);
1549
+ case "llamacpp":
1550
+ if (!config.llamacpp) {
1551
+ return new UnconfiguredProvider("llamacpp");
1552
+ }
1553
+ return new LlamaCppProvider(config.llamacpp);
1554
+ case "mlx":
1555
+ if (!config.mlx) {
1556
+ return new UnconfiguredProvider("mlx");
1557
+ }
1558
+ return new MLXProvider(config.mlx);
1559
+ case "llmgateway":
1560
+ if (!config.llmgateway) {
1561
+ return new UnconfiguredProvider("llmgateway");
1562
+ }
1563
+ return new LLMGatewayProvider(config.llmgateway, config.network);
1564
+ case "azure":
1565
+ if (!config.azure) {
1566
+ return new UnconfiguredProvider("azure");
1567
+ }
1568
+ return new AzureProvider(config.azure, config.network);
1569
+ case "openrouter":
1570
+ default:
1571
+ if (!config.openrouter) {
1572
+ return new UnconfiguredProvider("openrouter");
1573
+ }
1574
+ return new OpenRouterProvider(config.openrouter);
1575
+ }
1576
+ }
1577
+ /**
1578
+ * Get all available provider names.
1579
+ * MLX is only included on Apple Silicon (macOS + arm64).
1580
+ */
1581
+ static getProviderNames() {
1582
+ const providers = ["openrouter", "ollama", "openai", "llamacpp", "llmgateway", "azure"];
1583
+ if (isMLXSupported()) {
1584
+ providers.push("mlx");
1585
+ }
1586
+ return providers;
1587
+ }
1588
+ /**
1589
+ * Check if a provider name is valid.
1590
+ * Note: This checks if the name is a valid provider type, not if it's available on this platform.
1591
+ * MLX is always a valid provider name, but may not be available on non-Apple Silicon systems.
1592
+ */
1593
+ static isValidProvider(name) {
1594
+ const allProviders = ["openrouter", "ollama", "openai", "llamacpp", "mlx", "llmgateway", "azure"];
1595
+ return allProviders.includes(name);
1596
+ }
1597
+ };
1598
+
1599
+
1600
+
1601
+
1602
+ exports.ProviderNotConfiguredError = ProviderNotConfiguredError; exports.ProviderFactory = ProviderFactory;
1603
+ /**
1604
+ * @license
1605
+ * Copyright 2025 Autohand AI LLC
1606
+ * SPDX-License-Identifier: Apache-2.0
1607
+ */