gong-code 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (374) hide show
  1. package/README.md +38 -0
  2. package/dist/SKILL-22jccbka.md +1 -0
  3. package/dist/chunk-01wdn84e.js +8 -0
  4. package/dist/chunk-04bc74vz.js +243 -0
  5. package/dist/chunk-05t2vqsb.js +490 -0
  6. package/dist/chunk-0727wret.js +159 -0
  7. package/dist/chunk-07rervty.js +200 -0
  8. package/dist/chunk-0b9nxvyg.js +478 -0
  9. package/dist/chunk-0e428b73.js +600 -0
  10. package/dist/chunk-0hqfheht.js +3439 -0
  11. package/dist/chunk-0pnk52c2.js +116 -0
  12. package/dist/chunk-0sbddf6m.js +68 -0
  13. package/dist/chunk-0vf7xb84.js +176 -0
  14. package/dist/chunk-0yz17yjy.js +1061 -0
  15. package/dist/chunk-0zscb6c7.js +10 -0
  16. package/dist/chunk-10p21kq6.js +378 -0
  17. package/dist/chunk-17k78pmz.js +120 -0
  18. package/dist/chunk-19jvvqzp.js +1390 -0
  19. package/dist/chunk-1d5czqnr.js +173 -0
  20. package/dist/chunk-1gjysfnf.js +13876 -0
  21. package/dist/chunk-1jjfv0wd.js +467 -0
  22. package/dist/chunk-1jjzp2pa.js +617 -0
  23. package/dist/chunk-1m38dj2k.js +155 -0
  24. package/dist/chunk-1pgttway.js +211 -0
  25. package/dist/chunk-1vvnx0gb.js +8035 -0
  26. package/dist/chunk-1ztm9yps.js +49145 -0
  27. package/dist/chunk-208x1t0m.js +88 -0
  28. package/dist/chunk-20xqs3yk.js +843 -0
  29. package/dist/chunk-2b2c0wnn.js +584 -0
  30. package/dist/chunk-2g07117j.js +541 -0
  31. package/dist/chunk-2g6p2t3w.js +780 -0
  32. package/dist/chunk-2kjeaeg7.js +28 -0
  33. package/dist/chunk-2n0s2dhc.js +12 -0
  34. package/dist/chunk-2p0hvt8k.js +785 -0
  35. package/dist/chunk-2qmxg9y7.js +40174 -0
  36. package/dist/chunk-2qpy8kne.js +28 -0
  37. package/dist/chunk-2rgbf62a.js +40 -0
  38. package/dist/chunk-2tx0s41y.js +150 -0
  39. package/dist/chunk-2x8zk5cz.js +101 -0
  40. package/dist/chunk-2yqy25z4.js +48 -0
  41. package/dist/chunk-318g78ty.js +552 -0
  42. package/dist/chunk-31gfg8tz.js +290 -0
  43. package/dist/chunk-32n0s532.js +1317 -0
  44. package/dist/chunk-35dw2r9g.js +137 -0
  45. package/dist/chunk-36b5zqvn.js +1865 -0
  46. package/dist/chunk-38kpx69j.js +97 -0
  47. package/dist/chunk-3b01vp8p.js +815 -0
  48. package/dist/chunk-3f76a1ek.js +317 -0
  49. package/dist/chunk-3fd1hkhh.js +4254 -0
  50. package/dist/chunk-3ffxa7zj.js +8 -0
  51. package/dist/chunk-3gcj7w95.js +8 -0
  52. package/dist/chunk-3jf3k40v.js +970 -0
  53. package/dist/chunk-3k51qfp9.js +9301 -0
  54. package/dist/chunk-3mhygd2v.js +1180 -0
  55. package/dist/chunk-3p6y0gvb.js +117 -0
  56. package/dist/chunk-3r60xdt5.js +93 -0
  57. package/dist/chunk-43agf6xv.js +8 -0
  58. package/dist/chunk-46zr418r.js +15 -0
  59. package/dist/chunk-48k2bs4w.js +118 -0
  60. package/dist/chunk-4p987a4z.js +84 -0
  61. package/dist/chunk-4px25pe0.js +776 -0
  62. package/dist/chunk-4s93jyky.js +113 -0
  63. package/dist/chunk-4xv8bedf.js +58 -0
  64. package/dist/chunk-4y382mzd.js +346 -0
  65. package/dist/chunk-52dzw4bc.js +246 -0
  66. package/dist/chunk-52qqmh5g.js +110 -0
  67. package/dist/chunk-53grnhp7.js +8 -0
  68. package/dist/chunk-59craaxx.js +4249 -0
  69. package/dist/chunk-5b833jqs.js +96 -0
  70. package/dist/chunk-5cp2q9sh.js +58 -0
  71. package/dist/chunk-5dbaxmts.js +87 -0
  72. package/dist/chunk-5f5sxgp0.js +96 -0
  73. package/dist/chunk-5mkfderj.js +195 -0
  74. package/dist/chunk-5qw0mpvq.js +389 -0
  75. package/dist/chunk-5rxmszm8.js +48 -0
  76. package/dist/chunk-5ygxa3hr.js +65 -0
  77. package/dist/chunk-61d5dqq2.js +71 -0
  78. package/dist/chunk-6350958y.js +272 -0
  79. package/dist/chunk-65z8hg8j.js +1337 -0
  80. package/dist/chunk-6aq85gdq.js +8 -0
  81. package/dist/chunk-6cjh9exg.js +40751 -0
  82. package/dist/chunk-6e41p5m6.js +187 -0
  83. package/dist/chunk-6jb6xcnq.js +10511 -0
  84. package/dist/chunk-6jta29r1.js +20 -0
  85. package/dist/chunk-6q7f2rrv.js +4757 -0
  86. package/dist/chunk-6sg0ec6v.js +41 -0
  87. package/dist/chunk-6v0rhx4e.js +328 -0
  88. package/dist/chunk-6x5mz95v.js +751 -0
  89. package/dist/chunk-6xqp51qr.js +87 -0
  90. package/dist/chunk-6xvyx1s9.js +6957 -0
  91. package/dist/chunk-6ywh7wgp.js +468 -0
  92. package/dist/chunk-75ne77gm.js +1920 -0
  93. package/dist/chunk-77cb06js.js +602 -0
  94. package/dist/chunk-78n9kfp8.js +1645 -0
  95. package/dist/chunk-7be87rww.js +277 -0
  96. package/dist/chunk-7e1qjk3s.js +125 -0
  97. package/dist/chunk-7e7nxnss.js +360 -0
  98. package/dist/chunk-7g8156qp.js +55 -0
  99. package/dist/chunk-7nz71s42.js +3353 -0
  100. package/dist/chunk-7r078t2x.js +113 -0
  101. package/dist/chunk-7shx41hj.js +3094 -0
  102. package/dist/chunk-7yexhdjs.js +216 -0
  103. package/dist/chunk-83kmfcjv.js +256 -0
  104. package/dist/chunk-846rr9n1.js +107 -0
  105. package/dist/chunk-84ncsm1r.js +183 -0
  106. package/dist/chunk-867x8aq0.js +8936 -0
  107. package/dist/chunk-8byh30kb.js +1947 -0
  108. package/dist/chunk-8c8f6gnt.js +432 -0
  109. package/dist/chunk-8gtdseev.js +454 -0
  110. package/dist/chunk-8jfh9rq6.js +642 -0
  111. package/dist/chunk-8n4n4m67.js +800 -0
  112. package/dist/chunk-8za61tze.js +847 -0
  113. package/dist/chunk-9029sbq1.js +57 -0
  114. package/dist/chunk-94dm5rrm.js +673 -0
  115. package/dist/chunk-9bs8n985.js +329 -0
  116. package/dist/chunk-9c23776j.js +763 -0
  117. package/dist/chunk-9e93g81n.js +298 -0
  118. package/dist/chunk-9f3d61y7.js +548 -0
  119. package/dist/chunk-9gc3andd.js +422 -0
  120. package/dist/chunk-9kn3tgpb.js +130 -0
  121. package/dist/chunk-9m5rn7hk.js +61 -0
  122. package/dist/chunk-9p4hsd6a.js +50 -0
  123. package/dist/chunk-9qasfk8n.js +758 -0
  124. package/dist/chunk-9snp5mn2.js +533 -0
  125. package/dist/chunk-9tcvras3.js +2331 -0
  126. package/dist/chunk-a0186ngk.js +57 -0
  127. package/dist/chunk-a2m8f0aj.js +268 -0
  128. package/dist/chunk-a2rcafav.js +716 -0
  129. package/dist/chunk-a9hjdzar.js +160 -0
  130. package/dist/chunk-aak6ts5n.js +526 -0
  131. package/dist/chunk-aca3w84n.js +267 -0
  132. package/dist/chunk-ae76ded0.js +30 -0
  133. package/dist/chunk-afnjhn3j.js +21933 -0
  134. package/dist/chunk-ahj70tf0.js +880 -0
  135. package/dist/chunk-apjsxssq.js +169 -0
  136. package/dist/chunk-av49wjj1.js +43 -0
  137. package/dist/chunk-axggebfy.js +206 -0
  138. package/dist/chunk-az7jpbv8.js +16 -0
  139. package/dist/chunk-azz7sep6.js +328 -0
  140. package/dist/chunk-b4tpz2g1.js +93 -0
  141. package/dist/chunk-bczf2eyq.js +115 -0
  142. package/dist/chunk-bd95f637.js +208 -0
  143. package/dist/chunk-bdhdmkya.js +10 -0
  144. package/dist/chunk-bp0ynk17.js +32 -0
  145. package/dist/chunk-bq75gxjs.js +90 -0
  146. package/dist/chunk-btk5jaq6.js +10464 -0
  147. package/dist/chunk-bv4c48a5.js +120 -0
  148. package/dist/chunk-byfb105n.js +341 -0
  149. package/dist/chunk-c0bdmy7w.js +4223 -0
  150. package/dist/chunk-c20aqkz8.js +641 -0
  151. package/dist/chunk-c4jg3s2c.js +163 -0
  152. package/dist/chunk-c61q5cer.js +258 -0
  153. package/dist/chunk-c9f761sy.js +386 -0
  154. package/dist/chunk-ca6jkz06.js +157 -0
  155. package/dist/chunk-ccyg9ap9.js +171374 -0
  156. package/dist/chunk-cfej8cc7.js +371 -0
  157. package/dist/chunk-cjqxdnf9.js +23 -0
  158. package/dist/chunk-ckrs789w.js +8 -0
  159. package/dist/chunk-cqeq13wg.js +48 -0
  160. package/dist/chunk-crfj3w5e.js +22821 -0
  161. package/dist/chunk-ct5x6z7g.js +8066 -0
  162. package/dist/chunk-cv5h8n8c.js +101 -0
  163. package/dist/chunk-cwe8h159.js +1563 -0
  164. package/dist/chunk-cwm234zz.js +908 -0
  165. package/dist/chunk-d1jkfp45.js +1580 -0
  166. package/dist/chunk-d3hb1qdr.js +126 -0
  167. package/dist/chunk-d70d0zer.js +713 -0
  168. package/dist/chunk-d91tx29f.js +105 -0
  169. package/dist/chunk-d952pymh.js +20 -0
  170. package/dist/chunk-d9af3z8k.js +720 -0
  171. package/dist/chunk-da1n53c2.js +47 -0
  172. package/dist/chunk-df5c1x8m.js +732 -0
  173. package/dist/chunk-dkv64xys.js +71 -0
  174. package/dist/chunk-dm7166v6.js +251 -0
  175. package/dist/chunk-dr0y5a61.js +258 -0
  176. package/dist/chunk-drzb0rrd.js +306 -0
  177. package/dist/chunk-dw1v1q7w.js +37 -0
  178. package/dist/chunk-e224qet4.js +348 -0
  179. package/dist/chunk-e50ckbv0.js +1099 -0
  180. package/dist/chunk-e5gn6by6.js +1165 -0
  181. package/dist/chunk-e7m3f74m.js +37 -0
  182. package/dist/chunk-e8ghgwaj.js +295 -0
  183. package/dist/chunk-ed8k4ya2.js +41 -0
  184. package/dist/chunk-eh0vnqdz.js +76 -0
  185. package/dist/chunk-epwrzr35.js +4301 -0
  186. package/dist/chunk-er9h6sw5.js +5379 -0
  187. package/dist/chunk-eta5aekm.js +79 -0
  188. package/dist/chunk-ev5gxscq.js +63 -0
  189. package/dist/chunk-evp72hv5.js +212 -0
  190. package/dist/chunk-ex41stfq.js +127 -0
  191. package/dist/chunk-ezpa510h.js +62 -0
  192. package/dist/chunk-f0fm6dey.js +477 -0
  193. package/dist/chunk-f239sbhc.js +152 -0
  194. package/dist/chunk-f2bj47xf.js +109 -0
  195. package/dist/chunk-f4t24rq4.js +258 -0
  196. package/dist/chunk-f9ghvzy8.js +106 -0
  197. package/dist/chunk-f9rahzgr.js +99 -0
  198. package/dist/chunk-ff75qzty.js +521 -0
  199. package/dist/chunk-fj0ctt6q.js +264 -0
  200. package/dist/chunk-fj7wxdt3.js +221 -0
  201. package/dist/chunk-fnnpqv92.js +714 -0
  202. package/dist/chunk-fpd3zzx1.js +26696 -0
  203. package/dist/chunk-fqv2cn76.js +227 -0
  204. package/dist/chunk-fqw9j3bj.js +439 -0
  205. package/dist/chunk-frbcqhz0.js +4984 -0
  206. package/dist/chunk-fzkj59sg.js +174 -0
  207. package/dist/chunk-fzpzbpbw.js +248 -0
  208. package/dist/chunk-g52cpmd1.js +686 -0
  209. package/dist/chunk-g98m2qe6.js +31 -0
  210. package/dist/chunk-g9zgq4vy.js +715 -0
  211. package/dist/chunk-gbpt7tm8.js +687 -0
  212. package/dist/chunk-gg52jka8.js +373 -0
  213. package/dist/chunk-gjttphax.js +5793 -0
  214. package/dist/chunk-gs7vjaas.js +65 -0
  215. package/dist/chunk-gv8n85j2.js +8 -0
  216. package/dist/chunk-gxdwm6pt.js +650 -0
  217. package/dist/chunk-gy8wajg2.js +1014 -0
  218. package/dist/chunk-gz3647m0.js +200 -0
  219. package/dist/chunk-gzg07mqh.js +16 -0
  220. package/dist/chunk-h3zc60tq.js +44 -0
  221. package/dist/chunk-h4p1qt16.js +74 -0
  222. package/dist/chunk-h8219f18.js +282 -0
  223. package/dist/chunk-h9agq92v.js +331 -0
  224. package/dist/chunk-hhjtpwbj.js +16 -0
  225. package/dist/chunk-hpgpwzra.js +548 -0
  226. package/dist/chunk-hsnjvn19.js +80 -0
  227. package/dist/chunk-hsp9qrry.js +417 -0
  228. package/dist/chunk-hwz6aq2m.js +3939 -0
  229. package/dist/chunk-hzph0hbh.js +272 -0
  230. package/dist/chunk-hzqkss6v.js +153 -0
  231. package/dist/chunk-j0t31f39.js +1149 -0
  232. package/dist/chunk-j1qm6n1v.js +30 -0
  233. package/dist/chunk-j6e4b522.js +81 -0
  234. package/dist/chunk-j7qfbvxk.js +84 -0
  235. package/dist/chunk-j7qv3hvm.js +1635 -0
  236. package/dist/chunk-j8mxc6k6.js +213 -0
  237. package/dist/chunk-jevj87jn.js +224 -0
  238. package/dist/chunk-jk2ps88q.js +815 -0
  239. package/dist/chunk-jz7tbdyv.js +1599 -0
  240. package/dist/chunk-k36jpjvh.js +300 -0
  241. package/dist/chunk-k4nmrt3w.js +61 -0
  242. package/dist/chunk-kavn03r9.js +16606 -0
  243. package/dist/chunk-kkgav8dm.js +54 -0
  244. package/dist/chunk-kkq26g5c.js +33 -0
  245. package/dist/chunk-kp6nepx4.js +106 -0
  246. package/dist/chunk-ksefybc8.js +1551 -0
  247. package/dist/chunk-kv147680.js +3436 -0
  248. package/dist/chunk-kya46axt.js +833 -0
  249. package/dist/chunk-kyz233ny.js +227 -0
  250. package/dist/chunk-kzs75xmj.js +143 -0
  251. package/dist/chunk-m0vrez9w.js +266 -0
  252. package/dist/chunk-m1wadav3.js +118 -0
  253. package/dist/chunk-m4rfx3cj.js +221 -0
  254. package/dist/chunk-mc9zaggs.js +6374 -0
  255. package/dist/chunk-mcg5ttj4.js +105 -0
  256. package/dist/chunk-mf4r7918.js +63 -0
  257. package/dist/chunk-mh9khrt4.js +286 -0
  258. package/dist/chunk-mhc4szw7.js +22 -0
  259. package/dist/chunk-mkq0yzp0.js +38 -0
  260. package/dist/chunk-mr58jv4w.js +256 -0
  261. package/dist/chunk-mrrx3bmt.js +168 -0
  262. package/dist/chunk-mtyk3zz6.js +87 -0
  263. package/dist/chunk-mvgxntv6.js +62 -0
  264. package/dist/chunk-mwnjydcm.js +102 -0
  265. package/dist/chunk-mwxt0m3f.js +862 -0
  266. package/dist/chunk-mz1pxck8.js +998 -0
  267. package/dist/chunk-n3agghys.js +214 -0
  268. package/dist/chunk-n6q02ya9.js +442 -0
  269. package/dist/chunk-nt3y91vh.js +443 -0
  270. package/dist/chunk-nt837qt9.js +21 -0
  271. package/dist/chunk-ntgmegfs.js +39 -0
  272. package/dist/chunk-nwk16bbd.js +30 -0
  273. package/dist/chunk-p367ay90.js +672 -0
  274. package/dist/chunk-p7m3x9qk.js +48 -0
  275. package/dist/chunk-pagmjwp5.js +132 -0
  276. package/dist/chunk-pbd4r8ek.js +161 -0
  277. package/dist/chunk-pdkpssgv.js +2422 -0
  278. package/dist/chunk-pf6z1e35.js +32 -0
  279. package/dist/chunk-pfzejvpt.js +213 -0
  280. package/dist/chunk-pktxyhvk.js +400 -0
  281. package/dist/chunk-pp5xhveq.js +124 -0
  282. package/dist/chunk-ppnd7a8x.js +38 -0
  283. package/dist/chunk-pqh2w7dr.js +2202 -0
  284. package/dist/chunk-pr878d0z.js +240 -0
  285. package/dist/chunk-psf0y7hy.js +140 -0
  286. package/dist/chunk-pz2zz2f7.js +1586 -0
  287. package/dist/chunk-q0xkrxy8.js +890 -0
  288. package/dist/chunk-q5b0kpr3.js +14369 -0
  289. package/dist/chunk-q7z3t531.js +334 -0
  290. package/dist/chunk-qdjjtgwt.js +3183 -0
  291. package/dist/chunk-qezv7msv.js +26 -0
  292. package/dist/chunk-qmxn7f86.js +2643 -0
  293. package/dist/chunk-qqc3b97c.js +15 -0
  294. package/dist/chunk-qwrp63wh.js +145 -0
  295. package/dist/chunk-qyvqbst3.js +537 -0
  296. package/dist/chunk-qztvd4hd.js +465 -0
  297. package/dist/chunk-r0bvez4y.js +235 -0
  298. package/dist/chunk-r3z0896k.js +2831 -0
  299. package/dist/chunk-r4cxb6t7.js +32 -0
  300. package/dist/chunk-rb8k68m7.js +118 -0
  301. package/dist/chunk-rdprgm4t.js +42 -0
  302. package/dist/chunk-rex82hys.js +1710 -0
  303. package/dist/chunk-rjjakkw2.js +2560 -0
  304. package/dist/chunk-rka6gcv3.js +304 -0
  305. package/dist/chunk-rkz12ghm.js +144 -0
  306. package/dist/chunk-rnc7m1qs.js +436 -0
  307. package/dist/chunk-rx6z23h0.js +165 -0
  308. package/dist/chunk-s76582j1.js +28 -0
  309. package/dist/chunk-sez03qd9.js +7140 -0
  310. package/dist/chunk-sjvdn2ep.js +130 -0
  311. package/dist/chunk-smwhyy8p.js +115 -0
  312. package/dist/chunk-snw7nh0d.js +695 -0
  313. package/dist/chunk-sqx9c057.js +193 -0
  314. package/dist/chunk-sscms68s.js +16 -0
  315. package/dist/chunk-stknnmsb.js +272 -0
  316. package/dist/chunk-sywdxbs6.js +853 -0
  317. package/dist/chunk-syxy4pf2.js +782 -0
  318. package/dist/chunk-t0hpqsqn.js +6157 -0
  319. package/dist/chunk-tjm70vnw.js +123 -0
  320. package/dist/chunk-trzh0msn.js +154 -0
  321. package/dist/chunk-v1z17cpg.js +723 -0
  322. package/dist/chunk-vsfj449x.js +333 -0
  323. package/dist/chunk-vvkq20tt.js +287 -0
  324. package/dist/chunk-vxgdzx6p.js +338 -0
  325. package/dist/chunk-vxqhjyfz.js +15 -0
  326. package/dist/chunk-vzsstfmb.js +80 -0
  327. package/dist/chunk-w1jew5sn.js +69 -0
  328. package/dist/chunk-w3kwr76v.js +789 -0
  329. package/dist/chunk-w40netr9.js +59 -0
  330. package/dist/chunk-w99cpfdg.js +559 -0
  331. package/dist/chunk-wacn14d2.js +271 -0
  332. package/dist/chunk-wbhrqrq3.js +3033 -0
  333. package/dist/chunk-wfg9g0p7.js +17612 -0
  334. package/dist/chunk-wj3vjsbx.js +93 -0
  335. package/dist/chunk-wjhq855a.js +372 -0
  336. package/dist/chunk-wr06gqxh.js +114 -0
  337. package/dist/chunk-ws0z2y1g.js +195 -0
  338. package/dist/chunk-wt62wqcj.js +98 -0
  339. package/dist/chunk-wx4v7ddx.js +42 -0
  340. package/dist/chunk-wy0t3vb2.js +6101 -0
  341. package/dist/chunk-wysz7qk4.js +280 -0
  342. package/dist/chunk-x63fx1vd.js +192 -0
  343. package/dist/chunk-x8b7vft8.js +132 -0
  344. package/dist/chunk-x8jhkgxb.js +346 -0
  345. package/dist/chunk-x9z4q5k5.js +275 -0
  346. package/dist/chunk-xjd7e9jq.js +126 -0
  347. package/dist/chunk-xjr0n27e.js +42 -0
  348. package/dist/chunk-xpwyw7cd.js +24 -0
  349. package/dist/chunk-xw4ycnyz.js +37 -0
  350. package/dist/chunk-y66bqywr.js +17303 -0
  351. package/dist/chunk-ycarry7d.js +157 -0
  352. package/dist/chunk-ycr0hp6v.js +1486 -0
  353. package/dist/chunk-yqmaw6hf.js +496 -0
  354. package/dist/chunk-yrtm7d23.js +602 -0
  355. package/dist/chunk-yts879rw.js +48 -0
  356. package/dist/chunk-yvmhx96e.js +347 -0
  357. package/dist/chunk-ywq00rg4.js +246 -0
  358. package/dist/chunk-yygeg5mj.js +752 -0
  359. package/dist/chunk-z0exw850.js +726 -0
  360. package/dist/chunk-z2dp53wn.js +17 -0
  361. package/dist/chunk-z4rzc9nd.js +103 -0
  362. package/dist/chunk-zb0akt77.js +684 -0
  363. package/dist/chunk-zb2xrj5t.js +1789 -0
  364. package/dist/chunk-zbptn0ky.js +423 -0
  365. package/dist/chunk-zd45wbmf.js +1090 -0
  366. package/dist/chunk-zjkvspz9.js +2050 -0
  367. package/dist/chunk-zke1sp3x.js +1524 -0
  368. package/dist/chunk-zv2cvfsv.js +58 -0
  369. package/dist/chunk-zy4tmqst.js +644 -0
  370. package/dist/chunk-zygzzzvk.js +192 -0
  371. package/dist/cli-21v3v6ny.md +1 -0
  372. package/dist/cli.js +246 -0
  373. package/dist/server-n5f2q89z.md +1 -0
  374. package/package.json +161 -0
@@ -0,0 +1,1635 @@
1
+ // @bun
2
+ import {
3
+ __esm,
4
+ __export,
5
+ __require,
6
+ __toCommonJS,
7
+ __toESM
8
+ } from "./chunk-eta5aekm.js";
9
+ import {
10
+ getActiveProvider,
11
+ init_registry,
12
+ providerRegistry,
13
+ registerProviderFactory,
14
+ setActiveProvider
15
+ } from "./chunk-d3hb1qdr.js";
16
+ import {
17
+ __INVALID__REF__
18
+ } from "./chunk-ccyg9ap9.js";
19
+
20
+ // src/services/api/providers/types.ts
21
+ var exports_types = {};
22
+ __export(exports_types, {
23
+ ProviderError: () => ProviderError
24
+ });
25
+ var ProviderError;
26
+ var init_types = __esm(() => {
27
+ ProviderError = class ProviderError extends Error {
28
+ type;
29
+ statusCode;
30
+ providerName;
31
+ cause;
32
+ constructor(message, type, statusCode, providerName, cause) {
33
+ super(message);
34
+ this.type = type;
35
+ this.statusCode = statusCode;
36
+ this.providerName = providerName;
37
+ this.cause = cause;
38
+ this.name = "ProviderError";
39
+ }
40
+ };
41
+ });
42
+
43
+ // src/services/api/providers/BaseAdapter.ts
44
+ class BaseAdapter {
45
+ apiKey;
46
+ baseUrl;
47
+ model;
48
+ timeout;
49
+ maxRetries;
50
+ constructor(config = {}) {
51
+ this.apiKey = config.apiKey;
52
+ this.baseUrl = config.baseUrl;
53
+ this.model = config.model;
54
+ this.timeout = config.timeout ?? 600000;
55
+ this.maxRetries = config.maxRetries ?? 2;
56
+ }
57
+ extractTextContent(content) {
58
+ if (typeof content === "string") {
59
+ return content;
60
+ }
61
+ return content.filter((c) => c.type === "text").map((c) => c.text).join(`
62
+ `);
63
+ }
64
+ extractToolUses(content) {
65
+ return content.filter((c) => c.type === "tool_use");
66
+ }
67
+ extractToolResults(content) {
68
+ return content.filter((c) => c.type === "tool_result");
69
+ }
70
+ systemPromptToString(systemPrompt) {
71
+ if (!systemPrompt)
72
+ return "";
73
+ if (typeof systemPrompt === "string")
74
+ return systemPrompt;
75
+ return systemPrompt.join(`
76
+ `);
77
+ }
78
+ toolsToOpenAIFormat(tools) {
79
+ return tools.map((tool) => ({
80
+ type: "function",
81
+ function: {
82
+ name: tool.name,
83
+ description: tool.description,
84
+ parameters: tool.inputSchema
85
+ }
86
+ }));
87
+ }
88
+ messagesToOpenAIFormat(messages, systemPrompt) {
89
+ const result = [];
90
+ const sysPrompt = this.systemPromptToString(systemPrompt);
91
+ if (sysPrompt) {
92
+ result.push({ role: "system", content: sysPrompt });
93
+ }
94
+ for (const msg of messages) {
95
+ if (msg.role === "system") {
96
+ result.push({
97
+ role: "system",
98
+ content: this.extractTextContent(msg.content)
99
+ });
100
+ } else if (msg.role === "user") {
101
+ if (typeof msg.content !== "string") {
102
+ const toolResults = this.extractToolResults(msg.content);
103
+ if (toolResults.length > 0) {
104
+ for (const toolResult of toolResults) {
105
+ result.push({
106
+ role: "tool",
107
+ tool_call_id: toolResult.toolUseId,
108
+ content: typeof toolResult.content === "string" ? toolResult.content : this.extractTextContent(toolResult.content)
109
+ });
110
+ }
111
+ continue;
112
+ }
113
+ }
114
+ result.push({
115
+ role: "user",
116
+ content: this.extractTextContent(msg.content)
117
+ });
118
+ } else if (msg.role === "assistant") {
119
+ const toolUses = typeof msg.content === "string" ? [] : this.extractToolUses(msg.content);
120
+ const text = this.extractTextContent(msg.content);
121
+ if (toolUses.length > 0) {
122
+ result.push({
123
+ role: "assistant",
124
+ content: text || undefined,
125
+ tool_calls: toolUses.map((tu) => ({
126
+ id: tu.id,
127
+ type: "function",
128
+ function: {
129
+ name: tu.name,
130
+ arguments: JSON.stringify(tu.input)
131
+ }
132
+ }))
133
+ });
134
+ } else {
135
+ result.push({
136
+ role: "assistant",
137
+ content: text
138
+ });
139
+ }
140
+ }
141
+ }
142
+ return result;
143
+ }
144
+ contentFromOpenAIResponse(response) {
145
+ const choice = response.choices[0];
146
+ if (!choice) {
147
+ return {
148
+ content: [],
149
+ stopReason: "end_turn",
150
+ usage: { inputTokens: 0, outputTokens: 0 }
151
+ };
152
+ }
153
+ const content = [];
154
+ const message = choice.message;
155
+ if (message.content) {
156
+ content.push({ type: "text", text: message.content });
157
+ }
158
+ if (message.tool_calls) {
159
+ for (const toolCall of message.tool_calls) {
160
+ if (toolCall.type === "function") {
161
+ content.push({
162
+ type: "tool_use",
163
+ id: toolCall.id,
164
+ name: toolCall.function.name,
165
+ input: JSON.parse(toolCall.function.arguments || "{}")
166
+ });
167
+ }
168
+ }
169
+ }
170
+ let stopReason = "end_turn";
171
+ switch (choice.finish_reason) {
172
+ case "stop":
173
+ stopReason = "end_turn";
174
+ break;
175
+ case "length":
176
+ stopReason = "max_tokens";
177
+ break;
178
+ case "tool_calls":
179
+ case "function_call":
180
+ stopReason = "tool_use";
181
+ break;
182
+ }
183
+ return {
184
+ content,
185
+ stopReason,
186
+ usage: {
187
+ inputTokens: response.usage?.prompt_tokens ?? 0,
188
+ outputTokens: response.usage?.completion_tokens ?? 0
189
+ }
190
+ };
191
+ }
192
+ wrapError(error, defaultMessage) {
193
+ const { ProviderError: ProviderError2 } = (init_types(), __toCommonJS(exports_types));
194
+ if (error instanceof ProviderError2) {
195
+ return error;
196
+ }
197
+ if (error instanceof Error) {
198
+ const statusCode = error.status;
199
+ let errorType = "unknown_error";
200
+ if (statusCode === 401 || statusCode === 403) {
201
+ errorType = "authentication_error";
202
+ } else if (statusCode === 429) {
203
+ errorType = "rate_limit_error";
204
+ } else if (statusCode === 400) {
205
+ errorType = "invalid_request_error";
206
+ } else if (statusCode && statusCode >= 500) {
207
+ errorType = "api_error";
208
+ } else if (error.message.includes("ECONNREFUSED") || error.message.includes("ENOTFOUND")) {
209
+ errorType = "connection_error";
210
+ } else if (error.message.includes("timeout") || error.message.includes("ETIMEDOUT")) {
211
+ errorType = "timeout_error";
212
+ }
213
+ return new ProviderError2(error.message || defaultMessage, errorType, statusCode, this.name, error);
214
+ }
215
+ return new ProviderError2(defaultMessage, "unknown_error", undefined, this.name);
216
+ }
217
+ async validateApiKey(apiKey) {
218
+ return true;
219
+ }
220
+ async listModels() {
221
+ return [];
222
+ }
223
+ }
224
+
225
+ // src/services/api/providers/AnthropicAdapter.ts
226
+ var exports_AnthropicAdapter = {};
227
+ __export(exports_AnthropicAdapter, {
228
+ AnthropicAdapter: () => AnthropicAdapter
229
+ });
230
+ var AnthropicAdapter;
231
+ var init_AnthropicAdapter = __esm(() => {
232
+ AnthropicAdapter = class AnthropicAdapter extends BaseAdapter {
233
+ name = "anthropic";
234
+ displayName = "Anthropic Claude";
235
+ capabilities = {
236
+ streaming: true,
237
+ toolUse: true,
238
+ vision: true,
239
+ thinking: true,
240
+ systemPrompt: true,
241
+ maxContextLength: 200000,
242
+ maxOutputTokens: 128000
243
+ };
244
+ client = null;
245
+ constructor(config = {}) {
246
+ super(config);
247
+ this.model = config.model ?? "claude-sonnet-4-20250514";
248
+ }
249
+ async getClient() {
250
+ if (this.client) {
251
+ return this.client;
252
+ }
253
+ const { default: AnthropicSDK } = await import("./chunk-0sbddf6m.js");
254
+ this.client = new AnthropicSDK({
255
+ apiKey: this.apiKey || process.env.ANTHROPIC_API_KEY,
256
+ baseURL: this.baseUrl || process.env.ANTHROPIC_BASE_URL,
257
+ timeout: this.timeout,
258
+ maxRetries: this.maxRetries
259
+ });
260
+ return this.client;
261
+ }
262
+ toAnthropicMessages(messages) {
263
+ return messages.filter((msg) => msg.role !== "system").map((msg) => ({
264
+ role: msg.role,
265
+ content: this.toAnthropicContent(msg.content)
266
+ }));
267
+ }
268
+ toAnthropicContent(content) {
269
+ if (typeof content === "string") {
270
+ return content;
271
+ }
272
+ return content.map((c) => {
273
+ switch (c.type) {
274
+ case "text":
275
+ return { type: "text", text: c.text };
276
+ case "image":
277
+ return {
278
+ type: "image",
279
+ source: {
280
+ type: c.source.type,
281
+ media_type: c.source.mediaType || "image/png",
282
+ data: c.source.data || ""
283
+ }
284
+ };
285
+ case "tool_use":
286
+ return {
287
+ type: "tool_use",
288
+ id: c.id,
289
+ name: c.name,
290
+ input: c.input
291
+ };
292
+ case "tool_result":
293
+ return {
294
+ type: "tool_result",
295
+ tool_use_id: c.toolUseId,
296
+ content: typeof c.content === "string" ? c.content : this.toAnthropicContent(c.content),
297
+ is_error: c.isError
298
+ };
299
+ case "thinking":
300
+ return {
301
+ type: "thinking",
302
+ thinking: c.thinking
303
+ };
304
+ default:
305
+ return { type: "text", text: "" };
306
+ }
307
+ });
308
+ }
309
+ toAnthropicTools(tools) {
310
+ return tools.map((tool) => ({
311
+ name: tool.name,
312
+ description: tool.description,
313
+ input_schema: tool.inputSchema
314
+ }));
315
+ }
316
+ fromAnthropicContent(content) {
317
+ return content.map((c) => {
318
+ switch (c.type) {
319
+ case "text":
320
+ return { type: "text", text: c.text };
321
+ case "tool_use":
322
+ return {
323
+ type: "tool_use",
324
+ id: c.id,
325
+ name: c.name,
326
+ input: c.input
327
+ };
328
+ case "thinking":
329
+ return {
330
+ type: "thinking",
331
+ thinking: c.thinking
332
+ };
333
+ default:
334
+ return { type: "text", text: "" };
335
+ }
336
+ });
337
+ }
338
+ async* chat(params) {
339
+ const client = await this.getClient();
340
+ const model = params.model || this.model || "claude-sonnet-4-20250514";
341
+ try {
342
+ const response = await client.messages.stream({
343
+ model,
344
+ max_tokens: params.maxTokens || 8192,
345
+ messages: this.toAnthropicMessages(params.messages),
346
+ system: this.systemPromptToString(params.systemPrompt),
347
+ tools: params.tools ? this.toAnthropicTools(params.tools) : undefined,
348
+ temperature: params.temperature,
349
+ top_p: params.topP,
350
+ stop_sequences: params.stopSequences
351
+ });
352
+ for await (const event of response) {
353
+ yield this.convertStreamEvent(event);
354
+ }
355
+ } catch (error) {
356
+ throw this.wrapError(error, "Anthropic API request failed");
357
+ }
358
+ }
359
+ convertStreamEvent(event) {
360
+ const e = event;
361
+ switch (e.type) {
362
+ case "message_start":
363
+ return {
364
+ type: "message_start",
365
+ message: e.message
366
+ };
367
+ case "content_block_start":
368
+ return {
369
+ type: "content_block_start",
370
+ index: e.index,
371
+ contentBlock: e.content_block
372
+ };
373
+ case "content_block_delta":
374
+ return {
375
+ type: "content_block_delta",
376
+ index: e.index,
377
+ delta: e.delta
378
+ };
379
+ case "content_block_stop":
380
+ return {
381
+ type: "content_block_stop",
382
+ index: e.index
383
+ };
384
+ case "message_delta":
385
+ return {
386
+ type: "message_delta",
387
+ usage: e.usage
388
+ };
389
+ case "message_stop":
390
+ return { type: "message_stop" };
391
+ default:
392
+ return { type: "message_stop" };
393
+ }
394
+ }
395
+ async chatSync(params) {
396
+ const client = await this.getClient();
397
+ const model = params.model || this.model || "claude-sonnet-4-20250514";
398
+ try {
399
+ const response = await client.messages.create({
400
+ model,
401
+ max_tokens: params.maxTokens || 8192,
402
+ messages: this.toAnthropicMessages(params.messages),
403
+ system: this.systemPromptToString(params.systemPrompt),
404
+ tools: params.tools ? this.toAnthropicTools(params.tools) : undefined,
405
+ temperature: params.temperature,
406
+ top_p: params.topP,
407
+ stop_sequences: params.stopSequences
408
+ });
409
+ return {
410
+ id: response.id,
411
+ model: response.model,
412
+ role: "assistant",
413
+ content: this.fromAnthropicContent(response.content),
414
+ stopReason: this.convertStopReason(response.stop_reason),
415
+ usage: {
416
+ inputTokens: response.usage.input_tokens,
417
+ outputTokens: response.usage.output_tokens
418
+ }
419
+ };
420
+ } catch (error) {
421
+ throw this.wrapError(error, "Anthropic API request failed");
422
+ }
423
+ }
424
+ convertStopReason(reason) {
425
+ switch (reason) {
426
+ case "end_turn":
427
+ return "end_turn";
428
+ case "max_tokens":
429
+ return "max_tokens";
430
+ case "stop_sequence":
431
+ return "stop_sequence";
432
+ case "tool_use":
433
+ return "tool_use";
434
+ default:
435
+ return null;
436
+ }
437
+ }
438
+ async validateApiKey(apiKey) {
439
+ try {
440
+ const { default: AnthropicSDK } = await import("./chunk-0sbddf6m.js");
441
+ const tempClient = new AnthropicSDK({
442
+ apiKey,
443
+ baseURL: this.baseUrl,
444
+ timeout: 1e4,
445
+ maxRetries: 0
446
+ });
447
+ await tempClient.messages.create({
448
+ model: "claude-3-haiku-20240307",
449
+ max_tokens: 1,
450
+ messages: [{ role: "user", content: "test" }]
451
+ });
452
+ return true;
453
+ } catch {
454
+ return false;
455
+ }
456
+ }
457
+ async listModels() {
458
+ return [
459
+ "claude-sonnet-4-20250514",
460
+ "claude-opus-4-20250514",
461
+ "claude-3-5-sonnet-20241022",
462
+ "claude-3-5-haiku-20241022",
463
+ "claude-3-opus-20240229",
464
+ "claude-3-sonnet-20240229",
465
+ "claude-3-haiku-20240307"
466
+ ];
467
+ }
468
+ };
469
+ });
470
+
471
+ // src/services/api/providers/OpenAICompatAdapter.ts
472
+ var exports_OpenAICompatAdapter = {};
473
+ __export(exports_OpenAICompatAdapter, {
474
+ OpenAICompatAdapter: () => OpenAICompatAdapter
475
+ });
476
+ var OpenAICompatAdapter;
477
+ var init_OpenAICompatAdapter = __esm(() => {
478
+ init_types();
479
+ OpenAICompatAdapter = class OpenAICompatAdapter extends BaseAdapter {
480
+ name = "openai-compat";
481
+ displayName = "OpenAI Compatible";
482
+ capabilities = {
483
+ streaming: true,
484
+ toolUse: true,
485
+ vision: true,
486
+ thinking: false,
487
+ systemPrompt: true,
488
+ maxContextLength: 128000,
489
+ maxOutputTokens: 16384
490
+ };
491
+ constructor(config = {}) {
492
+ super(config);
493
+ this.baseUrl = config.baseUrl || process.env.OPENAI_BASE_URL || "https://api.openai.com/v1";
494
+ this.apiKey = config.apiKey || process.env.OPENAI_API_KEY;
495
+ this.model = config.model || process.env.OPENAI_MODEL || "gpt-4o";
496
+ }
497
+ getHeaders() {
498
+ const headers = {
499
+ "Content-Type": "application/json"
500
+ };
501
+ if (this.apiKey) {
502
+ headers["Authorization"] = `Bearer ${this.apiKey}`;
503
+ }
504
+ return headers;
505
+ }
506
+ async* chat(params) {
507
+ const model = params.model || this.model || "gpt-4o";
508
+ const url = `${this.baseUrl}/chat/completions`;
509
+ const body = {
510
+ model,
511
+ messages: this.messagesToOpenAIFormat(params.messages, params.systemPrompt),
512
+ max_tokens: params.maxTokens || 4096,
513
+ temperature: params.temperature,
514
+ top_p: params.topP,
515
+ stop: params.stopSequences,
516
+ stream: true,
517
+ ...params.tools && params.tools.length > 0 && {
518
+ tools: this.toolsToOpenAIFormat(params.tools),
519
+ tool_choice: "auto"
520
+ }
521
+ };
522
+ try {
523
+ const response = await fetch(url, {
524
+ method: "POST",
525
+ headers: this.getHeaders(),
526
+ body: JSON.stringify(body),
527
+ signal: params.signal
528
+ });
529
+ if (!response.ok) {
530
+ const errorText = await response.text();
531
+ throw new ProviderError(`OpenAI API error: ${errorText}`, response.status === 401 ? "authentication_error" : response.status === 429 ? "rate_limit_error" : response.status === 400 ? "invalid_request_error" : "api_error", response.status, this.name);
532
+ }
533
+ if (!response.body) {
534
+ throw new ProviderError("No response body", "api_error", undefined, this.name);
535
+ }
536
+ yield {
537
+ type: "message_start",
538
+ message: { id: "", model, role: "assistant", content: [], stopReason: null, usage: { inputTokens: 0, outputTokens: 0 } }
539
+ };
540
+ const reader = response.body.getReader();
541
+ const decoder = new TextDecoder;
542
+ let buffer = "";
543
+ let contentIndex = 0;
544
+ let currentContent = "";
545
+ const toolCalls = new Map;
546
+ while (true) {
547
+ const { done, value } = await reader.read();
548
+ if (done)
549
+ break;
550
+ buffer += decoder.decode(value, { stream: true });
551
+ const lines = buffer.split(`
552
+ `);
553
+ buffer = lines.pop() || "";
554
+ for (const line of lines) {
555
+ if (line.startsWith("data: ")) {
556
+ const data = line.slice(6).trim();
557
+ if (data === "[DONE]") {
558
+ if (currentContent) {
559
+ yield { type: "content_block_stop", index: contentIndex };
560
+ }
561
+ for (const [idx, tc] of toolCalls) {
562
+ yield {
563
+ type: "content_block_start",
564
+ index: idx + 1,
565
+ contentBlock: {
566
+ type: "tool_use",
567
+ id: tc.id,
568
+ name: tc.name,
569
+ input: JSON.parse(tc.arguments || "{}")
570
+ }
571
+ };
572
+ yield { type: "content_block_stop", index: idx + 1 };
573
+ }
574
+ yield { type: "message_stop" };
575
+ return;
576
+ }
577
+ try {
578
+ const chunk = JSON.parse(data);
579
+ const choice = chunk.choices[0];
580
+ if (!choice)
581
+ continue;
582
+ const delta = choice.delta;
583
+ if (delta.content) {
584
+ if (!currentContent) {
585
+ yield {
586
+ type: "content_block_start",
587
+ index: contentIndex,
588
+ contentBlock: { type: "text", text: "" }
589
+ };
590
+ }
591
+ currentContent += delta.content;
592
+ yield {
593
+ type: "content_block_delta",
594
+ index: contentIndex,
595
+ delta: { type: "text_delta", text: delta.content }
596
+ };
597
+ }
598
+ if (delta.tool_calls) {
599
+ for (const tc of delta.tool_calls) {
600
+ const existing = toolCalls.get(tc.index) || { id: "", name: "", arguments: "" };
601
+ if (tc.id)
602
+ existing.id = tc.id;
603
+ if (tc.function?.name)
604
+ existing.name = tc.function.name;
605
+ if (tc.function?.arguments)
606
+ existing.arguments += tc.function.arguments;
607
+ toolCalls.set(tc.index, existing);
608
+ }
609
+ }
610
+ if (choice.finish_reason) {
611
+ yield {
612
+ type: "message_delta",
613
+ usage: { inputTokens: 0, outputTokens: 0 }
614
+ };
615
+ }
616
+ } catch {}
617
+ }
618
+ }
619
+ }
620
+ } catch (error) {
621
+ if (error instanceof ProviderError) {
622
+ throw error;
623
+ }
624
+ throw this.wrapError(error, "OpenAI compatible API request failed");
625
+ }
626
+ }
627
+ async chatSync(params) {
628
+ const model = params.model || this.model || "gpt-4o";
629
+ const url = `${this.baseUrl}/chat/completions`;
630
+ const body = {
631
+ model,
632
+ messages: this.messagesToOpenAIFormat(params.messages, params.systemPrompt),
633
+ max_tokens: params.maxTokens || 4096,
634
+ temperature: params.temperature,
635
+ top_p: params.topP,
636
+ stop: params.stopSequences,
637
+ stream: false,
638
+ ...params.tools && params.tools.length > 0 && {
639
+ tools: this.toolsToOpenAIFormat(params.tools),
640
+ tool_choice: "auto"
641
+ }
642
+ };
643
+ try {
644
+ const response = await fetch(url, {
645
+ method: "POST",
646
+ headers: this.getHeaders(),
647
+ body: JSON.stringify(body),
648
+ signal: params.signal
649
+ });
650
+ if (!response.ok) {
651
+ const errorText = await response.text();
652
+ throw new ProviderError(`OpenAI API error: ${errorText}`, response.status === 401 ? "authentication_error" : response.status === 429 ? "rate_limit_error" : response.status === 400 ? "invalid_request_error" : "api_error", response.status, this.name);
653
+ }
654
+ const data = await response.json();
655
+ const { content, stopReason, usage } = this.contentFromOpenAIResponse(data);
656
+ return {
657
+ id: data.id,
658
+ model: data.model,
659
+ role: "assistant",
660
+ content,
661
+ stopReason,
662
+ usage
663
+ };
664
+ } catch (error) {
665
+ if (error instanceof ProviderError) {
666
+ throw error;
667
+ }
668
+ throw this.wrapError(error, "OpenAI compatible API request failed");
669
+ }
670
+ }
671
+ async validateApiKey(apiKey) {
672
+ try {
673
+ const url = `${this.baseUrl}/models`;
674
+ const response = await fetch(url, {
675
+ method: "GET",
676
+ headers: {
677
+ Authorization: `Bearer ${apiKey}`
678
+ }
679
+ });
680
+ return response.ok;
681
+ } catch {
682
+ return false;
683
+ }
684
+ }
685
+ async listModels() {
686
+ try {
687
+ const url = `${this.baseUrl}/models`;
688
+ const response = await fetch(url, {
689
+ method: "GET",
690
+ headers: this.getHeaders()
691
+ });
692
+ if (!response.ok) {
693
+ return [];
694
+ }
695
+ const data = await response.json();
696
+ return data.data.map((m) => m.id);
697
+ } catch {
698
+ return [];
699
+ }
700
+ }
701
+ };
702
+ });
703
+
704
+ // src/services/api/providers/MiniMaxAdapter.ts
705
+ var exports_MiniMaxAdapter = {};
706
+ __export(exports_MiniMaxAdapter, {
707
+ MiniMaxAdapter: () => MiniMaxAdapter
708
+ });
709
+ var MiniMaxAdapter;
710
+ var init_MiniMaxAdapter = __esm(() => {
711
+ init_types();
712
+ MiniMaxAdapter = class MiniMaxAdapter extends BaseAdapter {
713
+ name = "minimax";
714
+ displayName = "MiniMax";
715
+ capabilities = {
716
+ streaming: true,
717
+ toolUse: true,
718
+ vision: true,
719
+ thinking: true,
720
+ systemPrompt: true,
721
+ maxContextLength: 1e6,
722
+ maxOutputTokens: 16384
723
+ };
724
+ groupId;
725
+ constructor(config = {}) {
726
+ super(config);
727
+ this.baseUrl = config.baseUrl || process.env.MINIMAX_BASE_URL || "https://api.minimaxi.com/v1";
728
+ this.apiKey = config.apiKey || process.env.MINIMAX_API_KEY;
729
+ this.model = config.model || process.env.MINIMAX_MODEL || "MiniMax-M2.7";
730
+ this.groupId = config.groupId || process.env.MINIMAX_GROUP_ID;
731
+ }
732
+ getHeaders() {
733
+ const headers = {
734
+ "Content-Type": "application/json"
735
+ };
736
+ if (this.apiKey) {
737
+ headers["Authorization"] = `Bearer ${this.apiKey}`;
738
+ }
739
+ return headers;
740
+ }
741
+ toMiniMaxMessages(messages, systemPrompt) {
742
+ const result = [];
743
+ const sysPrompt = this.systemPromptToString(systemPrompt);
744
+ if (sysPrompt) {
745
+ result.push({ role: "system", content: sysPrompt });
746
+ }
747
+ for (const msg of messages) {
748
+ if (msg.role === "system") {
749
+ result.push({
750
+ role: "system",
751
+ content: this.extractTextContent(msg.content)
752
+ });
753
+ } else if (msg.role === "user") {
754
+ if (typeof msg.content !== "string") {
755
+ const toolResults = this.extractToolResults(msg.content);
756
+ if (toolResults.length > 0) {
757
+ for (const toolResult of toolResults) {
758
+ result.push({
759
+ role: "tool",
760
+ tool_call_id: toolResult.toolUseId,
761
+ content: typeof toolResult.content === "string" ? toolResult.content : this.extractTextContent(toolResult.content)
762
+ });
763
+ }
764
+ continue;
765
+ }
766
+ }
767
+ result.push({
768
+ role: "user",
769
+ content: this.extractTextContent(msg.content)
770
+ });
771
+ } else if (msg.role === "assistant") {
772
+ const toolUses = typeof msg.content === "string" ? [] : this.extractToolUses(msg.content);
773
+ const text = this.extractTextContent(msg.content);
774
+ if (toolUses.length > 0) {
775
+ result.push({
776
+ role: "assistant",
777
+ content: text || undefined,
778
+ tool_calls: toolUses.map((tu) => ({
779
+ id: tu.id,
780
+ type: "function",
781
+ function: {
782
+ name: tu.name,
783
+ arguments: JSON.stringify(tu.input)
784
+ }
785
+ }))
786
+ });
787
+ } else {
788
+ result.push({
789
+ role: "assistant",
790
+ content: text
791
+ });
792
+ }
793
+ }
794
+ }
795
+ return result;
796
+ }
797
+ toMiniMaxTools(tools) {
798
+ return tools.map((tool) => ({
799
+ type: "function",
800
+ function: {
801
+ name: tool.name,
802
+ description: tool.description,
803
+ parameters: tool.inputSchema
804
+ }
805
+ }));
806
+ }
807
+ async* chat(params) {
808
+ const model = params.model || this.model || "MiniMax-M2.7";
809
+ const url = `${this.baseUrl}/chat/completions`;
810
+ const body = {
811
+ model,
812
+ messages: this.toMiniMaxMessages(params.messages, params.systemPrompt),
813
+ max_tokens: params.maxTokens || 4096,
814
+ temperature: params.temperature ?? 0.7,
815
+ top_p: params.topP ?? 0.9,
816
+ stream: true
817
+ };
818
+ if (params.tools && params.tools.length > 0) {
819
+ body.tools = this.toMiniMaxTools(params.tools);
820
+ body.tool_choice = "auto";
821
+ }
822
+ if (params.thinking?.type === "enabled" && model.includes("M2")) {
823
+ body.thinking = {
824
+ type: "enabled",
825
+ budget_tokens: params.thinking.budgetTokens || 1e4
826
+ };
827
+ }
828
+ try {
829
+ const response = await fetch(url, {
830
+ method: "POST",
831
+ headers: this.getHeaders(),
832
+ body: JSON.stringify(body),
833
+ signal: params.signal
834
+ });
835
+ if (!response.ok) {
836
+ const errorText = await response.text();
837
+ throw new ProviderError(`MiniMax API error: ${errorText}`, response.status === 401 ? "authentication_error" : response.status === 429 ? "rate_limit_error" : response.status === 400 ? "invalid_request_error" : "api_error", response.status, this.name);
838
+ }
839
+ if (!response.body) {
840
+ throw new ProviderError("No response body", "api_error", undefined, this.name);
841
+ }
842
+ yield {
843
+ type: "message_start",
844
+ message: {
845
+ id: "",
846
+ model,
847
+ role: "assistant",
848
+ content: [],
849
+ stopReason: null,
850
+ usage: { inputTokens: 0, outputTokens: 0 }
851
+ }
852
+ };
853
+ const reader = response.body.getReader();
854
+ const decoder = new TextDecoder;
855
+ let buffer = "";
856
+ let contentIndex = 0;
857
+ let currentContent = "";
858
+ const toolCalls = new Map;
859
+ while (true) {
860
+ const { done, value } = await reader.read();
861
+ if (done)
862
+ break;
863
+ buffer += decoder.decode(value, { stream: true });
864
+ const lines = buffer.split(`
865
+ `);
866
+ buffer = lines.pop() || "";
867
+ for (const line of lines) {
868
+ if (line.startsWith("data: ")) {
869
+ const data = line.slice(6).trim();
870
+ if (data === "[DONE]") {
871
+ if (currentContent) {
872
+ yield { type: "content_block_stop", index: contentIndex };
873
+ }
874
+ for (const [idx, tc] of toolCalls) {
875
+ yield {
876
+ type: "content_block_start",
877
+ index: idx + 1,
878
+ contentBlock: {
879
+ type: "tool_use",
880
+ id: tc.id,
881
+ name: tc.name,
882
+ input: JSON.parse(tc.arguments || "{}")
883
+ }
884
+ };
885
+ yield { type: "content_block_stop", index: idx + 1 };
886
+ }
887
+ yield { type: "message_stop" };
888
+ return;
889
+ }
890
+ try {
891
+ const chunk = JSON.parse(data);
892
+ const choice = chunk.choices[0];
893
+ if (!choice)
894
+ continue;
895
+ const delta = choice.delta;
896
+ if (delta.content) {
897
+ if (!currentContent) {
898
+ yield {
899
+ type: "content_block_start",
900
+ index: contentIndex,
901
+ contentBlock: { type: "text", text: "" }
902
+ };
903
+ }
904
+ currentContent += delta.content;
905
+ yield {
906
+ type: "content_block_delta",
907
+ index: contentIndex,
908
+ delta: { type: "text_delta", text: delta.content }
909
+ };
910
+ }
911
+ if (delta.tool_calls) {
912
+ for (const tc of delta.tool_calls) {
913
+ const existing = toolCalls.get(tc.index) || { id: "", name: "", arguments: "" };
914
+ if (tc.id)
915
+ existing.id = tc.id;
916
+ if (tc.function?.name)
917
+ existing.name = tc.function.name;
918
+ if (tc.function?.arguments)
919
+ existing.arguments += tc.function.arguments;
920
+ toolCalls.set(tc.index, existing);
921
+ }
922
+ }
923
+ if (chunk.usage) {
924
+ yield {
925
+ type: "message_delta",
926
+ usage: {
927
+ inputTokens: chunk.usage.prompt_tokens,
928
+ outputTokens: chunk.usage.completion_tokens
929
+ }
930
+ };
931
+ }
932
+ } catch {}
933
+ }
934
+ }
935
+ }
936
+ } catch (error) {
937
+ if (error instanceof ProviderError) {
938
+ throw error;
939
+ }
940
+ throw this.wrapError(error, "MiniMax API request failed");
941
+ }
942
+ }
943
+ async chatSync(params) {
944
+ const model = params.model || this.model || "MiniMax-M2.7";
945
+ const url = `${this.baseUrl}/chat/completions`;
946
+ const body = {
947
+ model,
948
+ messages: this.toMiniMaxMessages(params.messages, params.systemPrompt),
949
+ max_tokens: params.maxTokens || 4096,
950
+ temperature: params.temperature ?? 0.7,
951
+ top_p: params.topP ?? 0.9,
952
+ stream: false
953
+ };
954
+ if (params.tools && params.tools.length > 0) {
955
+ body.tools = this.toMiniMaxTools(params.tools);
956
+ body.tool_choice = "auto";
957
+ }
958
+ if (params.thinking?.type === "enabled" && model.includes("M2")) {
959
+ body.thinking = {
960
+ type: "enabled",
961
+ budget_tokens: params.thinking.budgetTokens || 1e4
962
+ };
963
+ }
964
+ try {
965
+ const response = await fetch(url, {
966
+ method: "POST",
967
+ headers: this.getHeaders(),
968
+ body: JSON.stringify(body),
969
+ signal: params.signal
970
+ });
971
+ if (!response.ok) {
972
+ const errorText = await response.text();
973
+ throw new ProviderError(`MiniMax API error: ${errorText}`, response.status === 401 ? "authentication_error" : response.status === 429 ? "rate_limit_error" : response.status === 400 ? "invalid_request_error" : "api_error", response.status, this.name);
974
+ }
975
+ const data = await response.json();
976
+ if (data.base_resp && data.base_resp.status_code !== 0) {
977
+ throw new ProviderError(`MiniMax API error: ${data.base_resp.status_msg}`, "api_error", data.base_resp.status_code, this.name);
978
+ }
979
+ const choice = data.choices[0];
980
+ if (!choice) {
981
+ return {
982
+ id: data.id,
983
+ model: data.model,
984
+ role: "assistant",
985
+ content: [],
986
+ stopReason: "end_turn",
987
+ usage: {
988
+ inputTokens: data.usage?.prompt_tokens || 0,
989
+ outputTokens: data.usage?.completion_tokens || 0
990
+ }
991
+ };
992
+ }
993
+ const content = [];
994
+ const message = choice.message;
995
+ if (message.content) {
996
+ content.push({ type: "text", text: message.content });
997
+ }
998
+ if (message.tool_calls) {
999
+ for (const toolCall of message.tool_calls) {
1000
+ content.push({
1001
+ type: "tool_use",
1002
+ id: toolCall.id,
1003
+ name: toolCall.function.name,
1004
+ input: JSON.parse(toolCall.function.arguments || "{}")
1005
+ });
1006
+ }
1007
+ }
1008
+ let stopReason = "end_turn";
1009
+ switch (choice.finish_reason) {
1010
+ case "stop":
1011
+ stopReason = "end_turn";
1012
+ break;
1013
+ case "length":
1014
+ stopReason = "max_tokens";
1015
+ break;
1016
+ case "tool_calls":
1017
+ stopReason = "tool_use";
1018
+ break;
1019
+ }
1020
+ return {
1021
+ id: data.id,
1022
+ model: data.model,
1023
+ role: "assistant",
1024
+ content,
1025
+ stopReason,
1026
+ usage: {
1027
+ inputTokens: data.usage?.prompt_tokens || 0,
1028
+ outputTokens: data.usage?.completion_tokens || 0
1029
+ }
1030
+ };
1031
+ } catch (error) {
1032
+ if (error instanceof ProviderError) {
1033
+ throw error;
1034
+ }
1035
+ throw this.wrapError(error, "MiniMax API request failed");
1036
+ }
1037
+ }
1038
+ async validateApiKey(apiKey) {
1039
+ try {
1040
+ const url = `${this.baseUrl}/models`;
1041
+ const response = await fetch(url, {
1042
+ method: "GET",
1043
+ headers: {
1044
+ Authorization: `Bearer ${apiKey}`
1045
+ }
1046
+ });
1047
+ return response.ok;
1048
+ } catch {
1049
+ return false;
1050
+ }
1051
+ }
1052
+ async listModels() {
1053
+ return [
1054
+ "MiniMax-M1",
1055
+ "MiniMax-M2",
1056
+ "MiniMax-M2.7",
1057
+ "abab6.5s-chat",
1058
+ "abab6.5g-chat",
1059
+ "abab6.5t-chat",
1060
+ "abab5.5s-chat",
1061
+ "abab5.5-chat"
1062
+ ];
1063
+ }
1064
+ };
1065
+ });
1066
+
1067
+ // src/services/api/providers/GLMAdapter.ts
1068
+ var exports_GLMAdapter = {};
1069
+ __export(exports_GLMAdapter, {
1070
+ GLMAdapter: () => GLMAdapter
1071
+ });
1072
+ var GLMAdapter;
1073
+ var init_GLMAdapter = __esm(() => {
1074
+ init_types();
1075
+ GLMAdapter = class GLMAdapter extends BaseAdapter {
1076
+ name = "glm";
1077
+ displayName = "\u667A\u8C31 GLM";
1078
+ capabilities = {
1079
+ streaming: true,
1080
+ toolUse: true,
1081
+ vision: true,
1082
+ thinking: false,
1083
+ systemPrompt: true,
1084
+ maxContextLength: 128000,
1085
+ maxOutputTokens: 4096
1086
+ };
1087
+ constructor(config = {}) {
1088
+ super(config);
1089
+ this.baseUrl = config.baseUrl || process.env.GLM_BASE_URL || "https://open.bigmodel.cn/api/paas/v4";
1090
+ this.apiKey = config.apiKey || process.env.GLM_API_KEY || process.env.ZHIPU_API_KEY;
1091
+ this.model = config.model || process.env.GLM_MODEL || "glm-4-plus";
1092
+ }
1093
+ getHeaders() {
1094
+ const headers = {
1095
+ "Content-Type": "application/json"
1096
+ };
1097
+ if (this.apiKey) {
1098
+ headers["Authorization"] = `Bearer ${this.apiKey}`;
1099
+ }
1100
+ return headers;
1101
+ }
1102
+ toGLMMessages(messages, systemPrompt) {
1103
+ const result = [];
1104
+ const sysPrompt = this.systemPromptToString(systemPrompt);
1105
+ if (sysPrompt) {
1106
+ result.push({ role: "system", content: sysPrompt });
1107
+ }
1108
+ for (const msg of messages) {
1109
+ if (msg.role === "system") {
1110
+ result.push({
1111
+ role: "system",
1112
+ content: this.extractTextContent(msg.content)
1113
+ });
1114
+ } else if (msg.role === "user") {
1115
+ if (typeof msg.content !== "string") {
1116
+ const toolResults = this.extractToolResults(msg.content);
1117
+ if (toolResults.length > 0) {
1118
+ for (const toolResult of toolResults) {
1119
+ result.push({
1120
+ role: "tool",
1121
+ tool_call_id: toolResult.toolUseId,
1122
+ content: typeof toolResult.content === "string" ? toolResult.content : this.extractTextContent(toolResult.content)
1123
+ });
1124
+ }
1125
+ continue;
1126
+ }
1127
+ const hasImage = msg.content.some((c) => c.type === "image");
1128
+ if (hasImage) {
1129
+ const contentArray = [];
1130
+ for (const c of msg.content) {
1131
+ if (c.type === "text") {
1132
+ contentArray.push({ type: "text", text: c.text });
1133
+ } else if (c.type === "image") {
1134
+ const imageUrl = c.source.url || (c.source.data ? `data:${c.source.mediaType || "image/png"};base64,${c.source.data}` : "");
1135
+ contentArray.push({ type: "image_url", image_url: { url: imageUrl } });
1136
+ }
1137
+ }
1138
+ result.push({ role: "user", content: contentArray });
1139
+ continue;
1140
+ }
1141
+ }
1142
+ result.push({
1143
+ role: "user",
1144
+ content: this.extractTextContent(msg.content)
1145
+ });
1146
+ } else if (msg.role === "assistant") {
1147
+ const toolUses = typeof msg.content === "string" ? [] : this.extractToolUses(msg.content);
1148
+ const text = this.extractTextContent(msg.content);
1149
+ if (toolUses.length > 0) {
1150
+ result.push({
1151
+ role: "assistant",
1152
+ content: text || undefined,
1153
+ tool_calls: toolUses.map((tu) => ({
1154
+ id: tu.id,
1155
+ type: "function",
1156
+ function: {
1157
+ name: tu.name,
1158
+ arguments: JSON.stringify(tu.input)
1159
+ }
1160
+ }))
1161
+ });
1162
+ } else {
1163
+ result.push({
1164
+ role: "assistant",
1165
+ content: text
1166
+ });
1167
+ }
1168
+ }
1169
+ }
1170
+ return result;
1171
+ }
1172
+ toGLMTools(tools) {
1173
+ return tools.map((tool) => ({
1174
+ type: "function",
1175
+ function: {
1176
+ name: tool.name,
1177
+ description: tool.description,
1178
+ parameters: tool.inputSchema
1179
+ }
1180
+ }));
1181
+ }
1182
+ async* chat(params) {
1183
+ const model = params.model || this.model || "glm-4-plus";
1184
+ const url = `${this.baseUrl}/chat/completions`;
1185
+ const body = {
1186
+ model,
1187
+ messages: this.toGLMMessages(params.messages, params.systemPrompt),
1188
+ max_tokens: params.maxTokens || 4096,
1189
+ temperature: params.temperature ?? 0.7,
1190
+ top_p: params.topP ?? 0.9,
1191
+ stream: true
1192
+ };
1193
+ if (params.tools && params.tools.length > 0) {
1194
+ body.tools = this.toGLMTools(params.tools);
1195
+ body.tool_choice = "auto";
1196
+ }
1197
+ try {
1198
+ const response = await fetch(url, {
1199
+ method: "POST",
1200
+ headers: this.getHeaders(),
1201
+ body: JSON.stringify(body),
1202
+ signal: params.signal
1203
+ });
1204
+ if (!response.ok) {
1205
+ const errorText = await response.text();
1206
+ throw new ProviderError(`GLM API error: ${errorText}`, response.status === 401 ? "authentication_error" : response.status === 429 ? "rate_limit_error" : response.status === 400 ? "invalid_request_error" : "api_error", response.status, this.name);
1207
+ }
1208
+ if (!response.body) {
1209
+ throw new ProviderError("No response body", "api_error", undefined, this.name);
1210
+ }
1211
+ yield {
1212
+ type: "message_start",
1213
+ message: {
1214
+ id: "",
1215
+ model,
1216
+ role: "assistant",
1217
+ content: [],
1218
+ stopReason: null,
1219
+ usage: { inputTokens: 0, outputTokens: 0 }
1220
+ }
1221
+ };
1222
+ const reader = response.body.getReader();
1223
+ const decoder = new TextDecoder;
1224
+ let buffer = "";
1225
+ let contentIndex = 0;
1226
+ let currentContent = "";
1227
+ const toolCalls = new Map;
1228
+ while (true) {
1229
+ const { done, value } = await reader.read();
1230
+ if (done)
1231
+ break;
1232
+ buffer += decoder.decode(value, { stream: true });
1233
+ const lines = buffer.split(`
1234
+ `);
1235
+ buffer = lines.pop() || "";
1236
+ for (const line of lines) {
1237
+ if (line.startsWith("data: ")) {
1238
+ const data = line.slice(6).trim();
1239
+ if (data === "[DONE]") {
1240
+ if (currentContent) {
1241
+ yield { type: "content_block_stop", index: contentIndex };
1242
+ }
1243
+ for (const [idx, tc] of toolCalls) {
1244
+ yield {
1245
+ type: "content_block_start",
1246
+ index: idx + 1,
1247
+ contentBlock: {
1248
+ type: "tool_use",
1249
+ id: tc.id,
1250
+ name: tc.name,
1251
+ input: JSON.parse(tc.arguments || "{}")
1252
+ }
1253
+ };
1254
+ yield { type: "content_block_stop", index: idx + 1 };
1255
+ }
1256
+ yield { type: "message_stop" };
1257
+ return;
1258
+ }
1259
+ try {
1260
+ const chunk = JSON.parse(data);
1261
+ const choice = chunk.choices[0];
1262
+ if (!choice)
1263
+ continue;
1264
+ const delta = choice.delta;
1265
+ if (delta.content && typeof delta.content === "string") {
1266
+ if (!currentContent) {
1267
+ yield {
1268
+ type: "content_block_start",
1269
+ index: contentIndex,
1270
+ contentBlock: { type: "text", text: "" }
1271
+ };
1272
+ }
1273
+ currentContent += delta.content;
1274
+ yield {
1275
+ type: "content_block_delta",
1276
+ index: contentIndex,
1277
+ delta: { type: "text_delta", text: delta.content }
1278
+ };
1279
+ }
1280
+ if (delta.tool_calls) {
1281
+ for (const tc of delta.tool_calls) {
1282
+ const existing = toolCalls.get(tc.index) || { id: "", name: "", arguments: "" };
1283
+ if (tc.id)
1284
+ existing.id = tc.id;
1285
+ if (tc.function?.name)
1286
+ existing.name = tc.function.name;
1287
+ if (tc.function?.arguments)
1288
+ existing.arguments += tc.function.arguments;
1289
+ toolCalls.set(tc.index, existing);
1290
+ }
1291
+ }
1292
+ if (chunk.usage) {
1293
+ yield {
1294
+ type: "message_delta",
1295
+ usage: {
1296
+ inputTokens: chunk.usage.prompt_tokens,
1297
+ outputTokens: chunk.usage.completion_tokens
1298
+ }
1299
+ };
1300
+ }
1301
+ } catch {}
1302
+ }
1303
+ }
1304
+ }
1305
+ } catch (error) {
1306
+ if (error instanceof ProviderError) {
1307
+ throw error;
1308
+ }
1309
+ throw this.wrapError(error, "GLM API request failed");
1310
+ }
1311
+ }
1312
+ async chatSync(params) {
1313
+ const model = params.model || this.model || "glm-4-plus";
1314
+ const url = `${this.baseUrl}/chat/completions`;
1315
+ const body = {
1316
+ model,
1317
+ messages: this.toGLMMessages(params.messages, params.systemPrompt),
1318
+ max_tokens: params.maxTokens || 4096,
1319
+ temperature: params.temperature ?? 0.7,
1320
+ top_p: params.topP ?? 0.9,
1321
+ stream: false
1322
+ };
1323
+ if (params.tools && params.tools.length > 0) {
1324
+ body.tools = this.toGLMTools(params.tools);
1325
+ body.tool_choice = "auto";
1326
+ }
1327
+ try {
1328
+ const response = await fetch(url, {
1329
+ method: "POST",
1330
+ headers: this.getHeaders(),
1331
+ body: JSON.stringify(body),
1332
+ signal: params.signal
1333
+ });
1334
+ if (!response.ok) {
1335
+ const errorText = await response.text();
1336
+ throw new ProviderError(`GLM API error: ${errorText}`, response.status === 401 ? "authentication_error" : response.status === 429 ? "rate_limit_error" : response.status === 400 ? "invalid_request_error" : "api_error", response.status, this.name);
1337
+ }
1338
+ const data = await response.json();
1339
+ const choice = data.choices[0];
1340
+ if (!choice) {
1341
+ return {
1342
+ id: data.id,
1343
+ model: data.model,
1344
+ role: "assistant",
1345
+ content: [],
1346
+ stopReason: "end_turn",
1347
+ usage: {
1348
+ inputTokens: data.usage?.prompt_tokens || 0,
1349
+ outputTokens: data.usage?.completion_tokens || 0
1350
+ }
1351
+ };
1352
+ }
1353
+ const content = [];
1354
+ const message = choice.message;
1355
+ if (message.content) {
1356
+ if (typeof message.content === "string") {
1357
+ content.push({ type: "text", text: message.content });
1358
+ }
1359
+ }
1360
+ if (message.tool_calls) {
1361
+ for (const toolCall of message.tool_calls) {
1362
+ content.push({
1363
+ type: "tool_use",
1364
+ id: toolCall.id,
1365
+ name: toolCall.function.name,
1366
+ input: JSON.parse(toolCall.function.arguments || "{}")
1367
+ });
1368
+ }
1369
+ }
1370
+ let stopReason = "end_turn";
1371
+ switch (choice.finish_reason) {
1372
+ case "stop":
1373
+ stopReason = "end_turn";
1374
+ break;
1375
+ case "length":
1376
+ stopReason = "max_tokens";
1377
+ break;
1378
+ case "tool_calls":
1379
+ stopReason = "tool_use";
1380
+ break;
1381
+ }
1382
+ return {
1383
+ id: data.id,
1384
+ model: data.model,
1385
+ role: "assistant",
1386
+ content,
1387
+ stopReason,
1388
+ usage: {
1389
+ inputTokens: data.usage?.prompt_tokens || 0,
1390
+ outputTokens: data.usage?.completion_tokens || 0
1391
+ }
1392
+ };
1393
+ } catch (error) {
1394
+ if (error instanceof ProviderError) {
1395
+ throw error;
1396
+ }
1397
+ throw this.wrapError(error, "GLM API request failed");
1398
+ }
1399
+ }
1400
+ async validateApiKey(apiKey) {
1401
+ try {
1402
+ const url = `${this.baseUrl}/chat/completions`;
1403
+ const response = await fetch(url, {
1404
+ method: "POST",
1405
+ headers: {
1406
+ "Content-Type": "application/json",
1407
+ Authorization: `Bearer ${apiKey}`
1408
+ },
1409
+ body: JSON.stringify({
1410
+ model: "glm-4-flash",
1411
+ messages: [{ role: "user", content: "hi" }],
1412
+ max_tokens: 1
1413
+ })
1414
+ });
1415
+ return response.ok;
1416
+ } catch {
1417
+ return false;
1418
+ }
1419
+ }
1420
+ async listModels() {
1421
+ return [
1422
+ "glm-5",
1423
+ "glm-5-plus",
1424
+ "glm-4",
1425
+ "glm-4-plus",
1426
+ "glm-4-air",
1427
+ "glm-4-airx",
1428
+ "glm-4-flash",
1429
+ "glm-4-long",
1430
+ "glm-4v",
1431
+ "glm-4v-plus",
1432
+ "codegeex-4",
1433
+ "embedding-2",
1434
+ "embedding-3"
1435
+ ];
1436
+ }
1437
+ };
1438
+ });
1439
+
1440
+ // src/services/api/providers/config.ts
1441
+ function ensureModelProviderEnv() {
1442
+ if (!process.env.MODEL_PROVIDER) {
1443
+ process.env.MODEL_PROVIDER = DEFAULT_PROVIDER;
1444
+ }
1445
+ }
1446
+ function getProviderConfigFromEnv() {
1447
+ const providerType = process.env.MODEL_PROVIDER?.toLowerCase() || DEFAULT_PROVIDER;
1448
+ switch (providerType) {
1449
+ case "openai":
1450
+ case "openai-compat":
1451
+ case "openai_compat":
1452
+ return {
1453
+ provider: "openai-compat",
1454
+ apiKey: process.env.OPENAI_API_KEY,
1455
+ baseUrl: process.env.OPENAI_BASE_URL || "https://api.openai.com/v1",
1456
+ model: process.env.OPENAI_MODEL || "gpt-4o",
1457
+ timeout: parseInt(process.env.API_TIMEOUT_MS || "600000", 10),
1458
+ maxRetries: parseInt(process.env.API_MAX_RETRIES || "2", 10)
1459
+ };
1460
+ case "minimax":
1461
+ return {
1462
+ provider: "minimax",
1463
+ apiKey: process.env.MINIMAX_API_KEY,
1464
+ baseUrl: process.env.MINIMAX_BASE_URL || "https://api.minimaxi.com/v1",
1465
+ model: process.env.MINIMAX_MODEL || DEFAULT_MINIMAX_MODEL,
1466
+ groupId: process.env.MINIMAX_GROUP_ID,
1467
+ timeout: parseInt(process.env.API_TIMEOUT_MS || "600000", 10),
1468
+ maxRetries: parseInt(process.env.API_MAX_RETRIES || "2", 10)
1469
+ };
1470
+ case "glm":
1471
+ case "zhipu":
1472
+ return {
1473
+ provider: "glm",
1474
+ apiKey: process.env.GLM_API_KEY || process.env.ZHIPU_API_KEY,
1475
+ baseUrl: process.env.GLM_BASE_URL || "https://open.bigmodel.cn/api/paas/v4",
1476
+ model: process.env.GLM_MODEL || "glm-4-plus",
1477
+ timeout: parseInt(process.env.API_TIMEOUT_MS || "600000", 10),
1478
+ maxRetries: parseInt(process.env.API_MAX_RETRIES || "2", 10)
1479
+ };
1480
+ case "anthropic":
1481
+ default:
1482
+ return {
1483
+ provider: "anthropic",
1484
+ apiKey: process.env.ANTHROPIC_API_KEY,
1485
+ baseUrl: process.env.ANTHROPIC_BASE_URL,
1486
+ model: process.env.ANTHROPIC_MODEL || "claude-sonnet-4-20250514",
1487
+ timeout: parseInt(process.env.API_TIMEOUT_MS || "600000", 10),
1488
+ maxRetries: parseInt(process.env.API_MAX_RETRIES || "2", 10)
1489
+ };
1490
+ }
1491
+ }
1492
+ function parseProviderConfig(config) {
1493
+ const providerType = config.modelProvider?.toLowerCase() || "anthropic";
1494
+ switch (providerType) {
1495
+ case "openai":
1496
+ case "openai-compat":
1497
+ case "openai_compat": {
1498
+ const openaiConfig = config.openaiCompat || config.openai || {};
1499
+ return {
1500
+ provider: "openai-compat",
1501
+ apiKey: openaiConfig.apiKey,
1502
+ baseUrl: openaiConfig.baseUrl || "https://api.openai.com/v1",
1503
+ model: openaiConfig.model || "gpt-4o",
1504
+ timeout: openaiConfig.timeout,
1505
+ maxRetries: openaiConfig.maxRetries
1506
+ };
1507
+ }
1508
+ case "minimax": {
1509
+ const minimaxConfig = config.minimax || {};
1510
+ return {
1511
+ provider: "minimax",
1512
+ apiKey: minimaxConfig.apiKey,
1513
+ baseUrl: minimaxConfig.baseUrl || "https://api.minimaxi.com/v1",
1514
+ model: minimaxConfig.model || "MiniMax-M1",
1515
+ groupId: minimaxConfig.groupId,
1516
+ timeout: minimaxConfig.timeout,
1517
+ maxRetries: minimaxConfig.maxRetries
1518
+ };
1519
+ }
1520
+ case "glm":
1521
+ case "zhipu": {
1522
+ const glmConfig = config.glm || config.zhipu || {};
1523
+ return {
1524
+ provider: "glm",
1525
+ apiKey: glmConfig.apiKey,
1526
+ baseUrl: glmConfig.baseUrl || "https://open.bigmodel.cn/api/paas/v4",
1527
+ model: glmConfig.model || "glm-4-plus",
1528
+ timeout: glmConfig.timeout,
1529
+ maxRetries: glmConfig.maxRetries
1530
+ };
1531
+ }
1532
+ case "anthropic":
1533
+ default: {
1534
+ const anthropicConfig = config.anthropic || {};
1535
+ return {
1536
+ provider: "anthropic",
1537
+ apiKey: anthropicConfig.apiKey,
1538
+ baseUrl: anthropicConfig.baseUrl,
1539
+ model: anthropicConfig.model || "claude-sonnet-4-20250514",
1540
+ timeout: anthropicConfig.timeout,
1541
+ maxRetries: anthropicConfig.maxRetries
1542
+ };
1543
+ }
1544
+ }
1545
+ }
1546
+ async function initializeBuiltinAdapters() {
1547
+ if (adaptersInitialized) {
1548
+ return;
1549
+ }
1550
+ registerProviderFactory("anthropic", (config) => {
1551
+ const { AnthropicAdapter: AnthropicAdapter2 } = (init_AnthropicAdapter(), __toCommonJS(exports_AnthropicAdapter));
1552
+ return new AnthropicAdapter2({
1553
+ apiKey: config?.apiKey,
1554
+ baseUrl: config?.baseUrl,
1555
+ model: config?.model,
1556
+ timeout: config?.timeout,
1557
+ maxRetries: config?.maxRetries
1558
+ });
1559
+ });
1560
+ registerProviderFactory("openai-compat", (config) => {
1561
+ const { OpenAICompatAdapter: OpenAICompatAdapter2 } = (init_OpenAICompatAdapter(), __toCommonJS(exports_OpenAICompatAdapter));
1562
+ return new OpenAICompatAdapter2({
1563
+ apiKey: config?.apiKey,
1564
+ baseUrl: config?.baseUrl,
1565
+ model: config?.model,
1566
+ timeout: config?.timeout,
1567
+ maxRetries: config?.maxRetries
1568
+ });
1569
+ });
1570
+ registerProviderFactory("minimax", (config) => {
1571
+ const { MiniMaxAdapter: MiniMaxAdapter2 } = (init_MiniMaxAdapter(), __toCommonJS(exports_MiniMaxAdapter));
1572
+ const minimaxConfig = config;
1573
+ return new MiniMaxAdapter2({
1574
+ apiKey: config?.apiKey,
1575
+ baseUrl: config?.baseUrl,
1576
+ model: config?.model,
1577
+ groupId: minimaxConfig?.groupId,
1578
+ timeout: config?.timeout,
1579
+ maxRetries: config?.maxRetries
1580
+ });
1581
+ });
1582
+ registerProviderFactory("glm", (config) => {
1583
+ const { GLMAdapter: GLMAdapter2 } = (init_GLMAdapter(), __toCommonJS(exports_GLMAdapter));
1584
+ return new GLMAdapter2({
1585
+ apiKey: config?.apiKey,
1586
+ baseUrl: config?.baseUrl,
1587
+ model: config?.model,
1588
+ timeout: config?.timeout,
1589
+ maxRetries: config?.maxRetries
1590
+ });
1591
+ });
1592
+ adaptersInitialized = true;
1593
+ }
1594
+ async function initializeProviderFromEnv() {
1595
+ await initializeBuiltinAdapters();
1596
+ const config = getProviderConfigFromEnv();
1597
+ providerRegistry.get(config.provider, config);
1598
+ setActiveProvider(config.provider);
1599
+ }
1600
+ async function initializeProviderFromConfig(config) {
1601
+ await initializeBuiltinAdapters();
1602
+ const providerConfig = parseProviderConfig(config);
1603
+ providerRegistry.get(providerConfig.provider, providerConfig);
1604
+ setActiveProvider(providerConfig.provider);
1605
+ }
1606
+ function getCurrentProviderName() {
1607
+ return providerRegistry.getActiveProviderName();
1608
+ }
1609
+ function isUsingAnthropic() {
1610
+ return getCurrentProviderName() === "anthropic";
1611
+ }
1612
+ function isUsingThirdPartyProvider() {
1613
+ return !isUsingAnthropic();
1614
+ }
1615
+ var adaptersInitialized = false, DEFAULT_PROVIDER = "minimax", DEFAULT_MINIMAX_MODEL = "MiniMax-M2.7";
1616
+ var init_config = __esm(() => {
1617
+ init_registry();
1618
+ ensureModelProviderEnv();
1619
+ });
1620
+ init_config();
1621
+
1622
+ export {
1623
+ parseProviderConfig,
1624
+ isUsingThirdPartyProvider,
1625
+ isUsingAnthropic,
1626
+ initializeProviderFromEnv,
1627
+ initializeProviderFromConfig,
1628
+ initializeBuiltinAdapters,
1629
+ getProviderConfigFromEnv,
1630
+ getCurrentProviderName,
1631
+ getActiveProvider,
1632
+ ensureModelProviderEnv
1633
+ };
1634
+
1635
+ export { initializeProviderFromEnv, getCurrentProviderName, init_config };