@mastra/server 0.0.0-vector-sources-20250516175436 → 0.0.0-vector-extension-schema-20250922130418

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (333) hide show
  1. package/CHANGELOG.md +3745 -0
  2. package/LICENSE.md +11 -42
  3. package/README.md +0 -5
  4. package/dist/{chunk-H5PTF3Y4.js → chunk-4QCXUEAT.js} +11 -2
  5. package/dist/chunk-4QCXUEAT.js.map +1 -0
  6. package/dist/chunk-4RRMWXQ2.js +3522 -0
  7. package/dist/chunk-4RRMWXQ2.js.map +1 -0
  8. package/dist/chunk-57HWW2TY.cjs +587 -0
  9. package/dist/chunk-57HWW2TY.cjs.map +1 -0
  10. package/dist/chunk-5DP5XZH6.cjs +928 -0
  11. package/dist/chunk-5DP5XZH6.cjs.map +1 -0
  12. package/dist/chunk-66YYHFGF.js +761 -0
  13. package/dist/chunk-66YYHFGF.js.map +1 -0
  14. package/dist/chunk-6GMFZ5LK.js +2774 -0
  15. package/dist/chunk-6GMFZ5LK.js.map +1 -0
  16. package/dist/chunk-743UIDHI.cjs +2013 -0
  17. package/dist/chunk-743UIDHI.cjs.map +1 -0
  18. package/dist/chunk-7JYXPDM4.js +15712 -0
  19. package/dist/chunk-7JYXPDM4.js.map +1 -0
  20. package/dist/{chunk-OCWPVYNI.cjs → chunk-7NADHFD2.cjs} +3 -0
  21. package/dist/chunk-7NADHFD2.cjs.map +1 -0
  22. package/dist/chunk-7QEJ5QG5.js +151 -0
  23. package/dist/chunk-7QEJ5QG5.js.map +1 -0
  24. package/dist/chunk-A3AL7EWJ.js +83 -0
  25. package/dist/chunk-A3AL7EWJ.js.map +1 -0
  26. package/dist/chunk-AK2FXLLB.cjs +849 -0
  27. package/dist/chunk-AK2FXLLB.cjs.map +1 -0
  28. package/dist/{chunk-5SN4U5AC.cjs → chunk-AVEPEUN4.cjs} +115 -138
  29. package/dist/chunk-AVEPEUN4.cjs.map +1 -0
  30. package/dist/chunk-CNU4A2XU.js +129 -0
  31. package/dist/chunk-CNU4A2XU.js.map +1 -0
  32. package/dist/chunk-CY4TP3FK.js +16 -0
  33. package/dist/chunk-CY4TP3FK.js.map +1 -0
  34. package/dist/chunk-EMMSS5I5.cjs +37 -0
  35. package/dist/chunk-EMMSS5I5.cjs.map +1 -0
  36. package/dist/chunk-EMNGA4R4.js +845 -0
  37. package/dist/chunk-EMNGA4R4.js.map +1 -0
  38. package/dist/chunk-FALVL2VV.cjs +3525 -0
  39. package/dist/chunk-FALVL2VV.cjs.map +1 -0
  40. package/dist/chunk-FQNT7PI4.js +937 -0
  41. package/dist/chunk-FQNT7PI4.js.map +1 -0
  42. package/dist/chunk-G3PMV62Z.js +33 -0
  43. package/dist/chunk-G3PMV62Z.js.map +1 -0
  44. package/dist/chunk-G4PUALCE.cjs +28 -0
  45. package/dist/chunk-G4PUALCE.cjs.map +1 -0
  46. package/dist/chunk-G662L2YZ.js +568 -0
  47. package/dist/chunk-G662L2YZ.js.map +1 -0
  48. package/dist/chunk-GDWMF6SB.cjs +133 -0
  49. package/dist/chunk-GDWMF6SB.cjs.map +1 -0
  50. package/dist/chunk-GU4EWMZB.cjs +769 -0
  51. package/dist/chunk-GU4EWMZB.cjs.map +1 -0
  52. package/dist/chunk-GUI3CROV.cjs +159 -0
  53. package/dist/chunk-GUI3CROV.cjs.map +1 -0
  54. package/dist/chunk-HJQKWRKQ.cjs +764 -0
  55. package/dist/chunk-HJQKWRKQ.cjs.map +1 -0
  56. package/dist/{chunk-YWLUOY3D.cjs → chunk-HVBBFCDH.cjs} +1110 -793
  57. package/dist/chunk-HVBBFCDH.cjs.map +1 -0
  58. package/dist/chunk-HZJRQ5L3.cjs +1411 -0
  59. package/dist/chunk-HZJRQ5L3.cjs.map +1 -0
  60. package/dist/chunk-IGFMAZZ5.cjs +1150 -0
  61. package/dist/chunk-IGFMAZZ5.cjs.map +1 -0
  62. package/dist/chunk-ILESGJ6N.js +524 -0
  63. package/dist/chunk-ILESGJ6N.js.map +1 -0
  64. package/dist/chunk-IOQGI4ML.js +931 -0
  65. package/dist/chunk-IOQGI4ML.js.map +1 -0
  66. package/dist/chunk-J7BPKKOG.cjs +163 -0
  67. package/dist/chunk-J7BPKKOG.cjs.map +1 -0
  68. package/dist/{chunk-HFWCEP5S.js → chunk-JRDEOHAJ.js} +47 -14
  69. package/dist/chunk-JRDEOHAJ.js.map +1 -0
  70. package/dist/chunk-KNGXRN26.cjs +335 -0
  71. package/dist/chunk-KNGXRN26.cjs.map +1 -0
  72. package/dist/{chunk-OR3CIE2H.js → chunk-KV6VHX4V.js} +29 -7
  73. package/dist/chunk-KV6VHX4V.js.map +1 -0
  74. package/dist/chunk-L265APUD.cjs +69 -0
  75. package/dist/chunk-L265APUD.cjs.map +1 -0
  76. package/dist/chunk-LF2ZLOFP.js +767 -0
  77. package/dist/chunk-LF2ZLOFP.js.map +1 -0
  78. package/dist/chunk-LYPU75T6.js +1147 -0
  79. package/dist/chunk-LYPU75T6.js.map +1 -0
  80. package/dist/{chunk-NYN7KFXL.js → chunk-MMROOK5J.js} +3 -0
  81. package/dist/chunk-MMROOK5J.js.map +1 -0
  82. package/dist/chunk-N35YCWQ5.cjs +540 -0
  83. package/dist/chunk-N35YCWQ5.cjs.map +1 -0
  84. package/dist/{chunk-LIVAK2DM.js → chunk-N7F33WAD.js} +1083 -794
  85. package/dist/chunk-N7F33WAD.js.map +1 -0
  86. package/dist/chunk-NG5IVLEZ.js +1012 -0
  87. package/dist/chunk-NG5IVLEZ.js.map +1 -0
  88. package/dist/chunk-NLWACBE7.cjs +128 -0
  89. package/dist/chunk-NLWACBE7.cjs.map +1 -0
  90. package/dist/chunk-OGW6HHVI.js +1408 -0
  91. package/dist/chunk-OGW6HHVI.js.map +1 -0
  92. package/dist/chunk-OJQOYXHU.cjs +15748 -0
  93. package/dist/chunk-OJQOYXHU.cjs.map +1 -0
  94. package/dist/chunk-OZLRIVC4.cjs +588 -0
  95. package/dist/chunk-OZLRIVC4.cjs.map +1 -0
  96. package/dist/chunk-P7CIEIJ3.js +925 -0
  97. package/dist/chunk-P7CIEIJ3.js.map +1 -0
  98. package/dist/chunk-P7RBMCBE.cjs +934 -0
  99. package/dist/chunk-P7RBMCBE.cjs.map +1 -0
  100. package/dist/chunk-PPYGWINI.cjs +2777 -0
  101. package/dist/chunk-PPYGWINI.cjs.map +1 -0
  102. package/dist/{chunk-P6SCPDYW.js → chunk-PUYSH3IL.js} +114 -137
  103. package/dist/chunk-PUYSH3IL.js.map +1 -0
  104. package/dist/{chunk-MHKNLNAN.cjs → chunk-PWTXZZTR.cjs} +33 -10
  105. package/dist/chunk-PWTXZZTR.cjs.map +1 -0
  106. package/dist/chunk-R7NOGUZG.js +65 -0
  107. package/dist/chunk-R7NOGUZG.js.map +1 -0
  108. package/dist/chunk-RCHEPTZZ.js +2006 -0
  109. package/dist/chunk-RCHEPTZZ.js.map +1 -0
  110. package/dist/chunk-RE4RPXT2.cjs +18 -0
  111. package/dist/chunk-RE4RPXT2.cjs.map +1 -0
  112. package/dist/chunk-SIGXR3JT.cjs +1043 -0
  113. package/dist/chunk-SIGXR3JT.cjs.map +1 -0
  114. package/dist/chunk-SPLSYTYW.cjs +88 -0
  115. package/dist/chunk-SPLSYTYW.cjs.map +1 -0
  116. package/dist/chunk-SQY4T6EJ.js +571 -0
  117. package/dist/chunk-SQY4T6EJ.js.map +1 -0
  118. package/dist/{chunk-TJKLBTFB.js → chunk-SYRRSBGL.js} +51 -27
  119. package/dist/chunk-SYRRSBGL.js.map +1 -0
  120. package/dist/{chunk-BNEY4P4P.cjs → chunk-T3TIA3O6.cjs} +20 -18
  121. package/dist/chunk-T3TIA3O6.cjs.map +1 -0
  122. package/dist/{chunk-EJO45KYT.js → chunk-TTHEEIZ3.js} +53 -50
  123. package/dist/chunk-TTHEEIZ3.js.map +1 -0
  124. package/dist/chunk-TVSIG4JE.cjs +940 -0
  125. package/dist/chunk-TVSIG4JE.cjs.map +1 -0
  126. package/dist/{chunk-55DOQLP6.js → chunk-WHN4VX55.js} +5 -3
  127. package/dist/chunk-WHN4VX55.js.map +1 -0
  128. package/dist/dist-26HWEQY6.js +3 -0
  129. package/dist/dist-26HWEQY6.js.map +1 -0
  130. package/dist/dist-3A5DXB37.cjs +20 -0
  131. package/dist/dist-3A5DXB37.cjs.map +1 -0
  132. package/dist/dist-3SJKQJGY.cjs +16 -0
  133. package/dist/dist-3SJKQJGY.cjs.map +1 -0
  134. package/dist/dist-4ZQSPE5K.js +3 -0
  135. package/dist/dist-4ZQSPE5K.js.map +1 -0
  136. package/dist/dist-5W5QNRTD.js +3 -0
  137. package/dist/dist-5W5QNRTD.js.map +1 -0
  138. package/dist/dist-653SRMPL.js +3 -0
  139. package/dist/dist-653SRMPL.js.map +1 -0
  140. package/dist/dist-6U6EFC5C.cjs +16 -0
  141. package/dist/dist-6U6EFC5C.cjs.map +1 -0
  142. package/dist/dist-7IHNNYMF.cjs +16 -0
  143. package/dist/dist-7IHNNYMF.cjs.map +1 -0
  144. package/dist/dist-B5IPRF6W.js +3 -0
  145. package/dist/dist-B5IPRF6W.js.map +1 -0
  146. package/dist/dist-EOMYFT4Y.cjs +16 -0
  147. package/dist/dist-EOMYFT4Y.cjs.map +1 -0
  148. package/dist/dist-EZZMMMNT.cjs +16 -0
  149. package/dist/dist-EZZMMMNT.cjs.map +1 -0
  150. package/dist/dist-F2ET4MNO.cjs +16 -0
  151. package/dist/dist-F2ET4MNO.cjs.map +1 -0
  152. package/dist/dist-H64VX6DE.js +3 -0
  153. package/dist/dist-H64VX6DE.js.map +1 -0
  154. package/dist/dist-HY7RMLJQ.cjs +16 -0
  155. package/dist/dist-HY7RMLJQ.cjs.map +1 -0
  156. package/dist/dist-M6S4P3FJ.js +3 -0
  157. package/dist/dist-M6S4P3FJ.js.map +1 -0
  158. package/dist/dist-NR7QSCQT.js +3 -0
  159. package/dist/dist-NR7QSCQT.js.map +1 -0
  160. package/dist/dist-QLFMCMCX.js +3 -0
  161. package/dist/dist-QLFMCMCX.js.map +1 -0
  162. package/dist/dist-UY46BFRP.js +3 -0
  163. package/dist/dist-UY46BFRP.js.map +1 -0
  164. package/dist/dist-WCQDRTIV.cjs +16 -0
  165. package/dist/dist-WCQDRTIV.cjs.map +1 -0
  166. package/dist/dist-WKYB3LTJ.cjs +16 -0
  167. package/dist/dist-WKYB3LTJ.cjs.map +1 -0
  168. package/dist/index.cjs +6 -0
  169. package/dist/index.cjs.map +1 -0
  170. package/dist/index.d.ts +1 -0
  171. package/dist/index.d.ts.map +1 -0
  172. package/dist/index.js +5 -0
  173. package/dist/index.js.map +1 -0
  174. package/dist/server/a2a/protocol.d.ts +8 -0
  175. package/dist/server/a2a/protocol.d.ts.map +1 -0
  176. package/dist/server/a2a/store.cjs +25 -0
  177. package/dist/server/a2a/store.cjs.map +1 -0
  178. package/dist/server/a2a/store.d.ts +14 -0
  179. package/dist/server/a2a/store.d.ts.map +1 -0
  180. package/dist/server/a2a/store.js +23 -0
  181. package/dist/server/a2a/store.js.map +1 -0
  182. package/dist/server/a2a/tasks.d.ts +20 -0
  183. package/dist/server/a2a/tasks.d.ts.map +1 -0
  184. package/dist/server/handlers/a2a.cjs +13 -11
  185. package/dist/server/handlers/a2a.cjs.map +1 -0
  186. package/dist/server/handlers/a2a.d.ts +68 -6
  187. package/dist/server/handlers/a2a.d.ts.map +1 -0
  188. package/dist/server/handlers/a2a.js +3 -1
  189. package/dist/server/handlers/a2a.js.map +1 -0
  190. package/dist/server/handlers/agent-builder.cjs +68 -0
  191. package/dist/server/handlers/agent-builder.cjs.map +1 -0
  192. package/dist/server/handlers/agent-builder.d.ts +88 -0
  193. package/dist/server/handlers/agent-builder.d.ts.map +1 -0
  194. package/dist/server/handlers/agent-builder.js +3 -0
  195. package/dist/server/handlers/agent-builder.js.map +1 -0
  196. package/dist/server/handlers/agents.cjs +41 -7
  197. package/dist/server/handlers/agents.cjs.map +1 -0
  198. package/dist/server/handlers/agents.d.ts +139 -6
  199. package/dist/server/handlers/agents.d.ts.map +1 -0
  200. package/dist/server/handlers/agents.js +3 -1
  201. package/dist/server/handlers/agents.js.map +1 -0
  202. package/dist/server/handlers/error.cjs +4 -2
  203. package/dist/server/handlers/error.cjs.map +1 -0
  204. package/dist/server/handlers/error.d.ts +2 -1
  205. package/dist/server/handlers/error.d.ts.map +1 -0
  206. package/dist/server/handlers/error.js +3 -1
  207. package/dist/server/handlers/error.js.map +1 -0
  208. package/dist/server/handlers/legacyWorkflows.cjs +48 -0
  209. package/dist/server/handlers/legacyWorkflows.cjs.map +1 -0
  210. package/dist/server/handlers/legacyWorkflows.d.ts +59 -0
  211. package/dist/server/handlers/legacyWorkflows.d.ts.map +1 -0
  212. package/dist/server/handlers/legacyWorkflows.js +3 -0
  213. package/dist/server/handlers/legacyWorkflows.js.map +1 -0
  214. package/dist/server/handlers/logs.cjs +6 -4
  215. package/dist/server/handlers/logs.cjs.map +1 -0
  216. package/dist/server/handlers/logs.d.ts +34 -3
  217. package/dist/server/handlers/logs.d.ts.map +1 -0
  218. package/dist/server/handlers/logs.js +3 -1
  219. package/dist/server/handlers/logs.js.map +1 -0
  220. package/dist/server/handlers/memory.cjs +39 -9
  221. package/dist/server/handlers/memory.cjs.map +1 -0
  222. package/dist/server/handlers/memory.d.ts +118 -8
  223. package/dist/server/handlers/memory.d.ts.map +1 -0
  224. package/dist/server/handlers/memory.js +3 -1
  225. package/dist/server/handlers/memory.js.map +1 -0
  226. package/dist/server/handlers/observability.cjs +16 -0
  227. package/dist/server/handlers/observability.cjs.map +1 -0
  228. package/dist/server/handlers/observability.d.ts +23 -0
  229. package/dist/server/handlers/observability.d.ts.map +1 -0
  230. package/dist/server/handlers/observability.js +3 -0
  231. package/dist/server/handlers/observability.js.map +1 -0
  232. package/dist/server/handlers/scores.cjs +32 -0
  233. package/dist/server/handlers/scores.cjs.map +1 -0
  234. package/dist/server/handlers/scores.d.ts +49 -0
  235. package/dist/server/handlers/scores.d.ts.map +1 -0
  236. package/dist/server/handlers/scores.js +3 -0
  237. package/dist/server/handlers/scores.js.map +1 -0
  238. package/dist/server/handlers/telemetry.cjs +9 -3
  239. package/dist/server/handlers/telemetry.cjs.map +1 -0
  240. package/dist/server/handlers/telemetry.d.ts +33 -2
  241. package/dist/server/handlers/telemetry.d.ts.map +1 -0
  242. package/dist/server/handlers/telemetry.js +3 -1
  243. package/dist/server/handlers/telemetry.js.map +1 -0
  244. package/dist/server/handlers/tools.cjs +11 -5
  245. package/dist/server/handlers/tools.cjs.map +1 -0
  246. package/dist/server/handlers/tools.d.ts +25 -4
  247. package/dist/server/handlers/tools.d.ts.map +1 -0
  248. package/dist/server/handlers/tools.js +3 -1
  249. package/dist/server/handlers/tools.js.map +1 -0
  250. package/dist/server/handlers/utils.cjs +8 -2
  251. package/dist/server/handlers/utils.cjs.map +1 -0
  252. package/dist/server/handlers/utils.d.ts +8 -1
  253. package/dist/server/handlers/utils.d.ts.map +1 -0
  254. package/dist/server/handlers/utils.js +3 -1
  255. package/dist/server/handlers/utils.js.map +1 -0
  256. package/dist/server/handlers/vNextNetwork.cjs +220 -0
  257. package/dist/server/handlers/vNextNetwork.cjs.map +1 -0
  258. package/dist/server/handlers/vNextNetwork.d.ts +246 -0
  259. package/dist/server/handlers/vNextNetwork.d.ts.map +1 -0
  260. package/dist/server/handlers/vNextNetwork.js +213 -0
  261. package/dist/server/handlers/vNextNetwork.js.map +1 -0
  262. package/dist/server/handlers/vector.cjs +9 -7
  263. package/dist/server/handlers/vector.cjs.map +1 -0
  264. package/dist/server/handlers/vector.d.ts +51 -6
  265. package/dist/server/handlers/vector.d.ts.map +1 -0
  266. package/dist/server/handlers/vector.js +3 -1
  267. package/dist/server/handlers/vector.js.map +1 -0
  268. package/dist/server/handlers/voice.cjs +10 -4
  269. package/dist/server/handlers/voice.cjs.map +1 -0
  270. package/dist/server/handlers/voice.d.ts +41 -3
  271. package/dist/server/handlers/voice.d.ts.map +1 -0
  272. package/dist/server/handlers/voice.js +3 -1
  273. package/dist/server/handlers/voice.js.map +1 -0
  274. package/dist/server/handlers/workflows.cjs +43 -13
  275. package/dist/server/handlers/workflows.cjs.map +1 -0
  276. package/dist/server/handlers/workflows.d.ts +82 -10
  277. package/dist/server/handlers/workflows.d.ts.map +1 -0
  278. package/dist/server/handlers/workflows.js +3 -1
  279. package/dist/server/handlers/workflows.js.map +1 -0
  280. package/dist/server/handlers.cjs +44 -32
  281. package/dist/server/handlers.cjs.map +1 -0
  282. package/dist/server/handlers.d.ts +14 -11
  283. package/dist/server/handlers.d.ts.map +1 -0
  284. package/dist/server/handlers.js +15 -11
  285. package/dist/server/handlers.js.map +1 -0
  286. package/dist/server/http-exception.d.ts +87 -0
  287. package/dist/server/http-exception.d.ts.map +1 -0
  288. package/dist/server/types.d.ts +10 -0
  289. package/dist/server/types.d.ts.map +1 -0
  290. package/dist/server/utils.d.ts +44 -0
  291. package/dist/server/utils.d.ts.map +1 -0
  292. package/package.json +51 -21
  293. package/dist/_tsup-dts-rollup.d.cts +0 -816
  294. package/dist/_tsup-dts-rollup.d.ts +0 -816
  295. package/dist/chunk-57CJTIPW.cjs +0 -18
  296. package/dist/chunk-64U3UDTH.cjs +0 -13
  297. package/dist/chunk-75ZPJI57.cjs +0 -9
  298. package/dist/chunk-C7564HUT.js +0 -142
  299. package/dist/chunk-D4IRYCUI.cjs +0 -235
  300. package/dist/chunk-DJJIUEL2.js +0 -211
  301. package/dist/chunk-HWZVAG3H.js +0 -49
  302. package/dist/chunk-I2B73Y4I.cjs +0 -332
  303. package/dist/chunk-M5ABIP7D.js +0 -11
  304. package/dist/chunk-MIQYDLLM.js +0 -329
  305. package/dist/chunk-MLKGABMK.js +0 -7
  306. package/dist/chunk-OGCNNUHF.cjs +0 -54
  307. package/dist/chunk-UCTEMO2Q.cjs +0 -341
  308. package/dist/chunk-VPNDC2DI.cjs +0 -148
  309. package/dist/chunk-WUC6LSTW.js +0 -227
  310. package/dist/chunk-Y7UWRW5X.cjs +0 -221
  311. package/dist/chunk-YBVOQN4M.cjs +0 -94
  312. package/dist/chunk-ZE5AAC4I.cjs +0 -138
  313. package/dist/index.d.cts +0 -1
  314. package/dist/server/handlers/a2a.d.cts +0 -6
  315. package/dist/server/handlers/agents.d.cts +0 -6
  316. package/dist/server/handlers/error.d.cts +0 -1
  317. package/dist/server/handlers/logs.d.cts +0 -3
  318. package/dist/server/handlers/memory.d.cts +0 -8
  319. package/dist/server/handlers/network.cjs +0 -22
  320. package/dist/server/handlers/network.d.cts +0 -4
  321. package/dist/server/handlers/network.d.ts +0 -4
  322. package/dist/server/handlers/network.js +0 -1
  323. package/dist/server/handlers/telemetry.d.cts +0 -2
  324. package/dist/server/handlers/tools.d.cts +0 -4
  325. package/dist/server/handlers/utils.d.cts +0 -1
  326. package/dist/server/handlers/vNextWorkflows.cjs +0 -46
  327. package/dist/server/handlers/vNextWorkflows.d.cts +0 -10
  328. package/dist/server/handlers/vNextWorkflows.d.ts +0 -10
  329. package/dist/server/handlers/vNextWorkflows.js +0 -1
  330. package/dist/server/handlers/vector.d.cts +0 -6
  331. package/dist/server/handlers/voice.d.cts +0 -3
  332. package/dist/server/handlers/workflows.d.cts +0 -10
  333. package/dist/server/handlers.d.cts +0 -11
@@ -0,0 +1,2777 @@
1
+ 'use strict';
2
+
3
+ var chunkSIGXR3JT_cjs = require('./chunk-SIGXR3JT.cjs');
4
+ var zod = require('zod');
5
+
6
+ function convertToOpenAIChatMessages({
7
+ prompt,
8
+ useLegacyFunctionCalling = false,
9
+ systemMessageMode = "system"
10
+ }) {
11
+ const messages = [];
12
+ const warnings = [];
13
+ for (const { role, content } of prompt) {
14
+ switch (role) {
15
+ case "system": {
16
+ switch (systemMessageMode) {
17
+ case "system": {
18
+ messages.push({ role: "system", content });
19
+ break;
20
+ }
21
+ case "developer": {
22
+ messages.push({ role: "developer", content });
23
+ break;
24
+ }
25
+ case "remove": {
26
+ warnings.push({
27
+ type: "other",
28
+ message: "system messages are removed for this model"
29
+ });
30
+ break;
31
+ }
32
+ default: {
33
+ const _exhaustiveCheck = systemMessageMode;
34
+ throw new Error(
35
+ `Unsupported system message mode: ${_exhaustiveCheck}`
36
+ );
37
+ }
38
+ }
39
+ break;
40
+ }
41
+ case "user": {
42
+ if (content.length === 1 && content[0].type === "text") {
43
+ messages.push({ role: "user", content: content[0].text });
44
+ break;
45
+ }
46
+ messages.push({
47
+ role: "user",
48
+ content: content.map((part, index) => {
49
+ var _a, _b, _c, _d;
50
+ switch (part.type) {
51
+ case "text": {
52
+ return { type: "text", text: part.text };
53
+ }
54
+ case "image": {
55
+ return {
56
+ type: "image_url",
57
+ image_url: {
58
+ url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${chunkSIGXR3JT_cjs.convertUint8ArrayToBase64(part.image)}`,
59
+ // OpenAI specific extension: image detail
60
+ detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
61
+ }
62
+ };
63
+ }
64
+ case "file": {
65
+ if (part.data instanceof URL) {
66
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
67
+ functionality: "'File content parts with URL data' functionality not supported."
68
+ });
69
+ }
70
+ switch (part.mimeType) {
71
+ case "audio/wav": {
72
+ return {
73
+ type: "input_audio",
74
+ input_audio: { data: part.data, format: "wav" }
75
+ };
76
+ }
77
+ case "audio/mp3":
78
+ case "audio/mpeg": {
79
+ return {
80
+ type: "input_audio",
81
+ input_audio: { data: part.data, format: "mp3" }
82
+ };
83
+ }
84
+ case "application/pdf": {
85
+ return {
86
+ type: "file",
87
+ file: {
88
+ filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
89
+ file_data: `data:application/pdf;base64,${part.data}`
90
+ }
91
+ };
92
+ }
93
+ default: {
94
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
95
+ functionality: `File content part type ${part.mimeType} in user messages`
96
+ });
97
+ }
98
+ }
99
+ }
100
+ }
101
+ })
102
+ });
103
+ break;
104
+ }
105
+ case "assistant": {
106
+ let text = "";
107
+ const toolCalls = [];
108
+ for (const part of content) {
109
+ switch (part.type) {
110
+ case "text": {
111
+ text += part.text;
112
+ break;
113
+ }
114
+ case "tool-call": {
115
+ toolCalls.push({
116
+ id: part.toolCallId,
117
+ type: "function",
118
+ function: {
119
+ name: part.toolName,
120
+ arguments: JSON.stringify(part.args)
121
+ }
122
+ });
123
+ break;
124
+ }
125
+ }
126
+ }
127
+ if (useLegacyFunctionCalling) {
128
+ if (toolCalls.length > 1) {
129
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
130
+ functionality: "useLegacyFunctionCalling with multiple tool calls in one message"
131
+ });
132
+ }
133
+ messages.push({
134
+ role: "assistant",
135
+ content: text,
136
+ function_call: toolCalls.length > 0 ? toolCalls[0].function : void 0
137
+ });
138
+ } else {
139
+ messages.push({
140
+ role: "assistant",
141
+ content: text,
142
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
143
+ });
144
+ }
145
+ break;
146
+ }
147
+ case "tool": {
148
+ for (const toolResponse of content) {
149
+ if (useLegacyFunctionCalling) {
150
+ messages.push({
151
+ role: "function",
152
+ name: toolResponse.toolName,
153
+ content: JSON.stringify(toolResponse.result)
154
+ });
155
+ } else {
156
+ messages.push({
157
+ role: "tool",
158
+ tool_call_id: toolResponse.toolCallId,
159
+ content: JSON.stringify(toolResponse.result)
160
+ });
161
+ }
162
+ }
163
+ break;
164
+ }
165
+ default: {
166
+ const _exhaustiveCheck = role;
167
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
168
+ }
169
+ }
170
+ }
171
+ return { messages, warnings };
172
+ }
173
+ function mapOpenAIChatLogProbsOutput(logprobs) {
174
+ var _a, _b;
175
+ return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({
176
+ token,
177
+ logprob,
178
+ topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({
179
+ token: token2,
180
+ logprob: logprob2
181
+ })) : []
182
+ }))) != null ? _b : void 0;
183
+ }
184
+ function mapOpenAIFinishReason(finishReason) {
185
+ switch (finishReason) {
186
+ case "stop":
187
+ return "stop";
188
+ case "length":
189
+ return "length";
190
+ case "content_filter":
191
+ return "content-filter";
192
+ case "function_call":
193
+ case "tool_calls":
194
+ return "tool-calls";
195
+ default:
196
+ return "unknown";
197
+ }
198
+ }
199
+ var openaiErrorDataSchema = zod.z.object({
200
+ error: zod.z.object({
201
+ message: zod.z.string(),
202
+ // The additional information below is handled loosely to support
203
+ // OpenAI-compatible providers that have slightly different error
204
+ // responses:
205
+ type: zod.z.string().nullish(),
206
+ param: zod.z.any().nullish(),
207
+ code: zod.z.union([zod.z.string(), zod.z.number()]).nullish()
208
+ })
209
+ });
210
+ var openaiFailedResponseHandler = chunkSIGXR3JT_cjs.createJsonErrorResponseHandler({
211
+ errorSchema: openaiErrorDataSchema,
212
+ errorToMessage: (data) => data.error.message
213
+ });
214
+ function getResponseMetadata({
215
+ id,
216
+ model,
217
+ created
218
+ }) {
219
+ return {
220
+ id: id != null ? id : void 0,
221
+ modelId: model != null ? model : void 0,
222
+ timestamp: created != null ? new Date(created * 1e3) : void 0
223
+ };
224
+ }
225
+ function prepareTools({
226
+ mode,
227
+ useLegacyFunctionCalling = false,
228
+ structuredOutputs
229
+ }) {
230
+ var _a;
231
+ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
232
+ const toolWarnings = [];
233
+ if (tools == null) {
234
+ return { tools: void 0, tool_choice: void 0, toolWarnings };
235
+ }
236
+ const toolChoice = mode.toolChoice;
237
+ if (useLegacyFunctionCalling) {
238
+ const openaiFunctions = [];
239
+ for (const tool of tools) {
240
+ if (tool.type === "provider-defined") {
241
+ toolWarnings.push({ type: "unsupported-tool", tool });
242
+ } else {
243
+ openaiFunctions.push({
244
+ name: tool.name,
245
+ description: tool.description,
246
+ parameters: tool.parameters
247
+ });
248
+ }
249
+ }
250
+ if (toolChoice == null) {
251
+ return {
252
+ functions: openaiFunctions,
253
+ function_call: void 0,
254
+ toolWarnings
255
+ };
256
+ }
257
+ const type2 = toolChoice.type;
258
+ switch (type2) {
259
+ case "auto":
260
+ case "none":
261
+ case void 0:
262
+ return {
263
+ functions: openaiFunctions,
264
+ function_call: void 0,
265
+ toolWarnings
266
+ };
267
+ case "required":
268
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
269
+ functionality: "useLegacyFunctionCalling and toolChoice: required"
270
+ });
271
+ default:
272
+ return {
273
+ functions: openaiFunctions,
274
+ function_call: { name: toolChoice.toolName },
275
+ toolWarnings
276
+ };
277
+ }
278
+ }
279
+ const openaiTools2 = [];
280
+ for (const tool of tools) {
281
+ if (tool.type === "provider-defined") {
282
+ toolWarnings.push({ type: "unsupported-tool", tool });
283
+ } else {
284
+ openaiTools2.push({
285
+ type: "function",
286
+ function: {
287
+ name: tool.name,
288
+ description: tool.description,
289
+ parameters: tool.parameters,
290
+ strict: structuredOutputs ? true : void 0
291
+ }
292
+ });
293
+ }
294
+ }
295
+ if (toolChoice == null) {
296
+ return { tools: openaiTools2, tool_choice: void 0, toolWarnings };
297
+ }
298
+ const type = toolChoice.type;
299
+ switch (type) {
300
+ case "auto":
301
+ case "none":
302
+ case "required":
303
+ return { tools: openaiTools2, tool_choice: type, toolWarnings };
304
+ case "tool":
305
+ return {
306
+ tools: openaiTools2,
307
+ tool_choice: {
308
+ type: "function",
309
+ function: {
310
+ name: toolChoice.toolName
311
+ }
312
+ },
313
+ toolWarnings
314
+ };
315
+ default: {
316
+ const _exhaustiveCheck = type;
317
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
318
+ functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
319
+ });
320
+ }
321
+ }
322
+ }
323
+ var OpenAIChatLanguageModel = class {
324
+ constructor(modelId, settings, config) {
325
+ this.specificationVersion = "v1";
326
+ this.modelId = modelId;
327
+ this.settings = settings;
328
+ this.config = config;
329
+ }
330
+ get supportsStructuredOutputs() {
331
+ var _a;
332
+ return (_a = this.settings.structuredOutputs) != null ? _a : isReasoningModel(this.modelId);
333
+ }
334
+ get defaultObjectGenerationMode() {
335
+ if (isAudioModel(this.modelId)) {
336
+ return "tool";
337
+ }
338
+ return this.supportsStructuredOutputs ? "json" : "tool";
339
+ }
340
+ get provider() {
341
+ return this.config.provider;
342
+ }
343
+ get supportsImageUrls() {
344
+ return !this.settings.downloadImages;
345
+ }
346
+ getArgs({
347
+ mode,
348
+ prompt,
349
+ maxTokens,
350
+ temperature,
351
+ topP,
352
+ topK,
353
+ frequencyPenalty,
354
+ presencePenalty,
355
+ stopSequences,
356
+ responseFormat,
357
+ seed,
358
+ providerMetadata
359
+ }) {
360
+ var _a, _b, _c, _d, _e, _f, _g, _h;
361
+ const type = mode.type;
362
+ const warnings = [];
363
+ if (topK != null) {
364
+ warnings.push({
365
+ type: "unsupported-setting",
366
+ setting: "topK"
367
+ });
368
+ }
369
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !this.supportsStructuredOutputs) {
370
+ warnings.push({
371
+ type: "unsupported-setting",
372
+ setting: "responseFormat",
373
+ details: "JSON response format schema is only supported with structuredOutputs"
374
+ });
375
+ }
376
+ const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
377
+ if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
378
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
379
+ functionality: "useLegacyFunctionCalling with parallelToolCalls"
380
+ });
381
+ }
382
+ if (useLegacyFunctionCalling && this.supportsStructuredOutputs) {
383
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
384
+ functionality: "structuredOutputs with useLegacyFunctionCalling"
385
+ });
386
+ }
387
+ const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(
388
+ {
389
+ prompt,
390
+ useLegacyFunctionCalling,
391
+ systemMessageMode: getSystemMessageMode(this.modelId)
392
+ }
393
+ );
394
+ warnings.push(...messageWarnings);
395
+ const baseArgs = {
396
+ // model id:
397
+ model: this.modelId,
398
+ // model specific settings:
399
+ logit_bias: this.settings.logitBias,
400
+ logprobs: this.settings.logprobs === true || typeof this.settings.logprobs === "number" ? true : void 0,
401
+ top_logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
402
+ user: this.settings.user,
403
+ parallel_tool_calls: this.settings.parallelToolCalls,
404
+ // standardized settings:
405
+ max_tokens: maxTokens,
406
+ temperature,
407
+ top_p: topP,
408
+ frequency_penalty: frequencyPenalty,
409
+ presence_penalty: presencePenalty,
410
+ response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs && responseFormat.schema != null ? {
411
+ type: "json_schema",
412
+ json_schema: {
413
+ schema: responseFormat.schema,
414
+ strict: true,
415
+ name: (_a = responseFormat.name) != null ? _a : "response",
416
+ description: responseFormat.description
417
+ }
418
+ } : { type: "json_object" } : void 0,
419
+ stop: stopSequences,
420
+ seed,
421
+ // openai specific settings:
422
+ // TODO remove in next major version; we auto-map maxTokens now
423
+ max_completion_tokens: (_b = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _b.maxCompletionTokens,
424
+ store: (_c = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _c.store,
425
+ metadata: (_d = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _d.metadata,
426
+ prediction: (_e = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _e.prediction,
427
+ reasoning_effort: (_g = (_f = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _f.reasoningEffort) != null ? _g : this.settings.reasoningEffort,
428
+ // messages:
429
+ messages
430
+ };
431
+ if (isReasoningModel(this.modelId)) {
432
+ if (baseArgs.temperature != null) {
433
+ baseArgs.temperature = void 0;
434
+ warnings.push({
435
+ type: "unsupported-setting",
436
+ setting: "temperature",
437
+ details: "temperature is not supported for reasoning models"
438
+ });
439
+ }
440
+ if (baseArgs.top_p != null) {
441
+ baseArgs.top_p = void 0;
442
+ warnings.push({
443
+ type: "unsupported-setting",
444
+ setting: "topP",
445
+ details: "topP is not supported for reasoning models"
446
+ });
447
+ }
448
+ if (baseArgs.frequency_penalty != null) {
449
+ baseArgs.frequency_penalty = void 0;
450
+ warnings.push({
451
+ type: "unsupported-setting",
452
+ setting: "frequencyPenalty",
453
+ details: "frequencyPenalty is not supported for reasoning models"
454
+ });
455
+ }
456
+ if (baseArgs.presence_penalty != null) {
457
+ baseArgs.presence_penalty = void 0;
458
+ warnings.push({
459
+ type: "unsupported-setting",
460
+ setting: "presencePenalty",
461
+ details: "presencePenalty is not supported for reasoning models"
462
+ });
463
+ }
464
+ if (baseArgs.logit_bias != null) {
465
+ baseArgs.logit_bias = void 0;
466
+ warnings.push({
467
+ type: "other",
468
+ message: "logitBias is not supported for reasoning models"
469
+ });
470
+ }
471
+ if (baseArgs.logprobs != null) {
472
+ baseArgs.logprobs = void 0;
473
+ warnings.push({
474
+ type: "other",
475
+ message: "logprobs is not supported for reasoning models"
476
+ });
477
+ }
478
+ if (baseArgs.top_logprobs != null) {
479
+ baseArgs.top_logprobs = void 0;
480
+ warnings.push({
481
+ type: "other",
482
+ message: "topLogprobs is not supported for reasoning models"
483
+ });
484
+ }
485
+ if (baseArgs.max_tokens != null) {
486
+ if (baseArgs.max_completion_tokens == null) {
487
+ baseArgs.max_completion_tokens = baseArgs.max_tokens;
488
+ }
489
+ baseArgs.max_tokens = void 0;
490
+ }
491
+ } else if (this.modelId.startsWith("gpt-4o-search-preview") || this.modelId.startsWith("gpt-4o-mini-search-preview")) {
492
+ if (baseArgs.temperature != null) {
493
+ baseArgs.temperature = void 0;
494
+ warnings.push({
495
+ type: "unsupported-setting",
496
+ setting: "temperature",
497
+ details: "temperature is not supported for the search preview models and has been removed."
498
+ });
499
+ }
500
+ }
501
+ switch (type) {
502
+ case "regular": {
503
+ const { tools, tool_choice, functions, function_call, toolWarnings } = prepareTools({
504
+ mode,
505
+ useLegacyFunctionCalling,
506
+ structuredOutputs: this.supportsStructuredOutputs
507
+ });
508
+ return {
509
+ args: {
510
+ ...baseArgs,
511
+ tools,
512
+ tool_choice,
513
+ functions,
514
+ function_call
515
+ },
516
+ warnings: [...warnings, ...toolWarnings]
517
+ };
518
+ }
519
+ case "object-json": {
520
+ return {
521
+ args: {
522
+ ...baseArgs,
523
+ response_format: this.supportsStructuredOutputs && mode.schema != null ? {
524
+ type: "json_schema",
525
+ json_schema: {
526
+ schema: mode.schema,
527
+ strict: true,
528
+ name: (_h = mode.name) != null ? _h : "response",
529
+ description: mode.description
530
+ }
531
+ } : { type: "json_object" }
532
+ },
533
+ warnings
534
+ };
535
+ }
536
+ case "object-tool": {
537
+ return {
538
+ args: useLegacyFunctionCalling ? {
539
+ ...baseArgs,
540
+ function_call: {
541
+ name: mode.tool.name
542
+ },
543
+ functions: [
544
+ {
545
+ name: mode.tool.name,
546
+ description: mode.tool.description,
547
+ parameters: mode.tool.parameters
548
+ }
549
+ ]
550
+ } : {
551
+ ...baseArgs,
552
+ tool_choice: {
553
+ type: "function",
554
+ function: { name: mode.tool.name }
555
+ },
556
+ tools: [
557
+ {
558
+ type: "function",
559
+ function: {
560
+ name: mode.tool.name,
561
+ description: mode.tool.description,
562
+ parameters: mode.tool.parameters,
563
+ strict: this.supportsStructuredOutputs ? true : void 0
564
+ }
565
+ }
566
+ ]
567
+ },
568
+ warnings
569
+ };
570
+ }
571
+ default: {
572
+ const _exhaustiveCheck = type;
573
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
574
+ }
575
+ }
576
+ }
577
+ async doGenerate(options) {
578
+ var _a, _b, _c, _d, _e, _f, _g, _h;
579
+ const { args: body, warnings } = this.getArgs(options);
580
+ const {
581
+ responseHeaders,
582
+ value: response,
583
+ rawValue: rawResponse
584
+ } = await chunkSIGXR3JT_cjs.postJsonToApi({
585
+ url: this.config.url({
586
+ path: "/chat/completions",
587
+ modelId: this.modelId
588
+ }),
589
+ headers: chunkSIGXR3JT_cjs.combineHeaders(this.config.headers(), options.headers),
590
+ body,
591
+ failedResponseHandler: openaiFailedResponseHandler,
592
+ successfulResponseHandler: chunkSIGXR3JT_cjs.createJsonResponseHandler(
593
+ openaiChatResponseSchema
594
+ ),
595
+ abortSignal: options.abortSignal,
596
+ fetch: this.config.fetch
597
+ });
598
+ const { messages: rawPrompt, ...rawSettings } = body;
599
+ const choice = response.choices[0];
600
+ const completionTokenDetails = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details;
601
+ const promptTokenDetails = (_b = response.usage) == null ? void 0 : _b.prompt_tokens_details;
602
+ const providerMetadata = { openai: {} };
603
+ if ((completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens) != null) {
604
+ providerMetadata.openai.reasoningTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens;
605
+ }
606
+ if ((completionTokenDetails == null ? void 0 : completionTokenDetails.accepted_prediction_tokens) != null) {
607
+ providerMetadata.openai.acceptedPredictionTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.accepted_prediction_tokens;
608
+ }
609
+ if ((completionTokenDetails == null ? void 0 : completionTokenDetails.rejected_prediction_tokens) != null) {
610
+ providerMetadata.openai.rejectedPredictionTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.rejected_prediction_tokens;
611
+ }
612
+ if ((promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens) != null) {
613
+ providerMetadata.openai.cachedPromptTokens = promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens;
614
+ }
615
+ return {
616
+ text: (_c = choice.message.content) != null ? _c : void 0,
617
+ toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
618
+ {
619
+ toolCallType: "function",
620
+ toolCallId: chunkSIGXR3JT_cjs.generateId(),
621
+ toolName: choice.message.function_call.name,
622
+ args: choice.message.function_call.arguments
623
+ }
624
+ ] : (_d = choice.message.tool_calls) == null ? void 0 : _d.map((toolCall) => {
625
+ var _a2;
626
+ return {
627
+ toolCallType: "function",
628
+ toolCallId: (_a2 = toolCall.id) != null ? _a2 : chunkSIGXR3JT_cjs.generateId(),
629
+ toolName: toolCall.function.name,
630
+ args: toolCall.function.arguments
631
+ };
632
+ }),
633
+ finishReason: mapOpenAIFinishReason(choice.finish_reason),
634
+ usage: {
635
+ promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : NaN,
636
+ completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : NaN
637
+ },
638
+ rawCall: { rawPrompt, rawSettings },
639
+ rawResponse: { headers: responseHeaders, body: rawResponse },
640
+ request: { body: JSON.stringify(body) },
641
+ response: getResponseMetadata(response),
642
+ warnings,
643
+ logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
644
+ providerMetadata
645
+ };
646
+ }
647
+ async doStream(options) {
648
+ if (this.settings.simulateStreaming) {
649
+ const result = await this.doGenerate(options);
650
+ const simulatedStream = new ReadableStream({
651
+ start(controller) {
652
+ controller.enqueue({ type: "response-metadata", ...result.response });
653
+ if (result.text) {
654
+ controller.enqueue({
655
+ type: "text-delta",
656
+ textDelta: result.text
657
+ });
658
+ }
659
+ if (result.toolCalls) {
660
+ for (const toolCall of result.toolCalls) {
661
+ controller.enqueue({
662
+ type: "tool-call-delta",
663
+ toolCallType: "function",
664
+ toolCallId: toolCall.toolCallId,
665
+ toolName: toolCall.toolName,
666
+ argsTextDelta: toolCall.args
667
+ });
668
+ controller.enqueue({
669
+ type: "tool-call",
670
+ ...toolCall
671
+ });
672
+ }
673
+ }
674
+ controller.enqueue({
675
+ type: "finish",
676
+ finishReason: result.finishReason,
677
+ usage: result.usage,
678
+ logprobs: result.logprobs,
679
+ providerMetadata: result.providerMetadata
680
+ });
681
+ controller.close();
682
+ }
683
+ });
684
+ return {
685
+ stream: simulatedStream,
686
+ rawCall: result.rawCall,
687
+ rawResponse: result.rawResponse,
688
+ warnings: result.warnings
689
+ };
690
+ }
691
+ const { args, warnings } = this.getArgs(options);
692
+ const body = {
693
+ ...args,
694
+ stream: true,
695
+ // only include stream_options when in strict compatibility mode:
696
+ stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
697
+ };
698
+ const { responseHeaders, value: response } = await chunkSIGXR3JT_cjs.postJsonToApi({
699
+ url: this.config.url({
700
+ path: "/chat/completions",
701
+ modelId: this.modelId
702
+ }),
703
+ headers: chunkSIGXR3JT_cjs.combineHeaders(this.config.headers(), options.headers),
704
+ body,
705
+ failedResponseHandler: openaiFailedResponseHandler,
706
+ successfulResponseHandler: chunkSIGXR3JT_cjs.createEventSourceResponseHandler(
707
+ openaiChatChunkSchema
708
+ ),
709
+ abortSignal: options.abortSignal,
710
+ fetch: this.config.fetch
711
+ });
712
+ const { messages: rawPrompt, ...rawSettings } = args;
713
+ const toolCalls = [];
714
+ let finishReason = "unknown";
715
+ let usage = {
716
+ promptTokens: void 0,
717
+ completionTokens: void 0
718
+ };
719
+ let logprobs;
720
+ let isFirstChunk = true;
721
+ const { useLegacyFunctionCalling } = this.settings;
722
+ const providerMetadata = { openai: {} };
723
+ return {
724
+ stream: response.pipeThrough(
725
+ new TransformStream({
726
+ transform(chunk, controller) {
727
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
728
+ if (!chunk.success) {
729
+ finishReason = "error";
730
+ controller.enqueue({ type: "error", error: chunk.error });
731
+ return;
732
+ }
733
+ const value = chunk.value;
734
+ if ("error" in value) {
735
+ finishReason = "error";
736
+ controller.enqueue({ type: "error", error: value.error });
737
+ return;
738
+ }
739
+ if (isFirstChunk) {
740
+ isFirstChunk = false;
741
+ controller.enqueue({
742
+ type: "response-metadata",
743
+ ...getResponseMetadata(value)
744
+ });
745
+ }
746
+ if (value.usage != null) {
747
+ const {
748
+ prompt_tokens,
749
+ completion_tokens,
750
+ prompt_tokens_details,
751
+ completion_tokens_details
752
+ } = value.usage;
753
+ usage = {
754
+ promptTokens: prompt_tokens != null ? prompt_tokens : void 0,
755
+ completionTokens: completion_tokens != null ? completion_tokens : void 0
756
+ };
757
+ if ((completion_tokens_details == null ? void 0 : completion_tokens_details.reasoning_tokens) != null) {
758
+ providerMetadata.openai.reasoningTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.reasoning_tokens;
759
+ }
760
+ if ((completion_tokens_details == null ? void 0 : completion_tokens_details.accepted_prediction_tokens) != null) {
761
+ providerMetadata.openai.acceptedPredictionTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.accepted_prediction_tokens;
762
+ }
763
+ if ((completion_tokens_details == null ? void 0 : completion_tokens_details.rejected_prediction_tokens) != null) {
764
+ providerMetadata.openai.rejectedPredictionTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.rejected_prediction_tokens;
765
+ }
766
+ if ((prompt_tokens_details == null ? void 0 : prompt_tokens_details.cached_tokens) != null) {
767
+ providerMetadata.openai.cachedPromptTokens = prompt_tokens_details == null ? void 0 : prompt_tokens_details.cached_tokens;
768
+ }
769
+ }
770
+ const choice = value.choices[0];
771
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
772
+ finishReason = mapOpenAIFinishReason(choice.finish_reason);
773
+ }
774
+ if ((choice == null ? void 0 : choice.delta) == null) {
775
+ return;
776
+ }
777
+ const delta = choice.delta;
778
+ if (delta.content != null) {
779
+ controller.enqueue({
780
+ type: "text-delta",
781
+ textDelta: delta.content
782
+ });
783
+ }
784
+ const mappedLogprobs = mapOpenAIChatLogProbsOutput(
785
+ choice == null ? void 0 : choice.logprobs
786
+ );
787
+ if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
788
+ if (logprobs === void 0) logprobs = [];
789
+ logprobs.push(...mappedLogprobs);
790
+ }
791
+ const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
792
+ {
793
+ type: "function",
794
+ id: chunkSIGXR3JT_cjs.generateId(),
795
+ function: delta.function_call,
796
+ index: 0
797
+ }
798
+ ] : delta.tool_calls;
799
+ if (mappedToolCalls != null) {
800
+ for (const toolCallDelta of mappedToolCalls) {
801
+ const index = toolCallDelta.index;
802
+ if (toolCalls[index] == null) {
803
+ if (toolCallDelta.type !== "function") {
804
+ throw new chunkSIGXR3JT_cjs.InvalidResponseDataError({
805
+ data: toolCallDelta,
806
+ message: `Expected 'function' type.`
807
+ });
808
+ }
809
+ if (toolCallDelta.id == null) {
810
+ throw new chunkSIGXR3JT_cjs.InvalidResponseDataError({
811
+ data: toolCallDelta,
812
+ message: `Expected 'id' to be a string.`
813
+ });
814
+ }
815
+ if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
816
+ throw new chunkSIGXR3JT_cjs.InvalidResponseDataError({
817
+ data: toolCallDelta,
818
+ message: `Expected 'function.name' to be a string.`
819
+ });
820
+ }
821
+ toolCalls[index] = {
822
+ id: toolCallDelta.id,
823
+ type: "function",
824
+ function: {
825
+ name: toolCallDelta.function.name,
826
+ arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
827
+ },
828
+ hasFinished: false
829
+ };
830
+ const toolCall2 = toolCalls[index];
831
+ if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null) {
832
+ if (toolCall2.function.arguments.length > 0) {
833
+ controller.enqueue({
834
+ type: "tool-call-delta",
835
+ toolCallType: "function",
836
+ toolCallId: toolCall2.id,
837
+ toolName: toolCall2.function.name,
838
+ argsTextDelta: toolCall2.function.arguments
839
+ });
840
+ }
841
+ if (chunkSIGXR3JT_cjs.isParsableJson(toolCall2.function.arguments)) {
842
+ controller.enqueue({
843
+ type: "tool-call",
844
+ toolCallType: "function",
845
+ toolCallId: (_e = toolCall2.id) != null ? _e : chunkSIGXR3JT_cjs.generateId(),
846
+ toolName: toolCall2.function.name,
847
+ args: toolCall2.function.arguments
848
+ });
849
+ toolCall2.hasFinished = true;
850
+ }
851
+ }
852
+ continue;
853
+ }
854
+ const toolCall = toolCalls[index];
855
+ if (toolCall.hasFinished) {
856
+ continue;
857
+ }
858
+ if (((_f = toolCallDelta.function) == null ? void 0 : _f.arguments) != null) {
859
+ toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
860
+ }
861
+ controller.enqueue({
862
+ type: "tool-call-delta",
863
+ toolCallType: "function",
864
+ toolCallId: toolCall.id,
865
+ toolName: toolCall.function.name,
866
+ argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
867
+ });
868
+ if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && chunkSIGXR3JT_cjs.isParsableJson(toolCall.function.arguments)) {
869
+ controller.enqueue({
870
+ type: "tool-call",
871
+ toolCallType: "function",
872
+ toolCallId: (_l = toolCall.id) != null ? _l : chunkSIGXR3JT_cjs.generateId(),
873
+ toolName: toolCall.function.name,
874
+ args: toolCall.function.arguments
875
+ });
876
+ toolCall.hasFinished = true;
877
+ }
878
+ }
879
+ }
880
+ },
881
+ flush(controller) {
882
+ var _a, _b;
883
+ controller.enqueue({
884
+ type: "finish",
885
+ finishReason,
886
+ logprobs,
887
+ usage: {
888
+ promptTokens: (_a = usage.promptTokens) != null ? _a : NaN,
889
+ completionTokens: (_b = usage.completionTokens) != null ? _b : NaN
890
+ },
891
+ ...providerMetadata != null ? { providerMetadata } : {}
892
+ });
893
+ }
894
+ })
895
+ ),
896
+ rawCall: { rawPrompt, rawSettings },
897
+ rawResponse: { headers: responseHeaders },
898
+ request: { body: JSON.stringify(body) },
899
+ warnings
900
+ };
901
+ }
902
+ };
903
+ var openaiTokenUsageSchema = zod.z.object({
904
+ prompt_tokens: zod.z.number().nullish(),
905
+ completion_tokens: zod.z.number().nullish(),
906
+ prompt_tokens_details: zod.z.object({
907
+ cached_tokens: zod.z.number().nullish()
908
+ }).nullish(),
909
+ completion_tokens_details: zod.z.object({
910
+ reasoning_tokens: zod.z.number().nullish(),
911
+ accepted_prediction_tokens: zod.z.number().nullish(),
912
+ rejected_prediction_tokens: zod.z.number().nullish()
913
+ }).nullish()
914
+ }).nullish();
915
+ var openaiChatResponseSchema = zod.z.object({
916
+ id: zod.z.string().nullish(),
917
+ created: zod.z.number().nullish(),
918
+ model: zod.z.string().nullish(),
919
+ choices: zod.z.array(
920
+ zod.z.object({
921
+ message: zod.z.object({
922
+ role: zod.z.literal("assistant").nullish(),
923
+ content: zod.z.string().nullish(),
924
+ function_call: zod.z.object({
925
+ arguments: zod.z.string(),
926
+ name: zod.z.string()
927
+ }).nullish(),
928
+ tool_calls: zod.z.array(
929
+ zod.z.object({
930
+ id: zod.z.string().nullish(),
931
+ type: zod.z.literal("function"),
932
+ function: zod.z.object({
933
+ name: zod.z.string(),
934
+ arguments: zod.z.string()
935
+ })
936
+ })
937
+ ).nullish()
938
+ }),
939
+ index: zod.z.number(),
940
+ logprobs: zod.z.object({
941
+ content: zod.z.array(
942
+ zod.z.object({
943
+ token: zod.z.string(),
944
+ logprob: zod.z.number(),
945
+ top_logprobs: zod.z.array(
946
+ zod.z.object({
947
+ token: zod.z.string(),
948
+ logprob: zod.z.number()
949
+ })
950
+ )
951
+ })
952
+ ).nullable()
953
+ }).nullish(),
954
+ finish_reason: zod.z.string().nullish()
955
+ })
956
+ ),
957
+ usage: openaiTokenUsageSchema
958
+ });
959
+ var openaiChatChunkSchema = zod.z.union([
960
+ zod.z.object({
961
+ id: zod.z.string().nullish(),
962
+ created: zod.z.number().nullish(),
963
+ model: zod.z.string().nullish(),
964
+ choices: zod.z.array(
965
+ zod.z.object({
966
+ delta: zod.z.object({
967
+ role: zod.z.enum(["assistant"]).nullish(),
968
+ content: zod.z.string().nullish(),
969
+ function_call: zod.z.object({
970
+ name: zod.z.string().optional(),
971
+ arguments: zod.z.string().optional()
972
+ }).nullish(),
973
+ tool_calls: zod.z.array(
974
+ zod.z.object({
975
+ index: zod.z.number(),
976
+ id: zod.z.string().nullish(),
977
+ type: zod.z.literal("function").nullish(),
978
+ function: zod.z.object({
979
+ name: zod.z.string().nullish(),
980
+ arguments: zod.z.string().nullish()
981
+ })
982
+ })
983
+ ).nullish()
984
+ }).nullish(),
985
+ logprobs: zod.z.object({
986
+ content: zod.z.array(
987
+ zod.z.object({
988
+ token: zod.z.string(),
989
+ logprob: zod.z.number(),
990
+ top_logprobs: zod.z.array(
991
+ zod.z.object({
992
+ token: zod.z.string(),
993
+ logprob: zod.z.number()
994
+ })
995
+ )
996
+ })
997
+ ).nullable()
998
+ }).nullish(),
999
+ finish_reason: zod.z.string().nullish(),
1000
+ index: zod.z.number()
1001
+ })
1002
+ ),
1003
+ usage: openaiTokenUsageSchema
1004
+ }),
1005
+ openaiErrorDataSchema
1006
+ ]);
1007
+ function isReasoningModel(modelId) {
1008
+ return modelId.startsWith("o") || modelId.startsWith("gpt-5");
1009
+ }
1010
+ function isAudioModel(modelId) {
1011
+ return modelId.startsWith("gpt-4o-audio-preview");
1012
+ }
1013
+ function getSystemMessageMode(modelId) {
1014
+ var _a, _b;
1015
+ if (!isReasoningModel(modelId)) {
1016
+ return "system";
1017
+ }
1018
+ return (_b = (_a = reasoningModels[modelId]) == null ? void 0 : _a.systemMessageMode) != null ? _b : "developer";
1019
+ }
1020
+ var reasoningModels = {
1021
+ "o1-mini": {
1022
+ systemMessageMode: "remove"
1023
+ },
1024
+ "o1-mini-2024-09-12": {
1025
+ systemMessageMode: "remove"
1026
+ },
1027
+ "o1-preview": {
1028
+ systemMessageMode: "remove"
1029
+ },
1030
+ "o1-preview-2024-09-12": {
1031
+ systemMessageMode: "remove"
1032
+ },
1033
+ o3: {
1034
+ systemMessageMode: "developer"
1035
+ },
1036
+ "o3-2025-04-16": {
1037
+ systemMessageMode: "developer"
1038
+ },
1039
+ "o3-mini": {
1040
+ systemMessageMode: "developer"
1041
+ },
1042
+ "o3-mini-2025-01-31": {
1043
+ systemMessageMode: "developer"
1044
+ },
1045
+ "o4-mini": {
1046
+ systemMessageMode: "developer"
1047
+ },
1048
+ "o4-mini-2025-04-16": {
1049
+ systemMessageMode: "developer"
1050
+ }
1051
+ };
1052
+ function convertToOpenAICompletionPrompt({
1053
+ prompt,
1054
+ inputFormat,
1055
+ user = "user",
1056
+ assistant = "assistant"
1057
+ }) {
1058
+ if (inputFormat === "prompt" && prompt.length === 1 && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0].type === "text") {
1059
+ return { prompt: prompt[0].content[0].text };
1060
+ }
1061
+ let text = "";
1062
+ if (prompt[0].role === "system") {
1063
+ text += `${prompt[0].content}
1064
+
1065
+ `;
1066
+ prompt = prompt.slice(1);
1067
+ }
1068
+ for (const { role, content } of prompt) {
1069
+ switch (role) {
1070
+ case "system": {
1071
+ throw new chunkSIGXR3JT_cjs.InvalidPromptError({
1072
+ message: "Unexpected system message in prompt: ${content}",
1073
+ prompt
1074
+ });
1075
+ }
1076
+ case "user": {
1077
+ const userMessage = content.map((part) => {
1078
+ switch (part.type) {
1079
+ case "text": {
1080
+ return part.text;
1081
+ }
1082
+ case "image": {
1083
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
1084
+ functionality: "images"
1085
+ });
1086
+ }
1087
+ }
1088
+ }).join("");
1089
+ text += `${user}:
1090
+ ${userMessage}
1091
+
1092
+ `;
1093
+ break;
1094
+ }
1095
+ case "assistant": {
1096
+ const assistantMessage = content.map((part) => {
1097
+ switch (part.type) {
1098
+ case "text": {
1099
+ return part.text;
1100
+ }
1101
+ case "tool-call": {
1102
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
1103
+ functionality: "tool-call messages"
1104
+ });
1105
+ }
1106
+ }
1107
+ }).join("");
1108
+ text += `${assistant}:
1109
+ ${assistantMessage}
1110
+
1111
+ `;
1112
+ break;
1113
+ }
1114
+ case "tool": {
1115
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
1116
+ functionality: "tool messages"
1117
+ });
1118
+ }
1119
+ default: {
1120
+ const _exhaustiveCheck = role;
1121
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1122
+ }
1123
+ }
1124
+ }
1125
+ text += `${assistant}:
1126
+ `;
1127
+ return {
1128
+ prompt: text,
1129
+ stopSequences: [`
1130
+ ${user}:`]
1131
+ };
1132
+ }
1133
+ function mapOpenAICompletionLogProbs(logprobs) {
1134
+ return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => ({
1135
+ token,
1136
+ logprob: logprobs.token_logprobs[index],
1137
+ topLogprobs: logprobs.top_logprobs ? Object.entries(logprobs.top_logprobs[index]).map(
1138
+ ([token2, logprob]) => ({
1139
+ token: token2,
1140
+ logprob
1141
+ })
1142
+ ) : []
1143
+ }));
1144
+ }
1145
+ var OpenAICompletionLanguageModel = class {
1146
+ constructor(modelId, settings, config) {
1147
+ this.specificationVersion = "v1";
1148
+ this.defaultObjectGenerationMode = void 0;
1149
+ this.modelId = modelId;
1150
+ this.settings = settings;
1151
+ this.config = config;
1152
+ }
1153
+ get provider() {
1154
+ return this.config.provider;
1155
+ }
1156
+ getArgs({
1157
+ mode,
1158
+ inputFormat,
1159
+ prompt,
1160
+ maxTokens,
1161
+ temperature,
1162
+ topP,
1163
+ topK,
1164
+ frequencyPenalty,
1165
+ presencePenalty,
1166
+ stopSequences: userStopSequences,
1167
+ responseFormat,
1168
+ seed
1169
+ }) {
1170
+ var _a;
1171
+ const type = mode.type;
1172
+ const warnings = [];
1173
+ if (topK != null) {
1174
+ warnings.push({
1175
+ type: "unsupported-setting",
1176
+ setting: "topK"
1177
+ });
1178
+ }
1179
+ if (responseFormat != null && responseFormat.type !== "text") {
1180
+ warnings.push({
1181
+ type: "unsupported-setting",
1182
+ setting: "responseFormat",
1183
+ details: "JSON response format is not supported."
1184
+ });
1185
+ }
1186
+ const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
1187
+ const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
1188
+ const baseArgs = {
1189
+ // model id:
1190
+ model: this.modelId,
1191
+ // model specific settings:
1192
+ echo: this.settings.echo,
1193
+ logit_bias: this.settings.logitBias,
1194
+ logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
1195
+ suffix: this.settings.suffix,
1196
+ user: this.settings.user,
1197
+ // standardized settings:
1198
+ max_tokens: maxTokens,
1199
+ temperature,
1200
+ top_p: topP,
1201
+ frequency_penalty: frequencyPenalty,
1202
+ presence_penalty: presencePenalty,
1203
+ seed,
1204
+ // prompt:
1205
+ prompt: completionPrompt,
1206
+ // stop sequences:
1207
+ stop: stop.length > 0 ? stop : void 0
1208
+ };
1209
+ switch (type) {
1210
+ case "regular": {
1211
+ if ((_a = mode.tools) == null ? void 0 : _a.length) {
1212
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
1213
+ functionality: "tools"
1214
+ });
1215
+ }
1216
+ if (mode.toolChoice) {
1217
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
1218
+ functionality: "toolChoice"
1219
+ });
1220
+ }
1221
+ return { args: baseArgs, warnings };
1222
+ }
1223
+ case "object-json": {
1224
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
1225
+ functionality: "object-json mode"
1226
+ });
1227
+ }
1228
+ case "object-tool": {
1229
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
1230
+ functionality: "object-tool mode"
1231
+ });
1232
+ }
1233
+ default: {
1234
+ const _exhaustiveCheck = type;
1235
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
1236
+ }
1237
+ }
1238
+ }
1239
+ async doGenerate(options) {
1240
+ const { args, warnings } = this.getArgs(options);
1241
+ const {
1242
+ responseHeaders,
1243
+ value: response,
1244
+ rawValue: rawResponse
1245
+ } = await chunkSIGXR3JT_cjs.postJsonToApi({
1246
+ url: this.config.url({
1247
+ path: "/completions",
1248
+ modelId: this.modelId
1249
+ }),
1250
+ headers: chunkSIGXR3JT_cjs.combineHeaders(this.config.headers(), options.headers),
1251
+ body: args,
1252
+ failedResponseHandler: openaiFailedResponseHandler,
1253
+ successfulResponseHandler: chunkSIGXR3JT_cjs.createJsonResponseHandler(
1254
+ openaiCompletionResponseSchema
1255
+ ),
1256
+ abortSignal: options.abortSignal,
1257
+ fetch: this.config.fetch
1258
+ });
1259
+ const { prompt: rawPrompt, ...rawSettings } = args;
1260
+ const choice = response.choices[0];
1261
+ return {
1262
+ text: choice.text,
1263
+ usage: {
1264
+ promptTokens: response.usage.prompt_tokens,
1265
+ completionTokens: response.usage.completion_tokens
1266
+ },
1267
+ finishReason: mapOpenAIFinishReason(choice.finish_reason),
1268
+ logprobs: mapOpenAICompletionLogProbs(choice.logprobs),
1269
+ rawCall: { rawPrompt, rawSettings },
1270
+ rawResponse: { headers: responseHeaders, body: rawResponse },
1271
+ response: getResponseMetadata(response),
1272
+ warnings,
1273
+ request: { body: JSON.stringify(args) }
1274
+ };
1275
+ }
1276
+ async doStream(options) {
1277
+ const { args, warnings } = this.getArgs(options);
1278
+ const body = {
1279
+ ...args,
1280
+ stream: true,
1281
+ // only include stream_options when in strict compatibility mode:
1282
+ stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
1283
+ };
1284
+ const { responseHeaders, value: response } = await chunkSIGXR3JT_cjs.postJsonToApi({
1285
+ url: this.config.url({
1286
+ path: "/completions",
1287
+ modelId: this.modelId
1288
+ }),
1289
+ headers: chunkSIGXR3JT_cjs.combineHeaders(this.config.headers(), options.headers),
1290
+ body,
1291
+ failedResponseHandler: openaiFailedResponseHandler,
1292
+ successfulResponseHandler: chunkSIGXR3JT_cjs.createEventSourceResponseHandler(
1293
+ openaiCompletionChunkSchema
1294
+ ),
1295
+ abortSignal: options.abortSignal,
1296
+ fetch: this.config.fetch
1297
+ });
1298
+ const { prompt: rawPrompt, ...rawSettings } = args;
1299
+ let finishReason = "unknown";
1300
+ let usage = {
1301
+ promptTokens: Number.NaN,
1302
+ completionTokens: Number.NaN
1303
+ };
1304
+ let logprobs;
1305
+ let isFirstChunk = true;
1306
+ return {
1307
+ stream: response.pipeThrough(
1308
+ new TransformStream({
1309
+ transform(chunk, controller) {
1310
+ if (!chunk.success) {
1311
+ finishReason = "error";
1312
+ controller.enqueue({ type: "error", error: chunk.error });
1313
+ return;
1314
+ }
1315
+ const value = chunk.value;
1316
+ if ("error" in value) {
1317
+ finishReason = "error";
1318
+ controller.enqueue({ type: "error", error: value.error });
1319
+ return;
1320
+ }
1321
+ if (isFirstChunk) {
1322
+ isFirstChunk = false;
1323
+ controller.enqueue({
1324
+ type: "response-metadata",
1325
+ ...getResponseMetadata(value)
1326
+ });
1327
+ }
1328
+ if (value.usage != null) {
1329
+ usage = {
1330
+ promptTokens: value.usage.prompt_tokens,
1331
+ completionTokens: value.usage.completion_tokens
1332
+ };
1333
+ }
1334
+ const choice = value.choices[0];
1335
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
1336
+ finishReason = mapOpenAIFinishReason(choice.finish_reason);
1337
+ }
1338
+ if ((choice == null ? void 0 : choice.text) != null) {
1339
+ controller.enqueue({
1340
+ type: "text-delta",
1341
+ textDelta: choice.text
1342
+ });
1343
+ }
1344
+ const mappedLogprobs = mapOpenAICompletionLogProbs(
1345
+ choice == null ? void 0 : choice.logprobs
1346
+ );
1347
+ if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
1348
+ if (logprobs === void 0) logprobs = [];
1349
+ logprobs.push(...mappedLogprobs);
1350
+ }
1351
+ },
1352
+ flush(controller) {
1353
+ controller.enqueue({
1354
+ type: "finish",
1355
+ finishReason,
1356
+ logprobs,
1357
+ usage
1358
+ });
1359
+ }
1360
+ })
1361
+ ),
1362
+ rawCall: { rawPrompt, rawSettings },
1363
+ rawResponse: { headers: responseHeaders },
1364
+ warnings,
1365
+ request: { body: JSON.stringify(body) }
1366
+ };
1367
+ }
1368
+ };
1369
+ var openaiCompletionResponseSchema = zod.z.object({
1370
+ id: zod.z.string().nullish(),
1371
+ created: zod.z.number().nullish(),
1372
+ model: zod.z.string().nullish(),
1373
+ choices: zod.z.array(
1374
+ zod.z.object({
1375
+ text: zod.z.string(),
1376
+ finish_reason: zod.z.string(),
1377
+ logprobs: zod.z.object({
1378
+ tokens: zod.z.array(zod.z.string()),
1379
+ token_logprobs: zod.z.array(zod.z.number()),
1380
+ top_logprobs: zod.z.array(zod.z.record(zod.z.string(), zod.z.number())).nullable()
1381
+ }).nullish()
1382
+ })
1383
+ ),
1384
+ usage: zod.z.object({
1385
+ prompt_tokens: zod.z.number(),
1386
+ completion_tokens: zod.z.number()
1387
+ })
1388
+ });
1389
+ var openaiCompletionChunkSchema = zod.z.union([
1390
+ zod.z.object({
1391
+ id: zod.z.string().nullish(),
1392
+ created: zod.z.number().nullish(),
1393
+ model: zod.z.string().nullish(),
1394
+ choices: zod.z.array(
1395
+ zod.z.object({
1396
+ text: zod.z.string(),
1397
+ finish_reason: zod.z.string().nullish(),
1398
+ index: zod.z.number(),
1399
+ logprobs: zod.z.object({
1400
+ tokens: zod.z.array(zod.z.string()),
1401
+ token_logprobs: zod.z.array(zod.z.number()),
1402
+ top_logprobs: zod.z.array(zod.z.record(zod.z.string(), zod.z.number())).nullable()
1403
+ }).nullish()
1404
+ })
1405
+ ),
1406
+ usage: zod.z.object({
1407
+ prompt_tokens: zod.z.number(),
1408
+ completion_tokens: zod.z.number()
1409
+ }).nullish()
1410
+ }),
1411
+ openaiErrorDataSchema
1412
+ ]);
1413
+ var OpenAIEmbeddingModel = class {
1414
+ constructor(modelId, settings, config) {
1415
+ this.specificationVersion = "v1";
1416
+ this.modelId = modelId;
1417
+ this.settings = settings;
1418
+ this.config = config;
1419
+ }
1420
+ get provider() {
1421
+ return this.config.provider;
1422
+ }
1423
+ get maxEmbeddingsPerCall() {
1424
+ var _a;
1425
+ return (_a = this.settings.maxEmbeddingsPerCall) != null ? _a : 2048;
1426
+ }
1427
+ get supportsParallelCalls() {
1428
+ var _a;
1429
+ return (_a = this.settings.supportsParallelCalls) != null ? _a : true;
1430
+ }
1431
+ async doEmbed({
1432
+ values,
1433
+ headers,
1434
+ abortSignal
1435
+ }) {
1436
+ if (values.length > this.maxEmbeddingsPerCall) {
1437
+ throw new chunkSIGXR3JT_cjs.TooManyEmbeddingValuesForCallError({
1438
+ provider: this.provider,
1439
+ modelId: this.modelId,
1440
+ maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
1441
+ values
1442
+ });
1443
+ }
1444
+ const { responseHeaders, value: response } = await chunkSIGXR3JT_cjs.postJsonToApi({
1445
+ url: this.config.url({
1446
+ path: "/embeddings",
1447
+ modelId: this.modelId
1448
+ }),
1449
+ headers: chunkSIGXR3JT_cjs.combineHeaders(this.config.headers(), headers),
1450
+ body: {
1451
+ model: this.modelId,
1452
+ input: values,
1453
+ encoding_format: "float",
1454
+ dimensions: this.settings.dimensions,
1455
+ user: this.settings.user
1456
+ },
1457
+ failedResponseHandler: openaiFailedResponseHandler,
1458
+ successfulResponseHandler: chunkSIGXR3JT_cjs.createJsonResponseHandler(
1459
+ openaiTextEmbeddingResponseSchema
1460
+ ),
1461
+ abortSignal,
1462
+ fetch: this.config.fetch
1463
+ });
1464
+ return {
1465
+ embeddings: response.data.map((item) => item.embedding),
1466
+ usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
1467
+ rawResponse: { headers: responseHeaders }
1468
+ };
1469
+ }
1470
+ };
1471
+ var openaiTextEmbeddingResponseSchema = zod.z.object({
1472
+ data: zod.z.array(zod.z.object({ embedding: zod.z.array(zod.z.number()) })),
1473
+ usage: zod.z.object({ prompt_tokens: zod.z.number() }).nullish()
1474
+ });
1475
+ var modelMaxImagesPerCall = {
1476
+ "dall-e-3": 1,
1477
+ "dall-e-2": 10,
1478
+ "gpt-image-1": 10
1479
+ };
1480
+ var hasDefaultResponseFormat = /* @__PURE__ */ new Set(["gpt-image-1"]);
1481
+ var OpenAIImageModel = class {
1482
+ constructor(modelId, settings, config) {
1483
+ this.modelId = modelId;
1484
+ this.settings = settings;
1485
+ this.config = config;
1486
+ this.specificationVersion = "v1";
1487
+ }
1488
+ get maxImagesPerCall() {
1489
+ var _a, _b;
1490
+ return (_b = (_a = this.settings.maxImagesPerCall) != null ? _a : modelMaxImagesPerCall[this.modelId]) != null ? _b : 1;
1491
+ }
1492
+ get provider() {
1493
+ return this.config.provider;
1494
+ }
1495
+ async doGenerate({
1496
+ prompt,
1497
+ n,
1498
+ size,
1499
+ aspectRatio,
1500
+ seed,
1501
+ providerOptions,
1502
+ headers,
1503
+ abortSignal
1504
+ }) {
1505
+ var _a, _b, _c, _d;
1506
+ const warnings = [];
1507
+ if (aspectRatio != null) {
1508
+ warnings.push({
1509
+ type: "unsupported-setting",
1510
+ setting: "aspectRatio",
1511
+ details: "This model does not support aspect ratio. Use `size` instead."
1512
+ });
1513
+ }
1514
+ if (seed != null) {
1515
+ warnings.push({ type: "unsupported-setting", setting: "seed" });
1516
+ }
1517
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1518
+ const { value: response, responseHeaders } = await chunkSIGXR3JT_cjs.postJsonToApi({
1519
+ url: this.config.url({
1520
+ path: "/images/generations",
1521
+ modelId: this.modelId
1522
+ }),
1523
+ headers: chunkSIGXR3JT_cjs.combineHeaders(this.config.headers(), headers),
1524
+ body: {
1525
+ model: this.modelId,
1526
+ prompt,
1527
+ n,
1528
+ size,
1529
+ ...(_d = providerOptions.openai) != null ? _d : {},
1530
+ ...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
1531
+ },
1532
+ failedResponseHandler: openaiFailedResponseHandler,
1533
+ successfulResponseHandler: chunkSIGXR3JT_cjs.createJsonResponseHandler(
1534
+ openaiImageResponseSchema
1535
+ ),
1536
+ abortSignal,
1537
+ fetch: this.config.fetch
1538
+ });
1539
+ return {
1540
+ images: response.data.map((item) => item.b64_json),
1541
+ warnings,
1542
+ response: {
1543
+ timestamp: currentDate,
1544
+ modelId: this.modelId,
1545
+ headers: responseHeaders
1546
+ }
1547
+ };
1548
+ }
1549
+ };
1550
+ var openaiImageResponseSchema = zod.z.object({
1551
+ data: zod.z.array(zod.z.object({ b64_json: zod.z.string() }))
1552
+ });
1553
+ var openAIProviderOptionsSchema = zod.z.object({
1554
+ include: zod.z.array(zod.z.string()).nullish(),
1555
+ language: zod.z.string().nullish(),
1556
+ prompt: zod.z.string().nullish(),
1557
+ temperature: zod.z.number().min(0).max(1).nullish().default(0),
1558
+ timestampGranularities: zod.z.array(zod.z.enum(["word", "segment"])).nullish().default(["segment"])
1559
+ });
1560
+ var languageMap = {
1561
+ afrikaans: "af",
1562
+ arabic: "ar",
1563
+ armenian: "hy",
1564
+ azerbaijani: "az",
1565
+ belarusian: "be",
1566
+ bosnian: "bs",
1567
+ bulgarian: "bg",
1568
+ catalan: "ca",
1569
+ chinese: "zh",
1570
+ croatian: "hr",
1571
+ czech: "cs",
1572
+ danish: "da",
1573
+ dutch: "nl",
1574
+ english: "en",
1575
+ estonian: "et",
1576
+ finnish: "fi",
1577
+ french: "fr",
1578
+ galician: "gl",
1579
+ german: "de",
1580
+ greek: "el",
1581
+ hebrew: "he",
1582
+ hindi: "hi",
1583
+ hungarian: "hu",
1584
+ icelandic: "is",
1585
+ indonesian: "id",
1586
+ italian: "it",
1587
+ japanese: "ja",
1588
+ kannada: "kn",
1589
+ kazakh: "kk",
1590
+ korean: "ko",
1591
+ latvian: "lv",
1592
+ lithuanian: "lt",
1593
+ macedonian: "mk",
1594
+ malay: "ms",
1595
+ marathi: "mr",
1596
+ maori: "mi",
1597
+ nepali: "ne",
1598
+ norwegian: "no",
1599
+ persian: "fa",
1600
+ polish: "pl",
1601
+ portuguese: "pt",
1602
+ romanian: "ro",
1603
+ russian: "ru",
1604
+ serbian: "sr",
1605
+ slovak: "sk",
1606
+ slovenian: "sl",
1607
+ spanish: "es",
1608
+ swahili: "sw",
1609
+ swedish: "sv",
1610
+ tagalog: "tl",
1611
+ tamil: "ta",
1612
+ thai: "th",
1613
+ turkish: "tr",
1614
+ ukrainian: "uk",
1615
+ urdu: "ur",
1616
+ vietnamese: "vi",
1617
+ welsh: "cy"
1618
+ };
1619
+ var OpenAITranscriptionModel = class {
1620
+ constructor(modelId, config) {
1621
+ this.modelId = modelId;
1622
+ this.config = config;
1623
+ this.specificationVersion = "v1";
1624
+ }
1625
+ get provider() {
1626
+ return this.config.provider;
1627
+ }
1628
+ getArgs({
1629
+ audio,
1630
+ mediaType,
1631
+ providerOptions
1632
+ }) {
1633
+ var _a, _b, _c, _d, _e;
1634
+ const warnings = [];
1635
+ const openAIOptions = chunkSIGXR3JT_cjs.parseProviderOptions({
1636
+ provider: "openai",
1637
+ providerOptions,
1638
+ schema: openAIProviderOptionsSchema
1639
+ });
1640
+ const formData = new FormData();
1641
+ const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([chunkSIGXR3JT_cjs.convertBase64ToUint8Array(audio)]);
1642
+ formData.append("model", this.modelId);
1643
+ formData.append("file", new File([blob], "audio", { type: mediaType }));
1644
+ if (openAIOptions) {
1645
+ const transcriptionModelOptions = {
1646
+ include: (_a = openAIOptions.include) != null ? _a : void 0,
1647
+ language: (_b = openAIOptions.language) != null ? _b : void 0,
1648
+ prompt: (_c = openAIOptions.prompt) != null ? _c : void 0,
1649
+ temperature: (_d = openAIOptions.temperature) != null ? _d : void 0,
1650
+ timestamp_granularities: (_e = openAIOptions.timestampGranularities) != null ? _e : void 0
1651
+ };
1652
+ for (const key in transcriptionModelOptions) {
1653
+ const value = transcriptionModelOptions[key];
1654
+ if (value !== void 0) {
1655
+ formData.append(key, String(value));
1656
+ }
1657
+ }
1658
+ }
1659
+ return {
1660
+ formData,
1661
+ warnings
1662
+ };
1663
+ }
1664
+ async doGenerate(options) {
1665
+ var _a, _b, _c, _d, _e, _f;
1666
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1667
+ const { formData, warnings } = this.getArgs(options);
1668
+ const {
1669
+ value: response,
1670
+ responseHeaders,
1671
+ rawValue: rawResponse
1672
+ } = await chunkSIGXR3JT_cjs.postFormDataToApi({
1673
+ url: this.config.url({
1674
+ path: "/audio/transcriptions",
1675
+ modelId: this.modelId
1676
+ }),
1677
+ headers: chunkSIGXR3JT_cjs.combineHeaders(this.config.headers(), options.headers),
1678
+ formData,
1679
+ failedResponseHandler: openaiFailedResponseHandler,
1680
+ successfulResponseHandler: chunkSIGXR3JT_cjs.createJsonResponseHandler(
1681
+ openaiTranscriptionResponseSchema
1682
+ ),
1683
+ abortSignal: options.abortSignal,
1684
+ fetch: this.config.fetch
1685
+ });
1686
+ const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
1687
+ return {
1688
+ text: response.text,
1689
+ segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
1690
+ text: word.word,
1691
+ startSecond: word.start,
1692
+ endSecond: word.end
1693
+ }))) != null ? _e : [],
1694
+ language,
1695
+ durationInSeconds: (_f = response.duration) != null ? _f : void 0,
1696
+ warnings,
1697
+ response: {
1698
+ timestamp: currentDate,
1699
+ modelId: this.modelId,
1700
+ headers: responseHeaders,
1701
+ body: rawResponse
1702
+ }
1703
+ };
1704
+ }
1705
+ };
1706
+ var openaiTranscriptionResponseSchema = zod.z.object({
1707
+ text: zod.z.string(),
1708
+ language: zod.z.string().nullish(),
1709
+ duration: zod.z.number().nullish(),
1710
+ words: zod.z.array(
1711
+ zod.z.object({
1712
+ word: zod.z.string(),
1713
+ start: zod.z.number(),
1714
+ end: zod.z.number()
1715
+ })
1716
+ ).nullish()
1717
+ });
1718
+ function convertToOpenAIResponsesMessages({
1719
+ prompt,
1720
+ systemMessageMode
1721
+ }) {
1722
+ const messages = [];
1723
+ const warnings = [];
1724
+ for (const { role, content } of prompt) {
1725
+ switch (role) {
1726
+ case "system": {
1727
+ switch (systemMessageMode) {
1728
+ case "system": {
1729
+ messages.push({ role: "system", content });
1730
+ break;
1731
+ }
1732
+ case "developer": {
1733
+ messages.push({ role: "developer", content });
1734
+ break;
1735
+ }
1736
+ case "remove": {
1737
+ warnings.push({
1738
+ type: "other",
1739
+ message: "system messages are removed for this model"
1740
+ });
1741
+ break;
1742
+ }
1743
+ default: {
1744
+ const _exhaustiveCheck = systemMessageMode;
1745
+ throw new Error(
1746
+ `Unsupported system message mode: ${_exhaustiveCheck}`
1747
+ );
1748
+ }
1749
+ }
1750
+ break;
1751
+ }
1752
+ case "user": {
1753
+ messages.push({
1754
+ role: "user",
1755
+ content: content.map((part, index) => {
1756
+ var _a, _b, _c, _d;
1757
+ switch (part.type) {
1758
+ case "text": {
1759
+ return { type: "input_text", text: part.text };
1760
+ }
1761
+ case "image": {
1762
+ return {
1763
+ type: "input_image",
1764
+ image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${chunkSIGXR3JT_cjs.convertUint8ArrayToBase64(part.image)}`,
1765
+ // OpenAI specific extension: image detail
1766
+ detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
1767
+ };
1768
+ }
1769
+ case "file": {
1770
+ if (part.data instanceof URL) {
1771
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
1772
+ functionality: "File URLs in user messages"
1773
+ });
1774
+ }
1775
+ switch (part.mimeType) {
1776
+ case "application/pdf": {
1777
+ return {
1778
+ type: "input_file",
1779
+ filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
1780
+ file_data: `data:application/pdf;base64,${part.data}`
1781
+ };
1782
+ }
1783
+ default: {
1784
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
1785
+ functionality: "Only PDF files are supported in user messages"
1786
+ });
1787
+ }
1788
+ }
1789
+ }
1790
+ }
1791
+ })
1792
+ });
1793
+ break;
1794
+ }
1795
+ case "assistant": {
1796
+ for (const part of content) {
1797
+ switch (part.type) {
1798
+ case "text": {
1799
+ messages.push({
1800
+ role: "assistant",
1801
+ content: [{ type: "output_text", text: part.text }]
1802
+ });
1803
+ break;
1804
+ }
1805
+ case "tool-call": {
1806
+ messages.push({
1807
+ type: "function_call",
1808
+ call_id: part.toolCallId,
1809
+ name: part.toolName,
1810
+ arguments: JSON.stringify(part.args)
1811
+ });
1812
+ break;
1813
+ }
1814
+ }
1815
+ }
1816
+ break;
1817
+ }
1818
+ case "tool": {
1819
+ for (const part of content) {
1820
+ messages.push({
1821
+ type: "function_call_output",
1822
+ call_id: part.toolCallId,
1823
+ output: JSON.stringify(part.result)
1824
+ });
1825
+ }
1826
+ break;
1827
+ }
1828
+ default: {
1829
+ const _exhaustiveCheck = role;
1830
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1831
+ }
1832
+ }
1833
+ }
1834
+ return { messages, warnings };
1835
+ }
1836
+ function mapOpenAIResponseFinishReason({
1837
+ finishReason,
1838
+ hasToolCalls
1839
+ }) {
1840
+ switch (finishReason) {
1841
+ case void 0:
1842
+ case null:
1843
+ return hasToolCalls ? "tool-calls" : "stop";
1844
+ case "max_output_tokens":
1845
+ return "length";
1846
+ case "content_filter":
1847
+ return "content-filter";
1848
+ default:
1849
+ return hasToolCalls ? "tool-calls" : "unknown";
1850
+ }
1851
+ }
1852
+ function prepareResponsesTools({
1853
+ mode,
1854
+ strict
1855
+ }) {
1856
+ var _a;
1857
+ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
1858
+ const toolWarnings = [];
1859
+ if (tools == null) {
1860
+ return { tools: void 0, tool_choice: void 0, toolWarnings };
1861
+ }
1862
+ const toolChoice = mode.toolChoice;
1863
+ const openaiTools2 = [];
1864
+ for (const tool of tools) {
1865
+ switch (tool.type) {
1866
+ case "function":
1867
+ openaiTools2.push({
1868
+ type: "function",
1869
+ name: tool.name,
1870
+ description: tool.description,
1871
+ parameters: tool.parameters,
1872
+ strict: strict ? true : void 0
1873
+ });
1874
+ break;
1875
+ case "provider-defined":
1876
+ switch (tool.id) {
1877
+ case "openai.web_search_preview":
1878
+ openaiTools2.push({
1879
+ type: "web_search_preview",
1880
+ search_context_size: tool.args.searchContextSize,
1881
+ user_location: tool.args.userLocation
1882
+ });
1883
+ break;
1884
+ default:
1885
+ toolWarnings.push({ type: "unsupported-tool", tool });
1886
+ break;
1887
+ }
1888
+ break;
1889
+ default:
1890
+ toolWarnings.push({ type: "unsupported-tool", tool });
1891
+ break;
1892
+ }
1893
+ }
1894
+ if (toolChoice == null) {
1895
+ return { tools: openaiTools2, tool_choice: void 0, toolWarnings };
1896
+ }
1897
+ const type = toolChoice.type;
1898
+ switch (type) {
1899
+ case "auto":
1900
+ case "none":
1901
+ case "required":
1902
+ return { tools: openaiTools2, tool_choice: type, toolWarnings };
1903
+ case "tool": {
1904
+ if (toolChoice.toolName === "web_search_preview") {
1905
+ return {
1906
+ tools: openaiTools2,
1907
+ tool_choice: {
1908
+ type: "web_search_preview"
1909
+ },
1910
+ toolWarnings
1911
+ };
1912
+ }
1913
+ return {
1914
+ tools: openaiTools2,
1915
+ tool_choice: {
1916
+ type: "function",
1917
+ name: toolChoice.toolName
1918
+ },
1919
+ toolWarnings
1920
+ };
1921
+ }
1922
+ default: {
1923
+ const _exhaustiveCheck = type;
1924
+ throw new chunkSIGXR3JT_cjs.UnsupportedFunctionalityError({
1925
+ functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
1926
+ });
1927
+ }
1928
+ }
1929
+ }
1930
+ var OpenAIResponsesLanguageModel = class {
1931
+ constructor(modelId, config) {
1932
+ this.specificationVersion = "v1";
1933
+ this.defaultObjectGenerationMode = "json";
1934
+ this.supportsStructuredOutputs = true;
1935
+ this.modelId = modelId;
1936
+ this.config = config;
1937
+ }
1938
+ get provider() {
1939
+ return this.config.provider;
1940
+ }
1941
+ getArgs({
1942
+ mode,
1943
+ maxTokens,
1944
+ temperature,
1945
+ stopSequences,
1946
+ topP,
1947
+ topK,
1948
+ presencePenalty,
1949
+ frequencyPenalty,
1950
+ seed,
1951
+ prompt,
1952
+ providerMetadata,
1953
+ responseFormat
1954
+ }) {
1955
+ var _a, _b, _c;
1956
+ const warnings = [];
1957
+ const modelConfig = getResponsesModelConfig(this.modelId);
1958
+ const type = mode.type;
1959
+ if (topK != null) {
1960
+ warnings.push({
1961
+ type: "unsupported-setting",
1962
+ setting: "topK"
1963
+ });
1964
+ }
1965
+ if (seed != null) {
1966
+ warnings.push({
1967
+ type: "unsupported-setting",
1968
+ setting: "seed"
1969
+ });
1970
+ }
1971
+ if (presencePenalty != null) {
1972
+ warnings.push({
1973
+ type: "unsupported-setting",
1974
+ setting: "presencePenalty"
1975
+ });
1976
+ }
1977
+ if (frequencyPenalty != null) {
1978
+ warnings.push({
1979
+ type: "unsupported-setting",
1980
+ setting: "frequencyPenalty"
1981
+ });
1982
+ }
1983
+ if (stopSequences != null) {
1984
+ warnings.push({
1985
+ type: "unsupported-setting",
1986
+ setting: "stopSequences"
1987
+ });
1988
+ }
1989
+ const { messages, warnings: messageWarnings } = convertToOpenAIResponsesMessages({
1990
+ prompt,
1991
+ systemMessageMode: modelConfig.systemMessageMode
1992
+ });
1993
+ warnings.push(...messageWarnings);
1994
+ const openaiOptions = chunkSIGXR3JT_cjs.parseProviderOptions({
1995
+ provider: "openai",
1996
+ providerOptions: providerMetadata,
1997
+ schema: openaiResponsesProviderOptionsSchema
1998
+ });
1999
+ const isStrict = (_a = openaiOptions == null ? void 0 : openaiOptions.strictSchemas) != null ? _a : true;
2000
+ const baseArgs = {
2001
+ model: this.modelId,
2002
+ input: messages,
2003
+ temperature,
2004
+ top_p: topP,
2005
+ max_output_tokens: maxTokens,
2006
+ ...(responseFormat == null ? void 0 : responseFormat.type) === "json" && {
2007
+ text: {
2008
+ format: responseFormat.schema != null ? {
2009
+ type: "json_schema",
2010
+ strict: isStrict,
2011
+ name: (_b = responseFormat.name) != null ? _b : "response",
2012
+ description: responseFormat.description,
2013
+ schema: responseFormat.schema
2014
+ } : { type: "json_object" }
2015
+ }
2016
+ },
2017
+ // provider options:
2018
+ metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
2019
+ parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
2020
+ previous_response_id: openaiOptions == null ? void 0 : openaiOptions.previousResponseId,
2021
+ store: openaiOptions == null ? void 0 : openaiOptions.store,
2022
+ user: openaiOptions == null ? void 0 : openaiOptions.user,
2023
+ instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
2024
+ // model-specific settings:
2025
+ ...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
2026
+ reasoning: {
2027
+ ...(openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null && {
2028
+ effort: openaiOptions.reasoningEffort
2029
+ },
2030
+ ...(openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null && {
2031
+ summary: openaiOptions.reasoningSummary
2032
+ }
2033
+ }
2034
+ },
2035
+ ...modelConfig.requiredAutoTruncation && {
2036
+ truncation: "auto"
2037
+ }
2038
+ };
2039
+ if (modelConfig.isReasoningModel) {
2040
+ if (baseArgs.temperature != null) {
2041
+ baseArgs.temperature = void 0;
2042
+ warnings.push({
2043
+ type: "unsupported-setting",
2044
+ setting: "temperature",
2045
+ details: "temperature is not supported for reasoning models"
2046
+ });
2047
+ }
2048
+ if (baseArgs.top_p != null) {
2049
+ baseArgs.top_p = void 0;
2050
+ warnings.push({
2051
+ type: "unsupported-setting",
2052
+ setting: "topP",
2053
+ details: "topP is not supported for reasoning models"
2054
+ });
2055
+ }
2056
+ }
2057
+ switch (type) {
2058
+ case "regular": {
2059
+ const { tools, tool_choice, toolWarnings } = prepareResponsesTools({
2060
+ mode,
2061
+ strict: isStrict
2062
+ // TODO support provider options on tools
2063
+ });
2064
+ return {
2065
+ args: {
2066
+ ...baseArgs,
2067
+ tools,
2068
+ tool_choice
2069
+ },
2070
+ warnings: [...warnings, ...toolWarnings]
2071
+ };
2072
+ }
2073
+ case "object-json": {
2074
+ return {
2075
+ args: {
2076
+ ...baseArgs,
2077
+ text: {
2078
+ format: mode.schema != null ? {
2079
+ type: "json_schema",
2080
+ strict: isStrict,
2081
+ name: (_c = mode.name) != null ? _c : "response",
2082
+ description: mode.description,
2083
+ schema: mode.schema
2084
+ } : { type: "json_object" }
2085
+ }
2086
+ },
2087
+ warnings
2088
+ };
2089
+ }
2090
+ case "object-tool": {
2091
+ return {
2092
+ args: {
2093
+ ...baseArgs,
2094
+ tool_choice: { type: "function", name: mode.tool.name },
2095
+ tools: [
2096
+ {
2097
+ type: "function",
2098
+ name: mode.tool.name,
2099
+ description: mode.tool.description,
2100
+ parameters: mode.tool.parameters,
2101
+ strict: isStrict
2102
+ }
2103
+ ]
2104
+ },
2105
+ warnings
2106
+ };
2107
+ }
2108
+ default: {
2109
+ const _exhaustiveCheck = type;
2110
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
2111
+ }
2112
+ }
2113
+ }
2114
+ async doGenerate(options) {
2115
+ var _a, _b, _c, _d, _e, _f, _g;
2116
+ const { args: body, warnings } = this.getArgs(options);
2117
+ const url = this.config.url({
2118
+ path: "/responses",
2119
+ modelId: this.modelId
2120
+ });
2121
+ const {
2122
+ responseHeaders,
2123
+ value: response,
2124
+ rawValue: rawResponse
2125
+ } = await chunkSIGXR3JT_cjs.postJsonToApi({
2126
+ url,
2127
+ headers: chunkSIGXR3JT_cjs.combineHeaders(this.config.headers(), options.headers),
2128
+ body,
2129
+ failedResponseHandler: openaiFailedResponseHandler,
2130
+ successfulResponseHandler: chunkSIGXR3JT_cjs.createJsonResponseHandler(
2131
+ zod.z.object({
2132
+ id: zod.z.string(),
2133
+ created_at: zod.z.number(),
2134
+ error: zod.z.object({
2135
+ message: zod.z.string(),
2136
+ code: zod.z.string()
2137
+ }).nullish(),
2138
+ model: zod.z.string(),
2139
+ output: zod.z.array(
2140
+ zod.z.discriminatedUnion("type", [
2141
+ zod.z.object({
2142
+ type: zod.z.literal("message"),
2143
+ role: zod.z.literal("assistant"),
2144
+ content: zod.z.array(
2145
+ zod.z.object({
2146
+ type: zod.z.literal("output_text"),
2147
+ text: zod.z.string(),
2148
+ annotations: zod.z.array(
2149
+ zod.z.object({
2150
+ type: zod.z.literal("url_citation"),
2151
+ start_index: zod.z.number(),
2152
+ end_index: zod.z.number(),
2153
+ url: zod.z.string(),
2154
+ title: zod.z.string()
2155
+ })
2156
+ )
2157
+ })
2158
+ )
2159
+ }),
2160
+ zod.z.object({
2161
+ type: zod.z.literal("function_call"),
2162
+ call_id: zod.z.string(),
2163
+ name: zod.z.string(),
2164
+ arguments: zod.z.string()
2165
+ }),
2166
+ zod.z.object({
2167
+ type: zod.z.literal("web_search_call")
2168
+ }),
2169
+ zod.z.object({
2170
+ type: zod.z.literal("computer_call")
2171
+ }),
2172
+ zod.z.object({
2173
+ type: zod.z.literal("reasoning"),
2174
+ summary: zod.z.array(
2175
+ zod.z.object({
2176
+ type: zod.z.literal("summary_text"),
2177
+ text: zod.z.string()
2178
+ })
2179
+ )
2180
+ })
2181
+ ])
2182
+ ),
2183
+ incomplete_details: zod.z.object({ reason: zod.z.string() }).nullable(),
2184
+ usage: usageSchema
2185
+ })
2186
+ ),
2187
+ abortSignal: options.abortSignal,
2188
+ fetch: this.config.fetch
2189
+ });
2190
+ if (response.error) {
2191
+ throw new chunkSIGXR3JT_cjs.APICallError({
2192
+ message: response.error.message,
2193
+ url,
2194
+ requestBodyValues: body,
2195
+ statusCode: 400,
2196
+ responseHeaders,
2197
+ responseBody: rawResponse,
2198
+ isRetryable: false
2199
+ });
2200
+ }
2201
+ const outputTextElements = response.output.filter((output) => output.type === "message").flatMap((output) => output.content).filter((content) => content.type === "output_text");
2202
+ const toolCalls = response.output.filter((output) => output.type === "function_call").map((output) => ({
2203
+ toolCallType: "function",
2204
+ toolCallId: output.call_id,
2205
+ toolName: output.name,
2206
+ args: output.arguments
2207
+ }));
2208
+ const reasoningSummary = (_b = (_a = response.output.find((item) => item.type === "reasoning")) == null ? void 0 : _a.summary) != null ? _b : null;
2209
+ return {
2210
+ text: outputTextElements.map((content) => content.text).join("\n"),
2211
+ sources: outputTextElements.flatMap(
2212
+ (content) => content.annotations.map((annotation) => {
2213
+ var _a2, _b2, _c2;
2214
+ return {
2215
+ sourceType: "url",
2216
+ id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : chunkSIGXR3JT_cjs.generateId(),
2217
+ url: annotation.url,
2218
+ title: annotation.title
2219
+ };
2220
+ })
2221
+ ),
2222
+ finishReason: mapOpenAIResponseFinishReason({
2223
+ finishReason: (_c = response.incomplete_details) == null ? void 0 : _c.reason,
2224
+ hasToolCalls: toolCalls.length > 0
2225
+ }),
2226
+ toolCalls: toolCalls.length > 0 ? toolCalls : void 0,
2227
+ reasoning: reasoningSummary ? reasoningSummary.map((summary) => ({
2228
+ type: "text",
2229
+ text: summary.text
2230
+ })) : void 0,
2231
+ usage: {
2232
+ promptTokens: response.usage.input_tokens,
2233
+ completionTokens: response.usage.output_tokens
2234
+ },
2235
+ rawCall: {
2236
+ rawPrompt: void 0,
2237
+ rawSettings: {}
2238
+ },
2239
+ rawResponse: {
2240
+ headers: responseHeaders,
2241
+ body: rawResponse
2242
+ },
2243
+ request: {
2244
+ body: JSON.stringify(body)
2245
+ },
2246
+ response: {
2247
+ id: response.id,
2248
+ timestamp: new Date(response.created_at * 1e3),
2249
+ modelId: response.model
2250
+ },
2251
+ providerMetadata: {
2252
+ openai: {
2253
+ responseId: response.id,
2254
+ cachedPromptTokens: (_e = (_d = response.usage.input_tokens_details) == null ? void 0 : _d.cached_tokens) != null ? _e : null,
2255
+ reasoningTokens: (_g = (_f = response.usage.output_tokens_details) == null ? void 0 : _f.reasoning_tokens) != null ? _g : null
2256
+ }
2257
+ },
2258
+ warnings
2259
+ };
2260
+ }
2261
+ async doStream(options) {
2262
+ const { args: body, warnings } = this.getArgs(options);
2263
+ const { responseHeaders, value: response } = await chunkSIGXR3JT_cjs.postJsonToApi({
2264
+ url: this.config.url({
2265
+ path: "/responses",
2266
+ modelId: this.modelId
2267
+ }),
2268
+ headers: chunkSIGXR3JT_cjs.combineHeaders(this.config.headers(), options.headers),
2269
+ body: {
2270
+ ...body,
2271
+ stream: true
2272
+ },
2273
+ failedResponseHandler: openaiFailedResponseHandler,
2274
+ successfulResponseHandler: chunkSIGXR3JT_cjs.createEventSourceResponseHandler(
2275
+ openaiResponsesChunkSchema
2276
+ ),
2277
+ abortSignal: options.abortSignal,
2278
+ fetch: this.config.fetch
2279
+ });
2280
+ const self = this;
2281
+ let finishReason = "unknown";
2282
+ let promptTokens = NaN;
2283
+ let completionTokens = NaN;
2284
+ let cachedPromptTokens = null;
2285
+ let reasoningTokens = null;
2286
+ let responseId = null;
2287
+ const ongoingToolCalls = {};
2288
+ let hasToolCalls = false;
2289
+ return {
2290
+ stream: response.pipeThrough(
2291
+ new TransformStream({
2292
+ transform(chunk, controller) {
2293
+ var _a, _b, _c, _d, _e, _f, _g, _h;
2294
+ if (!chunk.success) {
2295
+ finishReason = "error";
2296
+ controller.enqueue({ type: "error", error: chunk.error });
2297
+ return;
2298
+ }
2299
+ const value = chunk.value;
2300
+ if (isResponseOutputItemAddedChunk(value)) {
2301
+ if (value.item.type === "function_call") {
2302
+ ongoingToolCalls[value.output_index] = {
2303
+ toolName: value.item.name,
2304
+ toolCallId: value.item.call_id
2305
+ };
2306
+ controller.enqueue({
2307
+ type: "tool-call-delta",
2308
+ toolCallType: "function",
2309
+ toolCallId: value.item.call_id,
2310
+ toolName: value.item.name,
2311
+ argsTextDelta: value.item.arguments
2312
+ });
2313
+ }
2314
+ } else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
2315
+ const toolCall = ongoingToolCalls[value.output_index];
2316
+ if (toolCall != null) {
2317
+ controller.enqueue({
2318
+ type: "tool-call-delta",
2319
+ toolCallType: "function",
2320
+ toolCallId: toolCall.toolCallId,
2321
+ toolName: toolCall.toolName,
2322
+ argsTextDelta: value.delta
2323
+ });
2324
+ }
2325
+ } else if (isResponseCreatedChunk(value)) {
2326
+ responseId = value.response.id;
2327
+ controller.enqueue({
2328
+ type: "response-metadata",
2329
+ id: value.response.id,
2330
+ timestamp: new Date(value.response.created_at * 1e3),
2331
+ modelId: value.response.model
2332
+ });
2333
+ } else if (isTextDeltaChunk(value)) {
2334
+ controller.enqueue({
2335
+ type: "text-delta",
2336
+ textDelta: value.delta
2337
+ });
2338
+ } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
2339
+ controller.enqueue({
2340
+ type: "reasoning",
2341
+ textDelta: value.delta
2342
+ });
2343
+ } else if (isResponseOutputItemDoneChunk(value) && value.item.type === "function_call") {
2344
+ ongoingToolCalls[value.output_index] = void 0;
2345
+ hasToolCalls = true;
2346
+ controller.enqueue({
2347
+ type: "tool-call",
2348
+ toolCallType: "function",
2349
+ toolCallId: value.item.call_id,
2350
+ toolName: value.item.name,
2351
+ args: value.item.arguments
2352
+ });
2353
+ } else if (isResponseFinishedChunk(value)) {
2354
+ finishReason = mapOpenAIResponseFinishReason({
2355
+ finishReason: (_a = value.response.incomplete_details) == null ? void 0 : _a.reason,
2356
+ hasToolCalls
2357
+ });
2358
+ promptTokens = value.response.usage.input_tokens;
2359
+ completionTokens = value.response.usage.output_tokens;
2360
+ cachedPromptTokens = (_c = (_b = value.response.usage.input_tokens_details) == null ? void 0 : _b.cached_tokens) != null ? _c : cachedPromptTokens;
2361
+ reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : reasoningTokens;
2362
+ } else if (isResponseAnnotationAddedChunk(value)) {
2363
+ controller.enqueue({
2364
+ type: "source",
2365
+ source: {
2366
+ sourceType: "url",
2367
+ id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : chunkSIGXR3JT_cjs.generateId(),
2368
+ url: value.annotation.url,
2369
+ title: value.annotation.title
2370
+ }
2371
+ });
2372
+ } else if (isErrorChunk(value)) {
2373
+ controller.enqueue({ type: "error", error: value });
2374
+ }
2375
+ },
2376
+ flush(controller) {
2377
+ controller.enqueue({
2378
+ type: "finish",
2379
+ finishReason,
2380
+ usage: { promptTokens, completionTokens },
2381
+ ...(cachedPromptTokens != null || reasoningTokens != null) && {
2382
+ providerMetadata: {
2383
+ openai: {
2384
+ responseId,
2385
+ cachedPromptTokens,
2386
+ reasoningTokens
2387
+ }
2388
+ }
2389
+ }
2390
+ });
2391
+ }
2392
+ })
2393
+ ),
2394
+ rawCall: {
2395
+ rawPrompt: void 0,
2396
+ rawSettings: {}
2397
+ },
2398
+ rawResponse: { headers: responseHeaders },
2399
+ request: { body: JSON.stringify(body) },
2400
+ warnings
2401
+ };
2402
+ }
2403
+ };
2404
+ var usageSchema = zod.z.object({
2405
+ input_tokens: zod.z.number(),
2406
+ input_tokens_details: zod.z.object({ cached_tokens: zod.z.number().nullish() }).nullish(),
2407
+ output_tokens: zod.z.number(),
2408
+ output_tokens_details: zod.z.object({ reasoning_tokens: zod.z.number().nullish() }).nullish()
2409
+ });
2410
+ var textDeltaChunkSchema = zod.z.object({
2411
+ type: zod.z.literal("response.output_text.delta"),
2412
+ delta: zod.z.string()
2413
+ });
2414
+ var responseFinishedChunkSchema = zod.z.object({
2415
+ type: zod.z.enum(["response.completed", "response.incomplete"]),
2416
+ response: zod.z.object({
2417
+ incomplete_details: zod.z.object({ reason: zod.z.string() }).nullish(),
2418
+ usage: usageSchema
2419
+ })
2420
+ });
2421
+ var responseCreatedChunkSchema = zod.z.object({
2422
+ type: zod.z.literal("response.created"),
2423
+ response: zod.z.object({
2424
+ id: zod.z.string(),
2425
+ created_at: zod.z.number(),
2426
+ model: zod.z.string()
2427
+ })
2428
+ });
2429
+ var responseOutputItemDoneSchema = zod.z.object({
2430
+ type: zod.z.literal("response.output_item.done"),
2431
+ output_index: zod.z.number(),
2432
+ item: zod.z.discriminatedUnion("type", [
2433
+ zod.z.object({
2434
+ type: zod.z.literal("message")
2435
+ }),
2436
+ zod.z.object({
2437
+ type: zod.z.literal("function_call"),
2438
+ id: zod.z.string(),
2439
+ call_id: zod.z.string(),
2440
+ name: zod.z.string(),
2441
+ arguments: zod.z.string(),
2442
+ status: zod.z.literal("completed")
2443
+ })
2444
+ ])
2445
+ });
2446
+ var responseFunctionCallArgumentsDeltaSchema = zod.z.object({
2447
+ type: zod.z.literal("response.function_call_arguments.delta"),
2448
+ item_id: zod.z.string(),
2449
+ output_index: zod.z.number(),
2450
+ delta: zod.z.string()
2451
+ });
2452
+ var responseOutputItemAddedSchema = zod.z.object({
2453
+ type: zod.z.literal("response.output_item.added"),
2454
+ output_index: zod.z.number(),
2455
+ item: zod.z.discriminatedUnion("type", [
2456
+ zod.z.object({
2457
+ type: zod.z.literal("message")
2458
+ }),
2459
+ zod.z.object({
2460
+ type: zod.z.literal("function_call"),
2461
+ id: zod.z.string(),
2462
+ call_id: zod.z.string(),
2463
+ name: zod.z.string(),
2464
+ arguments: zod.z.string()
2465
+ })
2466
+ ])
2467
+ });
2468
+ var responseAnnotationAddedSchema = zod.z.object({
2469
+ type: zod.z.literal("response.output_text.annotation.added"),
2470
+ annotation: zod.z.object({
2471
+ type: zod.z.literal("url_citation"),
2472
+ url: zod.z.string(),
2473
+ title: zod.z.string()
2474
+ })
2475
+ });
2476
+ var responseReasoningSummaryTextDeltaSchema = zod.z.object({
2477
+ type: zod.z.literal("response.reasoning_summary_text.delta"),
2478
+ item_id: zod.z.string(),
2479
+ output_index: zod.z.number(),
2480
+ summary_index: zod.z.number(),
2481
+ delta: zod.z.string()
2482
+ });
2483
+ var errorChunkSchema = zod.z.object({
2484
+ type: zod.z.literal("error"),
2485
+ code: zod.z.string(),
2486
+ message: zod.z.string(),
2487
+ param: zod.z.string().nullish(),
2488
+ sequence_number: zod.z.number()
2489
+ });
2490
+ var openaiResponsesChunkSchema = zod.z.union([
2491
+ textDeltaChunkSchema,
2492
+ responseFinishedChunkSchema,
2493
+ responseCreatedChunkSchema,
2494
+ responseOutputItemDoneSchema,
2495
+ responseFunctionCallArgumentsDeltaSchema,
2496
+ responseOutputItemAddedSchema,
2497
+ responseAnnotationAddedSchema,
2498
+ responseReasoningSummaryTextDeltaSchema,
2499
+ errorChunkSchema,
2500
+ zod.z.object({ type: zod.z.string() }).passthrough()
2501
+ // fallback for unknown chunks
2502
+ ]);
2503
+ function isTextDeltaChunk(chunk) {
2504
+ return chunk.type === "response.output_text.delta";
2505
+ }
2506
+ function isResponseOutputItemDoneChunk(chunk) {
2507
+ return chunk.type === "response.output_item.done";
2508
+ }
2509
+ function isResponseFinishedChunk(chunk) {
2510
+ return chunk.type === "response.completed" || chunk.type === "response.incomplete";
2511
+ }
2512
+ function isResponseCreatedChunk(chunk) {
2513
+ return chunk.type === "response.created";
2514
+ }
2515
+ function isResponseFunctionCallArgumentsDeltaChunk(chunk) {
2516
+ return chunk.type === "response.function_call_arguments.delta";
2517
+ }
2518
+ function isResponseOutputItemAddedChunk(chunk) {
2519
+ return chunk.type === "response.output_item.added";
2520
+ }
2521
+ function isResponseAnnotationAddedChunk(chunk) {
2522
+ return chunk.type === "response.output_text.annotation.added";
2523
+ }
2524
+ function isResponseReasoningSummaryTextDeltaChunk(chunk) {
2525
+ return chunk.type === "response.reasoning_summary_text.delta";
2526
+ }
2527
+ function isErrorChunk(chunk) {
2528
+ return chunk.type === "error";
2529
+ }
2530
+ function getResponsesModelConfig(modelId) {
2531
+ if (modelId.startsWith("o") || modelId.startsWith("gpt-5")) {
2532
+ if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {
2533
+ return {
2534
+ isReasoningModel: true,
2535
+ systemMessageMode: "remove",
2536
+ requiredAutoTruncation: false
2537
+ };
2538
+ }
2539
+ return {
2540
+ isReasoningModel: true,
2541
+ systemMessageMode: "developer",
2542
+ requiredAutoTruncation: false
2543
+ };
2544
+ }
2545
+ return {
2546
+ isReasoningModel: false,
2547
+ systemMessageMode: "system",
2548
+ requiredAutoTruncation: false
2549
+ };
2550
+ }
2551
+ var openaiResponsesProviderOptionsSchema = zod.z.object({
2552
+ metadata: zod.z.any().nullish(),
2553
+ parallelToolCalls: zod.z.boolean().nullish(),
2554
+ previousResponseId: zod.z.string().nullish(),
2555
+ store: zod.z.boolean().nullish(),
2556
+ user: zod.z.string().nullish(),
2557
+ reasoningEffort: zod.z.string().nullish(),
2558
+ strictSchemas: zod.z.boolean().nullish(),
2559
+ instructions: zod.z.string().nullish(),
2560
+ reasoningSummary: zod.z.string().nullish()
2561
+ });
2562
+ var WebSearchPreviewParameters = zod.z.object({});
2563
+ function webSearchPreviewTool({
2564
+ searchContextSize,
2565
+ userLocation
2566
+ } = {}) {
2567
+ return {
2568
+ type: "provider-defined",
2569
+ id: "openai.web_search_preview",
2570
+ args: {
2571
+ searchContextSize,
2572
+ userLocation
2573
+ },
2574
+ parameters: WebSearchPreviewParameters
2575
+ };
2576
+ }
2577
+ var openaiTools = {
2578
+ webSearchPreview: webSearchPreviewTool
2579
+ };
2580
+ var OpenAIProviderOptionsSchema = zod.z.object({
2581
+ instructions: zod.z.string().nullish(),
2582
+ speed: zod.z.number().min(0.25).max(4).default(1).nullish()
2583
+ });
2584
+ var OpenAISpeechModel = class {
2585
+ constructor(modelId, config) {
2586
+ this.modelId = modelId;
2587
+ this.config = config;
2588
+ this.specificationVersion = "v1";
2589
+ }
2590
+ get provider() {
2591
+ return this.config.provider;
2592
+ }
2593
+ getArgs({
2594
+ text,
2595
+ voice = "alloy",
2596
+ outputFormat = "mp3",
2597
+ speed,
2598
+ instructions,
2599
+ providerOptions
2600
+ }) {
2601
+ const warnings = [];
2602
+ const openAIOptions = chunkSIGXR3JT_cjs.parseProviderOptions({
2603
+ provider: "openai",
2604
+ providerOptions,
2605
+ schema: OpenAIProviderOptionsSchema
2606
+ });
2607
+ const requestBody = {
2608
+ model: this.modelId,
2609
+ input: text,
2610
+ voice,
2611
+ response_format: "mp3",
2612
+ speed,
2613
+ instructions
2614
+ };
2615
+ if (outputFormat) {
2616
+ if (["mp3", "opus", "aac", "flac", "wav", "pcm"].includes(outputFormat)) {
2617
+ requestBody.response_format = outputFormat;
2618
+ } else {
2619
+ warnings.push({
2620
+ type: "unsupported-setting",
2621
+ setting: "outputFormat",
2622
+ details: `Unsupported output format: ${outputFormat}. Using mp3 instead.`
2623
+ });
2624
+ }
2625
+ }
2626
+ if (openAIOptions) {
2627
+ const speechModelOptions = {};
2628
+ for (const key in speechModelOptions) {
2629
+ const value = speechModelOptions[key];
2630
+ if (value !== void 0) {
2631
+ requestBody[key] = value;
2632
+ }
2633
+ }
2634
+ }
2635
+ return {
2636
+ requestBody,
2637
+ warnings
2638
+ };
2639
+ }
2640
+ async doGenerate(options) {
2641
+ var _a, _b, _c;
2642
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
2643
+ const { requestBody, warnings } = this.getArgs(options);
2644
+ const {
2645
+ value: audio,
2646
+ responseHeaders,
2647
+ rawValue: rawResponse
2648
+ } = await chunkSIGXR3JT_cjs.postJsonToApi({
2649
+ url: this.config.url({
2650
+ path: "/audio/speech",
2651
+ modelId: this.modelId
2652
+ }),
2653
+ headers: chunkSIGXR3JT_cjs.combineHeaders(this.config.headers(), options.headers),
2654
+ body: requestBody,
2655
+ failedResponseHandler: openaiFailedResponseHandler,
2656
+ successfulResponseHandler: chunkSIGXR3JT_cjs.createBinaryResponseHandler(),
2657
+ abortSignal: options.abortSignal,
2658
+ fetch: this.config.fetch
2659
+ });
2660
+ return {
2661
+ audio,
2662
+ warnings,
2663
+ request: {
2664
+ body: JSON.stringify(requestBody)
2665
+ },
2666
+ response: {
2667
+ timestamp: currentDate,
2668
+ modelId: this.modelId,
2669
+ headers: responseHeaders,
2670
+ body: rawResponse
2671
+ }
2672
+ };
2673
+ }
2674
+ };
2675
+ function createOpenAI(options = {}) {
2676
+ var _a, _b, _c;
2677
+ const baseURL = (_a = chunkSIGXR3JT_cjs.withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
2678
+ const compatibility = (_b = options.compatibility) != null ? _b : "compatible";
2679
+ const providerName = (_c = options.name) != null ? _c : "openai";
2680
+ const getHeaders = () => ({
2681
+ Authorization: `Bearer ${chunkSIGXR3JT_cjs.loadApiKey({
2682
+ apiKey: options.apiKey,
2683
+ environmentVariableName: "OPENAI_API_KEY",
2684
+ description: "OpenAI"
2685
+ })}`,
2686
+ "OpenAI-Organization": options.organization,
2687
+ "OpenAI-Project": options.project,
2688
+ ...options.headers
2689
+ });
2690
+ const createChatModel = (modelId, settings = {}) => new OpenAIChatLanguageModel(modelId, settings, {
2691
+ provider: `${providerName}.chat`,
2692
+ url: ({ path }) => `${baseURL}${path}`,
2693
+ headers: getHeaders,
2694
+ compatibility,
2695
+ fetch: options.fetch
2696
+ });
2697
+ const createCompletionModel = (modelId, settings = {}) => new OpenAICompletionLanguageModel(modelId, settings, {
2698
+ provider: `${providerName}.completion`,
2699
+ url: ({ path }) => `${baseURL}${path}`,
2700
+ headers: getHeaders,
2701
+ compatibility,
2702
+ fetch: options.fetch
2703
+ });
2704
+ const createEmbeddingModel = (modelId, settings = {}) => new OpenAIEmbeddingModel(modelId, settings, {
2705
+ provider: `${providerName}.embedding`,
2706
+ url: ({ path }) => `${baseURL}${path}`,
2707
+ headers: getHeaders,
2708
+ fetch: options.fetch
2709
+ });
2710
+ const createImageModel = (modelId, settings = {}) => new OpenAIImageModel(modelId, settings, {
2711
+ provider: `${providerName}.image`,
2712
+ url: ({ path }) => `${baseURL}${path}`,
2713
+ headers: getHeaders,
2714
+ fetch: options.fetch
2715
+ });
2716
+ const createTranscriptionModel = (modelId) => new OpenAITranscriptionModel(modelId, {
2717
+ provider: `${providerName}.transcription`,
2718
+ url: ({ path }) => `${baseURL}${path}`,
2719
+ headers: getHeaders,
2720
+ fetch: options.fetch
2721
+ });
2722
+ const createSpeechModel = (modelId) => new OpenAISpeechModel(modelId, {
2723
+ provider: `${providerName}.speech`,
2724
+ url: ({ path }) => `${baseURL}${path}`,
2725
+ headers: getHeaders,
2726
+ fetch: options.fetch
2727
+ });
2728
+ const createLanguageModel = (modelId, settings) => {
2729
+ if (new.target) {
2730
+ throw new Error(
2731
+ "The OpenAI model function cannot be called with the new keyword."
2732
+ );
2733
+ }
2734
+ if (modelId === "gpt-3.5-turbo-instruct") {
2735
+ return createCompletionModel(
2736
+ modelId,
2737
+ settings
2738
+ );
2739
+ }
2740
+ return createChatModel(modelId, settings);
2741
+ };
2742
+ const createResponsesModel = (modelId) => {
2743
+ return new OpenAIResponsesLanguageModel(modelId, {
2744
+ provider: `${providerName}.responses`,
2745
+ url: ({ path }) => `${baseURL}${path}`,
2746
+ headers: getHeaders,
2747
+ fetch: options.fetch
2748
+ });
2749
+ };
2750
+ const provider = function(modelId, settings) {
2751
+ return createLanguageModel(modelId, settings);
2752
+ };
2753
+ provider.languageModel = createLanguageModel;
2754
+ provider.chat = createChatModel;
2755
+ provider.completion = createCompletionModel;
2756
+ provider.responses = createResponsesModel;
2757
+ provider.embedding = createEmbeddingModel;
2758
+ provider.textEmbedding = createEmbeddingModel;
2759
+ provider.textEmbeddingModel = createEmbeddingModel;
2760
+ provider.image = createImageModel;
2761
+ provider.imageModel = createImageModel;
2762
+ provider.transcription = createTranscriptionModel;
2763
+ provider.transcriptionModel = createTranscriptionModel;
2764
+ provider.speech = createSpeechModel;
2765
+ provider.speechModel = createSpeechModel;
2766
+ provider.tools = openaiTools;
2767
+ return provider;
2768
+ }
2769
+ var openai = createOpenAI({
2770
+ compatibility: "strict"
2771
+ // strict for OpenAI API
2772
+ });
2773
+
2774
+ exports.createOpenAI = createOpenAI;
2775
+ exports.openai = openai;
2776
+ //# sourceMappingURL=chunk-PPYGWINI.cjs.map
2777
+ //# sourceMappingURL=chunk-PPYGWINI.cjs.map