@mastra/server 0.0.0-new-scorer-api-20250801075530 → 0.0.0-new-button-export-20251219133013

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (402) hide show
  1. package/CHANGELOG.md +5548 -0
  2. package/README.md +2 -7
  3. package/dist/chunk-3BXS37TQ.js +1105 -0
  4. package/dist/chunk-3BXS37TQ.js.map +1 -0
  5. package/dist/chunk-3SFLFUKY.js +116 -0
  6. package/dist/chunk-3SFLFUKY.js.map +1 -0
  7. package/dist/chunk-3XI22UQR.cjs +148 -0
  8. package/dist/chunk-3XI22UQR.cjs.map +1 -0
  9. package/dist/chunk-5W4RPVTK.cjs +49 -0
  10. package/dist/chunk-5W4RPVTK.cjs.map +1 -0
  11. package/dist/{chunk-7NADHFD2.cjs → chunk-64ITUOXI.cjs} +2 -2
  12. package/dist/chunk-64ITUOXI.cjs.map +1 -0
  13. package/dist/{chunk-MMROOK5J.js → chunk-6QWQZI4Q.js} +2 -2
  14. package/dist/{chunk-7NADHFD2.cjs.map → chunk-6QWQZI4Q.js.map} +1 -1
  15. package/dist/chunk-6V57U52D.js +196 -0
  16. package/dist/chunk-6V57U52D.js.map +1 -0
  17. package/dist/chunk-7KOS32XA.cjs +928 -0
  18. package/dist/chunk-7KOS32XA.cjs.map +1 -0
  19. package/dist/chunk-AZIH2QEC.js +83 -0
  20. package/dist/chunk-AZIH2QEC.js.map +1 -0
  21. package/dist/chunk-BMLUV4BH.cjs +992 -0
  22. package/dist/chunk-BMLUV4BH.cjs.map +1 -0
  23. package/dist/chunk-BMYZ4DO6.cjs +269 -0
  24. package/dist/chunk-BMYZ4DO6.cjs.map +1 -0
  25. package/dist/chunk-BNGT3NIC.cjs +211 -0
  26. package/dist/chunk-BNGT3NIC.cjs.map +1 -0
  27. package/dist/chunk-C3UIIRAT.cjs +920 -0
  28. package/dist/chunk-C3UIIRAT.cjs.map +1 -0
  29. package/dist/chunk-DRUNNM4C.js +328 -0
  30. package/dist/chunk-DRUNNM4C.js.map +1 -0
  31. package/dist/chunk-DVSCJECS.js +250 -0
  32. package/dist/chunk-DVSCJECS.js.map +1 -0
  33. package/dist/chunk-DW3WE4M4.js +250 -0
  34. package/dist/chunk-DW3WE4M4.js.map +1 -0
  35. package/dist/chunk-E646Y4FQ.js +970 -0
  36. package/dist/chunk-E646Y4FQ.js.map +1 -0
  37. package/dist/chunk-EUBTCHQC.js +83 -0
  38. package/dist/chunk-EUBTCHQC.js.map +1 -0
  39. package/dist/chunk-FWSKVWS7.cjs +88 -0
  40. package/dist/chunk-FWSKVWS7.cjs.map +1 -0
  41. package/dist/chunk-FYY54HZC.js +310 -0
  42. package/dist/chunk-FYY54HZC.js.map +1 -0
  43. package/dist/chunk-GFF2I6UD.js +354 -0
  44. package/dist/chunk-GFF2I6UD.js.map +1 -0
  45. package/dist/chunk-GU4EWMZB.cjs +769 -0
  46. package/dist/chunk-GU4EWMZB.cjs.map +1 -0
  47. package/dist/chunk-H2RMXG2Q.cjs +167 -0
  48. package/dist/chunk-H2RMXG2Q.cjs.map +1 -0
  49. package/dist/chunk-HAJOEDNB.js +274 -0
  50. package/dist/chunk-HAJOEDNB.js.map +1 -0
  51. package/dist/chunk-HT4LP3BO.js +75 -0
  52. package/dist/chunk-HT4LP3BO.js.map +1 -0
  53. package/dist/chunk-I6LR6CPC.cjs +125 -0
  54. package/dist/chunk-I6LR6CPC.cjs.map +1 -0
  55. package/dist/chunk-IEYXQTUW.cjs +284 -0
  56. package/dist/chunk-IEYXQTUW.cjs.map +1 -0
  57. package/dist/chunk-JUYWLFVT.cjs +689 -0
  58. package/dist/chunk-JUYWLFVT.cjs.map +1 -0
  59. package/dist/chunk-K73YS7YB.cjs +2777 -0
  60. package/dist/chunk-K73YS7YB.cjs.map +1 -0
  61. package/dist/chunk-KPSSRYGH.js +234 -0
  62. package/dist/chunk-KPSSRYGH.js.map +1 -0
  63. package/dist/chunk-LF2ZLOFP.js +767 -0
  64. package/dist/chunk-LF2ZLOFP.js.map +1 -0
  65. package/dist/chunk-LH722MPX.cjs +27569 -0
  66. package/dist/chunk-LH722MPX.cjs.map +1 -0
  67. package/dist/chunk-LKTCAPBD.cjs +252 -0
  68. package/dist/chunk-LKTCAPBD.cjs.map +1 -0
  69. package/dist/chunk-LQSZ4FJJ.cjs +205 -0
  70. package/dist/chunk-LQSZ4FJJ.cjs.map +1 -0
  71. package/dist/chunk-MCYD5LW7.cjs +90 -0
  72. package/dist/chunk-MCYD5LW7.cjs.map +1 -0
  73. package/dist/chunk-MQLS6Z7A.js +891 -0
  74. package/dist/chunk-MQLS6Z7A.js.map +1 -0
  75. package/dist/chunk-O7I5CWRX.cjs +44 -0
  76. package/dist/{chunk-Q7SFCCGT.cjs.map → chunk-O7I5CWRX.cjs.map} +1 -1
  77. package/dist/chunk-P3H4AZKI.cjs +252 -0
  78. package/dist/chunk-P3H4AZKI.cjs.map +1 -0
  79. package/dist/chunk-P6S2HIVE.cjs +1149 -0
  80. package/dist/chunk-P6S2HIVE.cjs.map +1 -0
  81. package/dist/chunk-PR4QN5HX.js +39 -0
  82. package/dist/{chunk-PZ5AY32C.js.map → chunk-PR4QN5HX.js.map} +1 -1
  83. package/dist/chunk-PUFCKXFW.cjs +312 -0
  84. package/dist/chunk-PUFCKXFW.cjs.map +1 -0
  85. package/dist/chunk-Q7NPRJRV.cjs +88 -0
  86. package/dist/chunk-Q7NPRJRV.cjs.map +1 -0
  87. package/dist/chunk-S3TIWWQL.cjs +322 -0
  88. package/dist/chunk-S3TIWWQL.cjs.map +1 -0
  89. package/dist/chunk-S5XBFHJL.js +675 -0
  90. package/dist/chunk-S5XBFHJL.js.map +1 -0
  91. package/dist/chunk-SRQY5IWD.js +204 -0
  92. package/dist/chunk-SRQY5IWD.js.map +1 -0
  93. package/dist/chunk-SV4AUWGY.js +2774 -0
  94. package/dist/chunk-SV4AUWGY.js.map +1 -0
  95. package/dist/chunk-SXVANU23.js +164 -0
  96. package/dist/chunk-SXVANU23.js.map +1 -0
  97. package/dist/chunk-TYZ6ZISQ.cjs +368 -0
  98. package/dist/chunk-TYZ6ZISQ.cjs.map +1 -0
  99. package/dist/chunk-UWRAKVAJ.js +903 -0
  100. package/dist/chunk-UWRAKVAJ.js.map +1 -0
  101. package/dist/{chunk-CY4TP3FK.js → chunk-UXGQZUYZ.js} +3 -3
  102. package/dist/{chunk-CY4TP3FK.js.map → chunk-UXGQZUYZ.js.map} +1 -1
  103. package/dist/{chunk-RE4RPXT2.cjs → chunk-V5WWQN7P.cjs} +4 -4
  104. package/dist/{chunk-RE4RPXT2.cjs.map → chunk-V5WWQN7P.cjs.map} +1 -1
  105. package/dist/chunk-WBLT2HL3.js +144 -0
  106. package/dist/chunk-WBLT2HL3.js.map +1 -0
  107. package/dist/chunk-X43DWDXB.cjs +346 -0
  108. package/dist/chunk-X43DWDXB.cjs.map +1 -0
  109. package/dist/chunk-XW2HXQDO.js +302 -0
  110. package/dist/chunk-XW2HXQDO.js.map +1 -0
  111. package/dist/chunk-XWGAT2DA.js +44 -0
  112. package/dist/chunk-XWGAT2DA.js.map +1 -0
  113. package/dist/chunk-ZSD2NWTP.js +27527 -0
  114. package/dist/chunk-ZSD2NWTP.js.map +1 -0
  115. package/dist/dist-2J26LQO2.cjs +16 -0
  116. package/dist/dist-2J26LQO2.cjs.map +1 -0
  117. package/dist/dist-4CMHRWC4.js +1147 -0
  118. package/dist/dist-4CMHRWC4.js.map +1 -0
  119. package/dist/dist-AEJONJSS.js +937 -0
  120. package/dist/dist-AEJONJSS.js.map +1 -0
  121. package/dist/dist-AF7EUPXA.cjs +928 -0
  122. package/dist/dist-AF7EUPXA.cjs.map +1 -0
  123. package/dist/dist-MEN73GGI.js +3 -0
  124. package/dist/dist-MEN73GGI.js.map +1 -0
  125. package/dist/dist-NVXXJWBO.cjs +940 -0
  126. package/dist/dist-NVXXJWBO.cjs.map +1 -0
  127. package/dist/dist-Q2ST4SUQ.cjs +764 -0
  128. package/dist/dist-Q2ST4SUQ.cjs.map +1 -0
  129. package/dist/dist-TE7XRSWH.js +761 -0
  130. package/dist/dist-TE7XRSWH.js.map +1 -0
  131. package/dist/dist-VPYZNWNG.js +925 -0
  132. package/dist/dist-VPYZNWNG.js.map +1 -0
  133. package/dist/dist-Y5SYUVLY.cjs +1150 -0
  134. package/dist/dist-Y5SYUVLY.cjs.map +1 -0
  135. package/dist/index.cjs +4 -0
  136. package/dist/index.cjs.map +1 -1
  137. package/dist/index.js +3 -0
  138. package/dist/index.js.map +1 -1
  139. package/dist/server/a2a/store.cjs +25 -0
  140. package/dist/server/a2a/store.cjs.map +1 -0
  141. package/dist/server/a2a/store.d.ts +3 -3
  142. package/dist/server/a2a/store.d.ts.map +1 -1
  143. package/dist/server/a2a/store.js +23 -0
  144. package/dist/server/a2a/store.js.map +1 -0
  145. package/dist/server/a2a/tasks.d.ts +7 -7
  146. package/dist/server/a2a/tasks.d.ts.map +1 -1
  147. package/dist/server/auth/defaults.d.ts +3 -0
  148. package/dist/server/auth/defaults.d.ts.map +1 -0
  149. package/dist/server/auth/helpers.d.ts +14 -0
  150. package/dist/server/auth/helpers.d.ts.map +1 -0
  151. package/dist/server/auth/index.cjs +137 -0
  152. package/dist/server/auth/index.cjs.map +1 -0
  153. package/dist/server/auth/index.d.ts +3 -0
  154. package/dist/server/auth/index.d.ts.map +1 -0
  155. package/dist/server/auth/index.js +127 -0
  156. package/dist/server/auth/index.js.map +1 -0
  157. package/dist/server/handlers/a2a.cjs +19 -11
  158. package/dist/server/handlers/a2a.d.ts +507 -23
  159. package/dist/server/handlers/a2a.d.ts.map +1 -1
  160. package/dist/server/handlers/a2a.js +1 -1
  161. package/dist/server/handlers/agent-builder.cjs +80 -0
  162. package/dist/server/handlers/agent-builder.cjs.map +1 -0
  163. package/dist/server/handlers/agent-builder.d.ts +642 -0
  164. package/dist/server/handlers/agent-builder.d.ts.map +1 -0
  165. package/dist/server/handlers/agent-builder.js +3 -0
  166. package/dist/server/handlers/agent-builder.js.map +1 -0
  167. package/dist/server/handlers/agents.cjs +79 -15
  168. package/dist/server/handlers/agents.d.ts +3350 -72
  169. package/dist/server/handlers/agents.d.ts.map +1 -1
  170. package/dist/server/handlers/agents.js +1 -1
  171. package/dist/server/handlers/error.cjs +2 -2
  172. package/dist/server/handlers/error.js +1 -1
  173. package/dist/server/handlers/logs.cjs +7 -7
  174. package/dist/server/handlers/logs.d.ts +135 -27
  175. package/dist/server/handlers/logs.d.ts.map +1 -1
  176. package/dist/server/handlers/logs.js +1 -1
  177. package/dist/server/handlers/mcp.cjs +40 -0
  178. package/dist/server/handlers/mcp.cjs.map +1 -0
  179. package/dist/server/handlers/mcp.d.ts +110 -0
  180. package/dist/server/handlers/mcp.d.ts.map +1 -0
  181. package/dist/server/handlers/mcp.js +3 -0
  182. package/dist/server/handlers/mcp.js.map +1 -0
  183. package/dist/server/handlers/memory.cjs +65 -29
  184. package/dist/server/handlers/memory.d.ts +935 -99
  185. package/dist/server/handlers/memory.d.ts.map +1 -1
  186. package/dist/server/handlers/memory.js +1 -1
  187. package/dist/server/handlers/observability.cjs +40 -0
  188. package/dist/server/handlers/observability.cjs.map +1 -0
  189. package/dist/server/handlers/observability.d.ts +156 -0
  190. package/dist/server/handlers/observability.d.ts.map +1 -0
  191. package/dist/server/handlers/observability.js +3 -0
  192. package/dist/server/handlers/observability.js.map +1 -0
  193. package/dist/server/handlers/scores.cjs +13 -13
  194. package/dist/server/handlers/scores.d.ts +112 -40
  195. package/dist/server/handlers/scores.d.ts.map +1 -1
  196. package/dist/server/handlers/scores.js +1 -1
  197. package/dist/server/handlers/stored-agents.cjs +28 -0
  198. package/dist/server/handlers/stored-agents.cjs.map +1 -0
  199. package/dist/server/handlers/stored-agents.d.ts +289 -0
  200. package/dist/server/handlers/stored-agents.d.ts.map +1 -0
  201. package/dist/server/handlers/stored-agents.js +3 -0
  202. package/dist/server/handlers/stored-agents.js.map +1 -0
  203. package/dist/server/handlers/test-utils.cjs +15 -0
  204. package/dist/server/handlers/test-utils.cjs.map +1 -0
  205. package/dist/server/handlers/test-utils.d.ts +6 -0
  206. package/dist/server/handlers/test-utils.d.ts.map +1 -0
  207. package/dist/server/handlers/test-utils.js +13 -0
  208. package/dist/server/handlers/test-utils.js.map +1 -0
  209. package/dist/server/handlers/tools.cjs +13 -9
  210. package/dist/server/handlers/tools.d.ts +69 -19
  211. package/dist/server/handlers/tools.d.ts.map +1 -1
  212. package/dist/server/handlers/tools.js +1 -1
  213. package/dist/server/handlers/utils.cjs +14 -2
  214. package/dist/server/handlers/utils.d.ts +11 -0
  215. package/dist/server/handlers/utils.d.ts.map +1 -1
  216. package/dist/server/handlers/utils.js +1 -1
  217. package/dist/server/handlers/vector.cjs +31 -7
  218. package/dist/server/handlers/vector.d.ts +94 -10
  219. package/dist/server/handlers/vector.d.ts.map +1 -1
  220. package/dist/server/handlers/vector.js +1 -1
  221. package/dist/server/handlers/voice.cjs +21 -9
  222. package/dist/server/handlers/voice.d.ts +81 -38
  223. package/dist/server/handlers/voice.d.ts.map +1 -1
  224. package/dist/server/handlers/voice.js +1 -1
  225. package/dist/server/handlers/workflows.cjs +75 -31
  226. package/dist/server/handlers/workflows.d.ts +1000 -80
  227. package/dist/server/handlers/workflows.d.ts.map +1 -1
  228. package/dist/server/handlers/workflows.js +1 -1
  229. package/dist/server/handlers.cjs +36 -36
  230. package/dist/server/handlers.d.ts +12 -12
  231. package/dist/server/handlers.d.ts.map +1 -1
  232. package/dist/server/handlers.js +12 -12
  233. package/dist/server/http-exception.d.ts +0 -5
  234. package/dist/server/http-exception.d.ts.map +1 -1
  235. package/dist/server/schemas/a2a.d.ts +786 -0
  236. package/dist/server/schemas/a2a.d.ts.map +1 -0
  237. package/dist/server/schemas/agent-builder.d.ts +213 -0
  238. package/dist/server/schemas/agent-builder.d.ts.map +1 -0
  239. package/dist/server/schemas/agents.d.ts +1401 -0
  240. package/dist/server/schemas/agents.d.ts.map +1 -0
  241. package/dist/server/schemas/common.d.ts +179 -0
  242. package/dist/server/schemas/common.d.ts.map +1 -0
  243. package/dist/server/schemas/logs.d.ts +124 -0
  244. package/dist/server/schemas/logs.d.ts.map +1 -0
  245. package/dist/server/schemas/mcp.d.ts +299 -0
  246. package/dist/server/schemas/mcp.d.ts.map +1 -0
  247. package/dist/server/schemas/memory.d.ts +998 -0
  248. package/dist/server/schemas/memory.d.ts.map +1 -0
  249. package/dist/server/schemas/observability.d.ts +402 -0
  250. package/dist/server/schemas/observability.d.ts.map +1 -0
  251. package/dist/server/schemas/scores.d.ts +259 -0
  252. package/dist/server/schemas/scores.d.ts.map +1 -0
  253. package/dist/server/schemas/stored-agents.d.ts +792 -0
  254. package/dist/server/schemas/stored-agents.d.ts.map +1 -0
  255. package/dist/server/schemas/vectors.d.ts +107 -0
  256. package/dist/server/schemas/vectors.d.ts.map +1 -0
  257. package/dist/server/schemas/workflows.d.ts +668 -0
  258. package/dist/server/schemas/workflows.d.ts.map +1 -0
  259. package/dist/server/server-adapter/index.cjs +485 -0
  260. package/dist/server/server-adapter/index.cjs.map +1 -0
  261. package/dist/server/server-adapter/index.d.ts +91 -0
  262. package/dist/server/server-adapter/index.d.ts.map +1 -0
  263. package/dist/server/server-adapter/index.js +470 -0
  264. package/dist/server/server-adapter/index.js.map +1 -0
  265. package/dist/server/server-adapter/openapi-utils.d.ts +59 -0
  266. package/dist/server/server-adapter/openapi-utils.d.ts.map +1 -0
  267. package/dist/server/server-adapter/redact.d.ts +26 -0
  268. package/dist/server/server-adapter/redact.d.ts.map +1 -0
  269. package/dist/server/server-adapter/routes/a2a.d.ts +3 -0
  270. package/dist/server/server-adapter/routes/a2a.d.ts.map +1 -0
  271. package/dist/server/server-adapter/routes/agent-builder.d.ts +3 -0
  272. package/dist/server/server-adapter/routes/agent-builder.d.ts.map +1 -0
  273. package/dist/server/server-adapter/routes/agents.d.ts +3 -0
  274. package/dist/server/server-adapter/routes/agents.d.ts.map +1 -0
  275. package/dist/server/server-adapter/routes/index.d.ts +50 -0
  276. package/dist/server/server-adapter/routes/index.d.ts.map +1 -0
  277. package/dist/server/server-adapter/routes/legacy.d.ts +7 -0
  278. package/dist/server/server-adapter/routes/legacy.d.ts.map +1 -0
  279. package/dist/server/server-adapter/routes/logs.d.ts +3 -0
  280. package/dist/server/server-adapter/routes/logs.d.ts.map +1 -0
  281. package/dist/server/server-adapter/routes/mcp.d.ts +9 -0
  282. package/dist/server/server-adapter/routes/mcp.d.ts.map +1 -0
  283. package/dist/server/server-adapter/routes/memory.d.ts +3 -0
  284. package/dist/server/server-adapter/routes/memory.d.ts.map +1 -0
  285. package/dist/server/server-adapter/routes/observability.d.ts +3 -0
  286. package/dist/server/server-adapter/routes/observability.d.ts.map +1 -0
  287. package/dist/server/server-adapter/routes/route-builder.d.ts +52 -0
  288. package/dist/server/server-adapter/routes/route-builder.d.ts.map +1 -0
  289. package/dist/server/server-adapter/routes/scorers.d.ts +3 -0
  290. package/dist/server/server-adapter/routes/scorers.d.ts.map +1 -0
  291. package/dist/server/server-adapter/routes/stored-agents.d.ts +8 -0
  292. package/dist/server/server-adapter/routes/stored-agents.d.ts.map +1 -0
  293. package/dist/server/server-adapter/routes/stream-types.d.ts +10 -0
  294. package/dist/server/server-adapter/routes/stream-types.d.ts.map +1 -0
  295. package/dist/server/server-adapter/routes/tools.d.ts +3 -0
  296. package/dist/server/server-adapter/routes/tools.d.ts.map +1 -0
  297. package/dist/server/server-adapter/routes/vectors.d.ts +3 -0
  298. package/dist/server/server-adapter/routes/vectors.d.ts.map +1 -0
  299. package/dist/server/server-adapter/routes/workflows.d.ts +3 -0
  300. package/dist/server/server-adapter/routes/workflows.d.ts.map +1 -0
  301. package/dist/server/utils.d.ts +45 -0
  302. package/dist/server/utils.d.ts.map +1 -0
  303. package/dist/token-6GSAFR2W-KVDFAJ2M-EPLMGMHT.cjs +63 -0
  304. package/dist/token-6GSAFR2W-KVDFAJ2M-EPLMGMHT.cjs.map +1 -0
  305. package/dist/token-6GSAFR2W-KVDFAJ2M-LNX5VF3I.js +61 -0
  306. package/dist/token-6GSAFR2W-KVDFAJ2M-LNX5VF3I.js.map +1 -0
  307. package/dist/token-6GSAFR2W-KVDFAJ2M-SDYXODLX.cjs +63 -0
  308. package/dist/token-6GSAFR2W-KVDFAJ2M-SDYXODLX.cjs.map +1 -0
  309. package/dist/token-6GSAFR2W-KVDFAJ2M-VW443KIA.js +61 -0
  310. package/dist/token-6GSAFR2W-KVDFAJ2M-VW443KIA.js.map +1 -0
  311. package/dist/token-util-NEHG7TUY-DJYRKLRD-6TH3ODCN.cjs +10 -0
  312. package/dist/token-util-NEHG7TUY-DJYRKLRD-6TH3ODCN.cjs.map +1 -0
  313. package/dist/token-util-NEHG7TUY-DJYRKLRD-BSQMRUEW.js +8 -0
  314. package/dist/token-util-NEHG7TUY-DJYRKLRD-BSQMRUEW.js.map +1 -0
  315. package/dist/token-util-NEHG7TUY-DJYRKLRD-EACKYD4V.js +8 -0
  316. package/dist/token-util-NEHG7TUY-DJYRKLRD-EACKYD4V.js.map +1 -0
  317. package/dist/token-util-NEHG7TUY-DJYRKLRD-MFJRDVVF.cjs +10 -0
  318. package/dist/token-util-NEHG7TUY-DJYRKLRD-MFJRDVVF.cjs.map +1 -0
  319. package/package.json +66 -17
  320. package/dist/chunk-2XIJW7SV.cjs +0 -310
  321. package/dist/chunk-2XIJW7SV.cjs.map +0 -1
  322. package/dist/chunk-4CEZIJWJ.cjs +0 -150
  323. package/dist/chunk-4CEZIJWJ.cjs.map +0 -1
  324. package/dist/chunk-4MQU6AJN.js +0 -83
  325. package/dist/chunk-4MQU6AJN.js.map +0 -1
  326. package/dist/chunk-4QSNRCOT.cjs +0 -20
  327. package/dist/chunk-4QSNRCOT.cjs.map +0 -1
  328. package/dist/chunk-4US5W7PH.cjs +0 -120
  329. package/dist/chunk-4US5W7PH.cjs.map +0 -1
  330. package/dist/chunk-7FHF55WA.js +0 -2041
  331. package/dist/chunk-7FHF55WA.js.map +0 -1
  332. package/dist/chunk-BED2O446.cjs +0 -88
  333. package/dist/chunk-BED2O446.cjs.map +0 -1
  334. package/dist/chunk-BUOJL3MN.js +0 -529
  335. package/dist/chunk-BUOJL3MN.js.map +0 -1
  336. package/dist/chunk-G7NVCO5M.js +0 -123
  337. package/dist/chunk-G7NVCO5M.js.map +0 -1
  338. package/dist/chunk-GASWM5HJ.cjs +0 -514
  339. package/dist/chunk-GASWM5HJ.cjs.map +0 -1
  340. package/dist/chunk-HWHKM67I.js +0 -114
  341. package/dist/chunk-HWHKM67I.js.map +0 -1
  342. package/dist/chunk-HXIOPAHT.js +0 -541
  343. package/dist/chunk-HXIOPAHT.js.map +0 -1
  344. package/dist/chunk-ILCWPBYQ.cjs +0 -147
  345. package/dist/chunk-ILCWPBYQ.cjs.map +0 -1
  346. package/dist/chunk-JGVY3KWV.cjs +0 -334
  347. package/dist/chunk-JGVY3KWV.cjs.map +0 -1
  348. package/dist/chunk-LRUH33B4.cjs +0 -2044
  349. package/dist/chunk-LRUH33B4.cjs.map +0 -1
  350. package/dist/chunk-MMROOK5J.js.map +0 -1
  351. package/dist/chunk-NCS2OXX6.cjs +0 -131
  352. package/dist/chunk-NCS2OXX6.cjs.map +0 -1
  353. package/dist/chunk-O4I563YW.cjs +0 -130
  354. package/dist/chunk-O4I563YW.cjs.map +0 -1
  355. package/dist/chunk-OW4FX5TS.js +0 -18
  356. package/dist/chunk-OW4FX5TS.js.map +0 -1
  357. package/dist/chunk-PZ5AY32C.js +0 -9
  358. package/dist/chunk-Q7SFCCGT.cjs +0 -11
  359. package/dist/chunk-S37KCGOR.cjs +0 -546
  360. package/dist/chunk-S37KCGOR.cjs.map +0 -1
  361. package/dist/chunk-SQ7KKIH3.js +0 -143
  362. package/dist/chunk-SQ7KKIH3.js.map +0 -1
  363. package/dist/chunk-TRWPUPGD.js +0 -506
  364. package/dist/chunk-TRWPUPGD.js.map +0 -1
  365. package/dist/chunk-TXAQU2XN.js +0 -151
  366. package/dist/chunk-TXAQU2XN.js.map +0 -1
  367. package/dist/chunk-UVOSITKU.js +0 -322
  368. package/dist/chunk-UVOSITKU.js.map +0 -1
  369. package/dist/chunk-VIGPWTDA.cjs +0 -159
  370. package/dist/chunk-VIGPWTDA.cjs.map +0 -1
  371. package/dist/chunk-VOY6MGKA.js +0 -301
  372. package/dist/chunk-VOY6MGKA.js.map +0 -1
  373. package/dist/chunk-X5SDJOY6.cjs +0 -557
  374. package/dist/chunk-X5SDJOY6.cjs.map +0 -1
  375. package/dist/chunk-Y3J45CVZ.js +0 -144
  376. package/dist/chunk-Y3J45CVZ.js.map +0 -1
  377. package/dist/chunk-YS66MFDY.js +0 -124
  378. package/dist/chunk-YS66MFDY.js.map +0 -1
  379. package/dist/server/handlers/legacyWorkflows.cjs +0 -48
  380. package/dist/server/handlers/legacyWorkflows.cjs.map +0 -1
  381. package/dist/server/handlers/legacyWorkflows.d.ts +0 -59
  382. package/dist/server/handlers/legacyWorkflows.d.ts.map +0 -1
  383. package/dist/server/handlers/legacyWorkflows.js +0 -3
  384. package/dist/server/handlers/legacyWorkflows.js.map +0 -1
  385. package/dist/server/handlers/network.cjs +0 -24
  386. package/dist/server/handlers/network.cjs.map +0 -1
  387. package/dist/server/handlers/network.d.ts +0 -50
  388. package/dist/server/handlers/network.d.ts.map +0 -1
  389. package/dist/server/handlers/network.js +0 -3
  390. package/dist/server/handlers/network.js.map +0 -1
  391. package/dist/server/handlers/telemetry.cjs +0 -16
  392. package/dist/server/handlers/telemetry.cjs.map +0 -1
  393. package/dist/server/handlers/telemetry.d.ts +0 -32
  394. package/dist/server/handlers/telemetry.d.ts.map +0 -1
  395. package/dist/server/handlers/telemetry.js +0 -3
  396. package/dist/server/handlers/telemetry.js.map +0 -1
  397. package/dist/server/handlers/vNextNetwork.cjs +0 -220
  398. package/dist/server/handlers/vNextNetwork.cjs.map +0 -1
  399. package/dist/server/handlers/vNextNetwork.d.ts +0 -179
  400. package/dist/server/handlers/vNextNetwork.d.ts.map +0 -1
  401. package/dist/server/handlers/vNextNetwork.js +0 -213
  402. package/dist/server/handlers/vNextNetwork.js.map +0 -1
@@ -0,0 +1,2774 @@
1
+ import { createJsonErrorResponseHandler, withoutTrailingSlash, loadApiKey, UnsupportedFunctionalityError, postJsonToApi, createJsonResponseHandler, combineHeaders, createEventSourceResponseHandler, generateId, InvalidResponseDataError, isParsableJson, InvalidPromptError, convertUint8ArrayToBase64, TooManyEmbeddingValuesForCallError, parseProviderOptions, convertBase64ToUint8Array, postFormDataToApi, createBinaryResponseHandler, APICallError } from './chunk-E646Y4FQ.js';
2
+ import { z } from 'zod';
3
+
4
+ function convertToOpenAIChatMessages({
5
+ prompt,
6
+ useLegacyFunctionCalling = false,
7
+ systemMessageMode = "system"
8
+ }) {
9
+ const messages = [];
10
+ const warnings = [];
11
+ for (const { role, content } of prompt) {
12
+ switch (role) {
13
+ case "system": {
14
+ switch (systemMessageMode) {
15
+ case "system": {
16
+ messages.push({ role: "system", content });
17
+ break;
18
+ }
19
+ case "developer": {
20
+ messages.push({ role: "developer", content });
21
+ break;
22
+ }
23
+ case "remove": {
24
+ warnings.push({
25
+ type: "other",
26
+ message: "system messages are removed for this model"
27
+ });
28
+ break;
29
+ }
30
+ default: {
31
+ const _exhaustiveCheck = systemMessageMode;
32
+ throw new Error(
33
+ `Unsupported system message mode: ${_exhaustiveCheck}`
34
+ );
35
+ }
36
+ }
37
+ break;
38
+ }
39
+ case "user": {
40
+ if (content.length === 1 && content[0].type === "text") {
41
+ messages.push({ role: "user", content: content[0].text });
42
+ break;
43
+ }
44
+ messages.push({
45
+ role: "user",
46
+ content: content.map((part, index) => {
47
+ var _a, _b, _c, _d;
48
+ switch (part.type) {
49
+ case "text": {
50
+ return { type: "text", text: part.text };
51
+ }
52
+ case "image": {
53
+ return {
54
+ type: "image_url",
55
+ image_url: {
56
+ url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`,
57
+ // OpenAI specific extension: image detail
58
+ detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
59
+ }
60
+ };
61
+ }
62
+ case "file": {
63
+ if (part.data instanceof URL) {
64
+ throw new UnsupportedFunctionalityError({
65
+ functionality: "'File content parts with URL data' functionality not supported."
66
+ });
67
+ }
68
+ switch (part.mimeType) {
69
+ case "audio/wav": {
70
+ return {
71
+ type: "input_audio",
72
+ input_audio: { data: part.data, format: "wav" }
73
+ };
74
+ }
75
+ case "audio/mp3":
76
+ case "audio/mpeg": {
77
+ return {
78
+ type: "input_audio",
79
+ input_audio: { data: part.data, format: "mp3" }
80
+ };
81
+ }
82
+ case "application/pdf": {
83
+ return {
84
+ type: "file",
85
+ file: {
86
+ filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
87
+ file_data: `data:application/pdf;base64,${part.data}`
88
+ }
89
+ };
90
+ }
91
+ default: {
92
+ throw new UnsupportedFunctionalityError({
93
+ functionality: `File content part type ${part.mimeType} in user messages`
94
+ });
95
+ }
96
+ }
97
+ }
98
+ }
99
+ })
100
+ });
101
+ break;
102
+ }
103
+ case "assistant": {
104
+ let text = "";
105
+ const toolCalls = [];
106
+ for (const part of content) {
107
+ switch (part.type) {
108
+ case "text": {
109
+ text += part.text;
110
+ break;
111
+ }
112
+ case "tool-call": {
113
+ toolCalls.push({
114
+ id: part.toolCallId,
115
+ type: "function",
116
+ function: {
117
+ name: part.toolName,
118
+ arguments: JSON.stringify(part.args)
119
+ }
120
+ });
121
+ break;
122
+ }
123
+ }
124
+ }
125
+ if (useLegacyFunctionCalling) {
126
+ if (toolCalls.length > 1) {
127
+ throw new UnsupportedFunctionalityError({
128
+ functionality: "useLegacyFunctionCalling with multiple tool calls in one message"
129
+ });
130
+ }
131
+ messages.push({
132
+ role: "assistant",
133
+ content: text,
134
+ function_call: toolCalls.length > 0 ? toolCalls[0].function : void 0
135
+ });
136
+ } else {
137
+ messages.push({
138
+ role: "assistant",
139
+ content: text,
140
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
141
+ });
142
+ }
143
+ break;
144
+ }
145
+ case "tool": {
146
+ for (const toolResponse of content) {
147
+ if (useLegacyFunctionCalling) {
148
+ messages.push({
149
+ role: "function",
150
+ name: toolResponse.toolName,
151
+ content: JSON.stringify(toolResponse.result)
152
+ });
153
+ } else {
154
+ messages.push({
155
+ role: "tool",
156
+ tool_call_id: toolResponse.toolCallId,
157
+ content: JSON.stringify(toolResponse.result)
158
+ });
159
+ }
160
+ }
161
+ break;
162
+ }
163
+ default: {
164
+ const _exhaustiveCheck = role;
165
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
166
+ }
167
+ }
168
+ }
169
+ return { messages, warnings };
170
+ }
171
+ function mapOpenAIChatLogProbsOutput(logprobs) {
172
+ var _a, _b;
173
+ return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({
174
+ token,
175
+ logprob,
176
+ topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({
177
+ token: token2,
178
+ logprob: logprob2
179
+ })) : []
180
+ }))) != null ? _b : void 0;
181
+ }
182
+ function mapOpenAIFinishReason(finishReason) {
183
+ switch (finishReason) {
184
+ case "stop":
185
+ return "stop";
186
+ case "length":
187
+ return "length";
188
+ case "content_filter":
189
+ return "content-filter";
190
+ case "function_call":
191
+ case "tool_calls":
192
+ return "tool-calls";
193
+ default:
194
+ return "unknown";
195
+ }
196
+ }
197
+ var openaiErrorDataSchema = z.object({
198
+ error: z.object({
199
+ message: z.string(),
200
+ // The additional information below is handled loosely to support
201
+ // OpenAI-compatible providers that have slightly different error
202
+ // responses:
203
+ type: z.string().nullish(),
204
+ param: z.any().nullish(),
205
+ code: z.union([z.string(), z.number()]).nullish()
206
+ })
207
+ });
208
+ var openaiFailedResponseHandler = createJsonErrorResponseHandler({
209
+ errorSchema: openaiErrorDataSchema,
210
+ errorToMessage: (data) => data.error.message
211
+ });
212
+ function getResponseMetadata({
213
+ id,
214
+ model,
215
+ created
216
+ }) {
217
+ return {
218
+ id: id != null ? id : void 0,
219
+ modelId: model != null ? model : void 0,
220
+ timestamp: created != null ? new Date(created * 1e3) : void 0
221
+ };
222
+ }
223
+ function prepareTools({
224
+ mode,
225
+ useLegacyFunctionCalling = false,
226
+ structuredOutputs
227
+ }) {
228
+ var _a;
229
+ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
230
+ const toolWarnings = [];
231
+ if (tools == null) {
232
+ return { tools: void 0, tool_choice: void 0, toolWarnings };
233
+ }
234
+ const toolChoice = mode.toolChoice;
235
+ if (useLegacyFunctionCalling) {
236
+ const openaiFunctions = [];
237
+ for (const tool of tools) {
238
+ if (tool.type === "provider-defined") {
239
+ toolWarnings.push({ type: "unsupported-tool", tool });
240
+ } else {
241
+ openaiFunctions.push({
242
+ name: tool.name,
243
+ description: tool.description,
244
+ parameters: tool.parameters
245
+ });
246
+ }
247
+ }
248
+ if (toolChoice == null) {
249
+ return {
250
+ functions: openaiFunctions,
251
+ function_call: void 0,
252
+ toolWarnings
253
+ };
254
+ }
255
+ const type2 = toolChoice.type;
256
+ switch (type2) {
257
+ case "auto":
258
+ case "none":
259
+ case void 0:
260
+ return {
261
+ functions: openaiFunctions,
262
+ function_call: void 0,
263
+ toolWarnings
264
+ };
265
+ case "required":
266
+ throw new UnsupportedFunctionalityError({
267
+ functionality: "useLegacyFunctionCalling and toolChoice: required"
268
+ });
269
+ default:
270
+ return {
271
+ functions: openaiFunctions,
272
+ function_call: { name: toolChoice.toolName },
273
+ toolWarnings
274
+ };
275
+ }
276
+ }
277
+ const openaiTools2 = [];
278
+ for (const tool of tools) {
279
+ if (tool.type === "provider-defined") {
280
+ toolWarnings.push({ type: "unsupported-tool", tool });
281
+ } else {
282
+ openaiTools2.push({
283
+ type: "function",
284
+ function: {
285
+ name: tool.name,
286
+ description: tool.description,
287
+ parameters: tool.parameters,
288
+ strict: structuredOutputs ? true : void 0
289
+ }
290
+ });
291
+ }
292
+ }
293
+ if (toolChoice == null) {
294
+ return { tools: openaiTools2, tool_choice: void 0, toolWarnings };
295
+ }
296
+ const type = toolChoice.type;
297
+ switch (type) {
298
+ case "auto":
299
+ case "none":
300
+ case "required":
301
+ return { tools: openaiTools2, tool_choice: type, toolWarnings };
302
+ case "tool":
303
+ return {
304
+ tools: openaiTools2,
305
+ tool_choice: {
306
+ type: "function",
307
+ function: {
308
+ name: toolChoice.toolName
309
+ }
310
+ },
311
+ toolWarnings
312
+ };
313
+ default: {
314
+ const _exhaustiveCheck = type;
315
+ throw new UnsupportedFunctionalityError({
316
+ functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
317
+ });
318
+ }
319
+ }
320
+ }
321
+ var OpenAIChatLanguageModel = class {
322
+ constructor(modelId, settings, config) {
323
+ this.specificationVersion = "v1";
324
+ this.modelId = modelId;
325
+ this.settings = settings;
326
+ this.config = config;
327
+ }
328
+ get supportsStructuredOutputs() {
329
+ var _a;
330
+ return (_a = this.settings.structuredOutputs) != null ? _a : isReasoningModel(this.modelId);
331
+ }
332
+ get defaultObjectGenerationMode() {
333
+ if (isAudioModel(this.modelId)) {
334
+ return "tool";
335
+ }
336
+ return this.supportsStructuredOutputs ? "json" : "tool";
337
+ }
338
+ get provider() {
339
+ return this.config.provider;
340
+ }
341
+ get supportsImageUrls() {
342
+ return !this.settings.downloadImages;
343
+ }
344
+ getArgs({
345
+ mode,
346
+ prompt,
347
+ maxTokens,
348
+ temperature,
349
+ topP,
350
+ topK,
351
+ frequencyPenalty,
352
+ presencePenalty,
353
+ stopSequences,
354
+ responseFormat,
355
+ seed,
356
+ providerMetadata
357
+ }) {
358
+ var _a, _b, _c, _d, _e, _f, _g, _h;
359
+ const type = mode.type;
360
+ const warnings = [];
361
+ if (topK != null) {
362
+ warnings.push({
363
+ type: "unsupported-setting",
364
+ setting: "topK"
365
+ });
366
+ }
367
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !this.supportsStructuredOutputs) {
368
+ warnings.push({
369
+ type: "unsupported-setting",
370
+ setting: "responseFormat",
371
+ details: "JSON response format schema is only supported with structuredOutputs"
372
+ });
373
+ }
374
+ const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
375
+ if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
376
+ throw new UnsupportedFunctionalityError({
377
+ functionality: "useLegacyFunctionCalling with parallelToolCalls"
378
+ });
379
+ }
380
+ if (useLegacyFunctionCalling && this.supportsStructuredOutputs) {
381
+ throw new UnsupportedFunctionalityError({
382
+ functionality: "structuredOutputs with useLegacyFunctionCalling"
383
+ });
384
+ }
385
+ const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(
386
+ {
387
+ prompt,
388
+ useLegacyFunctionCalling,
389
+ systemMessageMode: getSystemMessageMode(this.modelId)
390
+ }
391
+ );
392
+ warnings.push(...messageWarnings);
393
+ const baseArgs = {
394
+ // model id:
395
+ model: this.modelId,
396
+ // model specific settings:
397
+ logit_bias: this.settings.logitBias,
398
+ logprobs: this.settings.logprobs === true || typeof this.settings.logprobs === "number" ? true : void 0,
399
+ top_logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
400
+ user: this.settings.user,
401
+ parallel_tool_calls: this.settings.parallelToolCalls,
402
+ // standardized settings:
403
+ max_tokens: maxTokens,
404
+ temperature,
405
+ top_p: topP,
406
+ frequency_penalty: frequencyPenalty,
407
+ presence_penalty: presencePenalty,
408
+ response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs && responseFormat.schema != null ? {
409
+ type: "json_schema",
410
+ json_schema: {
411
+ schema: responseFormat.schema,
412
+ strict: true,
413
+ name: (_a = responseFormat.name) != null ? _a : "response",
414
+ description: responseFormat.description
415
+ }
416
+ } : { type: "json_object" } : void 0,
417
+ stop: stopSequences,
418
+ seed,
419
+ // openai specific settings:
420
+ // TODO remove in next major version; we auto-map maxTokens now
421
+ max_completion_tokens: (_b = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _b.maxCompletionTokens,
422
+ store: (_c = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _c.store,
423
+ metadata: (_d = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _d.metadata,
424
+ prediction: (_e = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _e.prediction,
425
+ reasoning_effort: (_g = (_f = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _f.reasoningEffort) != null ? _g : this.settings.reasoningEffort,
426
+ // messages:
427
+ messages
428
+ };
429
+ if (isReasoningModel(this.modelId)) {
430
+ if (baseArgs.temperature != null) {
431
+ baseArgs.temperature = void 0;
432
+ warnings.push({
433
+ type: "unsupported-setting",
434
+ setting: "temperature",
435
+ details: "temperature is not supported for reasoning models"
436
+ });
437
+ }
438
+ if (baseArgs.top_p != null) {
439
+ baseArgs.top_p = void 0;
440
+ warnings.push({
441
+ type: "unsupported-setting",
442
+ setting: "topP",
443
+ details: "topP is not supported for reasoning models"
444
+ });
445
+ }
446
+ if (baseArgs.frequency_penalty != null) {
447
+ baseArgs.frequency_penalty = void 0;
448
+ warnings.push({
449
+ type: "unsupported-setting",
450
+ setting: "frequencyPenalty",
451
+ details: "frequencyPenalty is not supported for reasoning models"
452
+ });
453
+ }
454
+ if (baseArgs.presence_penalty != null) {
455
+ baseArgs.presence_penalty = void 0;
456
+ warnings.push({
457
+ type: "unsupported-setting",
458
+ setting: "presencePenalty",
459
+ details: "presencePenalty is not supported for reasoning models"
460
+ });
461
+ }
462
+ if (baseArgs.logit_bias != null) {
463
+ baseArgs.logit_bias = void 0;
464
+ warnings.push({
465
+ type: "other",
466
+ message: "logitBias is not supported for reasoning models"
467
+ });
468
+ }
469
+ if (baseArgs.logprobs != null) {
470
+ baseArgs.logprobs = void 0;
471
+ warnings.push({
472
+ type: "other",
473
+ message: "logprobs is not supported for reasoning models"
474
+ });
475
+ }
476
+ if (baseArgs.top_logprobs != null) {
477
+ baseArgs.top_logprobs = void 0;
478
+ warnings.push({
479
+ type: "other",
480
+ message: "topLogprobs is not supported for reasoning models"
481
+ });
482
+ }
483
+ if (baseArgs.max_tokens != null) {
484
+ if (baseArgs.max_completion_tokens == null) {
485
+ baseArgs.max_completion_tokens = baseArgs.max_tokens;
486
+ }
487
+ baseArgs.max_tokens = void 0;
488
+ }
489
+ } else if (this.modelId.startsWith("gpt-4o-search-preview") || this.modelId.startsWith("gpt-4o-mini-search-preview")) {
490
+ if (baseArgs.temperature != null) {
491
+ baseArgs.temperature = void 0;
492
+ warnings.push({
493
+ type: "unsupported-setting",
494
+ setting: "temperature",
495
+ details: "temperature is not supported for the search preview models and has been removed."
496
+ });
497
+ }
498
+ }
499
+ switch (type) {
500
+ case "regular": {
501
+ const { tools, tool_choice, functions, function_call, toolWarnings } = prepareTools({
502
+ mode,
503
+ useLegacyFunctionCalling,
504
+ structuredOutputs: this.supportsStructuredOutputs
505
+ });
506
+ return {
507
+ args: {
508
+ ...baseArgs,
509
+ tools,
510
+ tool_choice,
511
+ functions,
512
+ function_call
513
+ },
514
+ warnings: [...warnings, ...toolWarnings]
515
+ };
516
+ }
517
+ case "object-json": {
518
+ return {
519
+ args: {
520
+ ...baseArgs,
521
+ response_format: this.supportsStructuredOutputs && mode.schema != null ? {
522
+ type: "json_schema",
523
+ json_schema: {
524
+ schema: mode.schema,
525
+ strict: true,
526
+ name: (_h = mode.name) != null ? _h : "response",
527
+ description: mode.description
528
+ }
529
+ } : { type: "json_object" }
530
+ },
531
+ warnings
532
+ };
533
+ }
534
+ case "object-tool": {
535
+ return {
536
+ args: useLegacyFunctionCalling ? {
537
+ ...baseArgs,
538
+ function_call: {
539
+ name: mode.tool.name
540
+ },
541
+ functions: [
542
+ {
543
+ name: mode.tool.name,
544
+ description: mode.tool.description,
545
+ parameters: mode.tool.parameters
546
+ }
547
+ ]
548
+ } : {
549
+ ...baseArgs,
550
+ tool_choice: {
551
+ type: "function",
552
+ function: { name: mode.tool.name }
553
+ },
554
+ tools: [
555
+ {
556
+ type: "function",
557
+ function: {
558
+ name: mode.tool.name,
559
+ description: mode.tool.description,
560
+ parameters: mode.tool.parameters,
561
+ strict: this.supportsStructuredOutputs ? true : void 0
562
+ }
563
+ }
564
+ ]
565
+ },
566
+ warnings
567
+ };
568
+ }
569
+ default: {
570
+ const _exhaustiveCheck = type;
571
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
572
+ }
573
+ }
574
+ }
575
+ async doGenerate(options) {
576
+ var _a, _b, _c, _d, _e, _f, _g, _h;
577
+ const { args: body, warnings } = this.getArgs(options);
578
+ const {
579
+ responseHeaders,
580
+ value: response,
581
+ rawValue: rawResponse
582
+ } = await postJsonToApi({
583
+ url: this.config.url({
584
+ path: "/chat/completions",
585
+ modelId: this.modelId
586
+ }),
587
+ headers: combineHeaders(this.config.headers(), options.headers),
588
+ body,
589
+ failedResponseHandler: openaiFailedResponseHandler,
590
+ successfulResponseHandler: createJsonResponseHandler(
591
+ openaiChatResponseSchema
592
+ ),
593
+ abortSignal: options.abortSignal,
594
+ fetch: this.config.fetch
595
+ });
596
+ const { messages: rawPrompt, ...rawSettings } = body;
597
+ const choice = response.choices[0];
598
+ const completionTokenDetails = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details;
599
+ const promptTokenDetails = (_b = response.usage) == null ? void 0 : _b.prompt_tokens_details;
600
+ const providerMetadata = { openai: {} };
601
+ if ((completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens) != null) {
602
+ providerMetadata.openai.reasoningTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens;
603
+ }
604
+ if ((completionTokenDetails == null ? void 0 : completionTokenDetails.accepted_prediction_tokens) != null) {
605
+ providerMetadata.openai.acceptedPredictionTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.accepted_prediction_tokens;
606
+ }
607
+ if ((completionTokenDetails == null ? void 0 : completionTokenDetails.rejected_prediction_tokens) != null) {
608
+ providerMetadata.openai.rejectedPredictionTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.rejected_prediction_tokens;
609
+ }
610
+ if ((promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens) != null) {
611
+ providerMetadata.openai.cachedPromptTokens = promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens;
612
+ }
613
+ return {
614
+ text: (_c = choice.message.content) != null ? _c : void 0,
615
+ toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
616
+ {
617
+ toolCallType: "function",
618
+ toolCallId: generateId(),
619
+ toolName: choice.message.function_call.name,
620
+ args: choice.message.function_call.arguments
621
+ }
622
+ ] : (_d = choice.message.tool_calls) == null ? void 0 : _d.map((toolCall) => {
623
+ var _a2;
624
+ return {
625
+ toolCallType: "function",
626
+ toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
627
+ toolName: toolCall.function.name,
628
+ args: toolCall.function.arguments
629
+ };
630
+ }),
631
+ finishReason: mapOpenAIFinishReason(choice.finish_reason),
632
+ usage: {
633
+ promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : NaN,
634
+ completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : NaN
635
+ },
636
+ rawCall: { rawPrompt, rawSettings },
637
+ rawResponse: { headers: responseHeaders, body: rawResponse },
638
+ request: { body: JSON.stringify(body) },
639
+ response: getResponseMetadata(response),
640
+ warnings,
641
+ logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
642
+ providerMetadata
643
+ };
644
+ }
645
+ async doStream(options) {
646
+ if (this.settings.simulateStreaming) {
647
+ const result = await this.doGenerate(options);
648
+ const simulatedStream = new ReadableStream({
649
+ start(controller) {
650
+ controller.enqueue({ type: "response-metadata", ...result.response });
651
+ if (result.text) {
652
+ controller.enqueue({
653
+ type: "text-delta",
654
+ textDelta: result.text
655
+ });
656
+ }
657
+ if (result.toolCalls) {
658
+ for (const toolCall of result.toolCalls) {
659
+ controller.enqueue({
660
+ type: "tool-call-delta",
661
+ toolCallType: "function",
662
+ toolCallId: toolCall.toolCallId,
663
+ toolName: toolCall.toolName,
664
+ argsTextDelta: toolCall.args
665
+ });
666
+ controller.enqueue({
667
+ type: "tool-call",
668
+ ...toolCall
669
+ });
670
+ }
671
+ }
672
+ controller.enqueue({
673
+ type: "finish",
674
+ finishReason: result.finishReason,
675
+ usage: result.usage,
676
+ logprobs: result.logprobs,
677
+ providerMetadata: result.providerMetadata
678
+ });
679
+ controller.close();
680
+ }
681
+ });
682
+ return {
683
+ stream: simulatedStream,
684
+ rawCall: result.rawCall,
685
+ rawResponse: result.rawResponse,
686
+ warnings: result.warnings
687
+ };
688
+ }
689
+ const { args, warnings } = this.getArgs(options);
690
+ const body = {
691
+ ...args,
692
+ stream: true,
693
+ // only include stream_options when in strict compatibility mode:
694
+ stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
695
+ };
696
+ const { responseHeaders, value: response } = await postJsonToApi({
697
+ url: this.config.url({
698
+ path: "/chat/completions",
699
+ modelId: this.modelId
700
+ }),
701
+ headers: combineHeaders(this.config.headers(), options.headers),
702
+ body,
703
+ failedResponseHandler: openaiFailedResponseHandler,
704
+ successfulResponseHandler: createEventSourceResponseHandler(
705
+ openaiChatChunkSchema
706
+ ),
707
+ abortSignal: options.abortSignal,
708
+ fetch: this.config.fetch
709
+ });
710
+ const { messages: rawPrompt, ...rawSettings } = args;
711
+ const toolCalls = [];
712
+ let finishReason = "unknown";
713
+ let usage = {
714
+ promptTokens: void 0,
715
+ completionTokens: void 0
716
+ };
717
+ let logprobs;
718
+ let isFirstChunk = true;
719
+ const { useLegacyFunctionCalling } = this.settings;
720
+ const providerMetadata = { openai: {} };
721
+ return {
722
+ stream: response.pipeThrough(
723
+ new TransformStream({
724
+ transform(chunk, controller) {
725
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
726
+ if (!chunk.success) {
727
+ finishReason = "error";
728
+ controller.enqueue({ type: "error", error: chunk.error });
729
+ return;
730
+ }
731
+ const value = chunk.value;
732
+ if ("error" in value) {
733
+ finishReason = "error";
734
+ controller.enqueue({ type: "error", error: value.error });
735
+ return;
736
+ }
737
+ if (isFirstChunk) {
738
+ isFirstChunk = false;
739
+ controller.enqueue({
740
+ type: "response-metadata",
741
+ ...getResponseMetadata(value)
742
+ });
743
+ }
744
+ if (value.usage != null) {
745
+ const {
746
+ prompt_tokens,
747
+ completion_tokens,
748
+ prompt_tokens_details,
749
+ completion_tokens_details
750
+ } = value.usage;
751
+ usage = {
752
+ promptTokens: prompt_tokens != null ? prompt_tokens : void 0,
753
+ completionTokens: completion_tokens != null ? completion_tokens : void 0
754
+ };
755
+ if ((completion_tokens_details == null ? void 0 : completion_tokens_details.reasoning_tokens) != null) {
756
+ providerMetadata.openai.reasoningTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.reasoning_tokens;
757
+ }
758
+ if ((completion_tokens_details == null ? void 0 : completion_tokens_details.accepted_prediction_tokens) != null) {
759
+ providerMetadata.openai.acceptedPredictionTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.accepted_prediction_tokens;
760
+ }
761
+ if ((completion_tokens_details == null ? void 0 : completion_tokens_details.rejected_prediction_tokens) != null) {
762
+ providerMetadata.openai.rejectedPredictionTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.rejected_prediction_tokens;
763
+ }
764
+ if ((prompt_tokens_details == null ? void 0 : prompt_tokens_details.cached_tokens) != null) {
765
+ providerMetadata.openai.cachedPromptTokens = prompt_tokens_details == null ? void 0 : prompt_tokens_details.cached_tokens;
766
+ }
767
+ }
768
+ const choice = value.choices[0];
769
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
770
+ finishReason = mapOpenAIFinishReason(choice.finish_reason);
771
+ }
772
+ if ((choice == null ? void 0 : choice.delta) == null) {
773
+ return;
774
+ }
775
+ const delta = choice.delta;
776
+ if (delta.content != null) {
777
+ controller.enqueue({
778
+ type: "text-delta",
779
+ textDelta: delta.content
780
+ });
781
+ }
782
+ const mappedLogprobs = mapOpenAIChatLogProbsOutput(
783
+ choice == null ? void 0 : choice.logprobs
784
+ );
785
+ if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
786
+ if (logprobs === void 0) logprobs = [];
787
+ logprobs.push(...mappedLogprobs);
788
+ }
789
+ const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
790
+ {
791
+ type: "function",
792
+ id: generateId(),
793
+ function: delta.function_call,
794
+ index: 0
795
+ }
796
+ ] : delta.tool_calls;
797
+ if (mappedToolCalls != null) {
798
+ for (const toolCallDelta of mappedToolCalls) {
799
+ const index = toolCallDelta.index;
800
+ if (toolCalls[index] == null) {
801
+ if (toolCallDelta.type !== "function") {
802
+ throw new InvalidResponseDataError({
803
+ data: toolCallDelta,
804
+ message: `Expected 'function' type.`
805
+ });
806
+ }
807
+ if (toolCallDelta.id == null) {
808
+ throw new InvalidResponseDataError({
809
+ data: toolCallDelta,
810
+ message: `Expected 'id' to be a string.`
811
+ });
812
+ }
813
+ if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
814
+ throw new InvalidResponseDataError({
815
+ data: toolCallDelta,
816
+ message: `Expected 'function.name' to be a string.`
817
+ });
818
+ }
819
+ toolCalls[index] = {
820
+ id: toolCallDelta.id,
821
+ type: "function",
822
+ function: {
823
+ name: toolCallDelta.function.name,
824
+ arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
825
+ },
826
+ hasFinished: false
827
+ };
828
+ const toolCall2 = toolCalls[index];
829
+ if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null) {
830
+ if (toolCall2.function.arguments.length > 0) {
831
+ controller.enqueue({
832
+ type: "tool-call-delta",
833
+ toolCallType: "function",
834
+ toolCallId: toolCall2.id,
835
+ toolName: toolCall2.function.name,
836
+ argsTextDelta: toolCall2.function.arguments
837
+ });
838
+ }
839
+ if (isParsableJson(toolCall2.function.arguments)) {
840
+ controller.enqueue({
841
+ type: "tool-call",
842
+ toolCallType: "function",
843
+ toolCallId: (_e = toolCall2.id) != null ? _e : generateId(),
844
+ toolName: toolCall2.function.name,
845
+ args: toolCall2.function.arguments
846
+ });
847
+ toolCall2.hasFinished = true;
848
+ }
849
+ }
850
+ continue;
851
+ }
852
+ const toolCall = toolCalls[index];
853
+ if (toolCall.hasFinished) {
854
+ continue;
855
+ }
856
+ if (((_f = toolCallDelta.function) == null ? void 0 : _f.arguments) != null) {
857
+ toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
858
+ }
859
+ controller.enqueue({
860
+ type: "tool-call-delta",
861
+ toolCallType: "function",
862
+ toolCallId: toolCall.id,
863
+ toolName: toolCall.function.name,
864
+ argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
865
+ });
866
+ if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && isParsableJson(toolCall.function.arguments)) {
867
+ controller.enqueue({
868
+ type: "tool-call",
869
+ toolCallType: "function",
870
+ toolCallId: (_l = toolCall.id) != null ? _l : generateId(),
871
+ toolName: toolCall.function.name,
872
+ args: toolCall.function.arguments
873
+ });
874
+ toolCall.hasFinished = true;
875
+ }
876
+ }
877
+ }
878
+ },
879
+ flush(controller) {
880
+ var _a, _b;
881
+ controller.enqueue({
882
+ type: "finish",
883
+ finishReason,
884
+ logprobs,
885
+ usage: {
886
+ promptTokens: (_a = usage.promptTokens) != null ? _a : NaN,
887
+ completionTokens: (_b = usage.completionTokens) != null ? _b : NaN
888
+ },
889
+ ...providerMetadata != null ? { providerMetadata } : {}
890
+ });
891
+ }
892
+ })
893
+ ),
894
+ rawCall: { rawPrompt, rawSettings },
895
+ rawResponse: { headers: responseHeaders },
896
+ request: { body: JSON.stringify(body) },
897
+ warnings
898
+ };
899
+ }
900
+ };
901
+ var openaiTokenUsageSchema = z.object({
902
+ prompt_tokens: z.number().nullish(),
903
+ completion_tokens: z.number().nullish(),
904
+ prompt_tokens_details: z.object({
905
+ cached_tokens: z.number().nullish()
906
+ }).nullish(),
907
+ completion_tokens_details: z.object({
908
+ reasoning_tokens: z.number().nullish(),
909
+ accepted_prediction_tokens: z.number().nullish(),
910
+ rejected_prediction_tokens: z.number().nullish()
911
+ }).nullish()
912
+ }).nullish();
913
+ var openaiChatResponseSchema = z.object({
914
+ id: z.string().nullish(),
915
+ created: z.number().nullish(),
916
+ model: z.string().nullish(),
917
+ choices: z.array(
918
+ z.object({
919
+ message: z.object({
920
+ role: z.literal("assistant").nullish(),
921
+ content: z.string().nullish(),
922
+ function_call: z.object({
923
+ arguments: z.string(),
924
+ name: z.string()
925
+ }).nullish(),
926
+ tool_calls: z.array(
927
+ z.object({
928
+ id: z.string().nullish(),
929
+ type: z.literal("function"),
930
+ function: z.object({
931
+ name: z.string(),
932
+ arguments: z.string()
933
+ })
934
+ })
935
+ ).nullish()
936
+ }),
937
+ index: z.number(),
938
+ logprobs: z.object({
939
+ content: z.array(
940
+ z.object({
941
+ token: z.string(),
942
+ logprob: z.number(),
943
+ top_logprobs: z.array(
944
+ z.object({
945
+ token: z.string(),
946
+ logprob: z.number()
947
+ })
948
+ )
949
+ })
950
+ ).nullable()
951
+ }).nullish(),
952
+ finish_reason: z.string().nullish()
953
+ })
954
+ ),
955
+ usage: openaiTokenUsageSchema
956
+ });
957
+ var openaiChatChunkSchema = z.union([
958
+ z.object({
959
+ id: z.string().nullish(),
960
+ created: z.number().nullish(),
961
+ model: z.string().nullish(),
962
+ choices: z.array(
963
+ z.object({
964
+ delta: z.object({
965
+ role: z.enum(["assistant"]).nullish(),
966
+ content: z.string().nullish(),
967
+ function_call: z.object({
968
+ name: z.string().optional(),
969
+ arguments: z.string().optional()
970
+ }).nullish(),
971
+ tool_calls: z.array(
972
+ z.object({
973
+ index: z.number(),
974
+ id: z.string().nullish(),
975
+ type: z.literal("function").nullish(),
976
+ function: z.object({
977
+ name: z.string().nullish(),
978
+ arguments: z.string().nullish()
979
+ })
980
+ })
981
+ ).nullish()
982
+ }).nullish(),
983
+ logprobs: z.object({
984
+ content: z.array(
985
+ z.object({
986
+ token: z.string(),
987
+ logprob: z.number(),
988
+ top_logprobs: z.array(
989
+ z.object({
990
+ token: z.string(),
991
+ logprob: z.number()
992
+ })
993
+ )
994
+ })
995
+ ).nullable()
996
+ }).nullish(),
997
+ finish_reason: z.string().nullish(),
998
+ index: z.number()
999
+ })
1000
+ ),
1001
+ usage: openaiTokenUsageSchema
1002
+ }),
1003
+ openaiErrorDataSchema
1004
+ ]);
1005
+ function isReasoningModel(modelId) {
1006
+ return modelId.startsWith("o") || modelId.startsWith("gpt-5");
1007
+ }
1008
+ function isAudioModel(modelId) {
1009
+ return modelId.startsWith("gpt-4o-audio-preview");
1010
+ }
1011
+ function getSystemMessageMode(modelId) {
1012
+ var _a, _b;
1013
+ if (!isReasoningModel(modelId)) {
1014
+ return "system";
1015
+ }
1016
+ return (_b = (_a = reasoningModels[modelId]) == null ? void 0 : _a.systemMessageMode) != null ? _b : "developer";
1017
+ }
1018
+ var reasoningModels = {
1019
+ "o1-mini": {
1020
+ systemMessageMode: "remove"
1021
+ },
1022
+ "o1-mini-2024-09-12": {
1023
+ systemMessageMode: "remove"
1024
+ },
1025
+ "o1-preview": {
1026
+ systemMessageMode: "remove"
1027
+ },
1028
+ "o1-preview-2024-09-12": {
1029
+ systemMessageMode: "remove"
1030
+ },
1031
+ o3: {
1032
+ systemMessageMode: "developer"
1033
+ },
1034
+ "o3-2025-04-16": {
1035
+ systemMessageMode: "developer"
1036
+ },
1037
+ "o3-mini": {
1038
+ systemMessageMode: "developer"
1039
+ },
1040
+ "o3-mini-2025-01-31": {
1041
+ systemMessageMode: "developer"
1042
+ },
1043
+ "o4-mini": {
1044
+ systemMessageMode: "developer"
1045
+ },
1046
+ "o4-mini-2025-04-16": {
1047
+ systemMessageMode: "developer"
1048
+ }
1049
+ };
1050
+ function convertToOpenAICompletionPrompt({
1051
+ prompt,
1052
+ inputFormat,
1053
+ user = "user",
1054
+ assistant = "assistant"
1055
+ }) {
1056
+ if (inputFormat === "prompt" && prompt.length === 1 && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0].type === "text") {
1057
+ return { prompt: prompt[0].content[0].text };
1058
+ }
1059
+ let text = "";
1060
+ if (prompt[0].role === "system") {
1061
+ text += `${prompt[0].content}
1062
+
1063
+ `;
1064
+ prompt = prompt.slice(1);
1065
+ }
1066
+ for (const { role, content } of prompt) {
1067
+ switch (role) {
1068
+ case "system": {
1069
+ throw new InvalidPromptError({
1070
+ message: "Unexpected system message in prompt: ${content}",
1071
+ prompt
1072
+ });
1073
+ }
1074
+ case "user": {
1075
+ const userMessage = content.map((part) => {
1076
+ switch (part.type) {
1077
+ case "text": {
1078
+ return part.text;
1079
+ }
1080
+ case "image": {
1081
+ throw new UnsupportedFunctionalityError({
1082
+ functionality: "images"
1083
+ });
1084
+ }
1085
+ }
1086
+ }).join("");
1087
+ text += `${user}:
1088
+ ${userMessage}
1089
+
1090
+ `;
1091
+ break;
1092
+ }
1093
+ case "assistant": {
1094
+ const assistantMessage = content.map((part) => {
1095
+ switch (part.type) {
1096
+ case "text": {
1097
+ return part.text;
1098
+ }
1099
+ case "tool-call": {
1100
+ throw new UnsupportedFunctionalityError({
1101
+ functionality: "tool-call messages"
1102
+ });
1103
+ }
1104
+ }
1105
+ }).join("");
1106
+ text += `${assistant}:
1107
+ ${assistantMessage}
1108
+
1109
+ `;
1110
+ break;
1111
+ }
1112
+ case "tool": {
1113
+ throw new UnsupportedFunctionalityError({
1114
+ functionality: "tool messages"
1115
+ });
1116
+ }
1117
+ default: {
1118
+ const _exhaustiveCheck = role;
1119
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1120
+ }
1121
+ }
1122
+ }
1123
+ text += `${assistant}:
1124
+ `;
1125
+ return {
1126
+ prompt: text,
1127
+ stopSequences: [`
1128
+ ${user}:`]
1129
+ };
1130
+ }
1131
+ function mapOpenAICompletionLogProbs(logprobs) {
1132
+ return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => ({
1133
+ token,
1134
+ logprob: logprobs.token_logprobs[index],
1135
+ topLogprobs: logprobs.top_logprobs ? Object.entries(logprobs.top_logprobs[index]).map(
1136
+ ([token2, logprob]) => ({
1137
+ token: token2,
1138
+ logprob
1139
+ })
1140
+ ) : []
1141
+ }));
1142
+ }
1143
+ var OpenAICompletionLanguageModel = class {
1144
+ constructor(modelId, settings, config) {
1145
+ this.specificationVersion = "v1";
1146
+ this.defaultObjectGenerationMode = void 0;
1147
+ this.modelId = modelId;
1148
+ this.settings = settings;
1149
+ this.config = config;
1150
+ }
1151
+ get provider() {
1152
+ return this.config.provider;
1153
+ }
1154
+ getArgs({
1155
+ mode,
1156
+ inputFormat,
1157
+ prompt,
1158
+ maxTokens,
1159
+ temperature,
1160
+ topP,
1161
+ topK,
1162
+ frequencyPenalty,
1163
+ presencePenalty,
1164
+ stopSequences: userStopSequences,
1165
+ responseFormat,
1166
+ seed
1167
+ }) {
1168
+ var _a;
1169
+ const type = mode.type;
1170
+ const warnings = [];
1171
+ if (topK != null) {
1172
+ warnings.push({
1173
+ type: "unsupported-setting",
1174
+ setting: "topK"
1175
+ });
1176
+ }
1177
+ if (responseFormat != null && responseFormat.type !== "text") {
1178
+ warnings.push({
1179
+ type: "unsupported-setting",
1180
+ setting: "responseFormat",
1181
+ details: "JSON response format is not supported."
1182
+ });
1183
+ }
1184
+ const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
1185
+ const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
1186
+ const baseArgs = {
1187
+ // model id:
1188
+ model: this.modelId,
1189
+ // model specific settings:
1190
+ echo: this.settings.echo,
1191
+ logit_bias: this.settings.logitBias,
1192
+ logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
1193
+ suffix: this.settings.suffix,
1194
+ user: this.settings.user,
1195
+ // standardized settings:
1196
+ max_tokens: maxTokens,
1197
+ temperature,
1198
+ top_p: topP,
1199
+ frequency_penalty: frequencyPenalty,
1200
+ presence_penalty: presencePenalty,
1201
+ seed,
1202
+ // prompt:
1203
+ prompt: completionPrompt,
1204
+ // stop sequences:
1205
+ stop: stop.length > 0 ? stop : void 0
1206
+ };
1207
+ switch (type) {
1208
+ case "regular": {
1209
+ if ((_a = mode.tools) == null ? void 0 : _a.length) {
1210
+ throw new UnsupportedFunctionalityError({
1211
+ functionality: "tools"
1212
+ });
1213
+ }
1214
+ if (mode.toolChoice) {
1215
+ throw new UnsupportedFunctionalityError({
1216
+ functionality: "toolChoice"
1217
+ });
1218
+ }
1219
+ return { args: baseArgs, warnings };
1220
+ }
1221
+ case "object-json": {
1222
+ throw new UnsupportedFunctionalityError({
1223
+ functionality: "object-json mode"
1224
+ });
1225
+ }
1226
+ case "object-tool": {
1227
+ throw new UnsupportedFunctionalityError({
1228
+ functionality: "object-tool mode"
1229
+ });
1230
+ }
1231
+ default: {
1232
+ const _exhaustiveCheck = type;
1233
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
1234
+ }
1235
+ }
1236
+ }
1237
+ async doGenerate(options) {
1238
+ const { args, warnings } = this.getArgs(options);
1239
+ const {
1240
+ responseHeaders,
1241
+ value: response,
1242
+ rawValue: rawResponse
1243
+ } = await postJsonToApi({
1244
+ url: this.config.url({
1245
+ path: "/completions",
1246
+ modelId: this.modelId
1247
+ }),
1248
+ headers: combineHeaders(this.config.headers(), options.headers),
1249
+ body: args,
1250
+ failedResponseHandler: openaiFailedResponseHandler,
1251
+ successfulResponseHandler: createJsonResponseHandler(
1252
+ openaiCompletionResponseSchema
1253
+ ),
1254
+ abortSignal: options.abortSignal,
1255
+ fetch: this.config.fetch
1256
+ });
1257
+ const { prompt: rawPrompt, ...rawSettings } = args;
1258
+ const choice = response.choices[0];
1259
+ return {
1260
+ text: choice.text,
1261
+ usage: {
1262
+ promptTokens: response.usage.prompt_tokens,
1263
+ completionTokens: response.usage.completion_tokens
1264
+ },
1265
+ finishReason: mapOpenAIFinishReason(choice.finish_reason),
1266
+ logprobs: mapOpenAICompletionLogProbs(choice.logprobs),
1267
+ rawCall: { rawPrompt, rawSettings },
1268
+ rawResponse: { headers: responseHeaders, body: rawResponse },
1269
+ response: getResponseMetadata(response),
1270
+ warnings,
1271
+ request: { body: JSON.stringify(args) }
1272
+ };
1273
+ }
1274
+ async doStream(options) {
1275
+ const { args, warnings } = this.getArgs(options);
1276
+ const body = {
1277
+ ...args,
1278
+ stream: true,
1279
+ // only include stream_options when in strict compatibility mode:
1280
+ stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
1281
+ };
1282
+ const { responseHeaders, value: response } = await postJsonToApi({
1283
+ url: this.config.url({
1284
+ path: "/completions",
1285
+ modelId: this.modelId
1286
+ }),
1287
+ headers: combineHeaders(this.config.headers(), options.headers),
1288
+ body,
1289
+ failedResponseHandler: openaiFailedResponseHandler,
1290
+ successfulResponseHandler: createEventSourceResponseHandler(
1291
+ openaiCompletionChunkSchema
1292
+ ),
1293
+ abortSignal: options.abortSignal,
1294
+ fetch: this.config.fetch
1295
+ });
1296
+ const { prompt: rawPrompt, ...rawSettings } = args;
1297
+ let finishReason = "unknown";
1298
+ let usage = {
1299
+ promptTokens: Number.NaN,
1300
+ completionTokens: Number.NaN
1301
+ };
1302
+ let logprobs;
1303
+ let isFirstChunk = true;
1304
+ return {
1305
+ stream: response.pipeThrough(
1306
+ new TransformStream({
1307
+ transform(chunk, controller) {
1308
+ if (!chunk.success) {
1309
+ finishReason = "error";
1310
+ controller.enqueue({ type: "error", error: chunk.error });
1311
+ return;
1312
+ }
1313
+ const value = chunk.value;
1314
+ if ("error" in value) {
1315
+ finishReason = "error";
1316
+ controller.enqueue({ type: "error", error: value.error });
1317
+ return;
1318
+ }
1319
+ if (isFirstChunk) {
1320
+ isFirstChunk = false;
1321
+ controller.enqueue({
1322
+ type: "response-metadata",
1323
+ ...getResponseMetadata(value)
1324
+ });
1325
+ }
1326
+ if (value.usage != null) {
1327
+ usage = {
1328
+ promptTokens: value.usage.prompt_tokens,
1329
+ completionTokens: value.usage.completion_tokens
1330
+ };
1331
+ }
1332
+ const choice = value.choices[0];
1333
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
1334
+ finishReason = mapOpenAIFinishReason(choice.finish_reason);
1335
+ }
1336
+ if ((choice == null ? void 0 : choice.text) != null) {
1337
+ controller.enqueue({
1338
+ type: "text-delta",
1339
+ textDelta: choice.text
1340
+ });
1341
+ }
1342
+ const mappedLogprobs = mapOpenAICompletionLogProbs(
1343
+ choice == null ? void 0 : choice.logprobs
1344
+ );
1345
+ if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
1346
+ if (logprobs === void 0) logprobs = [];
1347
+ logprobs.push(...mappedLogprobs);
1348
+ }
1349
+ },
1350
+ flush(controller) {
1351
+ controller.enqueue({
1352
+ type: "finish",
1353
+ finishReason,
1354
+ logprobs,
1355
+ usage
1356
+ });
1357
+ }
1358
+ })
1359
+ ),
1360
+ rawCall: { rawPrompt, rawSettings },
1361
+ rawResponse: { headers: responseHeaders },
1362
+ warnings,
1363
+ request: { body: JSON.stringify(body) }
1364
+ };
1365
+ }
1366
+ };
1367
+ var openaiCompletionResponseSchema = z.object({
1368
+ id: z.string().nullish(),
1369
+ created: z.number().nullish(),
1370
+ model: z.string().nullish(),
1371
+ choices: z.array(
1372
+ z.object({
1373
+ text: z.string(),
1374
+ finish_reason: z.string(),
1375
+ logprobs: z.object({
1376
+ tokens: z.array(z.string()),
1377
+ token_logprobs: z.array(z.number()),
1378
+ top_logprobs: z.array(z.record(z.string(), z.number())).nullable()
1379
+ }).nullish()
1380
+ })
1381
+ ),
1382
+ usage: z.object({
1383
+ prompt_tokens: z.number(),
1384
+ completion_tokens: z.number()
1385
+ })
1386
+ });
1387
+ var openaiCompletionChunkSchema = z.union([
1388
+ z.object({
1389
+ id: z.string().nullish(),
1390
+ created: z.number().nullish(),
1391
+ model: z.string().nullish(),
1392
+ choices: z.array(
1393
+ z.object({
1394
+ text: z.string(),
1395
+ finish_reason: z.string().nullish(),
1396
+ index: z.number(),
1397
+ logprobs: z.object({
1398
+ tokens: z.array(z.string()),
1399
+ token_logprobs: z.array(z.number()),
1400
+ top_logprobs: z.array(z.record(z.string(), z.number())).nullable()
1401
+ }).nullish()
1402
+ })
1403
+ ),
1404
+ usage: z.object({
1405
+ prompt_tokens: z.number(),
1406
+ completion_tokens: z.number()
1407
+ }).nullish()
1408
+ }),
1409
+ openaiErrorDataSchema
1410
+ ]);
1411
+ var OpenAIEmbeddingModel = class {
1412
+ constructor(modelId, settings, config) {
1413
+ this.specificationVersion = "v1";
1414
+ this.modelId = modelId;
1415
+ this.settings = settings;
1416
+ this.config = config;
1417
+ }
1418
+ get provider() {
1419
+ return this.config.provider;
1420
+ }
1421
+ get maxEmbeddingsPerCall() {
1422
+ var _a;
1423
+ return (_a = this.settings.maxEmbeddingsPerCall) != null ? _a : 2048;
1424
+ }
1425
+ get supportsParallelCalls() {
1426
+ var _a;
1427
+ return (_a = this.settings.supportsParallelCalls) != null ? _a : true;
1428
+ }
1429
+ async doEmbed({
1430
+ values,
1431
+ headers,
1432
+ abortSignal
1433
+ }) {
1434
+ if (values.length > this.maxEmbeddingsPerCall) {
1435
+ throw new TooManyEmbeddingValuesForCallError({
1436
+ provider: this.provider,
1437
+ modelId: this.modelId,
1438
+ maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
1439
+ values
1440
+ });
1441
+ }
1442
+ const { responseHeaders, value: response } = await postJsonToApi({
1443
+ url: this.config.url({
1444
+ path: "/embeddings",
1445
+ modelId: this.modelId
1446
+ }),
1447
+ headers: combineHeaders(this.config.headers(), headers),
1448
+ body: {
1449
+ model: this.modelId,
1450
+ input: values,
1451
+ encoding_format: "float",
1452
+ dimensions: this.settings.dimensions,
1453
+ user: this.settings.user
1454
+ },
1455
+ failedResponseHandler: openaiFailedResponseHandler,
1456
+ successfulResponseHandler: createJsonResponseHandler(
1457
+ openaiTextEmbeddingResponseSchema
1458
+ ),
1459
+ abortSignal,
1460
+ fetch: this.config.fetch
1461
+ });
1462
+ return {
1463
+ embeddings: response.data.map((item) => item.embedding),
1464
+ usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
1465
+ rawResponse: { headers: responseHeaders }
1466
+ };
1467
+ }
1468
+ };
1469
+ var openaiTextEmbeddingResponseSchema = z.object({
1470
+ data: z.array(z.object({ embedding: z.array(z.number()) })),
1471
+ usage: z.object({ prompt_tokens: z.number() }).nullish()
1472
+ });
1473
+ var modelMaxImagesPerCall = {
1474
+ "dall-e-3": 1,
1475
+ "dall-e-2": 10,
1476
+ "gpt-image-1": 10
1477
+ };
1478
+ var hasDefaultResponseFormat = /* @__PURE__ */ new Set(["gpt-image-1"]);
1479
+ var OpenAIImageModel = class {
1480
+ constructor(modelId, settings, config) {
1481
+ this.modelId = modelId;
1482
+ this.settings = settings;
1483
+ this.config = config;
1484
+ this.specificationVersion = "v1";
1485
+ }
1486
+ get maxImagesPerCall() {
1487
+ var _a, _b;
1488
+ return (_b = (_a = this.settings.maxImagesPerCall) != null ? _a : modelMaxImagesPerCall[this.modelId]) != null ? _b : 1;
1489
+ }
1490
+ get provider() {
1491
+ return this.config.provider;
1492
+ }
1493
+ async doGenerate({
1494
+ prompt,
1495
+ n,
1496
+ size,
1497
+ aspectRatio,
1498
+ seed,
1499
+ providerOptions,
1500
+ headers,
1501
+ abortSignal
1502
+ }) {
1503
+ var _a, _b, _c, _d;
1504
+ const warnings = [];
1505
+ if (aspectRatio != null) {
1506
+ warnings.push({
1507
+ type: "unsupported-setting",
1508
+ setting: "aspectRatio",
1509
+ details: "This model does not support aspect ratio. Use `size` instead."
1510
+ });
1511
+ }
1512
+ if (seed != null) {
1513
+ warnings.push({ type: "unsupported-setting", setting: "seed" });
1514
+ }
1515
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1516
+ const { value: response, responseHeaders } = await postJsonToApi({
1517
+ url: this.config.url({
1518
+ path: "/images/generations",
1519
+ modelId: this.modelId
1520
+ }),
1521
+ headers: combineHeaders(this.config.headers(), headers),
1522
+ body: {
1523
+ model: this.modelId,
1524
+ prompt,
1525
+ n,
1526
+ size,
1527
+ ...(_d = providerOptions.openai) != null ? _d : {},
1528
+ ...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
1529
+ },
1530
+ failedResponseHandler: openaiFailedResponseHandler,
1531
+ successfulResponseHandler: createJsonResponseHandler(
1532
+ openaiImageResponseSchema
1533
+ ),
1534
+ abortSignal,
1535
+ fetch: this.config.fetch
1536
+ });
1537
+ return {
1538
+ images: response.data.map((item) => item.b64_json),
1539
+ warnings,
1540
+ response: {
1541
+ timestamp: currentDate,
1542
+ modelId: this.modelId,
1543
+ headers: responseHeaders
1544
+ }
1545
+ };
1546
+ }
1547
+ };
1548
+ var openaiImageResponseSchema = z.object({
1549
+ data: z.array(z.object({ b64_json: z.string() }))
1550
+ });
1551
+ var openAIProviderOptionsSchema = z.object({
1552
+ include: z.array(z.string()).nullish(),
1553
+ language: z.string().nullish(),
1554
+ prompt: z.string().nullish(),
1555
+ temperature: z.number().min(0).max(1).nullish().default(0),
1556
+ timestampGranularities: z.array(z.enum(["word", "segment"])).nullish().default(["segment"])
1557
+ });
1558
+ var languageMap = {
1559
+ afrikaans: "af",
1560
+ arabic: "ar",
1561
+ armenian: "hy",
1562
+ azerbaijani: "az",
1563
+ belarusian: "be",
1564
+ bosnian: "bs",
1565
+ bulgarian: "bg",
1566
+ catalan: "ca",
1567
+ chinese: "zh",
1568
+ croatian: "hr",
1569
+ czech: "cs",
1570
+ danish: "da",
1571
+ dutch: "nl",
1572
+ english: "en",
1573
+ estonian: "et",
1574
+ finnish: "fi",
1575
+ french: "fr",
1576
+ galician: "gl",
1577
+ german: "de",
1578
+ greek: "el",
1579
+ hebrew: "he",
1580
+ hindi: "hi",
1581
+ hungarian: "hu",
1582
+ icelandic: "is",
1583
+ indonesian: "id",
1584
+ italian: "it",
1585
+ japanese: "ja",
1586
+ kannada: "kn",
1587
+ kazakh: "kk",
1588
+ korean: "ko",
1589
+ latvian: "lv",
1590
+ lithuanian: "lt",
1591
+ macedonian: "mk",
1592
+ malay: "ms",
1593
+ marathi: "mr",
1594
+ maori: "mi",
1595
+ nepali: "ne",
1596
+ norwegian: "no",
1597
+ persian: "fa",
1598
+ polish: "pl",
1599
+ portuguese: "pt",
1600
+ romanian: "ro",
1601
+ russian: "ru",
1602
+ serbian: "sr",
1603
+ slovak: "sk",
1604
+ slovenian: "sl",
1605
+ spanish: "es",
1606
+ swahili: "sw",
1607
+ swedish: "sv",
1608
+ tagalog: "tl",
1609
+ tamil: "ta",
1610
+ thai: "th",
1611
+ turkish: "tr",
1612
+ ukrainian: "uk",
1613
+ urdu: "ur",
1614
+ vietnamese: "vi",
1615
+ welsh: "cy"
1616
+ };
1617
+ var OpenAITranscriptionModel = class {
1618
+ constructor(modelId, config) {
1619
+ this.modelId = modelId;
1620
+ this.config = config;
1621
+ this.specificationVersion = "v1";
1622
+ }
1623
+ get provider() {
1624
+ return this.config.provider;
1625
+ }
1626
+ getArgs({
1627
+ audio,
1628
+ mediaType,
1629
+ providerOptions
1630
+ }) {
1631
+ var _a, _b, _c, _d, _e;
1632
+ const warnings = [];
1633
+ const openAIOptions = parseProviderOptions({
1634
+ provider: "openai",
1635
+ providerOptions,
1636
+ schema: openAIProviderOptionsSchema
1637
+ });
1638
+ const formData = new FormData();
1639
+ const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
1640
+ formData.append("model", this.modelId);
1641
+ formData.append("file", new File([blob], "audio", { type: mediaType }));
1642
+ if (openAIOptions) {
1643
+ const transcriptionModelOptions = {
1644
+ include: (_a = openAIOptions.include) != null ? _a : void 0,
1645
+ language: (_b = openAIOptions.language) != null ? _b : void 0,
1646
+ prompt: (_c = openAIOptions.prompt) != null ? _c : void 0,
1647
+ temperature: (_d = openAIOptions.temperature) != null ? _d : void 0,
1648
+ timestamp_granularities: (_e = openAIOptions.timestampGranularities) != null ? _e : void 0
1649
+ };
1650
+ for (const key in transcriptionModelOptions) {
1651
+ const value = transcriptionModelOptions[key];
1652
+ if (value !== void 0) {
1653
+ formData.append(key, String(value));
1654
+ }
1655
+ }
1656
+ }
1657
+ return {
1658
+ formData,
1659
+ warnings
1660
+ };
1661
+ }
1662
+ async doGenerate(options) {
1663
+ var _a, _b, _c, _d, _e, _f;
1664
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
1665
+ const { formData, warnings } = this.getArgs(options);
1666
+ const {
1667
+ value: response,
1668
+ responseHeaders,
1669
+ rawValue: rawResponse
1670
+ } = await postFormDataToApi({
1671
+ url: this.config.url({
1672
+ path: "/audio/transcriptions",
1673
+ modelId: this.modelId
1674
+ }),
1675
+ headers: combineHeaders(this.config.headers(), options.headers),
1676
+ formData,
1677
+ failedResponseHandler: openaiFailedResponseHandler,
1678
+ successfulResponseHandler: createJsonResponseHandler(
1679
+ openaiTranscriptionResponseSchema
1680
+ ),
1681
+ abortSignal: options.abortSignal,
1682
+ fetch: this.config.fetch
1683
+ });
1684
+ const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
1685
+ return {
1686
+ text: response.text,
1687
+ segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
1688
+ text: word.word,
1689
+ startSecond: word.start,
1690
+ endSecond: word.end
1691
+ }))) != null ? _e : [],
1692
+ language,
1693
+ durationInSeconds: (_f = response.duration) != null ? _f : void 0,
1694
+ warnings,
1695
+ response: {
1696
+ timestamp: currentDate,
1697
+ modelId: this.modelId,
1698
+ headers: responseHeaders,
1699
+ body: rawResponse
1700
+ }
1701
+ };
1702
+ }
1703
+ };
1704
+ var openaiTranscriptionResponseSchema = z.object({
1705
+ text: z.string(),
1706
+ language: z.string().nullish(),
1707
+ duration: z.number().nullish(),
1708
+ words: z.array(
1709
+ z.object({
1710
+ word: z.string(),
1711
+ start: z.number(),
1712
+ end: z.number()
1713
+ })
1714
+ ).nullish()
1715
+ });
1716
+ function convertToOpenAIResponsesMessages({
1717
+ prompt,
1718
+ systemMessageMode
1719
+ }) {
1720
+ const messages = [];
1721
+ const warnings = [];
1722
+ for (const { role, content } of prompt) {
1723
+ switch (role) {
1724
+ case "system": {
1725
+ switch (systemMessageMode) {
1726
+ case "system": {
1727
+ messages.push({ role: "system", content });
1728
+ break;
1729
+ }
1730
+ case "developer": {
1731
+ messages.push({ role: "developer", content });
1732
+ break;
1733
+ }
1734
+ case "remove": {
1735
+ warnings.push({
1736
+ type: "other",
1737
+ message: "system messages are removed for this model"
1738
+ });
1739
+ break;
1740
+ }
1741
+ default: {
1742
+ const _exhaustiveCheck = systemMessageMode;
1743
+ throw new Error(
1744
+ `Unsupported system message mode: ${_exhaustiveCheck}`
1745
+ );
1746
+ }
1747
+ }
1748
+ break;
1749
+ }
1750
+ case "user": {
1751
+ messages.push({
1752
+ role: "user",
1753
+ content: content.map((part, index) => {
1754
+ var _a, _b, _c, _d;
1755
+ switch (part.type) {
1756
+ case "text": {
1757
+ return { type: "input_text", text: part.text };
1758
+ }
1759
+ case "image": {
1760
+ return {
1761
+ type: "input_image",
1762
+ image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`,
1763
+ // OpenAI specific extension: image detail
1764
+ detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
1765
+ };
1766
+ }
1767
+ case "file": {
1768
+ if (part.data instanceof URL) {
1769
+ throw new UnsupportedFunctionalityError({
1770
+ functionality: "File URLs in user messages"
1771
+ });
1772
+ }
1773
+ switch (part.mimeType) {
1774
+ case "application/pdf": {
1775
+ return {
1776
+ type: "input_file",
1777
+ filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
1778
+ file_data: `data:application/pdf;base64,${part.data}`
1779
+ };
1780
+ }
1781
+ default: {
1782
+ throw new UnsupportedFunctionalityError({
1783
+ functionality: "Only PDF files are supported in user messages"
1784
+ });
1785
+ }
1786
+ }
1787
+ }
1788
+ }
1789
+ })
1790
+ });
1791
+ break;
1792
+ }
1793
+ case "assistant": {
1794
+ for (const part of content) {
1795
+ switch (part.type) {
1796
+ case "text": {
1797
+ messages.push({
1798
+ role: "assistant",
1799
+ content: [{ type: "output_text", text: part.text }]
1800
+ });
1801
+ break;
1802
+ }
1803
+ case "tool-call": {
1804
+ messages.push({
1805
+ type: "function_call",
1806
+ call_id: part.toolCallId,
1807
+ name: part.toolName,
1808
+ arguments: JSON.stringify(part.args)
1809
+ });
1810
+ break;
1811
+ }
1812
+ }
1813
+ }
1814
+ break;
1815
+ }
1816
+ case "tool": {
1817
+ for (const part of content) {
1818
+ messages.push({
1819
+ type: "function_call_output",
1820
+ call_id: part.toolCallId,
1821
+ output: JSON.stringify(part.result)
1822
+ });
1823
+ }
1824
+ break;
1825
+ }
1826
+ default: {
1827
+ const _exhaustiveCheck = role;
1828
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1829
+ }
1830
+ }
1831
+ }
1832
+ return { messages, warnings };
1833
+ }
1834
+ function mapOpenAIResponseFinishReason({
1835
+ finishReason,
1836
+ hasToolCalls
1837
+ }) {
1838
+ switch (finishReason) {
1839
+ case void 0:
1840
+ case null:
1841
+ return hasToolCalls ? "tool-calls" : "stop";
1842
+ case "max_output_tokens":
1843
+ return "length";
1844
+ case "content_filter":
1845
+ return "content-filter";
1846
+ default:
1847
+ return hasToolCalls ? "tool-calls" : "unknown";
1848
+ }
1849
+ }
1850
+ function prepareResponsesTools({
1851
+ mode,
1852
+ strict
1853
+ }) {
1854
+ var _a;
1855
+ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
1856
+ const toolWarnings = [];
1857
+ if (tools == null) {
1858
+ return { tools: void 0, tool_choice: void 0, toolWarnings };
1859
+ }
1860
+ const toolChoice = mode.toolChoice;
1861
+ const openaiTools2 = [];
1862
+ for (const tool of tools) {
1863
+ switch (tool.type) {
1864
+ case "function":
1865
+ openaiTools2.push({
1866
+ type: "function",
1867
+ name: tool.name,
1868
+ description: tool.description,
1869
+ parameters: tool.parameters,
1870
+ strict: strict ? true : void 0
1871
+ });
1872
+ break;
1873
+ case "provider-defined":
1874
+ switch (tool.id) {
1875
+ case "openai.web_search_preview":
1876
+ openaiTools2.push({
1877
+ type: "web_search_preview",
1878
+ search_context_size: tool.args.searchContextSize,
1879
+ user_location: tool.args.userLocation
1880
+ });
1881
+ break;
1882
+ default:
1883
+ toolWarnings.push({ type: "unsupported-tool", tool });
1884
+ break;
1885
+ }
1886
+ break;
1887
+ default:
1888
+ toolWarnings.push({ type: "unsupported-tool", tool });
1889
+ break;
1890
+ }
1891
+ }
1892
+ if (toolChoice == null) {
1893
+ return { tools: openaiTools2, tool_choice: void 0, toolWarnings };
1894
+ }
1895
+ const type = toolChoice.type;
1896
+ switch (type) {
1897
+ case "auto":
1898
+ case "none":
1899
+ case "required":
1900
+ return { tools: openaiTools2, tool_choice: type, toolWarnings };
1901
+ case "tool": {
1902
+ if (toolChoice.toolName === "web_search_preview") {
1903
+ return {
1904
+ tools: openaiTools2,
1905
+ tool_choice: {
1906
+ type: "web_search_preview"
1907
+ },
1908
+ toolWarnings
1909
+ };
1910
+ }
1911
+ return {
1912
+ tools: openaiTools2,
1913
+ tool_choice: {
1914
+ type: "function",
1915
+ name: toolChoice.toolName
1916
+ },
1917
+ toolWarnings
1918
+ };
1919
+ }
1920
+ default: {
1921
+ const _exhaustiveCheck = type;
1922
+ throw new UnsupportedFunctionalityError({
1923
+ functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
1924
+ });
1925
+ }
1926
+ }
1927
+ }
1928
+ var OpenAIResponsesLanguageModel = class {
1929
+ constructor(modelId, config) {
1930
+ this.specificationVersion = "v1";
1931
+ this.defaultObjectGenerationMode = "json";
1932
+ this.supportsStructuredOutputs = true;
1933
+ this.modelId = modelId;
1934
+ this.config = config;
1935
+ }
1936
+ get provider() {
1937
+ return this.config.provider;
1938
+ }
1939
+ getArgs({
1940
+ mode,
1941
+ maxTokens,
1942
+ temperature,
1943
+ stopSequences,
1944
+ topP,
1945
+ topK,
1946
+ presencePenalty,
1947
+ frequencyPenalty,
1948
+ seed,
1949
+ prompt,
1950
+ providerMetadata,
1951
+ responseFormat
1952
+ }) {
1953
+ var _a, _b, _c;
1954
+ const warnings = [];
1955
+ const modelConfig = getResponsesModelConfig(this.modelId);
1956
+ const type = mode.type;
1957
+ if (topK != null) {
1958
+ warnings.push({
1959
+ type: "unsupported-setting",
1960
+ setting: "topK"
1961
+ });
1962
+ }
1963
+ if (seed != null) {
1964
+ warnings.push({
1965
+ type: "unsupported-setting",
1966
+ setting: "seed"
1967
+ });
1968
+ }
1969
+ if (presencePenalty != null) {
1970
+ warnings.push({
1971
+ type: "unsupported-setting",
1972
+ setting: "presencePenalty"
1973
+ });
1974
+ }
1975
+ if (frequencyPenalty != null) {
1976
+ warnings.push({
1977
+ type: "unsupported-setting",
1978
+ setting: "frequencyPenalty"
1979
+ });
1980
+ }
1981
+ if (stopSequences != null) {
1982
+ warnings.push({
1983
+ type: "unsupported-setting",
1984
+ setting: "stopSequences"
1985
+ });
1986
+ }
1987
+ const { messages, warnings: messageWarnings } = convertToOpenAIResponsesMessages({
1988
+ prompt,
1989
+ systemMessageMode: modelConfig.systemMessageMode
1990
+ });
1991
+ warnings.push(...messageWarnings);
1992
+ const openaiOptions = parseProviderOptions({
1993
+ provider: "openai",
1994
+ providerOptions: providerMetadata,
1995
+ schema: openaiResponsesProviderOptionsSchema
1996
+ });
1997
+ const isStrict = (_a = openaiOptions == null ? void 0 : openaiOptions.strictSchemas) != null ? _a : true;
1998
+ const baseArgs = {
1999
+ model: this.modelId,
2000
+ input: messages,
2001
+ temperature,
2002
+ top_p: topP,
2003
+ max_output_tokens: maxTokens,
2004
+ ...(responseFormat == null ? void 0 : responseFormat.type) === "json" && {
2005
+ text: {
2006
+ format: responseFormat.schema != null ? {
2007
+ type: "json_schema",
2008
+ strict: isStrict,
2009
+ name: (_b = responseFormat.name) != null ? _b : "response",
2010
+ description: responseFormat.description,
2011
+ schema: responseFormat.schema
2012
+ } : { type: "json_object" }
2013
+ }
2014
+ },
2015
+ // provider options:
2016
+ metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
2017
+ parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
2018
+ previous_response_id: openaiOptions == null ? void 0 : openaiOptions.previousResponseId,
2019
+ store: openaiOptions == null ? void 0 : openaiOptions.store,
2020
+ user: openaiOptions == null ? void 0 : openaiOptions.user,
2021
+ instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
2022
+ // model-specific settings:
2023
+ ...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
2024
+ reasoning: {
2025
+ ...(openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null && {
2026
+ effort: openaiOptions.reasoningEffort
2027
+ },
2028
+ ...(openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null && {
2029
+ summary: openaiOptions.reasoningSummary
2030
+ }
2031
+ }
2032
+ },
2033
+ ...modelConfig.requiredAutoTruncation && {
2034
+ truncation: "auto"
2035
+ }
2036
+ };
2037
+ if (modelConfig.isReasoningModel) {
2038
+ if (baseArgs.temperature != null) {
2039
+ baseArgs.temperature = void 0;
2040
+ warnings.push({
2041
+ type: "unsupported-setting",
2042
+ setting: "temperature",
2043
+ details: "temperature is not supported for reasoning models"
2044
+ });
2045
+ }
2046
+ if (baseArgs.top_p != null) {
2047
+ baseArgs.top_p = void 0;
2048
+ warnings.push({
2049
+ type: "unsupported-setting",
2050
+ setting: "topP",
2051
+ details: "topP is not supported for reasoning models"
2052
+ });
2053
+ }
2054
+ }
2055
+ switch (type) {
2056
+ case "regular": {
2057
+ const { tools, tool_choice, toolWarnings } = prepareResponsesTools({
2058
+ mode,
2059
+ strict: isStrict
2060
+ // TODO support provider options on tools
2061
+ });
2062
+ return {
2063
+ args: {
2064
+ ...baseArgs,
2065
+ tools,
2066
+ tool_choice
2067
+ },
2068
+ warnings: [...warnings, ...toolWarnings]
2069
+ };
2070
+ }
2071
+ case "object-json": {
2072
+ return {
2073
+ args: {
2074
+ ...baseArgs,
2075
+ text: {
2076
+ format: mode.schema != null ? {
2077
+ type: "json_schema",
2078
+ strict: isStrict,
2079
+ name: (_c = mode.name) != null ? _c : "response",
2080
+ description: mode.description,
2081
+ schema: mode.schema
2082
+ } : { type: "json_object" }
2083
+ }
2084
+ },
2085
+ warnings
2086
+ };
2087
+ }
2088
+ case "object-tool": {
2089
+ return {
2090
+ args: {
2091
+ ...baseArgs,
2092
+ tool_choice: { type: "function", name: mode.tool.name },
2093
+ tools: [
2094
+ {
2095
+ type: "function",
2096
+ name: mode.tool.name,
2097
+ description: mode.tool.description,
2098
+ parameters: mode.tool.parameters,
2099
+ strict: isStrict
2100
+ }
2101
+ ]
2102
+ },
2103
+ warnings
2104
+ };
2105
+ }
2106
+ default: {
2107
+ const _exhaustiveCheck = type;
2108
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
2109
+ }
2110
+ }
2111
+ }
2112
+ async doGenerate(options) {
2113
+ var _a, _b, _c, _d, _e, _f, _g;
2114
+ const { args: body, warnings } = this.getArgs(options);
2115
+ const url = this.config.url({
2116
+ path: "/responses",
2117
+ modelId: this.modelId
2118
+ });
2119
+ const {
2120
+ responseHeaders,
2121
+ value: response,
2122
+ rawValue: rawResponse
2123
+ } = await postJsonToApi({
2124
+ url,
2125
+ headers: combineHeaders(this.config.headers(), options.headers),
2126
+ body,
2127
+ failedResponseHandler: openaiFailedResponseHandler,
2128
+ successfulResponseHandler: createJsonResponseHandler(
2129
+ z.object({
2130
+ id: z.string(),
2131
+ created_at: z.number(),
2132
+ error: z.object({
2133
+ message: z.string(),
2134
+ code: z.string()
2135
+ }).nullish(),
2136
+ model: z.string(),
2137
+ output: z.array(
2138
+ z.discriminatedUnion("type", [
2139
+ z.object({
2140
+ type: z.literal("message"),
2141
+ role: z.literal("assistant"),
2142
+ content: z.array(
2143
+ z.object({
2144
+ type: z.literal("output_text"),
2145
+ text: z.string(),
2146
+ annotations: z.array(
2147
+ z.object({
2148
+ type: z.literal("url_citation"),
2149
+ start_index: z.number(),
2150
+ end_index: z.number(),
2151
+ url: z.string(),
2152
+ title: z.string()
2153
+ })
2154
+ )
2155
+ })
2156
+ )
2157
+ }),
2158
+ z.object({
2159
+ type: z.literal("function_call"),
2160
+ call_id: z.string(),
2161
+ name: z.string(),
2162
+ arguments: z.string()
2163
+ }),
2164
+ z.object({
2165
+ type: z.literal("web_search_call")
2166
+ }),
2167
+ z.object({
2168
+ type: z.literal("computer_call")
2169
+ }),
2170
+ z.object({
2171
+ type: z.literal("reasoning"),
2172
+ summary: z.array(
2173
+ z.object({
2174
+ type: z.literal("summary_text"),
2175
+ text: z.string()
2176
+ })
2177
+ )
2178
+ })
2179
+ ])
2180
+ ),
2181
+ incomplete_details: z.object({ reason: z.string() }).nullable(),
2182
+ usage: usageSchema
2183
+ })
2184
+ ),
2185
+ abortSignal: options.abortSignal,
2186
+ fetch: this.config.fetch
2187
+ });
2188
+ if (response.error) {
2189
+ throw new APICallError({
2190
+ message: response.error.message,
2191
+ url,
2192
+ requestBodyValues: body,
2193
+ statusCode: 400,
2194
+ responseHeaders,
2195
+ responseBody: rawResponse,
2196
+ isRetryable: false
2197
+ });
2198
+ }
2199
+ const outputTextElements = response.output.filter((output) => output.type === "message").flatMap((output) => output.content).filter((content) => content.type === "output_text");
2200
+ const toolCalls = response.output.filter((output) => output.type === "function_call").map((output) => ({
2201
+ toolCallType: "function",
2202
+ toolCallId: output.call_id,
2203
+ toolName: output.name,
2204
+ args: output.arguments
2205
+ }));
2206
+ const reasoningSummary = (_b = (_a = response.output.find((item) => item.type === "reasoning")) == null ? void 0 : _a.summary) != null ? _b : null;
2207
+ return {
2208
+ text: outputTextElements.map((content) => content.text).join("\n"),
2209
+ sources: outputTextElements.flatMap(
2210
+ (content) => content.annotations.map((annotation) => {
2211
+ var _a2, _b2, _c2;
2212
+ return {
2213
+ sourceType: "url",
2214
+ id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : generateId(),
2215
+ url: annotation.url,
2216
+ title: annotation.title
2217
+ };
2218
+ })
2219
+ ),
2220
+ finishReason: mapOpenAIResponseFinishReason({
2221
+ finishReason: (_c = response.incomplete_details) == null ? void 0 : _c.reason,
2222
+ hasToolCalls: toolCalls.length > 0
2223
+ }),
2224
+ toolCalls: toolCalls.length > 0 ? toolCalls : void 0,
2225
+ reasoning: reasoningSummary ? reasoningSummary.map((summary) => ({
2226
+ type: "text",
2227
+ text: summary.text
2228
+ })) : void 0,
2229
+ usage: {
2230
+ promptTokens: response.usage.input_tokens,
2231
+ completionTokens: response.usage.output_tokens
2232
+ },
2233
+ rawCall: {
2234
+ rawPrompt: void 0,
2235
+ rawSettings: {}
2236
+ },
2237
+ rawResponse: {
2238
+ headers: responseHeaders,
2239
+ body: rawResponse
2240
+ },
2241
+ request: {
2242
+ body: JSON.stringify(body)
2243
+ },
2244
+ response: {
2245
+ id: response.id,
2246
+ timestamp: new Date(response.created_at * 1e3),
2247
+ modelId: response.model
2248
+ },
2249
+ providerMetadata: {
2250
+ openai: {
2251
+ responseId: response.id,
2252
+ cachedPromptTokens: (_e = (_d = response.usage.input_tokens_details) == null ? void 0 : _d.cached_tokens) != null ? _e : null,
2253
+ reasoningTokens: (_g = (_f = response.usage.output_tokens_details) == null ? void 0 : _f.reasoning_tokens) != null ? _g : null
2254
+ }
2255
+ },
2256
+ warnings
2257
+ };
2258
+ }
2259
+ async doStream(options) {
2260
+ const { args: body, warnings } = this.getArgs(options);
2261
+ const { responseHeaders, value: response } = await postJsonToApi({
2262
+ url: this.config.url({
2263
+ path: "/responses",
2264
+ modelId: this.modelId
2265
+ }),
2266
+ headers: combineHeaders(this.config.headers(), options.headers),
2267
+ body: {
2268
+ ...body,
2269
+ stream: true
2270
+ },
2271
+ failedResponseHandler: openaiFailedResponseHandler,
2272
+ successfulResponseHandler: createEventSourceResponseHandler(
2273
+ openaiResponsesChunkSchema
2274
+ ),
2275
+ abortSignal: options.abortSignal,
2276
+ fetch: this.config.fetch
2277
+ });
2278
+ const self = this;
2279
+ let finishReason = "unknown";
2280
+ let promptTokens = NaN;
2281
+ let completionTokens = NaN;
2282
+ let cachedPromptTokens = null;
2283
+ let reasoningTokens = null;
2284
+ let responseId = null;
2285
+ const ongoingToolCalls = {};
2286
+ let hasToolCalls = false;
2287
+ return {
2288
+ stream: response.pipeThrough(
2289
+ new TransformStream({
2290
+ transform(chunk, controller) {
2291
+ var _a, _b, _c, _d, _e, _f, _g, _h;
2292
+ if (!chunk.success) {
2293
+ finishReason = "error";
2294
+ controller.enqueue({ type: "error", error: chunk.error });
2295
+ return;
2296
+ }
2297
+ const value = chunk.value;
2298
+ if (isResponseOutputItemAddedChunk(value)) {
2299
+ if (value.item.type === "function_call") {
2300
+ ongoingToolCalls[value.output_index] = {
2301
+ toolName: value.item.name,
2302
+ toolCallId: value.item.call_id
2303
+ };
2304
+ controller.enqueue({
2305
+ type: "tool-call-delta",
2306
+ toolCallType: "function",
2307
+ toolCallId: value.item.call_id,
2308
+ toolName: value.item.name,
2309
+ argsTextDelta: value.item.arguments
2310
+ });
2311
+ }
2312
+ } else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
2313
+ const toolCall = ongoingToolCalls[value.output_index];
2314
+ if (toolCall != null) {
2315
+ controller.enqueue({
2316
+ type: "tool-call-delta",
2317
+ toolCallType: "function",
2318
+ toolCallId: toolCall.toolCallId,
2319
+ toolName: toolCall.toolName,
2320
+ argsTextDelta: value.delta
2321
+ });
2322
+ }
2323
+ } else if (isResponseCreatedChunk(value)) {
2324
+ responseId = value.response.id;
2325
+ controller.enqueue({
2326
+ type: "response-metadata",
2327
+ id: value.response.id,
2328
+ timestamp: new Date(value.response.created_at * 1e3),
2329
+ modelId: value.response.model
2330
+ });
2331
+ } else if (isTextDeltaChunk(value)) {
2332
+ controller.enqueue({
2333
+ type: "text-delta",
2334
+ textDelta: value.delta
2335
+ });
2336
+ } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
2337
+ controller.enqueue({
2338
+ type: "reasoning",
2339
+ textDelta: value.delta
2340
+ });
2341
+ } else if (isResponseOutputItemDoneChunk(value) && value.item.type === "function_call") {
2342
+ ongoingToolCalls[value.output_index] = void 0;
2343
+ hasToolCalls = true;
2344
+ controller.enqueue({
2345
+ type: "tool-call",
2346
+ toolCallType: "function",
2347
+ toolCallId: value.item.call_id,
2348
+ toolName: value.item.name,
2349
+ args: value.item.arguments
2350
+ });
2351
+ } else if (isResponseFinishedChunk(value)) {
2352
+ finishReason = mapOpenAIResponseFinishReason({
2353
+ finishReason: (_a = value.response.incomplete_details) == null ? void 0 : _a.reason,
2354
+ hasToolCalls
2355
+ });
2356
+ promptTokens = value.response.usage.input_tokens;
2357
+ completionTokens = value.response.usage.output_tokens;
2358
+ cachedPromptTokens = (_c = (_b = value.response.usage.input_tokens_details) == null ? void 0 : _b.cached_tokens) != null ? _c : cachedPromptTokens;
2359
+ reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : reasoningTokens;
2360
+ } else if (isResponseAnnotationAddedChunk(value)) {
2361
+ controller.enqueue({
2362
+ type: "source",
2363
+ source: {
2364
+ sourceType: "url",
2365
+ id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : generateId(),
2366
+ url: value.annotation.url,
2367
+ title: value.annotation.title
2368
+ }
2369
+ });
2370
+ } else if (isErrorChunk(value)) {
2371
+ controller.enqueue({ type: "error", error: value });
2372
+ }
2373
+ },
2374
+ flush(controller) {
2375
+ controller.enqueue({
2376
+ type: "finish",
2377
+ finishReason,
2378
+ usage: { promptTokens, completionTokens },
2379
+ ...(cachedPromptTokens != null || reasoningTokens != null) && {
2380
+ providerMetadata: {
2381
+ openai: {
2382
+ responseId,
2383
+ cachedPromptTokens,
2384
+ reasoningTokens
2385
+ }
2386
+ }
2387
+ }
2388
+ });
2389
+ }
2390
+ })
2391
+ ),
2392
+ rawCall: {
2393
+ rawPrompt: void 0,
2394
+ rawSettings: {}
2395
+ },
2396
+ rawResponse: { headers: responseHeaders },
2397
+ request: { body: JSON.stringify(body) },
2398
+ warnings
2399
+ };
2400
+ }
2401
+ };
2402
+ var usageSchema = z.object({
2403
+ input_tokens: z.number(),
2404
+ input_tokens_details: z.object({ cached_tokens: z.number().nullish() }).nullish(),
2405
+ output_tokens: z.number(),
2406
+ output_tokens_details: z.object({ reasoning_tokens: z.number().nullish() }).nullish()
2407
+ });
2408
+ var textDeltaChunkSchema = z.object({
2409
+ type: z.literal("response.output_text.delta"),
2410
+ delta: z.string()
2411
+ });
2412
+ var responseFinishedChunkSchema = z.object({
2413
+ type: z.enum(["response.completed", "response.incomplete"]),
2414
+ response: z.object({
2415
+ incomplete_details: z.object({ reason: z.string() }).nullish(),
2416
+ usage: usageSchema
2417
+ })
2418
+ });
2419
+ var responseCreatedChunkSchema = z.object({
2420
+ type: z.literal("response.created"),
2421
+ response: z.object({
2422
+ id: z.string(),
2423
+ created_at: z.number(),
2424
+ model: z.string()
2425
+ })
2426
+ });
2427
+ var responseOutputItemDoneSchema = z.object({
2428
+ type: z.literal("response.output_item.done"),
2429
+ output_index: z.number(),
2430
+ item: z.discriminatedUnion("type", [
2431
+ z.object({
2432
+ type: z.literal("message")
2433
+ }),
2434
+ z.object({
2435
+ type: z.literal("function_call"),
2436
+ id: z.string(),
2437
+ call_id: z.string(),
2438
+ name: z.string(),
2439
+ arguments: z.string(),
2440
+ status: z.literal("completed")
2441
+ })
2442
+ ])
2443
+ });
2444
+ var responseFunctionCallArgumentsDeltaSchema = z.object({
2445
+ type: z.literal("response.function_call_arguments.delta"),
2446
+ item_id: z.string(),
2447
+ output_index: z.number(),
2448
+ delta: z.string()
2449
+ });
2450
+ var responseOutputItemAddedSchema = z.object({
2451
+ type: z.literal("response.output_item.added"),
2452
+ output_index: z.number(),
2453
+ item: z.discriminatedUnion("type", [
2454
+ z.object({
2455
+ type: z.literal("message")
2456
+ }),
2457
+ z.object({
2458
+ type: z.literal("function_call"),
2459
+ id: z.string(),
2460
+ call_id: z.string(),
2461
+ name: z.string(),
2462
+ arguments: z.string()
2463
+ })
2464
+ ])
2465
+ });
2466
+ var responseAnnotationAddedSchema = z.object({
2467
+ type: z.literal("response.output_text.annotation.added"),
2468
+ annotation: z.object({
2469
+ type: z.literal("url_citation"),
2470
+ url: z.string(),
2471
+ title: z.string()
2472
+ })
2473
+ });
2474
+ var responseReasoningSummaryTextDeltaSchema = z.object({
2475
+ type: z.literal("response.reasoning_summary_text.delta"),
2476
+ item_id: z.string(),
2477
+ output_index: z.number(),
2478
+ summary_index: z.number(),
2479
+ delta: z.string()
2480
+ });
2481
+ var errorChunkSchema = z.object({
2482
+ type: z.literal("error"),
2483
+ code: z.string(),
2484
+ message: z.string(),
2485
+ param: z.string().nullish(),
2486
+ sequence_number: z.number()
2487
+ });
2488
+ var openaiResponsesChunkSchema = z.union([
2489
+ textDeltaChunkSchema,
2490
+ responseFinishedChunkSchema,
2491
+ responseCreatedChunkSchema,
2492
+ responseOutputItemDoneSchema,
2493
+ responseFunctionCallArgumentsDeltaSchema,
2494
+ responseOutputItemAddedSchema,
2495
+ responseAnnotationAddedSchema,
2496
+ responseReasoningSummaryTextDeltaSchema,
2497
+ errorChunkSchema,
2498
+ z.object({ type: z.string() }).passthrough()
2499
+ // fallback for unknown chunks
2500
+ ]);
2501
+ function isTextDeltaChunk(chunk) {
2502
+ return chunk.type === "response.output_text.delta";
2503
+ }
2504
+ function isResponseOutputItemDoneChunk(chunk) {
2505
+ return chunk.type === "response.output_item.done";
2506
+ }
2507
+ function isResponseFinishedChunk(chunk) {
2508
+ return chunk.type === "response.completed" || chunk.type === "response.incomplete";
2509
+ }
2510
+ function isResponseCreatedChunk(chunk) {
2511
+ return chunk.type === "response.created";
2512
+ }
2513
+ function isResponseFunctionCallArgumentsDeltaChunk(chunk) {
2514
+ return chunk.type === "response.function_call_arguments.delta";
2515
+ }
2516
+ function isResponseOutputItemAddedChunk(chunk) {
2517
+ return chunk.type === "response.output_item.added";
2518
+ }
2519
+ function isResponseAnnotationAddedChunk(chunk) {
2520
+ return chunk.type === "response.output_text.annotation.added";
2521
+ }
2522
+ function isResponseReasoningSummaryTextDeltaChunk(chunk) {
2523
+ return chunk.type === "response.reasoning_summary_text.delta";
2524
+ }
2525
+ function isErrorChunk(chunk) {
2526
+ return chunk.type === "error";
2527
+ }
2528
+ function getResponsesModelConfig(modelId) {
2529
+ if (modelId.startsWith("o") || modelId.startsWith("gpt-5")) {
2530
+ if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {
2531
+ return {
2532
+ isReasoningModel: true,
2533
+ systemMessageMode: "remove",
2534
+ requiredAutoTruncation: false
2535
+ };
2536
+ }
2537
+ return {
2538
+ isReasoningModel: true,
2539
+ systemMessageMode: "developer",
2540
+ requiredAutoTruncation: false
2541
+ };
2542
+ }
2543
+ return {
2544
+ isReasoningModel: false,
2545
+ systemMessageMode: "system",
2546
+ requiredAutoTruncation: false
2547
+ };
2548
+ }
2549
+ var openaiResponsesProviderOptionsSchema = z.object({
2550
+ metadata: z.any().nullish(),
2551
+ parallelToolCalls: z.boolean().nullish(),
2552
+ previousResponseId: z.string().nullish(),
2553
+ store: z.boolean().nullish(),
2554
+ user: z.string().nullish(),
2555
+ reasoningEffort: z.string().nullish(),
2556
+ strictSchemas: z.boolean().nullish(),
2557
+ instructions: z.string().nullish(),
2558
+ reasoningSummary: z.string().nullish()
2559
+ });
2560
+ var WebSearchPreviewParameters = z.object({});
2561
+ function webSearchPreviewTool({
2562
+ searchContextSize,
2563
+ userLocation
2564
+ } = {}) {
2565
+ return {
2566
+ type: "provider-defined",
2567
+ id: "openai.web_search_preview",
2568
+ args: {
2569
+ searchContextSize,
2570
+ userLocation
2571
+ },
2572
+ parameters: WebSearchPreviewParameters
2573
+ };
2574
+ }
2575
+ var openaiTools = {
2576
+ webSearchPreview: webSearchPreviewTool
2577
+ };
2578
+ var OpenAIProviderOptionsSchema = z.object({
2579
+ instructions: z.string().nullish(),
2580
+ speed: z.number().min(0.25).max(4).default(1).nullish()
2581
+ });
2582
+ var OpenAISpeechModel = class {
2583
+ constructor(modelId, config) {
2584
+ this.modelId = modelId;
2585
+ this.config = config;
2586
+ this.specificationVersion = "v1";
2587
+ }
2588
+ get provider() {
2589
+ return this.config.provider;
2590
+ }
2591
+ getArgs({
2592
+ text,
2593
+ voice = "alloy",
2594
+ outputFormat = "mp3",
2595
+ speed,
2596
+ instructions,
2597
+ providerOptions
2598
+ }) {
2599
+ const warnings = [];
2600
+ const openAIOptions = parseProviderOptions({
2601
+ provider: "openai",
2602
+ providerOptions,
2603
+ schema: OpenAIProviderOptionsSchema
2604
+ });
2605
+ const requestBody = {
2606
+ model: this.modelId,
2607
+ input: text,
2608
+ voice,
2609
+ response_format: "mp3",
2610
+ speed,
2611
+ instructions
2612
+ };
2613
+ if (outputFormat) {
2614
+ if (["mp3", "opus", "aac", "flac", "wav", "pcm"].includes(outputFormat)) {
2615
+ requestBody.response_format = outputFormat;
2616
+ } else {
2617
+ warnings.push({
2618
+ type: "unsupported-setting",
2619
+ setting: "outputFormat",
2620
+ details: `Unsupported output format: ${outputFormat}. Using mp3 instead.`
2621
+ });
2622
+ }
2623
+ }
2624
+ if (openAIOptions) {
2625
+ const speechModelOptions = {};
2626
+ for (const key in speechModelOptions) {
2627
+ const value = speechModelOptions[key];
2628
+ if (value !== void 0) {
2629
+ requestBody[key] = value;
2630
+ }
2631
+ }
2632
+ }
2633
+ return {
2634
+ requestBody,
2635
+ warnings
2636
+ };
2637
+ }
2638
+ async doGenerate(options) {
2639
+ var _a, _b, _c;
2640
+ const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
2641
+ const { requestBody, warnings } = this.getArgs(options);
2642
+ const {
2643
+ value: audio,
2644
+ responseHeaders,
2645
+ rawValue: rawResponse
2646
+ } = await postJsonToApi({
2647
+ url: this.config.url({
2648
+ path: "/audio/speech",
2649
+ modelId: this.modelId
2650
+ }),
2651
+ headers: combineHeaders(this.config.headers(), options.headers),
2652
+ body: requestBody,
2653
+ failedResponseHandler: openaiFailedResponseHandler,
2654
+ successfulResponseHandler: createBinaryResponseHandler(),
2655
+ abortSignal: options.abortSignal,
2656
+ fetch: this.config.fetch
2657
+ });
2658
+ return {
2659
+ audio,
2660
+ warnings,
2661
+ request: {
2662
+ body: JSON.stringify(requestBody)
2663
+ },
2664
+ response: {
2665
+ timestamp: currentDate,
2666
+ modelId: this.modelId,
2667
+ headers: responseHeaders,
2668
+ body: rawResponse
2669
+ }
2670
+ };
2671
+ }
2672
+ };
2673
+ function createOpenAI(options = {}) {
2674
+ var _a, _b, _c;
2675
+ const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
2676
+ const compatibility = (_b = options.compatibility) != null ? _b : "compatible";
2677
+ const providerName = (_c = options.name) != null ? _c : "openai";
2678
+ const getHeaders = () => ({
2679
+ Authorization: `Bearer ${loadApiKey({
2680
+ apiKey: options.apiKey,
2681
+ environmentVariableName: "OPENAI_API_KEY",
2682
+ description: "OpenAI"
2683
+ })}`,
2684
+ "OpenAI-Organization": options.organization,
2685
+ "OpenAI-Project": options.project,
2686
+ ...options.headers
2687
+ });
2688
+ const createChatModel = (modelId, settings = {}) => new OpenAIChatLanguageModel(modelId, settings, {
2689
+ provider: `${providerName}.chat`,
2690
+ url: ({ path }) => `${baseURL}${path}`,
2691
+ headers: getHeaders,
2692
+ compatibility,
2693
+ fetch: options.fetch
2694
+ });
2695
+ const createCompletionModel = (modelId, settings = {}) => new OpenAICompletionLanguageModel(modelId, settings, {
2696
+ provider: `${providerName}.completion`,
2697
+ url: ({ path }) => `${baseURL}${path}`,
2698
+ headers: getHeaders,
2699
+ compatibility,
2700
+ fetch: options.fetch
2701
+ });
2702
+ const createEmbeddingModel = (modelId, settings = {}) => new OpenAIEmbeddingModel(modelId, settings, {
2703
+ provider: `${providerName}.embedding`,
2704
+ url: ({ path }) => `${baseURL}${path}`,
2705
+ headers: getHeaders,
2706
+ fetch: options.fetch
2707
+ });
2708
+ const createImageModel = (modelId, settings = {}) => new OpenAIImageModel(modelId, settings, {
2709
+ provider: `${providerName}.image`,
2710
+ url: ({ path }) => `${baseURL}${path}`,
2711
+ headers: getHeaders,
2712
+ fetch: options.fetch
2713
+ });
2714
+ const createTranscriptionModel = (modelId) => new OpenAITranscriptionModel(modelId, {
2715
+ provider: `${providerName}.transcription`,
2716
+ url: ({ path }) => `${baseURL}${path}`,
2717
+ headers: getHeaders,
2718
+ fetch: options.fetch
2719
+ });
2720
+ const createSpeechModel = (modelId) => new OpenAISpeechModel(modelId, {
2721
+ provider: `${providerName}.speech`,
2722
+ url: ({ path }) => `${baseURL}${path}`,
2723
+ headers: getHeaders,
2724
+ fetch: options.fetch
2725
+ });
2726
+ const createLanguageModel = (modelId, settings) => {
2727
+ if (new.target) {
2728
+ throw new Error(
2729
+ "The OpenAI model function cannot be called with the new keyword."
2730
+ );
2731
+ }
2732
+ if (modelId === "gpt-3.5-turbo-instruct") {
2733
+ return createCompletionModel(
2734
+ modelId,
2735
+ settings
2736
+ );
2737
+ }
2738
+ return createChatModel(modelId, settings);
2739
+ };
2740
+ const createResponsesModel = (modelId) => {
2741
+ return new OpenAIResponsesLanguageModel(modelId, {
2742
+ provider: `${providerName}.responses`,
2743
+ url: ({ path }) => `${baseURL}${path}`,
2744
+ headers: getHeaders,
2745
+ fetch: options.fetch
2746
+ });
2747
+ };
2748
+ const provider = function(modelId, settings) {
2749
+ return createLanguageModel(modelId, settings);
2750
+ };
2751
+ provider.languageModel = createLanguageModel;
2752
+ provider.chat = createChatModel;
2753
+ provider.completion = createCompletionModel;
2754
+ provider.responses = createResponsesModel;
2755
+ provider.embedding = createEmbeddingModel;
2756
+ provider.textEmbedding = createEmbeddingModel;
2757
+ provider.textEmbeddingModel = createEmbeddingModel;
2758
+ provider.image = createImageModel;
2759
+ provider.imageModel = createImageModel;
2760
+ provider.transcription = createTranscriptionModel;
2761
+ provider.transcriptionModel = createTranscriptionModel;
2762
+ provider.speech = createSpeechModel;
2763
+ provider.speechModel = createSpeechModel;
2764
+ provider.tools = openaiTools;
2765
+ return provider;
2766
+ }
2767
+ var openai = createOpenAI({
2768
+ compatibility: "strict"
2769
+ // strict for OpenAI API
2770
+ });
2771
+
2772
+ export { createOpenAI, openai };
2773
+ //# sourceMappingURL=chunk-SV4AUWGY.js.map
2774
+ //# sourceMappingURL=chunk-SV4AUWGY.js.map