ai 6.0.33 → 6.0.35

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (357) hide show
  1. package/CHANGELOG.md +16 -0
  2. package/dist/index.d.mts +50 -21
  3. package/dist/index.d.ts +50 -21
  4. package/dist/index.js +348 -286
  5. package/dist/index.js.map +1 -1
  6. package/dist/index.mjs +280 -219
  7. package/dist/index.mjs.map +1 -1
  8. package/dist/internal/index.js +1 -1
  9. package/dist/internal/index.mjs +1 -1
  10. package/docs/02-foundations/03-prompts.mdx +2 -2
  11. package/docs/03-ai-sdk-core/15-tools-and-tool-calling.mdx +1 -1
  12. package/docs/07-reference/01-ai-sdk-core/28-output.mdx +1 -1
  13. package/docs/07-reference/05-ai-sdk-errors/ai-ui-message-stream-error.mdx +67 -0
  14. package/package.json +6 -4
  15. package/src/agent/agent.ts +116 -0
  16. package/src/agent/create-agent-ui-stream-response.test.ts +258 -0
  17. package/src/agent/create-agent-ui-stream-response.ts +50 -0
  18. package/src/agent/create-agent-ui-stream.ts +73 -0
  19. package/src/agent/index.ts +33 -0
  20. package/src/agent/infer-agent-tools.ts +7 -0
  21. package/src/agent/infer-agent-ui-message.test-d.ts +54 -0
  22. package/src/agent/infer-agent-ui-message.ts +11 -0
  23. package/src/agent/pipe-agent-ui-stream-to-response.ts +52 -0
  24. package/src/agent/tool-loop-agent-on-finish-callback.ts +31 -0
  25. package/src/agent/tool-loop-agent-on-step-finish-callback.ts +11 -0
  26. package/src/agent/tool-loop-agent-settings.ts +182 -0
  27. package/src/agent/tool-loop-agent.test-d.ts +114 -0
  28. package/src/agent/tool-loop-agent.test.ts +442 -0
  29. package/src/agent/tool-loop-agent.ts +114 -0
  30. package/src/embed/__snapshots__/embed-many.test.ts.snap +191 -0
  31. package/src/embed/__snapshots__/embed.test.ts.snap +81 -0
  32. package/src/embed/embed-many-result.ts +53 -0
  33. package/src/embed/embed-many.test.ts +653 -0
  34. package/src/embed/embed-many.ts +378 -0
  35. package/src/embed/embed-result.ts +50 -0
  36. package/src/embed/embed.test.ts +298 -0
  37. package/src/embed/embed.ts +211 -0
  38. package/src/embed/index.ts +4 -0
  39. package/src/error/index.ts +35 -0
  40. package/src/error/invalid-argument-error.ts +34 -0
  41. package/src/error/invalid-stream-part-error.ts +28 -0
  42. package/src/error/invalid-tool-approval-error.ts +26 -0
  43. package/src/error/invalid-tool-input-error.ts +33 -0
  44. package/src/error/no-image-generated-error.ts +39 -0
  45. package/src/error/no-object-generated-error.ts +70 -0
  46. package/src/error/no-output-generated-error.ts +26 -0
  47. package/src/error/no-speech-generated-error.ts +18 -0
  48. package/src/error/no-such-tool-error.ts +35 -0
  49. package/src/error/no-transcript-generated-error.ts +20 -0
  50. package/src/error/tool-call-not-found-for-approval-error.ts +32 -0
  51. package/src/error/tool-call-repair-error.ts +30 -0
  52. package/src/error/ui-message-stream-error.ts +48 -0
  53. package/src/error/unsupported-model-version-error.ts +23 -0
  54. package/src/error/verify-no-object-generated-error.ts +27 -0
  55. package/src/generate-image/generate-image-result.ts +42 -0
  56. package/src/generate-image/generate-image.test.ts +1420 -0
  57. package/src/generate-image/generate-image.ts +360 -0
  58. package/src/generate-image/index.ts +18 -0
  59. package/src/generate-object/__snapshots__/generate-object.test.ts.snap +133 -0
  60. package/src/generate-object/__snapshots__/stream-object.test.ts.snap +297 -0
  61. package/src/generate-object/generate-object-result.ts +67 -0
  62. package/src/generate-object/generate-object.test-d.ts +49 -0
  63. package/src/generate-object/generate-object.test.ts +1191 -0
  64. package/src/generate-object/generate-object.ts +518 -0
  65. package/src/generate-object/index.ts +9 -0
  66. package/src/generate-object/inject-json-instruction.test.ts +181 -0
  67. package/src/generate-object/inject-json-instruction.ts +30 -0
  68. package/src/generate-object/output-strategy.ts +415 -0
  69. package/src/generate-object/parse-and-validate-object-result.ts +111 -0
  70. package/src/generate-object/repair-text.ts +12 -0
  71. package/src/generate-object/stream-object-result.ts +120 -0
  72. package/src/generate-object/stream-object.test-d.ts +74 -0
  73. package/src/generate-object/stream-object.test.ts +1950 -0
  74. package/src/generate-object/stream-object.ts +986 -0
  75. package/src/generate-object/validate-object-generation-input.ts +144 -0
  76. package/src/generate-speech/generate-speech-result.ts +30 -0
  77. package/src/generate-speech/generate-speech.test.ts +300 -0
  78. package/src/generate-speech/generate-speech.ts +190 -0
  79. package/src/generate-speech/generated-audio-file.ts +65 -0
  80. package/src/generate-speech/index.ts +3 -0
  81. package/src/generate-text/__snapshots__/generate-text.test.ts.snap +1872 -0
  82. package/src/generate-text/__snapshots__/stream-text.test.ts.snap +1255 -0
  83. package/src/generate-text/collect-tool-approvals.test.ts +553 -0
  84. package/src/generate-text/collect-tool-approvals.ts +116 -0
  85. package/src/generate-text/content-part.ts +25 -0
  86. package/src/generate-text/execute-tool-call.ts +129 -0
  87. package/src/generate-text/extract-reasoning-content.ts +17 -0
  88. package/src/generate-text/extract-text-content.ts +15 -0
  89. package/src/generate-text/generate-text-result.ts +168 -0
  90. package/src/generate-text/generate-text.test-d.ts +68 -0
  91. package/src/generate-text/generate-text.test.ts +7011 -0
  92. package/src/generate-text/generate-text.ts +1223 -0
  93. package/src/generate-text/generated-file.ts +70 -0
  94. package/src/generate-text/index.ts +57 -0
  95. package/src/generate-text/is-approval-needed.ts +29 -0
  96. package/src/generate-text/output-utils.ts +23 -0
  97. package/src/generate-text/output.test.ts +698 -0
  98. package/src/generate-text/output.ts +590 -0
  99. package/src/generate-text/parse-tool-call.test.ts +570 -0
  100. package/src/generate-text/parse-tool-call.ts +188 -0
  101. package/src/generate-text/prepare-step.ts +103 -0
  102. package/src/generate-text/prune-messages.test.ts +720 -0
  103. package/src/generate-text/prune-messages.ts +167 -0
  104. package/src/generate-text/reasoning-output.ts +20 -0
  105. package/src/generate-text/reasoning.ts +8 -0
  106. package/src/generate-text/response-message.ts +10 -0
  107. package/src/generate-text/run-tools-transformation.test.ts +1143 -0
  108. package/src/generate-text/run-tools-transformation.ts +420 -0
  109. package/src/generate-text/smooth-stream.test.ts +2101 -0
  110. package/src/generate-text/smooth-stream.ts +162 -0
  111. package/src/generate-text/step-result.ts +238 -0
  112. package/src/generate-text/stop-condition.ts +29 -0
  113. package/src/generate-text/stream-text-result.ts +463 -0
  114. package/src/generate-text/stream-text.test-d.ts +200 -0
  115. package/src/generate-text/stream-text.test.ts +19979 -0
  116. package/src/generate-text/stream-text.ts +2505 -0
  117. package/src/generate-text/to-response-messages.test.ts +922 -0
  118. package/src/generate-text/to-response-messages.ts +163 -0
  119. package/src/generate-text/tool-approval-request-output.ts +21 -0
  120. package/src/generate-text/tool-call-repair-function.ts +27 -0
  121. package/src/generate-text/tool-call.ts +47 -0
  122. package/src/generate-text/tool-error.ts +34 -0
  123. package/src/generate-text/tool-output-denied.ts +21 -0
  124. package/src/generate-text/tool-output.ts +7 -0
  125. package/src/generate-text/tool-result.ts +36 -0
  126. package/src/generate-text/tool-set.ts +14 -0
  127. package/src/global.ts +24 -0
  128. package/src/index.ts +50 -0
  129. package/src/logger/index.ts +6 -0
  130. package/src/logger/log-warnings.test.ts +351 -0
  131. package/src/logger/log-warnings.ts +119 -0
  132. package/src/middleware/__snapshots__/simulate-streaming-middleware.test.ts.snap +64 -0
  133. package/src/middleware/add-tool-input-examples-middleware.test.ts +476 -0
  134. package/src/middleware/add-tool-input-examples-middleware.ts +90 -0
  135. package/src/middleware/default-embedding-settings-middleware.test.ts +126 -0
  136. package/src/middleware/default-embedding-settings-middleware.ts +22 -0
  137. package/src/middleware/default-settings-middleware.test.ts +388 -0
  138. package/src/middleware/default-settings-middleware.ts +33 -0
  139. package/src/middleware/extract-json-middleware.test.ts +827 -0
  140. package/src/middleware/extract-json-middleware.ts +197 -0
  141. package/src/middleware/extract-reasoning-middleware.test.ts +1028 -0
  142. package/src/middleware/extract-reasoning-middleware.ts +238 -0
  143. package/src/middleware/index.ts +10 -0
  144. package/src/middleware/simulate-streaming-middleware.test.ts +911 -0
  145. package/src/middleware/simulate-streaming-middleware.ts +79 -0
  146. package/src/middleware/wrap-embedding-model.test.ts +358 -0
  147. package/src/middleware/wrap-embedding-model.ts +86 -0
  148. package/src/middleware/wrap-image-model.test.ts +423 -0
  149. package/src/middleware/wrap-image-model.ts +85 -0
  150. package/src/middleware/wrap-language-model.test.ts +518 -0
  151. package/src/middleware/wrap-language-model.ts +104 -0
  152. package/src/middleware/wrap-provider.test.ts +120 -0
  153. package/src/middleware/wrap-provider.ts +51 -0
  154. package/src/model/as-embedding-model-v3.test.ts +319 -0
  155. package/src/model/as-embedding-model-v3.ts +24 -0
  156. package/src/model/as-image-model-v3.test.ts +409 -0
  157. package/src/model/as-image-model-v3.ts +24 -0
  158. package/src/model/as-language-model-v3.test.ts +508 -0
  159. package/src/model/as-language-model-v3.ts +103 -0
  160. package/src/model/as-provider-v3.ts +36 -0
  161. package/src/model/as-speech-model-v3.test.ts +356 -0
  162. package/src/model/as-speech-model-v3.ts +24 -0
  163. package/src/model/as-transcription-model-v3.test.ts +529 -0
  164. package/src/model/as-transcription-model-v3.ts +24 -0
  165. package/src/model/resolve-model.test.ts +244 -0
  166. package/src/model/resolve-model.ts +126 -0
  167. package/src/prompt/call-settings.ts +148 -0
  168. package/src/prompt/content-part.ts +209 -0
  169. package/src/prompt/convert-to-language-model-prompt.test.ts +2018 -0
  170. package/src/prompt/convert-to-language-model-prompt.ts +442 -0
  171. package/src/prompt/create-tool-model-output.test.ts +508 -0
  172. package/src/prompt/create-tool-model-output.ts +34 -0
  173. package/src/prompt/data-content.test.ts +15 -0
  174. package/src/prompt/data-content.ts +134 -0
  175. package/src/prompt/index.ts +27 -0
  176. package/src/prompt/invalid-data-content-error.ts +29 -0
  177. package/src/prompt/invalid-message-role-error.ts +27 -0
  178. package/src/prompt/message-conversion-error.ts +28 -0
  179. package/src/prompt/message.ts +68 -0
  180. package/src/prompt/prepare-call-settings.test.ts +159 -0
  181. package/src/prompt/prepare-call-settings.ts +108 -0
  182. package/src/prompt/prepare-tools-and-tool-choice.test.ts +461 -0
  183. package/src/prompt/prepare-tools-and-tool-choice.ts +86 -0
  184. package/src/prompt/prompt.ts +43 -0
  185. package/src/prompt/split-data-url.ts +17 -0
  186. package/src/prompt/standardize-prompt.test.ts +82 -0
  187. package/src/prompt/standardize-prompt.ts +99 -0
  188. package/src/prompt/wrap-gateway-error.ts +29 -0
  189. package/src/registry/custom-provider.test.ts +211 -0
  190. package/src/registry/custom-provider.ts +155 -0
  191. package/src/registry/index.ts +7 -0
  192. package/src/registry/no-such-provider-error.ts +41 -0
  193. package/src/registry/provider-registry.test.ts +691 -0
  194. package/src/registry/provider-registry.ts +328 -0
  195. package/src/rerank/index.ts +2 -0
  196. package/src/rerank/rerank-result.ts +70 -0
  197. package/src/rerank/rerank.test.ts +516 -0
  198. package/src/rerank/rerank.ts +237 -0
  199. package/src/telemetry/assemble-operation-name.ts +21 -0
  200. package/src/telemetry/get-base-telemetry-attributes.ts +53 -0
  201. package/src/telemetry/get-tracer.ts +20 -0
  202. package/src/telemetry/noop-tracer.ts +69 -0
  203. package/src/telemetry/record-span.ts +63 -0
  204. package/src/telemetry/select-telemetry-attributes.ts +78 -0
  205. package/src/telemetry/select-temetry-attributes.test.ts +114 -0
  206. package/src/telemetry/stringify-for-telemetry.test.ts +114 -0
  207. package/src/telemetry/stringify-for-telemetry.ts +33 -0
  208. package/src/telemetry/telemetry-settings.ts +44 -0
  209. package/src/test/mock-embedding-model-v2.ts +35 -0
  210. package/src/test/mock-embedding-model-v3.ts +48 -0
  211. package/src/test/mock-image-model-v2.ts +28 -0
  212. package/src/test/mock-image-model-v3.ts +28 -0
  213. package/src/test/mock-language-model-v2.ts +72 -0
  214. package/src/test/mock-language-model-v3.ts +77 -0
  215. package/src/test/mock-provider-v2.ts +68 -0
  216. package/src/test/mock-provider-v3.ts +80 -0
  217. package/src/test/mock-reranking-model-v3.ts +25 -0
  218. package/src/test/mock-server-response.ts +69 -0
  219. package/src/test/mock-speech-model-v2.ts +24 -0
  220. package/src/test/mock-speech-model-v3.ts +24 -0
  221. package/src/test/mock-tracer.ts +156 -0
  222. package/src/test/mock-transcription-model-v2.ts +24 -0
  223. package/src/test/mock-transcription-model-v3.ts +24 -0
  224. package/src/test/mock-values.ts +4 -0
  225. package/src/test/not-implemented.ts +3 -0
  226. package/src/text-stream/create-text-stream-response.test.ts +38 -0
  227. package/src/text-stream/create-text-stream-response.ts +18 -0
  228. package/src/text-stream/index.ts +2 -0
  229. package/src/text-stream/pipe-text-stream-to-response.test.ts +38 -0
  230. package/src/text-stream/pipe-text-stream-to-response.ts +26 -0
  231. package/src/transcribe/index.ts +2 -0
  232. package/src/transcribe/transcribe-result.ts +60 -0
  233. package/src/transcribe/transcribe.test.ts +313 -0
  234. package/src/transcribe/transcribe.ts +173 -0
  235. package/src/types/embedding-model-middleware.ts +3 -0
  236. package/src/types/embedding-model.ts +18 -0
  237. package/src/types/image-model-middleware.ts +3 -0
  238. package/src/types/image-model-response-metadata.ts +16 -0
  239. package/src/types/image-model.ts +19 -0
  240. package/src/types/index.ts +29 -0
  241. package/src/types/json-value.ts +15 -0
  242. package/src/types/language-model-middleware.ts +3 -0
  243. package/src/types/language-model-request-metadata.ts +6 -0
  244. package/src/types/language-model-response-metadata.ts +21 -0
  245. package/src/types/language-model.ts +104 -0
  246. package/src/types/provider-metadata.ts +16 -0
  247. package/src/types/provider.ts +55 -0
  248. package/src/types/reranking-model.ts +6 -0
  249. package/src/types/speech-model-response-metadata.ts +21 -0
  250. package/src/types/speech-model.ts +6 -0
  251. package/src/types/transcription-model-response-metadata.ts +16 -0
  252. package/src/types/transcription-model.ts +9 -0
  253. package/src/types/usage.ts +200 -0
  254. package/src/types/warning.ts +7 -0
  255. package/src/ui/__snapshots__/append-response-messages.test.ts.snap +416 -0
  256. package/src/ui/__snapshots__/convert-to-model-messages.test.ts.snap +419 -0
  257. package/src/ui/__snapshots__/process-chat-text-response.test.ts.snap +142 -0
  258. package/src/ui/call-completion-api.ts +157 -0
  259. package/src/ui/chat-transport.ts +83 -0
  260. package/src/ui/chat.test-d.ts +233 -0
  261. package/src/ui/chat.test.ts +2695 -0
  262. package/src/ui/chat.ts +716 -0
  263. package/src/ui/convert-file-list-to-file-ui-parts.ts +36 -0
  264. package/src/ui/convert-to-model-messages.test.ts +2775 -0
  265. package/src/ui/convert-to-model-messages.ts +373 -0
  266. package/src/ui/default-chat-transport.ts +36 -0
  267. package/src/ui/direct-chat-transport.test.ts +446 -0
  268. package/src/ui/direct-chat-transport.ts +118 -0
  269. package/src/ui/http-chat-transport.test.ts +185 -0
  270. package/src/ui/http-chat-transport.ts +292 -0
  271. package/src/ui/index.ts +71 -0
  272. package/src/ui/last-assistant-message-is-complete-with-approval-responses.ts +44 -0
  273. package/src/ui/last-assistant-message-is-complete-with-tool-calls.test.ts +371 -0
  274. package/src/ui/last-assistant-message-is-complete-with-tool-calls.ts +39 -0
  275. package/src/ui/process-text-stream.test.ts +38 -0
  276. package/src/ui/process-text-stream.ts +16 -0
  277. package/src/ui/process-ui-message-stream.test.ts +8294 -0
  278. package/src/ui/process-ui-message-stream.ts +761 -0
  279. package/src/ui/text-stream-chat-transport.ts +23 -0
  280. package/src/ui/transform-text-to-ui-message-stream.test.ts +124 -0
  281. package/src/ui/transform-text-to-ui-message-stream.ts +27 -0
  282. package/src/ui/ui-messages.test.ts +48 -0
  283. package/src/ui/ui-messages.ts +534 -0
  284. package/src/ui/use-completion.ts +84 -0
  285. package/src/ui/validate-ui-messages.test.ts +1428 -0
  286. package/src/ui/validate-ui-messages.ts +476 -0
  287. package/src/ui-message-stream/create-ui-message-stream-response.test.ts +266 -0
  288. package/src/ui-message-stream/create-ui-message-stream-response.ts +32 -0
  289. package/src/ui-message-stream/create-ui-message-stream.test.ts +639 -0
  290. package/src/ui-message-stream/create-ui-message-stream.ts +124 -0
  291. package/src/ui-message-stream/get-response-ui-message-id.test.ts +55 -0
  292. package/src/ui-message-stream/get-response-ui-message-id.ts +24 -0
  293. package/src/ui-message-stream/handle-ui-message-stream-finish.test.ts +429 -0
  294. package/src/ui-message-stream/handle-ui-message-stream-finish.ts +135 -0
  295. package/src/ui-message-stream/index.ts +13 -0
  296. package/src/ui-message-stream/json-to-sse-transform-stream.ts +12 -0
  297. package/src/ui-message-stream/pipe-ui-message-stream-to-response.test.ts +90 -0
  298. package/src/ui-message-stream/pipe-ui-message-stream-to-response.ts +40 -0
  299. package/src/ui-message-stream/read-ui-message-stream.test.ts +122 -0
  300. package/src/ui-message-stream/read-ui-message-stream.ts +87 -0
  301. package/src/ui-message-stream/ui-message-chunks.test-d.ts +18 -0
  302. package/src/ui-message-stream/ui-message-chunks.ts +344 -0
  303. package/src/ui-message-stream/ui-message-stream-headers.ts +7 -0
  304. package/src/ui-message-stream/ui-message-stream-on-finish-callback.ts +32 -0
  305. package/src/ui-message-stream/ui-message-stream-response-init.ts +5 -0
  306. package/src/ui-message-stream/ui-message-stream-writer.ts +24 -0
  307. package/src/util/as-array.ts +3 -0
  308. package/src/util/async-iterable-stream.test.ts +241 -0
  309. package/src/util/async-iterable-stream.ts +94 -0
  310. package/src/util/consume-stream.ts +29 -0
  311. package/src/util/cosine-similarity.test.ts +57 -0
  312. package/src/util/cosine-similarity.ts +47 -0
  313. package/src/util/create-resolvable-promise.ts +30 -0
  314. package/src/util/create-stitchable-stream.test.ts +239 -0
  315. package/src/util/create-stitchable-stream.ts +112 -0
  316. package/src/util/data-url.ts +17 -0
  317. package/src/util/deep-partial.ts +84 -0
  318. package/src/util/detect-media-type.test.ts +670 -0
  319. package/src/util/detect-media-type.ts +184 -0
  320. package/src/util/download/download-function.ts +45 -0
  321. package/src/util/download/download.test.ts +69 -0
  322. package/src/util/download/download.ts +46 -0
  323. package/src/util/error-handler.ts +1 -0
  324. package/src/util/fix-json.test.ts +279 -0
  325. package/src/util/fix-json.ts +401 -0
  326. package/src/util/get-potential-start-index.test.ts +34 -0
  327. package/src/util/get-potential-start-index.ts +30 -0
  328. package/src/util/index.ts +11 -0
  329. package/src/util/is-deep-equal-data.test.ts +119 -0
  330. package/src/util/is-deep-equal-data.ts +48 -0
  331. package/src/util/is-non-empty-object.ts +5 -0
  332. package/src/util/job.ts +1 -0
  333. package/src/util/log-v2-compatibility-warning.ts +21 -0
  334. package/src/util/merge-abort-signals.test.ts +155 -0
  335. package/src/util/merge-abort-signals.ts +43 -0
  336. package/src/util/merge-objects.test.ts +118 -0
  337. package/src/util/merge-objects.ts +79 -0
  338. package/src/util/now.ts +4 -0
  339. package/src/util/parse-partial-json.test.ts +80 -0
  340. package/src/util/parse-partial-json.ts +30 -0
  341. package/src/util/prepare-headers.test.ts +51 -0
  342. package/src/util/prepare-headers.ts +14 -0
  343. package/src/util/prepare-retries.test.ts +10 -0
  344. package/src/util/prepare-retries.ts +47 -0
  345. package/src/util/retry-error.ts +41 -0
  346. package/src/util/retry-with-exponential-backoff.test.ts +446 -0
  347. package/src/util/retry-with-exponential-backoff.ts +154 -0
  348. package/src/util/serial-job-executor.test.ts +162 -0
  349. package/src/util/serial-job-executor.ts +36 -0
  350. package/src/util/simulate-readable-stream.test.ts +98 -0
  351. package/src/util/simulate-readable-stream.ts +39 -0
  352. package/src/util/split-array.test.ts +60 -0
  353. package/src/util/split-array.ts +20 -0
  354. package/src/util/value-of.ts +65 -0
  355. package/src/util/write-to-server-response.test.ts +266 -0
  356. package/src/util/write-to-server-response.ts +49 -0
  357. package/src/version.ts +5 -0
package/dist/index.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  var __defProp = Object.defineProperty;
2
2
  var __export = (target, all) => {
3
- for (var name16 in all)
4
- __defProp(target, name16, { get: all[name16], enumerable: true });
3
+ for (var name17 in all)
4
+ __defProp(target, name17, { get: all[name17], enumerable: true });
5
5
  };
6
6
 
7
7
  // src/index.ts
@@ -26,7 +26,7 @@ import {
26
26
 
27
27
  // src/error/index.ts
28
28
  import {
29
- AISDKError as AISDKError17,
29
+ AISDKError as AISDKError18,
30
30
  APICallError,
31
31
  EmptyResponseBodyError,
32
32
  InvalidPromptError,
@@ -300,21 +300,22 @@ var UnsupportedModelVersionError = class extends AISDKError12 {
300
300
  }
301
301
  };
302
302
 
303
- // src/prompt/invalid-data-content-error.ts
303
+ // src/error/ui-message-stream-error.ts
304
304
  import { AISDKError as AISDKError13 } from "@ai-sdk/provider";
305
- var name11 = "AI_InvalidDataContentError";
305
+ var name11 = "AI_UIMessageStreamError";
306
306
  var marker11 = `vercel.ai.error.${name11}`;
307
307
  var symbol11 = Symbol.for(marker11);
308
308
  var _a11;
309
- var InvalidDataContentError = class extends AISDKError13 {
309
+ var UIMessageStreamError = class extends AISDKError13 {
310
310
  constructor({
311
- content,
312
- cause,
313
- message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
311
+ chunkType,
312
+ chunkId,
313
+ message
314
314
  }) {
315
- super({ name: name11, message, cause });
315
+ super({ name: name11, message });
316
316
  this[_a11] = true;
317
- this.content = content;
317
+ this.chunkType = chunkType;
318
+ this.chunkId = chunkId;
318
319
  }
319
320
  static isInstance(error) {
320
321
  return AISDKError13.hasMarker(error, marker11);
@@ -322,20 +323,21 @@ var InvalidDataContentError = class extends AISDKError13 {
322
323
  };
323
324
  _a11 = symbol11;
324
325
 
325
- // src/prompt/invalid-message-role-error.ts
326
+ // src/prompt/invalid-data-content-error.ts
326
327
  import { AISDKError as AISDKError14 } from "@ai-sdk/provider";
327
- var name12 = "AI_InvalidMessageRoleError";
328
+ var name12 = "AI_InvalidDataContentError";
328
329
  var marker12 = `vercel.ai.error.${name12}`;
329
330
  var symbol12 = Symbol.for(marker12);
330
331
  var _a12;
331
- var InvalidMessageRoleError = class extends AISDKError14 {
332
+ var InvalidDataContentError = class extends AISDKError14 {
332
333
  constructor({
333
- role,
334
- message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
334
+ content,
335
+ cause,
336
+ message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
335
337
  }) {
336
- super({ name: name12, message });
338
+ super({ name: name12, message, cause });
337
339
  this[_a12] = true;
338
- this.role = role;
340
+ this.content = content;
339
341
  }
340
342
  static isInstance(error) {
341
343
  return AISDKError14.hasMarker(error, marker12);
@@ -343,20 +345,20 @@ var InvalidMessageRoleError = class extends AISDKError14 {
343
345
  };
344
346
  _a12 = symbol12;
345
347
 
346
- // src/prompt/message-conversion-error.ts
348
+ // src/prompt/invalid-message-role-error.ts
347
349
  import { AISDKError as AISDKError15 } from "@ai-sdk/provider";
348
- var name13 = "AI_MessageConversionError";
350
+ var name13 = "AI_InvalidMessageRoleError";
349
351
  var marker13 = `vercel.ai.error.${name13}`;
350
352
  var symbol13 = Symbol.for(marker13);
351
353
  var _a13;
352
- var MessageConversionError = class extends AISDKError15 {
354
+ var InvalidMessageRoleError = class extends AISDKError15 {
353
355
  constructor({
354
- originalMessage,
355
- message
356
+ role,
357
+ message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
356
358
  }) {
357
359
  super({ name: name13, message });
358
360
  this[_a13] = true;
359
- this.originalMessage = originalMessage;
361
+ this.role = role;
360
362
  }
361
363
  static isInstance(error) {
362
364
  return AISDKError15.hasMarker(error, marker13);
@@ -364,32 +366,53 @@ var MessageConversionError = class extends AISDKError15 {
364
366
  };
365
367
  _a13 = symbol13;
366
368
 
367
- // src/error/index.ts
368
- import { DownloadError } from "@ai-sdk/provider-utils";
369
-
370
- // src/util/retry-error.ts
369
+ // src/prompt/message-conversion-error.ts
371
370
  import { AISDKError as AISDKError16 } from "@ai-sdk/provider";
372
- var name14 = "AI_RetryError";
371
+ var name14 = "AI_MessageConversionError";
373
372
  var marker14 = `vercel.ai.error.${name14}`;
374
373
  var symbol14 = Symbol.for(marker14);
375
374
  var _a14;
376
- var RetryError = class extends AISDKError16 {
375
+ var MessageConversionError = class extends AISDKError16 {
376
+ constructor({
377
+ originalMessage,
378
+ message
379
+ }) {
380
+ super({ name: name14, message });
381
+ this[_a14] = true;
382
+ this.originalMessage = originalMessage;
383
+ }
384
+ static isInstance(error) {
385
+ return AISDKError16.hasMarker(error, marker14);
386
+ }
387
+ };
388
+ _a14 = symbol14;
389
+
390
+ // src/error/index.ts
391
+ import { DownloadError } from "@ai-sdk/provider-utils";
392
+
393
+ // src/util/retry-error.ts
394
+ import { AISDKError as AISDKError17 } from "@ai-sdk/provider";
395
+ var name15 = "AI_RetryError";
396
+ var marker15 = `vercel.ai.error.${name15}`;
397
+ var symbol15 = Symbol.for(marker15);
398
+ var _a15;
399
+ var RetryError = class extends AISDKError17 {
377
400
  constructor({
378
401
  message,
379
402
  reason,
380
403
  errors
381
404
  }) {
382
- super({ name: name14, message });
383
- this[_a14] = true;
405
+ super({ name: name15, message });
406
+ this[_a15] = true;
384
407
  this.reason = reason;
385
408
  this.errors = errors;
386
409
  this.lastError = errors[errors.length - 1];
387
410
  }
388
411
  static isInstance(error) {
389
- return AISDKError16.hasMarker(error, marker14);
412
+ return AISDKError17.hasMarker(error, marker15);
390
413
  }
391
414
  };
392
- _a14 = symbol14;
415
+ _a15 = symbol15;
393
416
 
394
417
  // src/logger/log-warnings.ts
395
418
  function formatWarning({
@@ -652,7 +675,7 @@ function resolveEmbeddingModel(model) {
652
675
  return getGlobalProvider().embeddingModel(model);
653
676
  }
654
677
  function resolveTranscriptionModel(model) {
655
- var _a16, _b;
678
+ var _a17, _b;
656
679
  if (typeof model !== "string") {
657
680
  if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
658
681
  const unsupportedModel = model;
@@ -664,10 +687,10 @@ function resolveTranscriptionModel(model) {
664
687
  }
665
688
  return asTranscriptionModelV3(model);
666
689
  }
667
- return (_b = (_a16 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a16, model);
690
+ return (_b = (_a17 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a17, model);
668
691
  }
669
692
  function resolveSpeechModel(model) {
670
- var _a16, _b;
693
+ var _a17, _b;
671
694
  if (typeof model !== "string") {
672
695
  if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
673
696
  const unsupportedModel = model;
@@ -679,7 +702,7 @@ function resolveSpeechModel(model) {
679
702
  }
680
703
  return asSpeechModelV3(model);
681
704
  }
682
- return (_b = (_a16 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a16, model);
705
+ return (_b = (_a17 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a17, model);
683
706
  }
684
707
  function resolveImageModel(model) {
685
708
  if (typeof model !== "string") {
@@ -696,8 +719,8 @@ function resolveImageModel(model) {
696
719
  return getGlobalProvider().imageModel(model);
697
720
  }
698
721
  function getGlobalProvider() {
699
- var _a16;
700
- return (_a16 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a16 : gateway;
722
+ var _a17;
723
+ return (_a17 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a17 : gateway;
701
724
  }
702
725
 
703
726
  // src/prompt/call-settings.ts
@@ -921,11 +944,11 @@ import {
921
944
  } from "@ai-sdk/provider-utils";
922
945
 
923
946
  // src/version.ts
924
- var VERSION = true ? "6.0.33" : "0.0.0-test";
947
+ var VERSION = true ? "6.0.35" : "0.0.0-test";
925
948
 
926
949
  // src/util/download/download.ts
927
950
  var download = async ({ url }) => {
928
- var _a16;
951
+ var _a17;
929
952
  const urlText = url.toString();
930
953
  try {
931
954
  const response = await fetch(urlText, {
@@ -944,7 +967,7 @@ var download = async ({ url }) => {
944
967
  }
945
968
  return {
946
969
  data: new Uint8Array(await response.arrayBuffer()),
947
- mediaType: (_a16 = response.headers.get("content-type")) != null ? _a16 : void 0
970
+ mediaType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
948
971
  };
949
972
  } catch (error) {
950
973
  if (DownloadError2.isInstance(error)) {
@@ -962,7 +985,7 @@ var createDefaultDownloadFunction = (download2 = download) => (requestedDownload
962
985
  );
963
986
 
964
987
  // src/prompt/data-content.ts
965
- import { AISDKError as AISDKError18 } from "@ai-sdk/provider";
988
+ import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
966
989
  import {
967
990
  convertBase64ToUint8Array as convertBase64ToUint8Array2,
968
991
  convertUint8ArrayToBase64
@@ -993,8 +1016,8 @@ var dataContentSchema = z.union([
993
1016
  z.custom(
994
1017
  // Buffer might not be available in some environments such as CloudFlare:
995
1018
  (value) => {
996
- var _a16, _b;
997
- return (_b = (_a16 = globalThis.Buffer) == null ? void 0 : _a16.isBuffer(value)) != null ? _b : false;
1019
+ var _a17, _b;
1020
+ return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
998
1021
  },
999
1022
  { message: "Must be a Buffer" }
1000
1023
  )
@@ -1017,7 +1040,7 @@ function convertToLanguageModelV3DataContent(content) {
1017
1040
  content.toString()
1018
1041
  );
1019
1042
  if (dataUrlMediaType == null || base64Content == null) {
1020
- throw new AISDKError18({
1043
+ throw new AISDKError19({
1021
1044
  name: "InvalidDataContentError",
1022
1045
  message: `Invalid data URL format in content ${content.toString()}`
1023
1046
  });
@@ -1234,8 +1257,8 @@ async function downloadAssets(messages, download2, supportedUrls) {
1234
1257
  ).flat().filter(
1235
1258
  (part) => part.type === "image" || part.type === "file"
1236
1259
  ).map((part) => {
1237
- var _a16;
1238
- const mediaType = (_a16 = part.mediaType) != null ? _a16 : part.type === "image" ? "image/*" : void 0;
1260
+ var _a17;
1261
+ const mediaType = (_a17 = part.mediaType) != null ? _a17 : part.type === "image" ? "image/*" : void 0;
1239
1262
  let data = part.type === "image" ? part.image : part.data;
1240
1263
  if (typeof data === "string") {
1241
1264
  try {
@@ -1265,7 +1288,7 @@ async function downloadAssets(messages, download2, supportedUrls) {
1265
1288
  );
1266
1289
  }
1267
1290
  function convertPartToLanguageModelPart(part, downloadedAssets) {
1268
- var _a16;
1291
+ var _a17;
1269
1292
  if (part.type === "text") {
1270
1293
  return {
1271
1294
  type: "text",
@@ -1298,7 +1321,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
1298
1321
  switch (type) {
1299
1322
  case "image": {
1300
1323
  if (data instanceof Uint8Array || typeof data === "string") {
1301
- mediaType = (_a16 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a16 : mediaType;
1324
+ mediaType = (_a17 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a17 : mediaType;
1302
1325
  }
1303
1326
  return {
1304
1327
  type: "file",
@@ -1486,10 +1509,10 @@ async function prepareToolsAndToolChoice({
1486
1509
  };
1487
1510
  }
1488
1511
  const filteredTools = activeTools != null ? Object.entries(tools).filter(
1489
- ([name16]) => activeTools.includes(name16)
1512
+ ([name17]) => activeTools.includes(name17)
1490
1513
  ) : Object.entries(tools);
1491
1514
  const languageModelTools = [];
1492
- for (const [name16, tool2] of filteredTools) {
1515
+ for (const [name17, tool2] of filteredTools) {
1493
1516
  const toolType = tool2.type;
1494
1517
  switch (toolType) {
1495
1518
  case void 0:
@@ -1497,7 +1520,7 @@ async function prepareToolsAndToolChoice({
1497
1520
  case "function":
1498
1521
  languageModelTools.push({
1499
1522
  type: "function",
1500
- name: name16,
1523
+ name: name17,
1501
1524
  description: tool2.description,
1502
1525
  inputSchema: await asSchema(tool2.inputSchema).jsonSchema,
1503
1526
  ...tool2.inputExamples != null ? { inputExamples: tool2.inputExamples } : {},
@@ -1508,7 +1531,7 @@ async function prepareToolsAndToolChoice({
1508
1531
  case "provider":
1509
1532
  languageModelTools.push({
1510
1533
  type: "provider",
1511
- name: name16,
1534
+ name: name17,
1512
1535
  id: tool2.id,
1513
1536
  args: tool2.args
1514
1537
  });
@@ -1798,14 +1821,14 @@ async function standardizePrompt(prompt) {
1798
1821
 
1799
1822
  // src/prompt/wrap-gateway-error.ts
1800
1823
  import { GatewayAuthenticationError } from "@ai-sdk/gateway";
1801
- import { AISDKError as AISDKError19 } from "@ai-sdk/provider";
1824
+ import { AISDKError as AISDKError20 } from "@ai-sdk/provider";
1802
1825
  function wrapGatewayError(error) {
1803
1826
  if (!GatewayAuthenticationError.isInstance(error))
1804
1827
  return error;
1805
1828
  const isProductionEnv = (process == null ? void 0 : process.env.NODE_ENV) === "production";
1806
1829
  const moreInfoURL = "https://ai-sdk.dev/unauthenticated-ai-gateway";
1807
1830
  if (isProductionEnv) {
1808
- return new AISDKError19({
1831
+ return new AISDKError20({
1809
1832
  name: "GatewayError",
1810
1833
  message: `Unauthenticated. Configure AI_GATEWAY_API_KEY or use a provider module. Learn more: ${moreInfoURL}`
1811
1834
  });
@@ -1846,7 +1869,7 @@ function getBaseTelemetryAttributes({
1846
1869
  telemetry,
1847
1870
  headers
1848
1871
  }) {
1849
- var _a16;
1872
+ var _a17;
1850
1873
  return {
1851
1874
  "ai.model.provider": model.provider,
1852
1875
  "ai.model.id": model.modelId,
@@ -1865,7 +1888,7 @@ function getBaseTelemetryAttributes({
1865
1888
  return attributes;
1866
1889
  }, {}),
1867
1890
  // add metadata as attributes:
1868
- ...Object.entries((_a16 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a16 : {}).reduce(
1891
+ ...Object.entries((_a17 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a17 : {}).reduce(
1869
1892
  (attributes, [key, value]) => {
1870
1893
  attributes[`ai.telemetry.metadata.${key}`] = value;
1871
1894
  return attributes;
@@ -1890,7 +1913,7 @@ var noopTracer = {
1890
1913
  startSpan() {
1891
1914
  return noopSpan;
1892
1915
  },
1893
- startActiveSpan(name16, arg1, arg2, arg3) {
1916
+ startActiveSpan(name17, arg1, arg2, arg3) {
1894
1917
  if (typeof arg1 === "function") {
1895
1918
  return arg1(noopSpan);
1896
1919
  }
@@ -1960,14 +1983,14 @@ function getTracer({
1960
1983
  // src/telemetry/record-span.ts
1961
1984
  import { SpanStatusCode } from "@opentelemetry/api";
1962
1985
  async function recordSpan({
1963
- name: name16,
1986
+ name: name17,
1964
1987
  tracer,
1965
1988
  attributes,
1966
1989
  fn,
1967
1990
  endWhenDone = true
1968
1991
  }) {
1969
1992
  return tracer.startActiveSpan(
1970
- name16,
1993
+ name17,
1971
1994
  { attributes: await attributes },
1972
1995
  async (span) => {
1973
1996
  try {
@@ -2097,12 +2120,12 @@ function createNullLanguageModelUsage() {
2097
2120
  };
2098
2121
  }
2099
2122
  function addLanguageModelUsage(usage1, usage2) {
2100
- var _a16, _b, _c, _d, _e, _f, _g, _h, _i, _j;
2123
+ var _a17, _b, _c, _d, _e, _f, _g, _h, _i, _j;
2101
2124
  return {
2102
2125
  inputTokens: addTokenCounts(usage1.inputTokens, usage2.inputTokens),
2103
2126
  inputTokenDetails: {
2104
2127
  noCacheTokens: addTokenCounts(
2105
- (_a16 = usage1.inputTokenDetails) == null ? void 0 : _a16.noCacheTokens,
2128
+ (_a17 = usage1.inputTokenDetails) == null ? void 0 : _a17.noCacheTokens,
2106
2129
  (_b = usage2.inputTokenDetails) == null ? void 0 : _b.noCacheTokens
2107
2130
  ),
2108
2131
  cacheReadTokens: addTokenCounts(
@@ -2921,7 +2944,7 @@ var text = () => ({
2921
2944
  });
2922
2945
  var object = ({
2923
2946
  schema: inputSchema,
2924
- name: name16,
2947
+ name: name17,
2925
2948
  description
2926
2949
  }) => {
2927
2950
  const schema = asSchema2(inputSchema);
@@ -2930,7 +2953,7 @@ var object = ({
2930
2953
  responseFormat: resolve(schema.jsonSchema).then((jsonSchema2) => ({
2931
2954
  type: "json",
2932
2955
  schema: jsonSchema2,
2933
- ...name16 != null && { name: name16 },
2956
+ ...name17 != null && { name: name17 },
2934
2957
  ...description != null && { description }
2935
2958
  })),
2936
2959
  async parseCompleteOutput({ text: text2 }, context) {
@@ -2984,7 +3007,7 @@ var object = ({
2984
3007
  };
2985
3008
  var array = ({
2986
3009
  element: inputElementSchema,
2987
- name: name16,
3010
+ name: name17,
2988
3011
  description
2989
3012
  }) => {
2990
3013
  const elementSchema = asSchema2(inputElementSchema);
@@ -3004,7 +3027,7 @@ var array = ({
3004
3027
  required: ["elements"],
3005
3028
  additionalProperties: false
3006
3029
  },
3007
- ...name16 != null && { name: name16 },
3030
+ ...name17 != null && { name: name17 },
3008
3031
  ...description != null && { description }
3009
3032
  };
3010
3033
  }),
@@ -3096,7 +3119,7 @@ var array = ({
3096
3119
  };
3097
3120
  var choice = ({
3098
3121
  options: choiceOptions,
3099
- name: name16,
3122
+ name: name17,
3100
3123
  description
3101
3124
  }) => {
3102
3125
  return {
@@ -3113,7 +3136,7 @@ var choice = ({
3113
3136
  required: ["result"],
3114
3137
  additionalProperties: false
3115
3138
  },
3116
- ...name16 != null && { name: name16 },
3139
+ ...name17 != null && { name: name17 },
3117
3140
  ...description != null && { description }
3118
3141
  }),
3119
3142
  async parseCompleteOutput({ text: text2 }, context) {
@@ -3174,14 +3197,14 @@ var choice = ({
3174
3197
  };
3175
3198
  };
3176
3199
  var json = ({
3177
- name: name16,
3200
+ name: name17,
3178
3201
  description
3179
3202
  } = {}) => {
3180
3203
  return {
3181
3204
  name: "json",
3182
3205
  responseFormat: Promise.resolve({
3183
3206
  type: "json",
3184
- ...name16 != null && { name: name16 },
3207
+ ...name17 != null && { name: name17 },
3185
3208
  ...description != null && { description }
3186
3209
  }),
3187
3210
  async parseCompleteOutput({ text: text2 }, context) {
@@ -3230,7 +3253,7 @@ async function parseToolCall({
3230
3253
  system,
3231
3254
  messages
3232
3255
  }) {
3233
- var _a16;
3256
+ var _a17;
3234
3257
  try {
3235
3258
  if (tools == null) {
3236
3259
  if (toolCall.providerExecuted && toolCall.dynamic) {
@@ -3279,7 +3302,7 @@ async function parseToolCall({
3279
3302
  dynamic: true,
3280
3303
  invalid: true,
3281
3304
  error,
3282
- title: (_a16 = tools == null ? void 0 : tools[toolCall.toolName]) == null ? void 0 : _a16.title,
3305
+ title: (_a17 = tools == null ? void 0 : tools[toolCall.toolName]) == null ? void 0 : _a17.title,
3283
3306
  providerExecuted: toolCall.providerExecuted,
3284
3307
  providerMetadata: toolCall.providerMetadata
3285
3308
  };
@@ -3418,8 +3441,8 @@ function stepCountIs(stepCount) {
3418
3441
  }
3419
3442
  function hasToolCall(toolName) {
3420
3443
  return ({ steps }) => {
3421
- var _a16, _b, _c;
3422
- return (_c = (_b = (_a16 = steps[steps.length - 1]) == null ? void 0 : _a16.toolCalls) == null ? void 0 : _b.some(
3444
+ var _a17, _b, _c;
3445
+ return (_c = (_b = (_a17 = steps[steps.length - 1]) == null ? void 0 : _a17.toolCalls) == null ? void 0 : _b.some(
3423
3446
  (toolCall) => toolCall.toolName === toolName
3424
3447
  )) != null ? _c : false;
3425
3448
  };
@@ -3673,7 +3696,7 @@ async function generateText({
3673
3696
  }),
3674
3697
  tracer,
3675
3698
  fn: async (span) => {
3676
- var _a16, _b, _c, _d, _e, _f, _g, _h;
3699
+ var _a17, _b, _c, _d, _e, _f, _g, _h;
3677
3700
  const initialMessages = initialPrompt.messages;
3678
3701
  const responseMessages = [];
3679
3702
  const { approvedToolApprovals, deniedToolApprovals } = collectToolApprovals({ messages: initialMessages });
@@ -3768,7 +3791,7 @@ async function generateText({
3768
3791
  experimental_context
3769
3792
  }));
3770
3793
  const stepModel = resolveLanguageModel(
3771
- (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a16 : model
3794
+ (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
3772
3795
  );
3773
3796
  const promptMessages = await convertToLanguageModelPrompt({
3774
3797
  prompt: {
@@ -3786,7 +3809,7 @@ async function generateText({
3786
3809
  });
3787
3810
  currentModelResponse = await retry(
3788
3811
  () => {
3789
- var _a17;
3812
+ var _a18;
3790
3813
  return recordSpan({
3791
3814
  name: "ai.generateText.doGenerate",
3792
3815
  attributes: selectTelemetryAttributes({
@@ -3818,14 +3841,14 @@ async function generateText({
3818
3841
  "gen_ai.request.max_tokens": settings.maxOutputTokens,
3819
3842
  "gen_ai.request.presence_penalty": settings.presencePenalty,
3820
3843
  "gen_ai.request.stop_sequences": settings.stopSequences,
3821
- "gen_ai.request.temperature": (_a17 = settings.temperature) != null ? _a17 : void 0,
3844
+ "gen_ai.request.temperature": (_a18 = settings.temperature) != null ? _a18 : void 0,
3822
3845
  "gen_ai.request.top_k": settings.topK,
3823
3846
  "gen_ai.request.top_p": settings.topP
3824
3847
  }
3825
3848
  }),
3826
3849
  tracer,
3827
3850
  fn: async (span2) => {
3828
- var _a18, _b2, _c2, _d2, _e2, _f2, _g2, _h2;
3851
+ var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h2;
3829
3852
  const stepProviderOptions = mergeObjects(
3830
3853
  providerOptions,
3831
3854
  prepareStepResult == null ? void 0 : prepareStepResult.providerOptions
@@ -3841,7 +3864,7 @@ async function generateText({
3841
3864
  headers: headersWithUserAgent
3842
3865
  });
3843
3866
  const responseData = {
3844
- id: (_b2 = (_a18 = result.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId2(),
3867
+ id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId2(),
3845
3868
  timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : /* @__PURE__ */ new Date(),
3846
3869
  modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
3847
3870
  headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
@@ -4720,21 +4743,23 @@ function processUIMessageStream({
4720
4743
  new TransformStream({
4721
4744
  async transform(chunk, controller) {
4722
4745
  await runUpdateMessageJob(async ({ state, write }) => {
4723
- var _a16, _b, _c, _d;
4746
+ var _a17, _b, _c, _d;
4724
4747
  function getToolInvocation(toolCallId) {
4725
4748
  const toolInvocations = state.message.parts.filter(isToolUIPart);
4726
4749
  const toolInvocation = toolInvocations.find(
4727
4750
  (invocation) => invocation.toolCallId === toolCallId
4728
4751
  );
4729
4752
  if (toolInvocation == null) {
4730
- throw new Error(
4731
- `no tool invocation found for tool call ${toolCallId}`
4732
- );
4753
+ throw new UIMessageStreamError({
4754
+ chunkType: "tool-invocation",
4755
+ chunkId: toolCallId,
4756
+ message: `No tool invocation found for tool call ID "${toolCallId}".`
4757
+ });
4733
4758
  }
4734
4759
  return toolInvocation;
4735
4760
  }
4736
4761
  function updateToolPart(options) {
4737
- var _a17;
4762
+ var _a18;
4738
4763
  const part = state.message.parts.find(
4739
4764
  (part2) => isStaticToolUIPart(part2) && part2.toolCallId === options.toolCallId
4740
4765
  );
@@ -4750,7 +4775,7 @@ function processUIMessageStream({
4750
4775
  if (options.title !== void 0) {
4751
4776
  anyPart.title = options.title;
4752
4777
  }
4753
- anyPart.providerExecuted = (_a17 = anyOptions.providerExecuted) != null ? _a17 : part.providerExecuted;
4778
+ anyPart.providerExecuted = (_a18 = anyOptions.providerExecuted) != null ? _a18 : part.providerExecuted;
4754
4779
  if (anyOptions.providerMetadata != null && part.state === "input-available") {
4755
4780
  part.callProviderMetadata = anyOptions.providerMetadata;
4756
4781
  }
@@ -4771,7 +4796,7 @@ function processUIMessageStream({
4771
4796
  }
4772
4797
  }
4773
4798
  function updateDynamicToolPart(options) {
4774
- var _a17, _b2;
4799
+ var _a18, _b2;
4775
4800
  const part = state.message.parts.find(
4776
4801
  (part2) => part2.type === "dynamic-tool" && part2.toolCallId === options.toolCallId
4777
4802
  );
@@ -4783,7 +4808,7 @@ function processUIMessageStream({
4783
4808
  anyPart.input = anyOptions.input;
4784
4809
  anyPart.output = anyOptions.output;
4785
4810
  anyPart.errorText = anyOptions.errorText;
4786
- anyPart.rawInput = (_a17 = anyOptions.rawInput) != null ? _a17 : anyPart.rawInput;
4811
+ anyPart.rawInput = (_a18 = anyOptions.rawInput) != null ? _a18 : anyPart.rawInput;
4787
4812
  anyPart.preliminary = anyOptions.preliminary;
4788
4813
  if (options.title !== void 0) {
4789
4814
  anyPart.title = options.title;
@@ -4835,13 +4860,27 @@ function processUIMessageStream({
4835
4860
  }
4836
4861
  case "text-delta": {
4837
4862
  const textPart = state.activeTextParts[chunk.id];
4863
+ if (textPart == null) {
4864
+ throw new UIMessageStreamError({
4865
+ chunkType: "text-delta",
4866
+ chunkId: chunk.id,
4867
+ message: `Received text-delta for missing text part with ID "${chunk.id}". Ensure a "text-start" chunk is sent before any "text-delta" chunks.`
4868
+ });
4869
+ }
4838
4870
  textPart.text += chunk.delta;
4839
- textPart.providerMetadata = (_a16 = chunk.providerMetadata) != null ? _a16 : textPart.providerMetadata;
4871
+ textPart.providerMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textPart.providerMetadata;
4840
4872
  write();
4841
4873
  break;
4842
4874
  }
4843
4875
  case "text-end": {
4844
4876
  const textPart = state.activeTextParts[chunk.id];
4877
+ if (textPart == null) {
4878
+ throw new UIMessageStreamError({
4879
+ chunkType: "text-end",
4880
+ chunkId: chunk.id,
4881
+ message: `Received text-end for missing text part with ID "${chunk.id}". Ensure a "text-start" chunk is sent before any "text-end" chunks.`
4882
+ });
4883
+ }
4845
4884
  textPart.state = "done";
4846
4885
  textPart.providerMetadata = (_b = chunk.providerMetadata) != null ? _b : textPart.providerMetadata;
4847
4886
  delete state.activeTextParts[chunk.id];
@@ -4862,6 +4901,13 @@ function processUIMessageStream({
4862
4901
  }
4863
4902
  case "reasoning-delta": {
4864
4903
  const reasoningPart = state.activeReasoningParts[chunk.id];
4904
+ if (reasoningPart == null) {
4905
+ throw new UIMessageStreamError({
4906
+ chunkType: "reasoning-delta",
4907
+ chunkId: chunk.id,
4908
+ message: `Received reasoning-delta for missing reasoning part with ID "${chunk.id}". Ensure a "reasoning-start" chunk is sent before any "reasoning-delta" chunks.`
4909
+ });
4910
+ }
4865
4911
  reasoningPart.text += chunk.delta;
4866
4912
  reasoningPart.providerMetadata = (_c = chunk.providerMetadata) != null ? _c : reasoningPart.providerMetadata;
4867
4913
  write();
@@ -4869,6 +4915,13 @@ function processUIMessageStream({
4869
4915
  }
4870
4916
  case "reasoning-end": {
4871
4917
  const reasoningPart = state.activeReasoningParts[chunk.id];
4918
+ if (reasoningPart == null) {
4919
+ throw new UIMessageStreamError({
4920
+ chunkType: "reasoning-end",
4921
+ chunkId: chunk.id,
4922
+ message: `Received reasoning-end for missing reasoning part with ID "${chunk.id}". Ensure a "reasoning-start" chunk is sent before any "reasoning-end" chunks.`
4923
+ });
4924
+ }
4872
4925
  reasoningPart.providerMetadata = (_d = chunk.providerMetadata) != null ? _d : reasoningPart.providerMetadata;
4873
4926
  reasoningPart.state = "done";
4874
4927
  delete state.activeReasoningParts[chunk.id];
@@ -4940,6 +4993,13 @@ function processUIMessageStream({
4940
4993
  }
4941
4994
  case "tool-input-delta": {
4942
4995
  const partialToolCall = state.partialToolCalls[chunk.toolCallId];
4996
+ if (partialToolCall == null) {
4997
+ throw new UIMessageStreamError({
4998
+ chunkType: "tool-input-delta",
4999
+ chunkId: chunk.toolCallId,
5000
+ message: `Received tool-input-delta for missing tool call with ID "${chunk.toolCallId}". Ensure a "tool-input-start" chunk is sent before any "tool-input-delta" chunks.`
5001
+ });
5002
+ }
4943
5003
  partialToolCall.text += chunk.inputTextDelta;
4944
5004
  const { value: partialArgs } = await parsePartialJson(
4945
5005
  partialToolCall.text
@@ -5275,13 +5335,13 @@ function createAsyncIterableStream(source) {
5275
5335
  const reader = this.getReader();
5276
5336
  let finished = false;
5277
5337
  async function cleanup(cancelStream) {
5278
- var _a16;
5338
+ var _a17;
5279
5339
  if (finished)
5280
5340
  return;
5281
5341
  finished = true;
5282
5342
  try {
5283
5343
  if (cancelStream) {
5284
- await ((_a16 = reader.cancel) == null ? void 0 : _a16.call(reader));
5344
+ await ((_a17 = reader.cancel) == null ? void 0 : _a17.call(reader));
5285
5345
  }
5286
5346
  } finally {
5287
5347
  try {
@@ -5447,8 +5507,8 @@ function createStitchableStream() {
5447
5507
 
5448
5508
  // src/util/now.ts
5449
5509
  function now() {
5450
- var _a16, _b;
5451
- return (_b = (_a16 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a16.now()) != null ? _b : Date.now();
5510
+ var _a17, _b;
5511
+ return (_b = (_a17 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a17.now()) != null ? _b : Date.now();
5452
5512
  }
5453
5513
 
5454
5514
  // src/generate-text/run-tools-transformation.ts
@@ -5797,7 +5857,7 @@ function createOutputTransformStream(output) {
5797
5857
  }
5798
5858
  return new TransformStream({
5799
5859
  async transform(chunk, controller) {
5800
- var _a16;
5860
+ var _a17;
5801
5861
  if (chunk.type === "finish-step" && textChunk.length > 0) {
5802
5862
  publishTextChunk({ controller });
5803
5863
  }
@@ -5824,7 +5884,7 @@ function createOutputTransformStream(output) {
5824
5884
  }
5825
5885
  text2 += chunk.text;
5826
5886
  textChunk += chunk.text;
5827
- textProviderMetadata = (_a16 = chunk.providerMetadata) != null ? _a16 : textProviderMetadata;
5887
+ textProviderMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textProviderMetadata;
5828
5888
  const result = await output.parsePartialOutput({ text: text2 });
5829
5889
  if (result !== void 0) {
5830
5890
  const currentJson = JSON.stringify(result.partial);
@@ -5893,7 +5953,7 @@ var DefaultStreamTextResult = class {
5893
5953
  let activeReasoningContent = {};
5894
5954
  const eventProcessor = new TransformStream({
5895
5955
  async transform(chunk, controller) {
5896
- var _a16, _b, _c, _d;
5956
+ var _a17, _b, _c, _d;
5897
5957
  controller.enqueue(chunk);
5898
5958
  const { part } = chunk;
5899
5959
  if (part.type === "text-delta" || part.type === "reasoning-delta" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
@@ -5923,7 +5983,7 @@ var DefaultStreamTextResult = class {
5923
5983
  return;
5924
5984
  }
5925
5985
  activeText.text += part.text;
5926
- activeText.providerMetadata = (_a16 = part.providerMetadata) != null ? _a16 : activeText.providerMetadata;
5986
+ activeText.providerMetadata = (_a17 = part.providerMetadata) != null ? _a17 : activeText.providerMetadata;
5927
5987
  }
5928
5988
  if (part.type === "text-end") {
5929
5989
  const activeText = activeTextContent[part.id];
@@ -6088,8 +6148,8 @@ var DefaultStreamTextResult = class {
6088
6148
  "ai.response.text": { output: () => finalStep.text },
6089
6149
  "ai.response.toolCalls": {
6090
6150
  output: () => {
6091
- var _a16;
6092
- return ((_a16 = finalStep.toolCalls) == null ? void 0 : _a16.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
6151
+ var _a17;
6152
+ return ((_a17 = finalStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
6093
6153
  }
6094
6154
  },
6095
6155
  "ai.response.providerMetadata": JSON.stringify(
@@ -6311,7 +6371,7 @@ var DefaultStreamTextResult = class {
6311
6371
  responseMessages,
6312
6372
  usage
6313
6373
  }) {
6314
- var _a16, _b, _c, _d, _e, _f;
6374
+ var _a17, _b, _c, _d, _e, _f;
6315
6375
  const includeRawChunks2 = self.includeRawChunks;
6316
6376
  const stepTimeoutId = stepTimeoutMs != null ? setTimeout(() => stepAbortController.abort(), stepTimeoutMs) : void 0;
6317
6377
  let chunkTimeoutId = void 0;
@@ -6347,7 +6407,7 @@ var DefaultStreamTextResult = class {
6347
6407
  experimental_context
6348
6408
  }));
6349
6409
  const stepModel = resolveLanguageModel(
6350
- (_a16 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a16 : model
6410
+ (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
6351
6411
  );
6352
6412
  const promptMessages = await convertToLanguageModelPrompt({
6353
6413
  prompt: {
@@ -6460,7 +6520,7 @@ var DefaultStreamTextResult = class {
6460
6520
  streamWithToolResults.pipeThrough(
6461
6521
  new TransformStream({
6462
6522
  async transform(chunk, controller) {
6463
- var _a17, _b2, _c2, _d2, _e2;
6523
+ var _a18, _b2, _c2, _d2, _e2;
6464
6524
  resetChunkTimeout();
6465
6525
  if (chunk.type === "stream-start") {
6466
6526
  warnings = chunk.warnings;
@@ -6534,7 +6594,7 @@ var DefaultStreamTextResult = class {
6534
6594
  }
6535
6595
  case "response-metadata": {
6536
6596
  stepResponse = {
6537
- id: (_a17 = chunk.id) != null ? _a17 : stepResponse.id,
6597
+ id: (_a18 = chunk.id) != null ? _a18 : stepResponse.id,
6538
6598
  timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
6539
6599
  modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
6540
6600
  };
@@ -6866,14 +6926,14 @@ var DefaultStreamTextResult = class {
6866
6926
  );
6867
6927
  }
6868
6928
  async consumeStream(options) {
6869
- var _a16;
6929
+ var _a17;
6870
6930
  try {
6871
6931
  await consumeStream({
6872
6932
  stream: this.fullStream,
6873
6933
  onError: options == null ? void 0 : options.onError
6874
6934
  });
6875
6935
  } catch (error) {
6876
- (_a16 = options == null ? void 0 : options.onError) == null ? void 0 : _a16.call(options, error);
6936
+ (_a17 = options == null ? void 0 : options.onError) == null ? void 0 : _a17.call(options, error);
6877
6937
  }
6878
6938
  }
6879
6939
  get experimental_partialOutputStream() {
@@ -6893,8 +6953,8 @@ var DefaultStreamTextResult = class {
6893
6953
  );
6894
6954
  }
6895
6955
  get elementStream() {
6896
- var _a16, _b, _c;
6897
- const transform = (_a16 = this.outputSpecification) == null ? void 0 : _a16.createElementStreamTransform();
6956
+ var _a17, _b, _c;
6957
+ const transform = (_a17 = this.outputSpecification) == null ? void 0 : _a17.createElementStreamTransform();
6898
6958
  if (transform == null) {
6899
6959
  throw new UnsupportedFunctionalityError2({
6900
6960
  functionality: `element streams in ${(_c = (_b = this.outputSpecification) == null ? void 0 : _b.name) != null ? _c : "text"} mode`
@@ -6904,8 +6964,8 @@ var DefaultStreamTextResult = class {
6904
6964
  }
6905
6965
  get output() {
6906
6966
  return this.finalStep.then((step) => {
6907
- var _a16;
6908
- const output = (_a16 = this.outputSpecification) != null ? _a16 : text();
6967
+ var _a17;
6968
+ const output = (_a17 = this.outputSpecification) != null ? _a17 : text();
6909
6969
  return output.parseCompleteOutput(
6910
6970
  { text: step.text },
6911
6971
  {
@@ -6932,8 +6992,8 @@ var DefaultStreamTextResult = class {
6932
6992
  responseMessageId: generateMessageId
6933
6993
  }) : void 0;
6934
6994
  const isDynamic = (part) => {
6935
- var _a16;
6936
- const tool2 = (_a16 = this.tools) == null ? void 0 : _a16[part.toolName];
6995
+ var _a17;
6996
+ const tool2 = (_a17 = this.tools) == null ? void 0 : _a17[part.toolName];
6937
6997
  if (tool2 == null) {
6938
6998
  return part.dynamic;
6939
6999
  }
@@ -7271,10 +7331,10 @@ var ToolLoopAgent = class {
7271
7331
  return this.settings.tools;
7272
7332
  }
7273
7333
  async prepareCall(options) {
7274
- var _a16, _b, _c, _d;
7334
+ var _a17, _b, _c, _d;
7275
7335
  const baseCallArgs = {
7276
7336
  ...this.settings,
7277
- stopWhen: (_a16 = this.settings.stopWhen) != null ? _a16 : stepCountIs(20),
7337
+ stopWhen: (_a17 = this.settings.stopWhen) != null ? _a17 : stepCountIs(20),
7278
7338
  ...options
7279
7339
  };
7280
7340
  const preparedCallArgs = (_d = await ((_c = (_b = this.settings).prepareCall) == null ? void 0 : _c.call(_b, baseCallArgs))) != null ? _d : baseCallArgs;
@@ -7413,7 +7473,7 @@ function readUIMessageStream({
7413
7473
  onError,
7414
7474
  terminateOnError = false
7415
7475
  }) {
7416
- var _a16;
7476
+ var _a17;
7417
7477
  let controller;
7418
7478
  let hasErrored = false;
7419
7479
  const outputStream = new ReadableStream({
@@ -7422,7 +7482,7 @@ function readUIMessageStream({
7422
7482
  }
7423
7483
  });
7424
7484
  const state = createStreamingUIMessageState({
7425
- messageId: (_a16 = message == null ? void 0 : message.id) != null ? _a16 : "",
7485
+ messageId: (_a17 = message == null ? void 0 : message.id) != null ? _a17 : "",
7426
7486
  lastMessage: message
7427
7487
  });
7428
7488
  const handleError = (error) => {
@@ -7491,7 +7551,7 @@ async function convertToModelMessages(messages, options) {
7491
7551
  modelMessages.push({
7492
7552
  role: "user",
7493
7553
  content: message.parts.map((part) => {
7494
- var _a16;
7554
+ var _a17;
7495
7555
  if (isTextUIPart(part)) {
7496
7556
  return {
7497
7557
  type: "text",
@@ -7509,7 +7569,7 @@ async function convertToModelMessages(messages, options) {
7509
7569
  };
7510
7570
  }
7511
7571
  if (isDataUIPart(part)) {
7512
- return (_a16 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a16.call(
7572
+ return (_a17 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a17.call(
7513
7573
  options,
7514
7574
  part
7515
7575
  );
@@ -7522,7 +7582,7 @@ async function convertToModelMessages(messages, options) {
7522
7582
  if (message.parts != null) {
7523
7583
  let block = [];
7524
7584
  async function processBlock() {
7525
- var _a16, _b, _c, _d, _e, _f;
7585
+ var _a17, _b, _c, _d, _e, _f;
7526
7586
  if (block.length === 0) {
7527
7587
  return;
7528
7588
  }
@@ -7554,7 +7614,7 @@ async function convertToModelMessages(messages, options) {
7554
7614
  type: "tool-call",
7555
7615
  toolCallId: part.toolCallId,
7556
7616
  toolName,
7557
- input: part.state === "output-error" ? (_a16 = part.input) != null ? _a16 : "rawInput" in part ? part.rawInput : void 0 : part.input,
7617
+ input: part.state === "output-error" ? (_a17 = part.input) != null ? _a17 : "rawInput" in part ? part.rawInput : void 0 : part.input,
7558
7618
  providerExecuted: part.providerExecuted,
7559
7619
  ...part.callProviderMetadata != null ? { providerOptions: part.callProviderMetadata } : {}
7560
7620
  });
@@ -7600,8 +7660,8 @@ async function convertToModelMessages(messages, options) {
7600
7660
  });
7601
7661
  const toolParts = block.filter(
7602
7662
  (part) => {
7603
- var _a17;
7604
- return isToolUIPart(part) && (part.providerExecuted !== true || ((_a17 = part.approval) == null ? void 0 : _a17.approved) != null);
7663
+ var _a18;
7664
+ return isToolUIPart(part) && (part.providerExecuted !== true || ((_a18 = part.approval) == null ? void 0 : _a18.approved) != null);
7605
7665
  }
7606
7666
  );
7607
7667
  if (toolParts.length > 0) {
@@ -8191,7 +8251,7 @@ async function embed({
8191
8251
  }),
8192
8252
  tracer,
8193
8253
  fn: async (doEmbedSpan) => {
8194
- var _a16;
8254
+ var _a17;
8195
8255
  const modelResponse = await model.doEmbed({
8196
8256
  values: [value],
8197
8257
  abortSignal,
@@ -8199,7 +8259,7 @@ async function embed({
8199
8259
  providerOptions
8200
8260
  });
8201
8261
  const embedding2 = modelResponse.embeddings[0];
8202
- const usage2 = (_a16 = modelResponse.usage) != null ? _a16 : { tokens: NaN };
8262
+ const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
8203
8263
  doEmbedSpan.setAttributes(
8204
8264
  await selectTelemetryAttributes({
8205
8265
  telemetry,
@@ -8313,7 +8373,7 @@ async function embedMany({
8313
8373
  }),
8314
8374
  tracer,
8315
8375
  fn: async (span) => {
8316
- var _a16;
8376
+ var _a17;
8317
8377
  const [maxEmbeddingsPerCall, supportsParallelCalls] = await Promise.all([
8318
8378
  model.maxEmbeddingsPerCall,
8319
8379
  model.supportsParallelCalls
@@ -8338,7 +8398,7 @@ async function embedMany({
8338
8398
  }),
8339
8399
  tracer,
8340
8400
  fn: async (doEmbedSpan) => {
8341
- var _a17;
8401
+ var _a18;
8342
8402
  const modelResponse = await model.doEmbed({
8343
8403
  values,
8344
8404
  abortSignal,
@@ -8346,7 +8406,7 @@ async function embedMany({
8346
8406
  providerOptions
8347
8407
  });
8348
8408
  const embeddings3 = modelResponse.embeddings;
8349
- const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
8409
+ const usage2 = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
8350
8410
  doEmbedSpan.setAttributes(
8351
8411
  await selectTelemetryAttributes({
8352
8412
  telemetry,
@@ -8427,7 +8487,7 @@ async function embedMany({
8427
8487
  }),
8428
8488
  tracer,
8429
8489
  fn: async (doEmbedSpan) => {
8430
- var _a17;
8490
+ var _a18;
8431
8491
  const modelResponse = await model.doEmbed({
8432
8492
  values: chunk,
8433
8493
  abortSignal,
@@ -8435,7 +8495,7 @@ async function embedMany({
8435
8495
  providerOptions
8436
8496
  });
8437
8497
  const embeddings2 = modelResponse.embeddings;
8438
- const usage = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
8498
+ const usage = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
8439
8499
  doEmbedSpan.setAttributes(
8440
8500
  await selectTelemetryAttributes({
8441
8501
  telemetry,
@@ -8474,7 +8534,7 @@ async function embedMany({
8474
8534
  result.providerMetadata
8475
8535
  )) {
8476
8536
  providerMetadata[providerName] = {
8477
- ...(_a16 = providerMetadata[providerName]) != null ? _a16 : {},
8537
+ ...(_a17 = providerMetadata[providerName]) != null ? _a17 : {},
8478
8538
  ...metadata
8479
8539
  };
8480
8540
  }
@@ -8538,7 +8598,7 @@ async function generateImage({
8538
8598
  abortSignal,
8539
8599
  headers
8540
8600
  }) {
8541
- var _a16, _b;
8601
+ var _a17, _b;
8542
8602
  const model = resolveImageModel(modelArg);
8543
8603
  const headersWithUserAgent = withUserAgentSuffix5(
8544
8604
  headers != null ? headers : {},
@@ -8548,7 +8608,7 @@ async function generateImage({
8548
8608
  maxRetries: maxRetriesArg,
8549
8609
  abortSignal
8550
8610
  });
8551
- const maxImagesPerCallWithDefault = (_a16 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a16 : 1;
8611
+ const maxImagesPerCallWithDefault = (_a17 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a17 : 1;
8552
8612
  const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
8553
8613
  const callImageCounts = Array.from({ length: callCount }, (_, i) => {
8554
8614
  if (i < callCount - 1) {
@@ -8589,13 +8649,13 @@ async function generateImage({
8589
8649
  images.push(
8590
8650
  ...result.images.map(
8591
8651
  (image) => {
8592
- var _a17;
8652
+ var _a18;
8593
8653
  return new DefaultGeneratedFile({
8594
8654
  data: image,
8595
- mediaType: (_a17 = detectMediaType({
8655
+ mediaType: (_a18 = detectMediaType({
8596
8656
  data: image,
8597
8657
  signatures: imageMediaTypeSignatures
8598
- })) != null ? _a17 : "image/png"
8658
+ })) != null ? _a18 : "image/png"
8599
8659
  });
8600
8660
  }
8601
8661
  )
@@ -8803,7 +8863,7 @@ var arrayOutputStrategy = (schema) => {
8803
8863
  isFirstDelta,
8804
8864
  isFinalDelta
8805
8865
  }) {
8806
- var _a16;
8866
+ var _a17;
8807
8867
  if (!isJSONObject(value) || !isJSONArray(value.elements)) {
8808
8868
  return {
8809
8869
  success: false,
@@ -8826,7 +8886,7 @@ var arrayOutputStrategy = (schema) => {
8826
8886
  }
8827
8887
  resultArray.push(result.value);
8828
8888
  }
8829
- const publishedElementCount = (_a16 = latestObject == null ? void 0 : latestObject.length) != null ? _a16 : 0;
8889
+ const publishedElementCount = (_a17 = latestObject == null ? void 0 : latestObject.length) != null ? _a17 : 0;
8830
8890
  let textDelta = "";
8831
8891
  if (isFirstDelta) {
8832
8892
  textDelta += "[";
@@ -9246,7 +9306,7 @@ async function generateObject(options) {
9246
9306
  }),
9247
9307
  tracer,
9248
9308
  fn: async (span) => {
9249
- var _a16;
9309
+ var _a17;
9250
9310
  let result;
9251
9311
  let finishReason;
9252
9312
  let usage;
@@ -9292,7 +9352,7 @@ async function generateObject(options) {
9292
9352
  }),
9293
9353
  tracer,
9294
9354
  fn: async (span2) => {
9295
- var _a17, _b, _c, _d, _e, _f, _g, _h;
9355
+ var _a18, _b, _c, _d, _e, _f, _g, _h;
9296
9356
  const result2 = await model.doGenerate({
9297
9357
  responseFormat: {
9298
9358
  type: "json",
@@ -9307,7 +9367,7 @@ async function generateObject(options) {
9307
9367
  headers: headersWithUserAgent
9308
9368
  });
9309
9369
  const responseData = {
9310
- id: (_b = (_a17 = result2.response) == null ? void 0 : _a17.id) != null ? _b : generateId2(),
9370
+ id: (_b = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b : generateId2(),
9311
9371
  timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
9312
9372
  modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
9313
9373
  headers: (_g = result2.response) == null ? void 0 : _g.headers,
@@ -9363,7 +9423,7 @@ async function generateObject(options) {
9363
9423
  usage = asLanguageModelUsage(generateResult.usage);
9364
9424
  warnings = generateResult.warnings;
9365
9425
  resultProviderMetadata = generateResult.providerMetadata;
9366
- request = (_a16 = generateResult.request) != null ? _a16 : {};
9426
+ request = (_a17 = generateResult.request) != null ? _a17 : {};
9367
9427
  response = generateResult.responseData;
9368
9428
  reasoning = generateResult.reasoning;
9369
9429
  logWarnings({
@@ -9426,9 +9486,9 @@ var DefaultGenerateObjectResult = class {
9426
9486
  this.reasoning = options.reasoning;
9427
9487
  }
9428
9488
  toJsonResponse(init) {
9429
- var _a16;
9489
+ var _a17;
9430
9490
  return new Response(JSON.stringify(this.object), {
9431
- status: (_a16 = init == null ? void 0 : init.status) != null ? _a16 : 200,
9491
+ status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
9432
9492
  headers: prepareHeaders(init == null ? void 0 : init.headers, {
9433
9493
  "content-type": "application/json; charset=utf-8"
9434
9494
  })
@@ -9557,8 +9617,8 @@ function simulateReadableStream({
9557
9617
  chunkDelayInMs = 0,
9558
9618
  _internal
9559
9619
  }) {
9560
- var _a16;
9561
- const delay2 = (_a16 = _internal == null ? void 0 : _internal.delay) != null ? _a16 : delayFunction;
9620
+ var _a17;
9621
+ const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : delayFunction;
9562
9622
  let index = 0;
9563
9623
  return new ReadableStream({
9564
9624
  async pull(controller) {
@@ -9816,7 +9876,7 @@ var DefaultStreamObjectResult = class {
9816
9876
  const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
9817
9877
  new TransformStream({
9818
9878
  async transform(chunk, controller) {
9819
- var _a16, _b, _c;
9879
+ var _a17, _b, _c;
9820
9880
  if (typeof chunk === "object" && chunk.type === "stream-start") {
9821
9881
  warnings = chunk.warnings;
9822
9882
  return;
@@ -9866,7 +9926,7 @@ var DefaultStreamObjectResult = class {
9866
9926
  switch (chunk.type) {
9867
9927
  case "response-metadata": {
9868
9928
  fullResponse = {
9869
- id: (_a16 = chunk.id) != null ? _a16 : fullResponse.id,
9929
+ id: (_a17 = chunk.id) != null ? _a17 : fullResponse.id,
9870
9930
  timestamp: (_b = chunk.timestamp) != null ? _b : fullResponse.timestamp,
9871
9931
  modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
9872
9932
  };
@@ -10138,7 +10198,7 @@ async function generateSpeech({
10138
10198
  abortSignal,
10139
10199
  headers
10140
10200
  }) {
10141
- var _a16;
10201
+ var _a17;
10142
10202
  const resolvedModel = resolveSpeechModel(model);
10143
10203
  if (!resolvedModel) {
10144
10204
  throw new Error("Model could not be resolved");
@@ -10175,10 +10235,10 @@ async function generateSpeech({
10175
10235
  return new DefaultSpeechResult({
10176
10236
  audio: new DefaultGeneratedAudioFile({
10177
10237
  data: result.audio,
10178
- mediaType: (_a16 = detectMediaType({
10238
+ mediaType: (_a17 = detectMediaType({
10179
10239
  data: result.audio,
10180
10240
  signatures: audioMediaTypeSignatures
10181
- })) != null ? _a16 : "audio/mp3"
10241
+ })) != null ? _a17 : "audio/mp3"
10182
10242
  }),
10183
10243
  warnings: result.warnings,
10184
10244
  responses: [result.response],
@@ -10187,11 +10247,11 @@ async function generateSpeech({
10187
10247
  }
10188
10248
  var DefaultSpeechResult = class {
10189
10249
  constructor(options) {
10190
- var _a16;
10250
+ var _a17;
10191
10251
  this.audio = options.audio;
10192
10252
  this.warnings = options.warnings;
10193
10253
  this.responses = options.responses;
10194
- this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
10254
+ this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
10195
10255
  }
10196
10256
  };
10197
10257
 
@@ -10401,8 +10461,8 @@ function defaultTransform(text2) {
10401
10461
  return text2.replace(/^```(?:json)?\s*\n?/, "").replace(/\n?```\s*$/, "").trim();
10402
10462
  }
10403
10463
  function extractJsonMiddleware(options) {
10404
- var _a16;
10405
- const transform = (_a16 = options == null ? void 0 : options.transform) != null ? _a16 : defaultTransform;
10464
+ var _a17;
10465
+ const transform = (_a17 = options == null ? void 0 : options.transform) != null ? _a17 : defaultTransform;
10406
10466
  const hasCustomTransform = (options == null ? void 0 : options.transform) !== void 0;
10407
10467
  return {
10408
10468
  specificationVersion: "v3",
@@ -10770,13 +10830,13 @@ function addToolInputExamplesMiddleware({
10770
10830
  return {
10771
10831
  specificationVersion: "v3",
10772
10832
  transformParams: async ({ params }) => {
10773
- var _a16;
10774
- if (!((_a16 = params.tools) == null ? void 0 : _a16.length)) {
10833
+ var _a17;
10834
+ if (!((_a17 = params.tools) == null ? void 0 : _a17.length)) {
10775
10835
  return params;
10776
10836
  }
10777
10837
  const transformedTools = params.tools.map((tool2) => {
10778
- var _a17;
10779
- if (tool2.type !== "function" || !((_a17 = tool2.inputExamples) == null ? void 0 : _a17.length)) {
10838
+ var _a18;
10839
+ if (tool2.type !== "function" || !((_a18 = tool2.inputExamples) == null ? void 0 : _a18.length)) {
10780
10840
  return tool2;
10781
10841
  }
10782
10842
  const formattedExamples = tool2.inputExamples.map((example, index) => format(example, index)).join("\n");
@@ -10823,7 +10883,7 @@ var doWrap = ({
10823
10883
  modelId,
10824
10884
  providerId
10825
10885
  }) => {
10826
- var _a16, _b, _c;
10886
+ var _a17, _b, _c;
10827
10887
  async function doTransform({
10828
10888
  params,
10829
10889
  type
@@ -10832,7 +10892,7 @@ var doWrap = ({
10832
10892
  }
10833
10893
  return {
10834
10894
  specificationVersion: "v3",
10835
- provider: (_a16 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a16 : model.provider,
10895
+ provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
10836
10896
  modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
10837
10897
  supportedUrls: (_c = overrideSupportedUrls == null ? void 0 : overrideSupportedUrls({ model })) != null ? _c : model.supportedUrls,
10838
10898
  async doGenerate(params) {
@@ -10879,7 +10939,7 @@ var doWrap2 = ({
10879
10939
  modelId,
10880
10940
  providerId
10881
10941
  }) => {
10882
- var _a16, _b, _c, _d;
10942
+ var _a17, _b, _c, _d;
10883
10943
  async function doTransform({
10884
10944
  params
10885
10945
  }) {
@@ -10887,7 +10947,7 @@ var doWrap2 = ({
10887
10947
  }
10888
10948
  return {
10889
10949
  specificationVersion: "v3",
10890
- provider: (_a16 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a16 : model.provider,
10950
+ provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
10891
10951
  modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
10892
10952
  maxEmbeddingsPerCall: (_c = overrideMaxEmbeddingsPerCall == null ? void 0 : overrideMaxEmbeddingsPerCall({ model })) != null ? _c : model.maxEmbeddingsPerCall,
10893
10953
  supportsParallelCalls: (_d = overrideSupportsParallelCalls == null ? void 0 : overrideSupportsParallelCalls({ model })) != null ? _d : model.supportsParallelCalls,
@@ -10926,11 +10986,11 @@ var doWrap3 = ({
10926
10986
  modelId,
10927
10987
  providerId
10928
10988
  }) => {
10929
- var _a16, _b, _c;
10989
+ var _a17, _b, _c;
10930
10990
  async function doTransform({ params }) {
10931
10991
  return transformParams ? await transformParams({ params, model }) : params;
10932
10992
  }
10933
- const maxImagesPerCallRaw = (_a16 = overrideMaxImagesPerCall == null ? void 0 : overrideMaxImagesPerCall({ model })) != null ? _a16 : model.maxImagesPerCall;
10993
+ const maxImagesPerCallRaw = (_a17 = overrideMaxImagesPerCall == null ? void 0 : overrideMaxImagesPerCall({ model })) != null ? _a17 : model.maxImagesPerCall;
10934
10994
  const maxImagesPerCall = maxImagesPerCallRaw instanceof Function ? maxImagesPerCallRaw.bind(model) : maxImagesPerCallRaw;
10935
10995
  return {
10936
10996
  specificationVersion: "v3",
@@ -11069,11 +11129,11 @@ function customProvider({
11069
11129
  var experimental_customProvider = customProvider;
11070
11130
 
11071
11131
  // src/registry/no-such-provider-error.ts
11072
- import { AISDKError as AISDKError20, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
11073
- var name15 = "AI_NoSuchProviderError";
11074
- var marker15 = `vercel.ai.error.${name15}`;
11075
- var symbol15 = Symbol.for(marker15);
11076
- var _a15;
11132
+ import { AISDKError as AISDKError21, NoSuchModelError as NoSuchModelError3 } from "@ai-sdk/provider";
11133
+ var name16 = "AI_NoSuchProviderError";
11134
+ var marker16 = `vercel.ai.error.${name16}`;
11135
+ var symbol16 = Symbol.for(marker16);
11136
+ var _a16;
11077
11137
  var NoSuchProviderError = class extends NoSuchModelError3 {
11078
11138
  constructor({
11079
11139
  modelId,
@@ -11082,16 +11142,16 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
11082
11142
  availableProviders,
11083
11143
  message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
11084
11144
  }) {
11085
- super({ errorName: name15, modelId, modelType, message });
11086
- this[_a15] = true;
11145
+ super({ errorName: name16, modelId, modelType, message });
11146
+ this[_a16] = true;
11087
11147
  this.providerId = providerId;
11088
11148
  this.availableProviders = availableProviders;
11089
11149
  }
11090
11150
  static isInstance(error) {
11091
- return AISDKError20.hasMarker(error, marker15);
11151
+ return AISDKError21.hasMarker(error, marker16);
11092
11152
  }
11093
11153
  };
11094
- _a15 = symbol15;
11154
+ _a16 = symbol16;
11095
11155
 
11096
11156
  // src/registry/provider-registry.ts
11097
11157
  import {
@@ -11154,10 +11214,10 @@ var DefaultProviderRegistry = class {
11154
11214
  return [id.slice(0, index), id.slice(index + this.separator.length)];
11155
11215
  }
11156
11216
  languageModel(id) {
11157
- var _a16, _b;
11217
+ var _a17, _b;
11158
11218
  const [providerId, modelId] = this.splitId(id, "languageModel");
11159
- let model = (_b = (_a16 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
11160
- _a16,
11219
+ let model = (_b = (_a17 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
11220
+ _a17,
11161
11221
  modelId
11162
11222
  );
11163
11223
  if (model == null) {
@@ -11172,10 +11232,10 @@ var DefaultProviderRegistry = class {
11172
11232
  return model;
11173
11233
  }
11174
11234
  embeddingModel(id) {
11175
- var _a16;
11235
+ var _a17;
11176
11236
  const [providerId, modelId] = this.splitId(id, "embeddingModel");
11177
11237
  const provider = this.getProvider(providerId, "embeddingModel");
11178
- const model = (_a16 = provider.embeddingModel) == null ? void 0 : _a16.call(provider, modelId);
11238
+ const model = (_a17 = provider.embeddingModel) == null ? void 0 : _a17.call(provider, modelId);
11179
11239
  if (model == null) {
11180
11240
  throw new NoSuchModelError4({
11181
11241
  modelId: id,
@@ -11185,10 +11245,10 @@ var DefaultProviderRegistry = class {
11185
11245
  return model;
11186
11246
  }
11187
11247
  imageModel(id) {
11188
- var _a16;
11248
+ var _a17;
11189
11249
  const [providerId, modelId] = this.splitId(id, "imageModel");
11190
11250
  const provider = this.getProvider(providerId, "imageModel");
11191
- let model = (_a16 = provider.imageModel) == null ? void 0 : _a16.call(provider, modelId);
11251
+ let model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
11192
11252
  if (model == null) {
11193
11253
  throw new NoSuchModelError4({ modelId: id, modelType: "imageModel" });
11194
11254
  }
@@ -11201,10 +11261,10 @@ var DefaultProviderRegistry = class {
11201
11261
  return model;
11202
11262
  }
11203
11263
  transcriptionModel(id) {
11204
- var _a16;
11264
+ var _a17;
11205
11265
  const [providerId, modelId] = this.splitId(id, "transcriptionModel");
11206
11266
  const provider = this.getProvider(providerId, "transcriptionModel");
11207
- const model = (_a16 = provider.transcriptionModel) == null ? void 0 : _a16.call(provider, modelId);
11267
+ const model = (_a17 = provider.transcriptionModel) == null ? void 0 : _a17.call(provider, modelId);
11208
11268
  if (model == null) {
11209
11269
  throw new NoSuchModelError4({
11210
11270
  modelId: id,
@@ -11214,20 +11274,20 @@ var DefaultProviderRegistry = class {
11214
11274
  return model;
11215
11275
  }
11216
11276
  speechModel(id) {
11217
- var _a16;
11277
+ var _a17;
11218
11278
  const [providerId, modelId] = this.splitId(id, "speechModel");
11219
11279
  const provider = this.getProvider(providerId, "speechModel");
11220
- const model = (_a16 = provider.speechModel) == null ? void 0 : _a16.call(provider, modelId);
11280
+ const model = (_a17 = provider.speechModel) == null ? void 0 : _a17.call(provider, modelId);
11221
11281
  if (model == null) {
11222
11282
  throw new NoSuchModelError4({ modelId: id, modelType: "speechModel" });
11223
11283
  }
11224
11284
  return model;
11225
11285
  }
11226
11286
  rerankingModel(id) {
11227
- var _a16;
11287
+ var _a17;
11228
11288
  const [providerId, modelId] = this.splitId(id, "rerankingModel");
11229
11289
  const provider = this.getProvider(providerId, "rerankingModel");
11230
- const model = (_a16 = provider.rerankingModel) == null ? void 0 : _a16.call(provider, modelId);
11290
+ const model = (_a17 = provider.rerankingModel) == null ? void 0 : _a17.call(provider, modelId);
11231
11291
  if (model == null) {
11232
11292
  throw new NoSuchModelError4({ modelId: id, modelType: "rerankingModel" });
11233
11293
  }
@@ -11284,7 +11344,7 @@ async function rerank({
11284
11344
  }),
11285
11345
  tracer,
11286
11346
  fn: async () => {
11287
- var _a16, _b;
11347
+ var _a17, _b;
11288
11348
  const { ranking, response, providerMetadata, warnings } = await retry(
11289
11349
  () => recordSpan({
11290
11350
  name: "ai.rerank.doRerank",
@@ -11348,7 +11408,7 @@ async function rerank({
11348
11408
  providerMetadata,
11349
11409
  response: {
11350
11410
  id: response == null ? void 0 : response.id,
11351
- timestamp: (_a16 = response == null ? void 0 : response.timestamp) != null ? _a16 : /* @__PURE__ */ new Date(),
11411
+ timestamp: (_a17 = response == null ? void 0 : response.timestamp) != null ? _a17 : /* @__PURE__ */ new Date(),
11352
11412
  modelId: (_b = response == null ? void 0 : response.modelId) != null ? _b : model.modelId,
11353
11413
  headers: response == null ? void 0 : response.headers,
11354
11414
  body: response == null ? void 0 : response.body
@@ -11373,8 +11433,8 @@ var DefaultRerankResult = class {
11373
11433
  import { withUserAgentSuffix as withUserAgentSuffix8 } from "@ai-sdk/provider-utils";
11374
11434
 
11375
11435
  // src/error/no-transcript-generated-error.ts
11376
- import { AISDKError as AISDKError21 } from "@ai-sdk/provider";
11377
- var NoTranscriptGeneratedError = class extends AISDKError21 {
11436
+ import { AISDKError as AISDKError22 } from "@ai-sdk/provider";
11437
+ var NoTranscriptGeneratedError = class extends AISDKError22 {
11378
11438
  constructor(options) {
11379
11439
  super({
11380
11440
  name: "AI_NoTranscriptGeneratedError",
@@ -11408,16 +11468,16 @@ async function transcribe({
11408
11468
  const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
11409
11469
  const result = await retry(
11410
11470
  () => {
11411
- var _a16;
11471
+ var _a17;
11412
11472
  return resolvedModel.doGenerate({
11413
11473
  audio: audioData,
11414
11474
  abortSignal,
11415
11475
  headers: headersWithUserAgent,
11416
11476
  providerOptions,
11417
- mediaType: (_a16 = detectMediaType({
11477
+ mediaType: (_a17 = detectMediaType({
11418
11478
  data: audioData,
11419
11479
  signatures: audioMediaTypeSignatures
11420
- })) != null ? _a16 : "audio/wav"
11480
+ })) != null ? _a17 : "audio/wav"
11421
11481
  });
11422
11482
  }
11423
11483
  );
@@ -11441,14 +11501,14 @@ async function transcribe({
11441
11501
  }
11442
11502
  var DefaultTranscriptionResult = class {
11443
11503
  constructor(options) {
11444
- var _a16;
11504
+ var _a17;
11445
11505
  this.text = options.text;
11446
11506
  this.segments = options.segments;
11447
11507
  this.language = options.language;
11448
11508
  this.durationInSeconds = options.durationInSeconds;
11449
11509
  this.warnings = options.warnings;
11450
11510
  this.responses = options.responses;
11451
- this.providerMetadata = (_a16 = options.providerMetadata) != null ? _a16 : {};
11511
+ this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
11452
11512
  }
11453
11513
  };
11454
11514
 
@@ -11491,7 +11551,7 @@ async function callCompletionApi({
11491
11551
  onError,
11492
11552
  fetch: fetch2 = getOriginalFetch()
11493
11553
  }) {
11494
- var _a16;
11554
+ var _a17;
11495
11555
  try {
11496
11556
  setLoading(true);
11497
11557
  setError(void 0);
@@ -11519,7 +11579,7 @@ async function callCompletionApi({
11519
11579
  });
11520
11580
  if (!response.ok) {
11521
11581
  throw new Error(
11522
- (_a16 = await response.text()) != null ? _a16 : "Failed to fetch the chat response."
11582
+ (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
11523
11583
  );
11524
11584
  }
11525
11585
  if (!response.body) {
@@ -11605,12 +11665,12 @@ async function convertFileListToFileUIParts(files) {
11605
11665
  }
11606
11666
  return Promise.all(
11607
11667
  Array.from(files).map(async (file) => {
11608
- const { name: name16, type } = file;
11668
+ const { name: name17, type } = file;
11609
11669
  const dataUrl = await new Promise((resolve3, reject) => {
11610
11670
  const reader = new FileReader();
11611
11671
  reader.onload = (readerEvent) => {
11612
- var _a16;
11613
- resolve3((_a16 = readerEvent.target) == null ? void 0 : _a16.result);
11672
+ var _a17;
11673
+ resolve3((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
11614
11674
  };
11615
11675
  reader.onerror = (error) => reject(error);
11616
11676
  reader.readAsDataURL(file);
@@ -11618,7 +11678,7 @@ async function convertFileListToFileUIParts(files) {
11618
11678
  return {
11619
11679
  type: "file",
11620
11680
  mediaType: type,
11621
- filename: name16,
11681
+ filename: name17,
11622
11682
  url: dataUrl
11623
11683
  };
11624
11684
  })
@@ -11657,7 +11717,7 @@ var HttpChatTransport = class {
11657
11717
  abortSignal,
11658
11718
  ...options
11659
11719
  }) {
11660
- var _a16, _b, _c, _d, _e;
11720
+ var _a17, _b, _c, _d, _e;
11661
11721
  const resolvedBody = await resolve2(this.body);
11662
11722
  const resolvedHeaders = await resolve2(this.headers);
11663
11723
  const resolvedCredentials = await resolve2(this.credentials);
@@ -11665,7 +11725,7 @@ var HttpChatTransport = class {
11665
11725
  ...normalizeHeaders(resolvedHeaders),
11666
11726
  ...normalizeHeaders(options.headers)
11667
11727
  };
11668
- const preparedRequest = await ((_a16 = this.prepareSendMessagesRequest) == null ? void 0 : _a16.call(this, {
11728
+ const preparedRequest = await ((_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
11669
11729
  api: this.api,
11670
11730
  id: options.chatId,
11671
11731
  messages: options.messages,
@@ -11713,7 +11773,7 @@ var HttpChatTransport = class {
11713
11773
  return this.processResponseStream(response.body);
11714
11774
  }
11715
11775
  async reconnectToStream(options) {
11716
- var _a16, _b, _c, _d, _e;
11776
+ var _a17, _b, _c, _d, _e;
11717
11777
  const resolvedBody = await resolve2(this.body);
11718
11778
  const resolvedHeaders = await resolve2(this.headers);
11719
11779
  const resolvedCredentials = await resolve2(this.credentials);
@@ -11721,7 +11781,7 @@ var HttpChatTransport = class {
11721
11781
  ...normalizeHeaders(resolvedHeaders),
11722
11782
  ...normalizeHeaders(options.headers)
11723
11783
  };
11724
- const preparedRequest = await ((_a16 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a16.call(this, {
11784
+ const preparedRequest = await ((_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
11725
11785
  api: this.api,
11726
11786
  id: options.chatId,
11727
11787
  body: { ...resolvedBody, ...options.body },
@@ -11803,11 +11863,11 @@ var AbstractChat = class {
11803
11863
  * If a messageId is provided, the message will be replaced.
11804
11864
  */
11805
11865
  this.sendMessage = async (message, options) => {
11806
- var _a16, _b, _c, _d;
11866
+ var _a17, _b, _c, _d;
11807
11867
  if (message == null) {
11808
11868
  await this.makeRequest({
11809
11869
  trigger: "submit-message",
11810
- messageId: (_a16 = this.lastMessage) == null ? void 0 : _a16.id,
11870
+ messageId: (_a17 = this.lastMessage) == null ? void 0 : _a17.id,
11811
11871
  ...options
11812
11872
  });
11813
11873
  return;
@@ -11900,7 +11960,7 @@ var AbstractChat = class {
11900
11960
  approved,
11901
11961
  reason
11902
11962
  }) => this.jobExecutor.run(async () => {
11903
- var _a16, _b;
11963
+ var _a17, _b;
11904
11964
  const messages = this.state.messages;
11905
11965
  const lastMessage = messages[messages.length - 1];
11906
11966
  const updatePart = (part) => isToolUIPart(part) && part.state === "approval-requested" && part.approval.id === id ? {
@@ -11915,7 +11975,7 @@ var AbstractChat = class {
11915
11975
  if (this.activeResponse) {
11916
11976
  this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
11917
11977
  }
11918
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a16 = this.sendAutomaticallyWhen) == null ? void 0 : _a16.call(this, { messages: this.state.messages }))) {
11978
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
11919
11979
  this.makeRequest({
11920
11980
  trigger: "submit-message",
11921
11981
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -11929,7 +11989,7 @@ var AbstractChat = class {
11929
11989
  output,
11930
11990
  errorText
11931
11991
  }) => this.jobExecutor.run(async () => {
11932
- var _a16, _b;
11992
+ var _a17, _b;
11933
11993
  const messages = this.state.messages;
11934
11994
  const lastMessage = messages[messages.length - 1];
11935
11995
  const updatePart = (part) => isToolUIPart(part) && part.toolCallId === toolCallId ? { ...part, state, output, errorText } : part;
@@ -11940,7 +12000,7 @@ var AbstractChat = class {
11940
12000
  if (this.activeResponse) {
11941
12001
  this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
11942
12002
  }
11943
- if (this.status !== "streaming" && this.status !== "submitted" && ((_a16 = this.sendAutomaticallyWhen) == null ? void 0 : _a16.call(this, { messages: this.state.messages }))) {
12003
+ if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
11944
12004
  this.makeRequest({
11945
12005
  trigger: "submit-message",
11946
12006
  messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
@@ -11953,10 +12013,10 @@ var AbstractChat = class {
11953
12013
  * Abort the current request immediately, keep the generated tokens if any.
11954
12014
  */
11955
12015
  this.stop = async () => {
11956
- var _a16;
12016
+ var _a17;
11957
12017
  if (this.status !== "streaming" && this.status !== "submitted")
11958
12018
  return;
11959
- if ((_a16 = this.activeResponse) == null ? void 0 : _a16.abortController) {
12019
+ if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
11960
12020
  this.activeResponse.abortController.abort();
11961
12021
  }
11962
12022
  };
@@ -12011,7 +12071,7 @@ var AbstractChat = class {
12011
12071
  body,
12012
12072
  messageId
12013
12073
  }) {
12014
- var _a16, _b, _c, _d;
12074
+ var _a17, _b, _c, _d;
12015
12075
  this.setStatus({ status: "submitted", error: void 0 });
12016
12076
  const lastMessage = this.lastMessage;
12017
12077
  let isAbort = false;
@@ -12060,9 +12120,9 @@ var AbstractChat = class {
12060
12120
  () => job({
12061
12121
  state: activeResponse.state,
12062
12122
  write: () => {
12063
- var _a17;
12123
+ var _a18;
12064
12124
  this.setStatus({ status: "streaming" });
12065
- const replaceLastMessage = activeResponse.state.message.id === ((_a17 = this.lastMessage) == null ? void 0 : _a17.id);
12125
+ const replaceLastMessage = activeResponse.state.message.id === ((_a18 = this.lastMessage) == null ? void 0 : _a18.id);
12066
12126
  if (replaceLastMessage) {
12067
12127
  this.state.replaceMessage(
12068
12128
  this.state.messages.length - 1,
@@ -12114,7 +12174,7 @@ var AbstractChat = class {
12114
12174
  isAbort,
12115
12175
  isDisconnect,
12116
12176
  isError,
12117
- finishReason: (_a16 = this.activeResponse) == null ? void 0 : _a16.state.finishReason
12177
+ finishReason: (_a17 = this.activeResponse) == null ? void 0 : _a17.state.finishReason
12118
12178
  });
12119
12179
  } catch (err) {
12120
12180
  console.error(err);
@@ -12252,7 +12312,7 @@ var TextStreamChatTransport = class extends HttpChatTransport {
12252
12312
  }
12253
12313
  };
12254
12314
  export {
12255
- AISDKError17 as AISDKError,
12315
+ AISDKError18 as AISDKError,
12256
12316
  APICallError,
12257
12317
  AbstractChat,
12258
12318
  DefaultChatTransport,
@@ -12291,6 +12351,7 @@ export {
12291
12351
  ToolCallRepairError,
12292
12352
  ToolLoopAgent,
12293
12353
  TypeValidationError,
12354
+ UIMessageStreamError,
12294
12355
  UI_MESSAGE_STREAM_HEADERS,
12295
12356
  UnsupportedFunctionalityError,
12296
12357
  UnsupportedModelVersionError,