@mastra/core 1.0.0-beta.13 → 1.0.0-beta.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (249) hide show
  1. package/CHANGELOG.md +170 -0
  2. package/dist/agent/agent.d.ts +2 -2
  3. package/dist/agent/agent.d.ts.map +1 -1
  4. package/dist/agent/agent.types.d.ts +6 -7
  5. package/dist/agent/agent.types.d.ts.map +1 -1
  6. package/dist/agent/index.cjs +17 -9
  7. package/dist/agent/index.d.ts +1 -1
  8. package/dist/agent/index.d.ts.map +1 -1
  9. package/dist/agent/index.js +2 -2
  10. package/dist/agent/message-list/index.cjs +3 -3
  11. package/dist/agent/message-list/index.js +1 -1
  12. package/dist/agent/trip-wire.d.ts +2 -2
  13. package/dist/agent/trip-wire.d.ts.map +1 -1
  14. package/dist/agent/utils.d.ts +4 -1
  15. package/dist/agent/utils.d.ts.map +1 -1
  16. package/dist/agent/workflows/prepare-stream/index.d.ts +3 -3
  17. package/dist/agent/workflows/prepare-stream/index.d.ts.map +1 -1
  18. package/dist/agent/workflows/prepare-stream/map-results-step.d.ts +3 -3
  19. package/dist/agent/workflows/prepare-stream/map-results-step.d.ts.map +1 -1
  20. package/dist/agent/workflows/prepare-stream/prepare-memory-step.d.ts +3 -3
  21. package/dist/agent/workflows/prepare-stream/prepare-memory-step.d.ts.map +1 -1
  22. package/dist/agent/workflows/prepare-stream/prepare-tools-step.d.ts +3 -3
  23. package/dist/agent/workflows/prepare-stream/prepare-tools-step.d.ts.map +1 -1
  24. package/dist/bundler/types.d.ts +1 -0
  25. package/dist/bundler/types.d.ts.map +1 -1
  26. package/dist/chunk-2AU5ZHBL.js +79 -0
  27. package/dist/chunk-2AU5ZHBL.js.map +1 -0
  28. package/dist/{chunk-BQDZIQ3G.js → chunk-3IP3DZ7G.js} +85 -18
  29. package/dist/chunk-3IP3DZ7G.js.map +1 -0
  30. package/dist/{chunk-WYWRMIQC.js → chunk-4AT6YQKZ.js} +4 -103
  31. package/dist/chunk-4AT6YQKZ.js.map +1 -0
  32. package/dist/{chunk-62Q7K656.js → chunk-4CMIJQF6.js} +114 -110
  33. package/dist/chunk-4CMIJQF6.js.map +1 -0
  34. package/dist/{chunk-MXBVP7HX.cjs → chunk-53SZJCBX.cjs} +220 -174
  35. package/dist/chunk-53SZJCBX.cjs.map +1 -0
  36. package/dist/{chunk-2IU4RGU5.js → chunk-5UQ5TB6J.js} +5 -5
  37. package/dist/{chunk-2IU4RGU5.js.map → chunk-5UQ5TB6J.js.map} +1 -1
  38. package/dist/{chunk-WYGUWVTF.js → chunk-67LM2UCT.js} +8 -8
  39. package/dist/chunk-67LM2UCT.js.map +1 -0
  40. package/dist/{chunk-U4CSOY6T.cjs → chunk-6CG7IY57.cjs} +110 -43
  41. package/dist/chunk-6CG7IY57.cjs.map +1 -0
  42. package/dist/{chunk-X6IBA7FP.cjs → chunk-72E3YF6A.cjs} +9 -110
  43. package/dist/chunk-72E3YF6A.cjs.map +1 -0
  44. package/dist/{chunk-GIWC35YQ.js → chunk-AYBJ5GAD.js} +180 -4
  45. package/dist/chunk-AYBJ5GAD.js.map +1 -0
  46. package/dist/{chunk-VEPP75C4.cjs → chunk-DBW6S25C.cjs} +10 -10
  47. package/dist/{chunk-VEPP75C4.cjs.map → chunk-DBW6S25C.cjs.map} +1 -1
  48. package/dist/{chunk-Y7MZ5LJT.cjs → chunk-EGHGFLL3.cjs} +101 -102
  49. package/dist/chunk-EGHGFLL3.cjs.map +1 -0
  50. package/dist/{chunk-LGB4VNZI.cjs → chunk-ETWAR2YE.cjs} +5 -5
  51. package/dist/{chunk-LGB4VNZI.cjs.map → chunk-ETWAR2YE.cjs.map} +1 -1
  52. package/dist/{chunk-OWIEOL55.cjs → chunk-F75EQ574.cjs} +695 -254
  53. package/dist/chunk-F75EQ574.cjs.map +1 -0
  54. package/dist/{chunk-AGHLXC4I.cjs → chunk-FPDJ4XN6.cjs} +181 -5
  55. package/dist/chunk-FPDJ4XN6.cjs.map +1 -0
  56. package/dist/{chunk-VETAQUW3.js → chunk-GBQXIVL6.js} +3 -3
  57. package/dist/{chunk-VETAQUW3.js.map → chunk-GBQXIVL6.js.map} +1 -1
  58. package/dist/{chunk-4BC5FUAO.js → chunk-GELVUDUY.js} +5 -4
  59. package/dist/chunk-GELVUDUY.js.map +1 -0
  60. package/dist/chunk-GVAPYQRO.cjs +252 -0
  61. package/dist/chunk-GVAPYQRO.cjs.map +1 -0
  62. package/dist/{chunk-R5AJGM55.cjs → chunk-JAGQZZ43.cjs} +410 -265
  63. package/dist/chunk-JAGQZZ43.cjs.map +1 -0
  64. package/dist/{chunk-ARAQIW6E.js → chunk-K66U47VL.js} +617 -188
  65. package/dist/chunk-K66U47VL.js.map +1 -0
  66. package/dist/{chunk-RCJLMMTO.js → chunk-LDXKZYOV.js} +4 -4
  67. package/dist/{chunk-RCJLMMTO.js.map → chunk-LDXKZYOV.js.map} +1 -1
  68. package/dist/{chunk-MLKE7HRS.cjs → chunk-O3ULBGV6.cjs} +4 -4
  69. package/dist/{chunk-MLKE7HRS.cjs.map → chunk-O3ULBGV6.cjs.map} +1 -1
  70. package/dist/{chunk-PJAK4U6R.cjs → chunk-O5BQBZEF.cjs} +15 -15
  71. package/dist/chunk-O5BQBZEF.cjs.map +1 -0
  72. package/dist/{chunk-H4VUIOWU.cjs → chunk-OOUFPYSX.cjs} +5 -4
  73. package/dist/chunk-OOUFPYSX.cjs.map +1 -0
  74. package/dist/{chunk-5VZGJTPR.js → chunk-QDVYP2T7.js} +56 -10
  75. package/dist/chunk-QDVYP2T7.js.map +1 -0
  76. package/dist/{chunk-E5BQRAJK.js → chunk-QF4MHFSU.js} +364 -221
  77. package/dist/chunk-QF4MHFSU.js.map +1 -0
  78. package/dist/{chunk-WPTTKULS.js → chunk-SLBWA2F3.js} +3 -3
  79. package/dist/{chunk-WPTTKULS.js.map → chunk-SLBWA2F3.js.map} +1 -1
  80. package/dist/chunk-ST7NBF4H.cjs +84 -0
  81. package/dist/chunk-ST7NBF4H.cjs.map +1 -0
  82. package/dist/{chunk-7HEAVZRS.cjs → chunk-TDM43G4I.cjs} +8 -8
  83. package/dist/{chunk-7HEAVZRS.cjs.map → chunk-TDM43G4I.cjs.map} +1 -1
  84. package/dist/{chunk-SZYSDJTN.cjs → chunk-TRUNX3AX.cjs} +117 -112
  85. package/dist/chunk-TRUNX3AX.cjs.map +1 -0
  86. package/dist/chunk-VE6HQ7H6.js +250 -0
  87. package/dist/chunk-VE6HQ7H6.js.map +1 -0
  88. package/dist/{chunk-MRRFTNF4.js → chunk-VZJOEGQA.js} +3 -4
  89. package/dist/chunk-VZJOEGQA.js.map +1 -0
  90. package/dist/evals/index.cjs +4 -4
  91. package/dist/evals/index.js +1 -1
  92. package/dist/evals/run/index.d.ts.map +1 -1
  93. package/dist/evals/scoreTraces/index.cjs +3 -3
  94. package/dist/evals/scoreTraces/index.js +1 -1
  95. package/dist/index.cjs +2 -2
  96. package/dist/index.js +1 -1
  97. package/dist/llm/index.cjs +15 -15
  98. package/dist/llm/index.js +5 -5
  99. package/dist/llm/model/aisdk/generate-to-stream.d.ts +20 -0
  100. package/dist/llm/model/aisdk/generate-to-stream.d.ts.map +1 -0
  101. package/dist/llm/model/aisdk/v5/model.d.ts +4 -0
  102. package/dist/llm/model/aisdk/v5/model.d.ts.map +1 -1
  103. package/dist/llm/model/aisdk/v6/model.d.ts +51 -0
  104. package/dist/llm/model/aisdk/v6/model.d.ts.map +1 -0
  105. package/dist/llm/model/model.loop.d.ts +1 -1
  106. package/dist/llm/model/provider-types.generated.d.ts +84 -0
  107. package/dist/llm/model/resolve-model.d.ts +2 -2
  108. package/dist/llm/model/resolve-model.d.ts.map +1 -1
  109. package/dist/llm/model/shared.types.d.ts +18 -7
  110. package/dist/llm/model/shared.types.d.ts.map +1 -1
  111. package/dist/loop/index.cjs +2 -2
  112. package/dist/loop/index.js +1 -1
  113. package/dist/loop/network/index.d.ts +2 -2
  114. package/dist/loop/network/index.d.ts.map +1 -1
  115. package/dist/loop/test-utils/MastraLanguageModelV2Mock.d.ts +1 -1
  116. package/dist/loop/test-utils/MastraLanguageModelV2Mock.d.ts.map +1 -1
  117. package/dist/loop/test-utils/MastraLanguageModelV3Mock.d.ts +37 -0
  118. package/dist/loop/test-utils/MastraLanguageModelV3Mock.d.ts.map +1 -0
  119. package/dist/loop/test-utils/fullStream.d.ts +2 -1
  120. package/dist/loop/test-utils/fullStream.d.ts.map +1 -1
  121. package/dist/loop/test-utils/resultObject.d.ts +2 -1
  122. package/dist/loop/test-utils/resultObject.d.ts.map +1 -1
  123. package/dist/loop/test-utils/streamObject.d.ts.map +1 -1
  124. package/dist/loop/test-utils/utils-v3.d.ts +55 -0
  125. package/dist/loop/test-utils/utils-v3.d.ts.map +1 -0
  126. package/dist/loop/types.d.ts +5 -6
  127. package/dist/loop/types.d.ts.map +1 -1
  128. package/dist/loop/workflows/agentic-execution/index.d.ts +36 -36
  129. package/dist/loop/workflows/agentic-execution/llm-execution-step.d.ts +25 -25
  130. package/dist/loop/workflows/agentic-execution/llm-execution-step.d.ts.map +1 -1
  131. package/dist/loop/workflows/agentic-execution/llm-mapping-step.d.ts +12 -12
  132. package/dist/loop/workflows/agentic-loop/index.d.ts +36 -36
  133. package/dist/loop/workflows/run-state.d.ts +2 -2
  134. package/dist/loop/workflows/run-state.d.ts.map +1 -1
  135. package/dist/loop/workflows/schema.d.ts +12 -12
  136. package/dist/mastra/index.cjs +2 -2
  137. package/dist/mastra/index.js +1 -1
  138. package/dist/memory/index.cjs +6 -6
  139. package/dist/memory/index.js +1 -1
  140. package/dist/models-dev-E3WWI7VA.js +3 -0
  141. package/dist/{models-dev-EO22XOXQ.js.map → models-dev-E3WWI7VA.js.map} +1 -1
  142. package/dist/models-dev-PPS7X4JM.cjs +12 -0
  143. package/dist/{models-dev-D3EKFGAO.cjs.map → models-dev-PPS7X4JM.cjs.map} +1 -1
  144. package/dist/netlify-TY656UYF.js +3 -0
  145. package/dist/{netlify-AE4LNCAI.js.map → netlify-TY656UYF.js.map} +1 -1
  146. package/dist/netlify-VZFM5UH3.cjs +12 -0
  147. package/dist/{netlify-WE42TZIT.cjs.map → netlify-VZFM5UH3.cjs.map} +1 -1
  148. package/dist/processors/index.cjs +37 -37
  149. package/dist/processors/index.d.ts +7 -7
  150. package/dist/processors/index.d.ts.map +1 -1
  151. package/dist/processors/index.js +1 -1
  152. package/dist/processors/runner.d.ts.map +1 -1
  153. package/dist/processors/step-schema.d.ts +26 -26
  154. package/dist/processors/step-schema.d.ts.map +1 -1
  155. package/dist/provider-registry-NXVD764B.js +3 -0
  156. package/dist/{provider-registry-6LF3NGC5.js.map → provider-registry-NXVD764B.js.map} +1 -1
  157. package/dist/provider-registry-ZIWSEUQE.cjs +40 -0
  158. package/dist/{provider-registry-73FKMXJV.cjs.map → provider-registry-ZIWSEUQE.cjs.map} +1 -1
  159. package/dist/provider-registry.json +176 -0
  160. package/dist/relevance/index.cjs +3 -3
  161. package/dist/relevance/index.cjs.map +1 -1
  162. package/dist/relevance/index.js +2 -2
  163. package/dist/relevance/index.js.map +1 -1
  164. package/dist/storage/index.cjs +38 -38
  165. package/dist/storage/index.js +1 -1
  166. package/dist/stream/aisdk/v5/execute.d.ts +4 -4
  167. package/dist/stream/aisdk/v5/execute.d.ts.map +1 -1
  168. package/dist/stream/aisdk/v5/output.d.ts +2 -2
  169. package/dist/stream/aisdk/v5/output.d.ts.map +1 -1
  170. package/dist/stream/aisdk/v5/transform.d.ts.map +1 -1
  171. package/dist/stream/base/output.d.ts +7 -5
  172. package/dist/stream/base/output.d.ts.map +1 -1
  173. package/dist/stream/index.cjs +12 -12
  174. package/dist/stream/index.js +2 -2
  175. package/dist/stream/types.d.ts +29 -21
  176. package/dist/stream/types.d.ts.map +1 -1
  177. package/dist/test-utils/llm-mock.cjs +4 -4
  178. package/dist/test-utils/llm-mock.cjs.map +1 -1
  179. package/dist/test-utils/llm-mock.js +1 -1
  180. package/dist/test-utils/llm-mock.js.map +1 -1
  181. package/dist/token-6GSAFR2W-LTZ7QQUP.js +61 -0
  182. package/dist/token-6GSAFR2W-LTZ7QQUP.js.map +1 -0
  183. package/dist/token-6GSAFR2W-UEEINYAN.cjs +63 -0
  184. package/dist/token-6GSAFR2W-UEEINYAN.cjs.map +1 -0
  185. package/dist/token-util-NEHG7TUY-QTFZ26EN.js +8 -0
  186. package/dist/token-util-NEHG7TUY-QTFZ26EN.js.map +1 -0
  187. package/dist/token-util-NEHG7TUY-WZL2DNCG.cjs +10 -0
  188. package/dist/token-util-NEHG7TUY-WZL2DNCG.cjs.map +1 -0
  189. package/dist/utils.cjs +22 -22
  190. package/dist/utils.d.ts +2 -2
  191. package/dist/utils.d.ts.map +1 -1
  192. package/dist/utils.js +1 -1
  193. package/dist/vector/embed.d.ts +1 -0
  194. package/dist/vector/embed.d.ts.map +1 -1
  195. package/dist/vector/index.cjs +5300 -0
  196. package/dist/vector/index.cjs.map +1 -1
  197. package/dist/vector/index.js +5279 -1
  198. package/dist/vector/index.js.map +1 -1
  199. package/dist/vector/vector.d.ts +14 -1
  200. package/dist/vector/vector.d.ts.map +1 -1
  201. package/dist/voice/composite-voice.d.ts.map +1 -1
  202. package/dist/voice/index.cjs +6 -6
  203. package/dist/voice/index.js +1 -1
  204. package/dist/workflows/evented/index.cjs +10 -10
  205. package/dist/workflows/evented/index.js +1 -1
  206. package/dist/workflows/evented/step-executor.d.ts +4 -0
  207. package/dist/workflows/evented/step-executor.d.ts.map +1 -1
  208. package/dist/workflows/evented/workflow-event-processor/index.d.ts +15 -0
  209. package/dist/workflows/evented/workflow-event-processor/index.d.ts.map +1 -1
  210. package/dist/workflows/evented/workflow.d.ts +5 -0
  211. package/dist/workflows/evented/workflow.d.ts.map +1 -1
  212. package/dist/workflows/index.cjs +23 -23
  213. package/dist/workflows/index.js +1 -1
  214. package/dist/workflows/workflow.d.ts +4 -1
  215. package/dist/workflows/workflow.d.ts.map +1 -1
  216. package/package.json +10 -12
  217. package/src/llm/model/provider-types.generated.d.ts +84 -0
  218. package/dist/chunk-4BC5FUAO.js.map +0 -1
  219. package/dist/chunk-5VZGJTPR.js.map +0 -1
  220. package/dist/chunk-62Q7K656.js.map +0 -1
  221. package/dist/chunk-AGHLXC4I.cjs.map +0 -1
  222. package/dist/chunk-ARAQIW6E.js.map +0 -1
  223. package/dist/chunk-BQDZIQ3G.js.map +0 -1
  224. package/dist/chunk-E5BQRAJK.js.map +0 -1
  225. package/dist/chunk-GIWC35YQ.js.map +0 -1
  226. package/dist/chunk-H4VUIOWU.cjs.map +0 -1
  227. package/dist/chunk-IXZ2T2QX.cjs +0 -448
  228. package/dist/chunk-IXZ2T2QX.cjs.map +0 -1
  229. package/dist/chunk-MRRFTNF4.js.map +0 -1
  230. package/dist/chunk-MXBVP7HX.cjs.map +0 -1
  231. package/dist/chunk-OWIEOL55.cjs.map +0 -1
  232. package/dist/chunk-PJAK4U6R.cjs.map +0 -1
  233. package/dist/chunk-R5AJGM55.cjs.map +0 -1
  234. package/dist/chunk-SZYSDJTN.cjs.map +0 -1
  235. package/dist/chunk-U4CSOY6T.cjs.map +0 -1
  236. package/dist/chunk-UBSPZTQX.js +0 -434
  237. package/dist/chunk-UBSPZTQX.js.map +0 -1
  238. package/dist/chunk-WYGUWVTF.js.map +0 -1
  239. package/dist/chunk-WYWRMIQC.js.map +0 -1
  240. package/dist/chunk-X6IBA7FP.cjs.map +0 -1
  241. package/dist/chunk-Y7MZ5LJT.cjs.map +0 -1
  242. package/dist/llm/model/is-v2-model.d.ts +0 -3
  243. package/dist/llm/model/is-v2-model.d.ts.map +0 -1
  244. package/dist/models-dev-D3EKFGAO.cjs +0 -12
  245. package/dist/models-dev-EO22XOXQ.js +0 -3
  246. package/dist/netlify-AE4LNCAI.js +0 -3
  247. package/dist/netlify-WE42TZIT.cjs +0 -12
  248. package/dist/provider-registry-6LF3NGC5.js +0 -3
  249. package/dist/provider-registry-73FKMXJV.cjs +0 -40
@@ -1,8 +1,437 @@
1
- import { InvalidArgumentError, UnsupportedFunctionalityError, APICallError, LoadAPIKeyError, TypeValidationError, EmptyResponseBodyError, JSONParseError, EventSourceParserStream, LoadSettingError, NoSuchModelError, InvalidResponseDataError, TooManyEmbeddingValuesForCallError, InvalidPromptError } from './chunk-UBSPZTQX.js';
2
1
  import * as z4 from 'zod/v4';
3
2
  import { z } from 'zod/v4';
4
3
  import { ZodFirstPartyTypeKind } from 'zod/v3';
5
4
 
5
+ // ../../node_modules/.pnpm/@ai-sdk+provider@2.0.0/node_modules/@ai-sdk/provider/dist/index.mjs
6
+ var marker = "vercel.ai.error";
7
+ var symbol = Symbol.for(marker);
8
+ var _a;
9
+ var _AISDKError = class _AISDKError2 extends Error {
10
+ /**
11
+ * Creates an AI SDK Error.
12
+ *
13
+ * @param {Object} params - The parameters for creating the error.
14
+ * @param {string} params.name - The name of the error.
15
+ * @param {string} params.message - The error message.
16
+ * @param {unknown} [params.cause] - The underlying cause of the error.
17
+ */
18
+ constructor({
19
+ name: name14,
20
+ message,
21
+ cause
22
+ }) {
23
+ super(message);
24
+ this[_a] = true;
25
+ this.name = name14;
26
+ this.cause = cause;
27
+ }
28
+ /**
29
+ * Checks if the given error is an AI SDK Error.
30
+ * @param {unknown} error - The error to check.
31
+ * @returns {boolean} True if the error is an AI SDK Error, false otherwise.
32
+ */
33
+ static isInstance(error) {
34
+ return _AISDKError2.hasMarker(error, marker);
35
+ }
36
+ static hasMarker(error, marker15) {
37
+ const markerSymbol = Symbol.for(marker15);
38
+ return error != null && typeof error === "object" && markerSymbol in error && typeof error[markerSymbol] === "boolean" && error[markerSymbol] === true;
39
+ }
40
+ };
41
+ _a = symbol;
42
+ var AISDKError = _AISDKError;
43
+ var name = "AI_APICallError";
44
+ var marker2 = `vercel.ai.error.${name}`;
45
+ var symbol2 = Symbol.for(marker2);
46
+ var _a2;
47
+ var APICallError = class extends AISDKError {
48
+ constructor({
49
+ message,
50
+ url,
51
+ requestBodyValues,
52
+ statusCode,
53
+ responseHeaders,
54
+ responseBody,
55
+ cause,
56
+ isRetryable = statusCode != null && (statusCode === 408 || // request timeout
57
+ statusCode === 409 || // conflict
58
+ statusCode === 429 || // too many requests
59
+ statusCode >= 500),
60
+ // server error
61
+ data
62
+ }) {
63
+ super({ name, message, cause });
64
+ this[_a2] = true;
65
+ this.url = url;
66
+ this.requestBodyValues = requestBodyValues;
67
+ this.statusCode = statusCode;
68
+ this.responseHeaders = responseHeaders;
69
+ this.responseBody = responseBody;
70
+ this.isRetryable = isRetryable;
71
+ this.data = data;
72
+ }
73
+ static isInstance(error) {
74
+ return AISDKError.hasMarker(error, marker2);
75
+ }
76
+ };
77
+ _a2 = symbol2;
78
+ var name2 = "AI_EmptyResponseBodyError";
79
+ var marker3 = `vercel.ai.error.${name2}`;
80
+ var symbol3 = Symbol.for(marker3);
81
+ var _a3;
82
+ var EmptyResponseBodyError = class extends AISDKError {
83
+ // used in isInstance
84
+ constructor({ message = "Empty response body" } = {}) {
85
+ super({ name: name2, message });
86
+ this[_a3] = true;
87
+ }
88
+ static isInstance(error) {
89
+ return AISDKError.hasMarker(error, marker3);
90
+ }
91
+ };
92
+ _a3 = symbol3;
93
+ function getErrorMessage(error) {
94
+ if (error == null) {
95
+ return "unknown error";
96
+ }
97
+ if (typeof error === "string") {
98
+ return error;
99
+ }
100
+ if (error instanceof Error) {
101
+ return error.message;
102
+ }
103
+ return JSON.stringify(error);
104
+ }
105
+ var name3 = "AI_InvalidArgumentError";
106
+ var marker4 = `vercel.ai.error.${name3}`;
107
+ var symbol4 = Symbol.for(marker4);
108
+ var _a4;
109
+ var InvalidArgumentError = class extends AISDKError {
110
+ constructor({
111
+ message,
112
+ cause,
113
+ argument
114
+ }) {
115
+ super({ name: name3, message, cause });
116
+ this[_a4] = true;
117
+ this.argument = argument;
118
+ }
119
+ static isInstance(error) {
120
+ return AISDKError.hasMarker(error, marker4);
121
+ }
122
+ };
123
+ _a4 = symbol4;
124
+ var name4 = "AI_InvalidPromptError";
125
+ var marker5 = `vercel.ai.error.${name4}`;
126
+ var symbol5 = Symbol.for(marker5);
127
+ var _a5;
128
+ var InvalidPromptError = class extends AISDKError {
129
+ constructor({
130
+ prompt,
131
+ message,
132
+ cause
133
+ }) {
134
+ super({ name: name4, message: `Invalid prompt: ${message}`, cause });
135
+ this[_a5] = true;
136
+ this.prompt = prompt;
137
+ }
138
+ static isInstance(error) {
139
+ return AISDKError.hasMarker(error, marker5);
140
+ }
141
+ };
142
+ _a5 = symbol5;
143
+ var name5 = "AI_InvalidResponseDataError";
144
+ var marker6 = `vercel.ai.error.${name5}`;
145
+ var symbol6 = Symbol.for(marker6);
146
+ var _a6;
147
+ var InvalidResponseDataError = class extends AISDKError {
148
+ constructor({
149
+ data,
150
+ message = `Invalid response data: ${JSON.stringify(data)}.`
151
+ }) {
152
+ super({ name: name5, message });
153
+ this[_a6] = true;
154
+ this.data = data;
155
+ }
156
+ static isInstance(error) {
157
+ return AISDKError.hasMarker(error, marker6);
158
+ }
159
+ };
160
+ _a6 = symbol6;
161
+ var name6 = "AI_JSONParseError";
162
+ var marker7 = `vercel.ai.error.${name6}`;
163
+ var symbol7 = Symbol.for(marker7);
164
+ var _a7;
165
+ var JSONParseError = class extends AISDKError {
166
+ constructor({ text, cause }) {
167
+ super({
168
+ name: name6,
169
+ message: `JSON parsing failed: Text: ${text}.
170
+ Error message: ${getErrorMessage(cause)}`,
171
+ cause
172
+ });
173
+ this[_a7] = true;
174
+ this.text = text;
175
+ }
176
+ static isInstance(error) {
177
+ return AISDKError.hasMarker(error, marker7);
178
+ }
179
+ };
180
+ _a7 = symbol7;
181
+ var name7 = "AI_LoadAPIKeyError";
182
+ var marker8 = `vercel.ai.error.${name7}`;
183
+ var symbol8 = Symbol.for(marker8);
184
+ var _a8;
185
+ var LoadAPIKeyError = class extends AISDKError {
186
+ // used in isInstance
187
+ constructor({ message }) {
188
+ super({ name: name7, message });
189
+ this[_a8] = true;
190
+ }
191
+ static isInstance(error) {
192
+ return AISDKError.hasMarker(error, marker8);
193
+ }
194
+ };
195
+ _a8 = symbol8;
196
+ var name8 = "AI_LoadSettingError";
197
+ var marker9 = `vercel.ai.error.${name8}`;
198
+ var symbol9 = Symbol.for(marker9);
199
+ var _a9;
200
+ var LoadSettingError = class extends AISDKError {
201
+ // used in isInstance
202
+ constructor({ message }) {
203
+ super({ name: name8, message });
204
+ this[_a9] = true;
205
+ }
206
+ static isInstance(error) {
207
+ return AISDKError.hasMarker(error, marker9);
208
+ }
209
+ };
210
+ _a9 = symbol9;
211
+ var name10 = "AI_NoSuchModelError";
212
+ var marker11 = `vercel.ai.error.${name10}`;
213
+ var symbol11 = Symbol.for(marker11);
214
+ var _a11;
215
+ var NoSuchModelError = class extends AISDKError {
216
+ constructor({
217
+ errorName = name10,
218
+ modelId,
219
+ modelType,
220
+ message = `No such ${modelType}: ${modelId}`
221
+ }) {
222
+ super({ name: errorName, message });
223
+ this[_a11] = true;
224
+ this.modelId = modelId;
225
+ this.modelType = modelType;
226
+ }
227
+ static isInstance(error) {
228
+ return AISDKError.hasMarker(error, marker11);
229
+ }
230
+ };
231
+ _a11 = symbol11;
232
+ var name11 = "AI_TooManyEmbeddingValuesForCallError";
233
+ var marker12 = `vercel.ai.error.${name11}`;
234
+ var symbol12 = Symbol.for(marker12);
235
+ var _a12;
236
+ var TooManyEmbeddingValuesForCallError = class extends AISDKError {
237
+ constructor(options) {
238
+ super({
239
+ name: name11,
240
+ message: `Too many values for a single embedding call. The ${options.provider} model "${options.modelId}" can only embed up to ${options.maxEmbeddingsPerCall} values per call, but ${options.values.length} values were provided.`
241
+ });
242
+ this[_a12] = true;
243
+ this.provider = options.provider;
244
+ this.modelId = options.modelId;
245
+ this.maxEmbeddingsPerCall = options.maxEmbeddingsPerCall;
246
+ this.values = options.values;
247
+ }
248
+ static isInstance(error) {
249
+ return AISDKError.hasMarker(error, marker12);
250
+ }
251
+ };
252
+ _a12 = symbol12;
253
+ var name12 = "AI_TypeValidationError";
254
+ var marker13 = `vercel.ai.error.${name12}`;
255
+ var symbol13 = Symbol.for(marker13);
256
+ var _a13;
257
+ var _TypeValidationError = class _TypeValidationError2 extends AISDKError {
258
+ constructor({ value, cause }) {
259
+ super({
260
+ name: name12,
261
+ message: `Type validation failed: Value: ${JSON.stringify(value)}.
262
+ Error message: ${getErrorMessage(cause)}`,
263
+ cause
264
+ });
265
+ this[_a13] = true;
266
+ this.value = value;
267
+ }
268
+ static isInstance(error) {
269
+ return AISDKError.hasMarker(error, marker13);
270
+ }
271
+ /**
272
+ * Wraps an error into a TypeValidationError.
273
+ * If the cause is already a TypeValidationError with the same value, it returns the cause.
274
+ * Otherwise, it creates a new TypeValidationError.
275
+ *
276
+ * @param {Object} params - The parameters for wrapping the error.
277
+ * @param {unknown} params.value - The value that failed validation.
278
+ * @param {unknown} params.cause - The original error or cause of the validation failure.
279
+ * @returns {TypeValidationError} A TypeValidationError instance.
280
+ */
281
+ static wrap({
282
+ value,
283
+ cause
284
+ }) {
285
+ return _TypeValidationError2.isInstance(cause) && cause.value === value ? cause : new _TypeValidationError2({ value, cause });
286
+ }
287
+ };
288
+ _a13 = symbol13;
289
+ var TypeValidationError = _TypeValidationError;
290
+ var name13 = "AI_UnsupportedFunctionalityError";
291
+ var marker14 = `vercel.ai.error.${name13}`;
292
+ var symbol14 = Symbol.for(marker14);
293
+ var _a14;
294
+ var UnsupportedFunctionalityError = class extends AISDKError {
295
+ constructor({
296
+ functionality,
297
+ message = `'${functionality}' functionality not supported.`
298
+ }) {
299
+ super({ name: name13, message });
300
+ this[_a14] = true;
301
+ this.functionality = functionality;
302
+ }
303
+ static isInstance(error) {
304
+ return AISDKError.hasMarker(error, marker14);
305
+ }
306
+ };
307
+ _a14 = symbol14;
308
+
309
+ // ../../node_modules/.pnpm/eventsource-parser@3.0.6/node_modules/eventsource-parser/dist/index.js
310
+ var ParseError = class extends Error {
311
+ constructor(message, options) {
312
+ super(message), this.name = "ParseError", this.type = options.type, this.field = options.field, this.value = options.value, this.line = options.line;
313
+ }
314
+ };
315
+ function noop(_arg) {
316
+ }
317
+ function createParser(callbacks) {
318
+ if (typeof callbacks == "function")
319
+ throw new TypeError(
320
+ "`callbacks` must be an object, got a function instead. Did you mean `{onEvent: fn}`?"
321
+ );
322
+ const { onEvent = noop, onError = noop, onRetry = noop, onComment } = callbacks;
323
+ let incompleteLine = "", isFirstChunk = true, id, data = "", eventType = "";
324
+ function feed(newChunk) {
325
+ const chunk = isFirstChunk ? newChunk.replace(/^\xEF\xBB\xBF/, "") : newChunk, [complete, incomplete] = splitLines(`${incompleteLine}${chunk}`);
326
+ for (const line of complete)
327
+ parseLine(line);
328
+ incompleteLine = incomplete, isFirstChunk = false;
329
+ }
330
+ function parseLine(line) {
331
+ if (line === "") {
332
+ dispatchEvent();
333
+ return;
334
+ }
335
+ if (line.startsWith(":")) {
336
+ onComment && onComment(line.slice(line.startsWith(": ") ? 2 : 1));
337
+ return;
338
+ }
339
+ const fieldSeparatorIndex = line.indexOf(":");
340
+ if (fieldSeparatorIndex !== -1) {
341
+ const field = line.slice(0, fieldSeparatorIndex), offset = line[fieldSeparatorIndex + 1] === " " ? 2 : 1, value = line.slice(fieldSeparatorIndex + offset);
342
+ processField(field, value, line);
343
+ return;
344
+ }
345
+ processField(line, "", line);
346
+ }
347
+ function processField(field, value, line) {
348
+ switch (field) {
349
+ case "event":
350
+ eventType = value;
351
+ break;
352
+ case "data":
353
+ data = `${data}${value}
354
+ `;
355
+ break;
356
+ case "id":
357
+ id = value.includes("\0") ? void 0 : value;
358
+ break;
359
+ case "retry":
360
+ /^\d+$/.test(value) ? onRetry(parseInt(value, 10)) : onError(
361
+ new ParseError(`Invalid \`retry\` value: "${value}"`, {
362
+ type: "invalid-retry",
363
+ value,
364
+ line
365
+ })
366
+ );
367
+ break;
368
+ default:
369
+ onError(
370
+ new ParseError(
371
+ `Unknown field "${field.length > 20 ? `${field.slice(0, 20)}\u2026` : field}"`,
372
+ { type: "unknown-field", field, value, line }
373
+ )
374
+ );
375
+ break;
376
+ }
377
+ }
378
+ function dispatchEvent() {
379
+ data.length > 0 && onEvent({
380
+ id,
381
+ event: eventType || void 0,
382
+ // If the data buffer's last character is a U+000A LINE FEED (LF) character,
383
+ // then remove the last character from the data buffer.
384
+ data: data.endsWith(`
385
+ `) ? data.slice(0, -1) : data
386
+ }), id = void 0, data = "", eventType = "";
387
+ }
388
+ function reset(options = {}) {
389
+ incompleteLine && options.consume && parseLine(incompleteLine), isFirstChunk = true, id = void 0, data = "", eventType = "", incompleteLine = "";
390
+ }
391
+ return { feed, reset };
392
+ }
393
+ function splitLines(chunk) {
394
+ const lines = [];
395
+ let incompleteLine = "", searchIndex = 0;
396
+ for (; searchIndex < chunk.length; ) {
397
+ const crIndex = chunk.indexOf("\r", searchIndex), lfIndex = chunk.indexOf(`
398
+ `, searchIndex);
399
+ let lineEnd = -1;
400
+ if (crIndex !== -1 && lfIndex !== -1 ? lineEnd = Math.min(crIndex, lfIndex) : crIndex !== -1 ? crIndex === chunk.length - 1 ? lineEnd = -1 : lineEnd = crIndex : lfIndex !== -1 && (lineEnd = lfIndex), lineEnd === -1) {
401
+ incompleteLine = chunk.slice(searchIndex);
402
+ break;
403
+ } else {
404
+ const line = chunk.slice(searchIndex, lineEnd);
405
+ lines.push(line), searchIndex = lineEnd + 1, chunk[searchIndex - 1] === "\r" && chunk[searchIndex] === `
406
+ ` && searchIndex++;
407
+ }
408
+ }
409
+ return [lines, incompleteLine];
410
+ }
411
+
412
+ // ../../node_modules/.pnpm/eventsource-parser@3.0.6/node_modules/eventsource-parser/dist/stream.js
413
+ var EventSourceParserStream = class extends TransformStream {
414
+ constructor({ onError, onRetry, onComment } = {}) {
415
+ let parser;
416
+ super({
417
+ start(controller) {
418
+ parser = createParser({
419
+ onEvent: (event) => {
420
+ controller.enqueue(event);
421
+ },
422
+ onError(error) {
423
+ onError === "terminate" ? controller.error(error) : typeof onError == "function" && onError(error);
424
+ },
425
+ onRetry,
426
+ onComment
427
+ });
428
+ },
429
+ transform(chunk) {
430
+ parser.feed(chunk);
431
+ }
432
+ });
433
+ }
434
+ };
6
435
  function combineHeaders(...headers) {
7
436
  return headers.reduce(
8
437
  (combinedHeaders, currentHeaders) => ({
@@ -70,11 +499,11 @@ function handleFetchError({
70
499
  return error;
71
500
  }
72
501
  function getRuntimeEnvironmentUserAgent(globalThisAny = globalThis) {
73
- var _a, _b, _c;
502
+ var _a15, _b, _c;
74
503
  if (globalThisAny.window) {
75
504
  return `runtime/browser`;
76
505
  }
77
- if ((_a = globalThisAny.navigator) == null ? void 0 : _a.userAgent) {
506
+ if ((_a15 = globalThisAny.navigator) == null ? void 0 : _a15.userAgent) {
78
507
  return `runtime/${globalThisAny.navigator.userAgent.toLowerCase()}`;
79
508
  }
80
509
  if ((_c = (_b = globalThisAny.process) == null ? void 0 : _b.versions) == null ? void 0 : _c.node) {
@@ -140,8 +569,8 @@ function injectJsonInstructionIntoMessages({
140
569
  schemaPrefix,
141
570
  schemaSuffix
142
571
  }) {
143
- var _a, _b;
144
- const systemMessage = ((_a = messages[0]) == null ? void 0 : _a.role) === "system" ? { ...messages[0] } : { role: "system", content: "" };
572
+ var _a15, _b;
573
+ const systemMessage = ((_a15 = messages[0]) == null ? void 0 : _a15.role) === "system" ? { ...messages[0] } : { role: "system", content: "" };
145
574
  systemMessage.content = injectJsonInstruction({
146
575
  prompt: systemMessage.content,
147
576
  schema,
@@ -234,15 +663,15 @@ function loadSetting({
234
663
  return settingValue;
235
664
  }
236
665
  function mediaTypeToExtension(mediaType) {
237
- var _a;
666
+ var _a15;
238
667
  const [_type, subtype = ""] = mediaType.toLowerCase().split("/");
239
- return (_a = {
668
+ return (_a15 = {
240
669
  mpeg: "mp3",
241
670
  "x-wav": "wav",
242
671
  opus: "ogg",
243
672
  mp4: "m4a",
244
673
  "x-m4a": "m4a"
245
- }[subtype]) != null ? _a : subtype;
674
+ }[subtype]) != null ? _a15 : subtype;
246
675
  }
247
676
  var suspectProtoRx = /"__proto__"\s*:/;
248
677
  var suspectConstructorRx = /"constructor"\s*:/;
@@ -555,7 +984,7 @@ function tool(tool2) {
555
984
  }
556
985
  function createProviderDefinedToolFactory({
557
986
  id,
558
- name,
987
+ name: name14,
559
988
  inputSchema
560
989
  }) {
561
990
  return ({
@@ -569,7 +998,7 @@ function createProviderDefinedToolFactory({
569
998
  }) => tool({
570
999
  type: "provider-defined",
571
1000
  id,
572
- name,
1001
+ name: name14,
573
1002
  args,
574
1003
  inputSchema,
575
1004
  outputSchema,
@@ -582,7 +1011,7 @@ function createProviderDefinedToolFactory({
582
1011
  }
583
1012
  function createProviderDefinedToolFactoryWithOutputSchema({
584
1013
  id,
585
- name,
1014
+ name: name14,
586
1015
  inputSchema,
587
1016
  outputSchema
588
1017
  }) {
@@ -596,7 +1025,7 @@ function createProviderDefinedToolFactoryWithOutputSchema({
596
1025
  }) => tool({
597
1026
  type: "provider-defined",
598
1027
  id,
599
- name,
1028
+ name: name14,
600
1029
  args,
601
1030
  inputSchema,
602
1031
  outputSchema,
@@ -776,11 +1205,11 @@ function parseAnyDef() {
776
1205
  return {};
777
1206
  }
778
1207
  function parseArrayDef(def, refs) {
779
- var _a, _b, _c;
1208
+ var _a15, _b, _c;
780
1209
  const res = {
781
1210
  type: "array"
782
1211
  };
783
- if (((_a = def.type) == null ? void 0 : _a._def) && ((_c = (_b = def.type) == null ? void 0 : _b._def) == null ? void 0 : _c.typeName) !== ZodFirstPartyTypeKind.ZodAny) {
1212
+ if (((_a15 = def.type) == null ? void 0 : _a15._def) && ((_c = (_b = def.type) == null ? void 0 : _b._def) == null ? void 0 : _c.typeName) !== ZodFirstPartyTypeKind.ZodAny) {
784
1213
  res.items = parseDef(def.type._def, {
785
1214
  ...refs,
786
1215
  currentPath: [...refs.currentPath, "items"]
@@ -1139,8 +1568,8 @@ function escapeNonAlphaNumeric(source) {
1139
1568
  return result;
1140
1569
  }
1141
1570
  function addFormat(schema, value, message, refs) {
1142
- var _a;
1143
- if (schema.format || ((_a = schema.anyOf) == null ? void 0 : _a.some((x) => x.format))) {
1571
+ var _a15;
1572
+ if (schema.format || ((_a15 = schema.anyOf) == null ? void 0 : _a15.some((x) => x.format))) {
1144
1573
  if (!schema.anyOf) {
1145
1574
  schema.anyOf = [];
1146
1575
  }
@@ -1159,8 +1588,8 @@ function addFormat(schema, value, message, refs) {
1159
1588
  }
1160
1589
  }
1161
1590
  function addPattern(schema, regex, message, refs) {
1162
- var _a;
1163
- if (schema.pattern || ((_a = schema.allOf) == null ? void 0 : _a.some((x) => x.pattern))) {
1591
+ var _a15;
1592
+ if (schema.pattern || ((_a15 = schema.allOf) == null ? void 0 : _a15.some((x) => x.pattern))) {
1164
1593
  if (!schema.allOf) {
1165
1594
  schema.allOf = [];
1166
1595
  }
@@ -1179,7 +1608,7 @@ function addPattern(schema, regex, message, refs) {
1179
1608
  }
1180
1609
  }
1181
1610
  function stringifyRegExpWithFlags(regex, refs) {
1182
- var _a;
1611
+ var _a15;
1183
1612
  if (!refs.applyRegexFlags || !regex.flags) {
1184
1613
  return regex.source;
1185
1614
  }
@@ -1209,7 +1638,7 @@ function stringifyRegExpWithFlags(regex, refs) {
1209
1638
  pattern += source[i];
1210
1639
  pattern += `${source[i - 2]}-${source[i]}`.toUpperCase();
1211
1640
  inCharRange = false;
1212
- } else if (source[i + 1] === "-" && ((_a = source[i + 2]) == null ? void 0 : _a.match(/[a-z]/))) {
1641
+ } else if (source[i + 1] === "-" && ((_a15 = source[i + 2]) == null ? void 0 : _a15.match(/[a-z]/))) {
1213
1642
  pattern += source[i];
1214
1643
  inCharRange = true;
1215
1644
  } else {
@@ -1251,13 +1680,13 @@ function stringifyRegExpWithFlags(regex, refs) {
1251
1680
  return pattern;
1252
1681
  }
1253
1682
  function parseRecordDef(def, refs) {
1254
- var _a, _b, _c, _d, _e, _f;
1683
+ var _a15, _b, _c, _d, _e, _f;
1255
1684
  const schema = {
1256
1685
  type: "object",
1257
- additionalProperties: (_a = parseDef(def.valueType._def, {
1686
+ additionalProperties: (_a15 = parseDef(def.valueType._def, {
1258
1687
  ...refs,
1259
1688
  currentPath: [...refs.currentPath, "additionalProperties"]
1260
- })) != null ? _a : refs.allowedAdditionalProperties
1689
+ })) != null ? _a15 : refs.allowedAdditionalProperties
1261
1690
  };
1262
1691
  if (((_b = def.keyType) == null ? void 0 : _b._def.typeName) === ZodFirstPartyTypeKind.ZodString && ((_c = def.keyType._def.checks) == null ? void 0 : _c.length)) {
1263
1692
  const { type, ...keyType } = parseStringDef(def.keyType._def, refs);
@@ -1514,8 +1943,8 @@ function safeIsOptional(schema) {
1514
1943
  }
1515
1944
  }
1516
1945
  var parseOptionalDef = (def, refs) => {
1517
- var _a;
1518
- if (refs.currentPath.toString() === ((_a = refs.propertyPath) == null ? void 0 : _a.toString())) {
1946
+ var _a15;
1947
+ if (refs.currentPath.toString() === ((_a15 = refs.propertyPath) == null ? void 0 : _a15.toString())) {
1519
1948
  return parseDef(def.innerType._def, refs);
1520
1949
  }
1521
1950
  const innerSchema = parseDef(def.innerType._def, {
@@ -1685,10 +2114,10 @@ var selectParser = (def, typeName, refs) => {
1685
2114
  }
1686
2115
  };
1687
2116
  function parseDef(def, refs, forceResolution = false) {
1688
- var _a;
2117
+ var _a15;
1689
2118
  const seenItem = refs.seen.get(def);
1690
2119
  if (refs.override) {
1691
- const overrideResult = (_a = refs.override) == null ? void 0 : _a.call(
2120
+ const overrideResult = (_a15 = refs.override) == null ? void 0 : _a15.call(
1692
2121
  refs,
1693
2122
  def,
1694
2123
  refs,
@@ -1754,11 +2183,11 @@ var getRefs = (options) => {
1754
2183
  currentPath,
1755
2184
  propertyPath: void 0,
1756
2185
  seen: new Map(
1757
- Object.entries(_options.definitions).map(([name, def]) => [
2186
+ Object.entries(_options.definitions).map(([name14, def]) => [
1758
2187
  def._def,
1759
2188
  {
1760
2189
  def: def._def,
1761
- path: [..._options.basePath, _options.definitionPath, name],
2190
+ path: [..._options.basePath, _options.definitionPath, name14],
1762
2191
  // Resolution of references will be forced even though seen, so it's ok that the schema is undefined here for now.
1763
2192
  jsonSchema: void 0
1764
2193
  }
@@ -1767,50 +2196,50 @@ var getRefs = (options) => {
1767
2196
  };
1768
2197
  };
1769
2198
  var zodToJsonSchema = (schema, options) => {
1770
- var _a;
2199
+ var _a15;
1771
2200
  const refs = getRefs(options);
1772
2201
  let definitions = typeof options === "object" && options.definitions ? Object.entries(options.definitions).reduce(
1773
- (acc, [name2, schema2]) => {
1774
- var _a2;
2202
+ (acc, [name22, schema2]) => {
2203
+ var _a22;
1775
2204
  return {
1776
2205
  ...acc,
1777
- [name2]: (_a2 = parseDef(
2206
+ [name22]: (_a22 = parseDef(
1778
2207
  schema2._def,
1779
2208
  {
1780
2209
  ...refs,
1781
- currentPath: [...refs.basePath, refs.definitionPath, name2]
2210
+ currentPath: [...refs.basePath, refs.definitionPath, name22]
1782
2211
  },
1783
2212
  true
1784
- )) != null ? _a2 : parseAnyDef()
2213
+ )) != null ? _a22 : parseAnyDef()
1785
2214
  };
1786
2215
  },
1787
2216
  {}
1788
2217
  ) : void 0;
1789
- const name = typeof options === "string" ? options : (options == null ? void 0 : options.nameStrategy) === "title" ? void 0 : options == null ? void 0 : options.name;
1790
- const main = (_a = parseDef(
2218
+ const name14 = typeof options === "string" ? options : (options == null ? void 0 : options.nameStrategy) === "title" ? void 0 : options == null ? void 0 : options.name;
2219
+ const main = (_a15 = parseDef(
1791
2220
  schema._def,
1792
- name === void 0 ? refs : {
2221
+ name14 === void 0 ? refs : {
1793
2222
  ...refs,
1794
- currentPath: [...refs.basePath, refs.definitionPath, name]
2223
+ currentPath: [...refs.basePath, refs.definitionPath, name14]
1795
2224
  },
1796
2225
  false
1797
- )) != null ? _a : parseAnyDef();
2226
+ )) != null ? _a15 : parseAnyDef();
1798
2227
  const title = typeof options === "object" && options.name !== void 0 && options.nameStrategy === "title" ? options.name : void 0;
1799
2228
  if (title !== void 0) {
1800
2229
  main.title = title;
1801
2230
  }
1802
- const combined = name === void 0 ? definitions ? {
2231
+ const combined = name14 === void 0 ? definitions ? {
1803
2232
  ...main,
1804
2233
  [refs.definitionPath]: definitions
1805
2234
  } : main : {
1806
2235
  $ref: [
1807
2236
  ...refs.$refStrategy === "relative" ? [] : refs.basePath,
1808
2237
  refs.definitionPath,
1809
- name
2238
+ name14
1810
2239
  ].join("/"),
1811
2240
  [refs.definitionPath]: {
1812
2241
  ...definitions,
1813
- [name]: main
2242
+ [name14]: main
1814
2243
  }
1815
2244
  };
1816
2245
  combined.$schema = "http://json-schema.org/draft-07/schema#";
@@ -1818,8 +2247,8 @@ var zodToJsonSchema = (schema, options) => {
1818
2247
  };
1819
2248
  var zod_to_json_schema_default = zodToJsonSchema;
1820
2249
  function zod3Schema(zodSchema2, options) {
1821
- var _a;
1822
- const useReferences = (_a = options == null ? void 0 : options.useReferences) != null ? _a : false;
2250
+ var _a15;
2251
+ const useReferences = (_a15 = options == null ? void 0 : options.useReferences) != null ? _a15 : false;
1823
2252
  return jsonSchema(
1824
2253
  // defer json schema creation to avoid unnecessary computation when only validation is needed
1825
2254
  () => zod_to_json_schema_default(zodSchema2, {
@@ -1834,8 +2263,8 @@ function zod3Schema(zodSchema2, options) {
1834
2263
  );
1835
2264
  }
1836
2265
  function zod4Schema(zodSchema2, options) {
1837
- var _a;
1838
- const useReferences = (_a = options == null ? void 0 : options.useReferences) != null ? _a : false;
2266
+ var _a15;
2267
+ const useReferences = (_a15 = options == null ? void 0 : options.useReferences) != null ? _a15 : false;
1839
2268
  return jsonSchema(
1840
2269
  // defer json schema creation to avoid unnecessary computation when only validation is needed
1841
2270
  () => z4.toJSONSchema(zodSchema2, {
@@ -1908,8 +2337,8 @@ function withoutTrailingSlash(url) {
1908
2337
  return url == null ? void 0 : url.replace(/\/$/, "");
1909
2338
  }
1910
2339
  function getOpenAIMetadata(message) {
1911
- var _a, _b;
1912
- return (_b = (_a = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a.openaiCompatible) != null ? _b : {};
2340
+ var _a15, _b;
2341
+ return (_b = (_a15 = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a15.openaiCompatible) != null ? _b : {};
1913
2342
  }
1914
2343
  function convertToOpenAICompatibleChatMessages(prompt) {
1915
2344
  const messages = [];
@@ -2134,10 +2563,10 @@ var OpenAICompatibleChatLanguageModel = class {
2134
2563
  // type inferred via constructor
2135
2564
  constructor(modelId, config) {
2136
2565
  this.specificationVersion = "v2";
2137
- var _a, _b;
2566
+ var _a15, _b;
2138
2567
  this.modelId = modelId;
2139
2568
  this.config = config;
2140
- const errorStructure = (_a = config.errorStructure) != null ? _a : defaultOpenAICompatibleErrorStructure;
2569
+ const errorStructure = (_a15 = config.errorStructure) != null ? _a15 : defaultOpenAICompatibleErrorStructure;
2141
2570
  this.chunkSchema = createOpenAICompatibleChatChunkSchema(
2142
2571
  errorStructure.errorSchema
2143
2572
  );
@@ -2151,8 +2580,8 @@ var OpenAICompatibleChatLanguageModel = class {
2151
2580
  return this.config.provider.split(".")[0].trim();
2152
2581
  }
2153
2582
  get supportedUrls() {
2154
- var _a, _b, _c;
2155
- return (_c = (_b = (_a = this.config).supportedUrls) == null ? void 0 : _b.call(_a)) != null ? _c : {};
2583
+ var _a15, _b, _c;
2584
+ return (_c = (_b = (_a15 = this.config).supportedUrls) == null ? void 0 : _b.call(_a15)) != null ? _c : {};
2156
2585
  }
2157
2586
  async getArgs({
2158
2587
  prompt,
@@ -2169,14 +2598,14 @@ var OpenAICompatibleChatLanguageModel = class {
2169
2598
  toolChoice,
2170
2599
  tools
2171
2600
  }) {
2172
- var _a, _b, _c, _d;
2601
+ var _a15, _b, _c, _d;
2173
2602
  const warnings = [];
2174
2603
  const compatibleOptions = Object.assign(
2175
- (_a = await parseProviderOptions({
2604
+ (_a15 = await parseProviderOptions({
2176
2605
  provider: "openai-compatible",
2177
2606
  providerOptions,
2178
2607
  schema: openaiCompatibleProviderOptions
2179
- })) != null ? _a : {},
2608
+ })) != null ? _a15 : {},
2180
2609
  (_b = await parseProviderOptions({
2181
2610
  provider: this.providerOptionsName,
2182
2611
  providerOptions,
@@ -2242,7 +2671,7 @@ var OpenAICompatibleChatLanguageModel = class {
2242
2671
  };
2243
2672
  }
2244
2673
  async doGenerate(options) {
2245
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
2674
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
2246
2675
  const { args, warnings } = await this.getArgs({ ...options });
2247
2676
  const body = JSON.stringify(args);
2248
2677
  const {
@@ -2269,7 +2698,7 @@ var OpenAICompatibleChatLanguageModel = class {
2269
2698
  if (text != null && text.length > 0) {
2270
2699
  content.push({ type: "text", text });
2271
2700
  }
2272
- const reasoning = (_a = choice.message.reasoning_content) != null ? _a : choice.message.reasoning;
2701
+ const reasoning = (_a15 = choice.message.reasoning_content) != null ? _a15 : choice.message.reasoning;
2273
2702
  if (reasoning != null && reasoning.length > 0) {
2274
2703
  content.push({
2275
2704
  type: "reasoning",
@@ -2320,7 +2749,7 @@ var OpenAICompatibleChatLanguageModel = class {
2320
2749
  };
2321
2750
  }
2322
2751
  async doStream(options) {
2323
- var _a;
2752
+ var _a15;
2324
2753
  const { args, warnings } = await this.getArgs({ ...options });
2325
2754
  const body = {
2326
2755
  ...args,
@@ -2328,7 +2757,7 @@ var OpenAICompatibleChatLanguageModel = class {
2328
2757
  // only include stream_options when in strict compatibility mode:
2329
2758
  stream_options: this.config.includeUsage ? { include_usage: true } : void 0
2330
2759
  };
2331
- const metadataExtractor = (_a = this.config.metadataExtractor) == null ? void 0 : _a.createStreamExtractor();
2760
+ const metadataExtractor = (_a15 = this.config.metadataExtractor) == null ? void 0 : _a15.createStreamExtractor();
2332
2761
  const { responseHeaders, value: response } = await postJsonToApi({
2333
2762
  url: this.config.url({
2334
2763
  path: "/chat/completions",
@@ -2370,7 +2799,7 @@ var OpenAICompatibleChatLanguageModel = class {
2370
2799
  },
2371
2800
  // TODO we lost type safety on Chunk, most likely due to the error schema. MUST FIX
2372
2801
  transform(chunk, controller) {
2373
- var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
2802
+ var _a22, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
2374
2803
  if (options.includeRawChunks) {
2375
2804
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
2376
2805
  }
@@ -2427,7 +2856,7 @@ var OpenAICompatibleChatLanguageModel = class {
2427
2856
  return;
2428
2857
  }
2429
2858
  const delta = choice.delta;
2430
- const reasoningContent = (_a2 = delta.reasoning_content) != null ? _a2 : delta.reasoning;
2859
+ const reasoningContent = (_a22 = delta.reasoning_content) != null ? _a22 : delta.reasoning;
2431
2860
  if (reasoningContent) {
2432
2861
  if (!isActiveReasoning) {
2433
2862
  controller.enqueue({
@@ -2537,7 +2966,7 @@ var OpenAICompatibleChatLanguageModel = class {
2537
2966
  }
2538
2967
  },
2539
2968
  flush(controller) {
2540
- var _a2, _b, _c, _d, _e, _f;
2969
+ var _a22, _b, _c, _d, _e, _f;
2541
2970
  if (isActiveReasoning) {
2542
2971
  controller.enqueue({ type: "reasoning-end", id: "reasoning-0" });
2543
2972
  }
@@ -2553,7 +2982,7 @@ var OpenAICompatibleChatLanguageModel = class {
2553
2982
  });
2554
2983
  controller.enqueue({
2555
2984
  type: "tool-call",
2556
- toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
2985
+ toolCallId: (_a22 = toolCall.id) != null ? _a22 : generateId(),
2557
2986
  toolName: toolCall.function.name,
2558
2987
  input: toolCall.function.arguments
2559
2988
  });
@@ -2783,10 +3212,10 @@ var OpenAICompatibleCompletionLanguageModel = class {
2783
3212
  // type inferred via constructor
2784
3213
  constructor(modelId, config) {
2785
3214
  this.specificationVersion = "v2";
2786
- var _a;
3215
+ var _a15;
2787
3216
  this.modelId = modelId;
2788
3217
  this.config = config;
2789
- const errorStructure = (_a = config.errorStructure) != null ? _a : defaultOpenAICompatibleErrorStructure;
3218
+ const errorStructure = (_a15 = config.errorStructure) != null ? _a15 : defaultOpenAICompatibleErrorStructure;
2790
3219
  this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(
2791
3220
  errorStructure.errorSchema
2792
3221
  );
@@ -2799,8 +3228,8 @@ var OpenAICompatibleCompletionLanguageModel = class {
2799
3228
  return this.config.provider.split(".")[0].trim();
2800
3229
  }
2801
3230
  get supportedUrls() {
2802
- var _a, _b, _c;
2803
- return (_c = (_b = (_a = this.config).supportedUrls) == null ? void 0 : _b.call(_a)) != null ? _c : {};
3231
+ var _a15, _b, _c;
3232
+ return (_c = (_b = (_a15 = this.config).supportedUrls) == null ? void 0 : _b.call(_a15)) != null ? _c : {};
2804
3233
  }
2805
3234
  async getArgs({
2806
3235
  prompt,
@@ -2817,13 +3246,13 @@ var OpenAICompatibleCompletionLanguageModel = class {
2817
3246
  tools,
2818
3247
  toolChoice
2819
3248
  }) {
2820
- var _a;
3249
+ var _a15;
2821
3250
  const warnings = [];
2822
- const completionOptions = (_a = await parseProviderOptions({
3251
+ const completionOptions = (_a15 = await parseProviderOptions({
2823
3252
  provider: this.providerOptionsName,
2824
3253
  providerOptions,
2825
3254
  schema: openaiCompatibleCompletionProviderOptions
2826
- })) != null ? _a : {};
3255
+ })) != null ? _a15 : {};
2827
3256
  if (topK != null) {
2828
3257
  warnings.push({ type: "unsupported-setting", setting: "topK" });
2829
3258
  }
@@ -2868,7 +3297,7 @@ var OpenAICompatibleCompletionLanguageModel = class {
2868
3297
  };
2869
3298
  }
2870
3299
  async doGenerate(options) {
2871
- var _a, _b, _c, _d, _e, _f;
3300
+ var _a15, _b, _c, _d, _e, _f;
2872
3301
  const { args, warnings } = await this.getArgs(options);
2873
3302
  const {
2874
3303
  responseHeaders,
@@ -2896,7 +3325,7 @@ var OpenAICompatibleCompletionLanguageModel = class {
2896
3325
  return {
2897
3326
  content,
2898
3327
  usage: {
2899
- inputTokens: (_b = (_a = response.usage) == null ? void 0 : _a.prompt_tokens) != null ? _b : void 0,
3328
+ inputTokens: (_b = (_a15 = response.usage) == null ? void 0 : _a15.prompt_tokens) != null ? _b : void 0,
2900
3329
  outputTokens: (_d = (_c = response.usage) == null ? void 0 : _c.completion_tokens) != null ? _d : void 0,
2901
3330
  totalTokens: (_f = (_e = response.usage) == null ? void 0 : _e.total_tokens) != null ? _f : void 0
2902
3331
  },
@@ -2946,7 +3375,7 @@ var OpenAICompatibleCompletionLanguageModel = class {
2946
3375
  controller.enqueue({ type: "stream-start", warnings });
2947
3376
  },
2948
3377
  transform(chunk, controller) {
2949
- var _a, _b, _c;
3378
+ var _a15, _b, _c;
2950
3379
  if (options.includeRawChunks) {
2951
3380
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
2952
3381
  }
@@ -2973,7 +3402,7 @@ var OpenAICompatibleCompletionLanguageModel = class {
2973
3402
  });
2974
3403
  }
2975
3404
  if (value.usage != null) {
2976
- usage.inputTokens = (_a = value.usage.prompt_tokens) != null ? _a : void 0;
3405
+ usage.inputTokens = (_a15 = value.usage.prompt_tokens) != null ? _a15 : void 0;
2977
3406
  usage.outputTokens = (_b = value.usage.completion_tokens) != null ? _b : void 0;
2978
3407
  usage.totalTokens = (_c = value.usage.total_tokens) != null ? _c : void 0;
2979
3408
  }
@@ -3063,12 +3492,12 @@ var OpenAICompatibleEmbeddingModel = class {
3063
3492
  return this.config.provider;
3064
3493
  }
3065
3494
  get maxEmbeddingsPerCall() {
3066
- var _a;
3067
- return (_a = this.config.maxEmbeddingsPerCall) != null ? _a : 2048;
3495
+ var _a15;
3496
+ return (_a15 = this.config.maxEmbeddingsPerCall) != null ? _a15 : 2048;
3068
3497
  }
3069
3498
  get supportsParallelCalls() {
3070
- var _a;
3071
- return (_a = this.config.supportsParallelCalls) != null ? _a : true;
3499
+ var _a15;
3500
+ return (_a15 = this.config.supportsParallelCalls) != null ? _a15 : true;
3072
3501
  }
3073
3502
  get providerOptionsName() {
3074
3503
  return this.config.provider.split(".")[0].trim();
@@ -3079,13 +3508,13 @@ var OpenAICompatibleEmbeddingModel = class {
3079
3508
  abortSignal,
3080
3509
  providerOptions
3081
3510
  }) {
3082
- var _a, _b, _c;
3511
+ var _a15, _b, _c;
3083
3512
  const compatibleOptions = Object.assign(
3084
- (_a = await parseProviderOptions({
3513
+ (_a15 = await parseProviderOptions({
3085
3514
  provider: "openai-compatible",
3086
3515
  providerOptions,
3087
3516
  schema: openaiCompatibleEmbeddingProviderOptions
3088
- })) != null ? _a : {},
3517
+ })) != null ? _a15 : {},
3089
3518
  (_b = await parseProviderOptions({
3090
3519
  provider: this.providerOptionsName,
3091
3520
  providerOptions,
@@ -3159,7 +3588,7 @@ var OpenAICompatibleImageModel = class {
3159
3588
  headers,
3160
3589
  abortSignal
3161
3590
  }) {
3162
- var _a, _b, _c, _d, _e;
3591
+ var _a15, _b, _c, _d, _e;
3163
3592
  const warnings = [];
3164
3593
  if (aspectRatio != null) {
3165
3594
  warnings.push({
@@ -3171,7 +3600,7 @@ var OpenAICompatibleImageModel = class {
3171
3600
  if (seed != null) {
3172
3601
  warnings.push({ type: "unsupported-setting", setting: "seed" });
3173
3602
  }
3174
- const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
3603
+ const currentDate = (_c = (_b = (_a15 = this.config._internal) == null ? void 0 : _a15.currentDate) == null ? void 0 : _b.call(_a15)) != null ? _c : /* @__PURE__ */ new Date();
3175
3604
  const { value: response, responseHeaders } = await postJsonToApi({
3176
3605
  url: this.config.url({
3177
3606
  path: "/images/generations",
@@ -3803,9 +4232,9 @@ var anthropicProviderOptions = z.object({
3803
4232
  });
3804
4233
  var MAX_CACHE_BREAKPOINTS = 4;
3805
4234
  function getCacheControl(providerMetadata) {
3806
- var _a;
4235
+ var _a15;
3807
4236
  const anthropic2 = providerMetadata == null ? void 0 : providerMetadata.anthropic;
3808
- const cacheControlValue = (_a = anthropic2 == null ? void 0 : anthropic2.cacheControl) != null ? _a : anthropic2 == null ? void 0 : anthropic2.cache_control;
4237
+ const cacheControlValue = (_a15 = anthropic2 == null ? void 0 : anthropic2.cacheControl) != null ? _a15 : anthropic2 == null ? void 0 : anthropic2.cache_control;
3809
4238
  return cacheControlValue;
3810
4239
  }
3811
4240
  var CacheControlValidator = class {
@@ -4336,20 +4765,20 @@ async function convertToAnthropicMessagesPrompt({
4336
4765
  warnings,
4337
4766
  cacheControlValidator
4338
4767
  }) {
4339
- var _a, _b, _c, _d, _e;
4768
+ var _a15, _b, _c, _d, _e;
4340
4769
  const betas = /* @__PURE__ */ new Set();
4341
4770
  const blocks = groupIntoBlocks(prompt);
4342
4771
  const validator2 = cacheControlValidator || new CacheControlValidator();
4343
4772
  let system = void 0;
4344
4773
  const messages = [];
4345
4774
  async function shouldEnableCitations(providerMetadata) {
4346
- var _a2, _b2;
4775
+ var _a22, _b2;
4347
4776
  const anthropicOptions = await parseProviderOptions({
4348
4777
  provider: "anthropic",
4349
4778
  providerOptions: providerMetadata,
4350
4779
  schema: anthropicFilePartProviderOptions
4351
4780
  });
4352
- return (_b2 = (_a2 = anthropicOptions == null ? void 0 : anthropicOptions.citations) == null ? void 0 : _a2.enabled) != null ? _b2 : false;
4781
+ return (_b2 = (_a22 = anthropicOptions == null ? void 0 : anthropicOptions.citations) == null ? void 0 : _a22.enabled) != null ? _b2 : false;
4353
4782
  }
4354
4783
  async function getDocumentMetadata(providerMetadata) {
4355
4784
  const anthropicOptions = await parseProviderOptions({
@@ -4392,10 +4821,10 @@ async function convertToAnthropicMessagesPrompt({
4392
4821
  for (let j = 0; j < content.length; j++) {
4393
4822
  const part = content[j];
4394
4823
  const isLastPart = j === content.length - 1;
4395
- const cacheControl = (_a = validator2.getCacheControl(part.providerOptions, {
4824
+ const cacheControl = (_a15 = validator2.getCacheControl(part.providerOptions, {
4396
4825
  type: "user message part",
4397
4826
  canCache: true
4398
- })) != null ? _a : isLastPart ? validator2.getCacheControl(message.providerOptions, {
4827
+ })) != null ? _a15 : isLastPart ? validator2.getCacheControl(message.providerOptions, {
4399
4828
  type: "user message",
4400
4829
  canCache: true
4401
4830
  }) : void 0;
@@ -4881,7 +5310,7 @@ function mapAnthropicStopReason({
4881
5310
  }
4882
5311
  }
4883
5312
  function createCitationSource(citation, citationDocuments, generateId3) {
4884
- var _a;
5313
+ var _a15;
4885
5314
  if (citation.type !== "page_location" && citation.type !== "char_location") {
4886
5315
  return;
4887
5316
  }
@@ -4894,7 +5323,7 @@ function createCitationSource(citation, citationDocuments, generateId3) {
4894
5323
  sourceType: "document",
4895
5324
  id: generateId3(),
4896
5325
  mediaType: documentInfo.mediaType,
4897
- title: (_a = citation.document_title) != null ? _a : documentInfo.title,
5326
+ title: (_a15 = citation.document_title) != null ? _a15 : documentInfo.title,
4898
5327
  filename: documentInfo.filename,
4899
5328
  providerMetadata: {
4900
5329
  anthropic: citation.type === "page_location" ? {
@@ -4912,10 +5341,10 @@ function createCitationSource(citation, citationDocuments, generateId3) {
4912
5341
  var AnthropicMessagesLanguageModel = class {
4913
5342
  constructor(modelId, config) {
4914
5343
  this.specificationVersion = "v2";
4915
- var _a;
5344
+ var _a15;
4916
5345
  this.modelId = modelId;
4917
5346
  this.config = config;
4918
- this.generateId = (_a = config.generateId) != null ? _a : generateId;
5347
+ this.generateId = (_a15 = config.generateId) != null ? _a15 : generateId;
4919
5348
  }
4920
5349
  supportsUrl(url) {
4921
5350
  return url.protocol === "https:";
@@ -4924,8 +5353,8 @@ var AnthropicMessagesLanguageModel = class {
4924
5353
  return this.config.provider;
4925
5354
  }
4926
5355
  get supportedUrls() {
4927
- var _a, _b, _c;
4928
- return (_c = (_b = (_a = this.config).supportedUrls) == null ? void 0 : _b.call(_a)) != null ? _c : {};
5356
+ var _a15, _b, _c;
5357
+ return (_c = (_b = (_a15 = this.config).supportedUrls) == null ? void 0 : _b.call(_a15)) != null ? _c : {};
4929
5358
  }
4930
5359
  async getArgs({
4931
5360
  prompt,
@@ -4942,7 +5371,7 @@ var AnthropicMessagesLanguageModel = class {
4942
5371
  toolChoice,
4943
5372
  providerOptions
4944
5373
  }) {
4945
- var _a, _b, _c, _d;
5374
+ var _a15, _b, _c, _d;
4946
5375
  const warnings = [];
4947
5376
  if (frequencyPenalty != null) {
4948
5377
  warnings.push({
@@ -5006,7 +5435,7 @@ var AnthropicMessagesLanguageModel = class {
5006
5435
  const cacheControlValidator = new CacheControlValidator();
5007
5436
  const { prompt: messagesPrompt, betas } = await convertToAnthropicMessagesPrompt({
5008
5437
  prompt,
5009
- sendReasoning: (_a = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _a : true,
5438
+ sendReasoning: (_a15 = anthropicOptions == null ? void 0 : anthropicOptions.sendReasoning) != null ? _a15 : true,
5010
5439
  warnings,
5011
5440
  cacheControlValidator
5012
5441
  });
@@ -5138,38 +5567,38 @@ var AnthropicMessagesLanguageModel = class {
5138
5567
  );
5139
5568
  }
5140
5569
  buildRequestUrl(isStreaming) {
5141
- var _a, _b, _c;
5142
- return (_c = (_b = (_a = this.config).buildRequestUrl) == null ? void 0 : _b.call(_a, this.config.baseURL, isStreaming)) != null ? _c : `${this.config.baseURL}/messages`;
5570
+ var _a15, _b, _c;
5571
+ return (_c = (_b = (_a15 = this.config).buildRequestUrl) == null ? void 0 : _b.call(_a15, this.config.baseURL, isStreaming)) != null ? _c : `${this.config.baseURL}/messages`;
5143
5572
  }
5144
5573
  transformRequestBody(args) {
5145
- var _a, _b, _c;
5146
- return (_c = (_b = (_a = this.config).transformRequestBody) == null ? void 0 : _b.call(_a, args)) != null ? _c : args;
5574
+ var _a15, _b, _c;
5575
+ return (_c = (_b = (_a15 = this.config).transformRequestBody) == null ? void 0 : _b.call(_a15, args)) != null ? _c : args;
5147
5576
  }
5148
5577
  extractCitationDocuments(prompt) {
5149
5578
  const isCitationPart = (part) => {
5150
- var _a, _b;
5579
+ var _a15, _b;
5151
5580
  if (part.type !== "file") {
5152
5581
  return false;
5153
5582
  }
5154
5583
  if (part.mediaType !== "application/pdf" && part.mediaType !== "text/plain") {
5155
5584
  return false;
5156
5585
  }
5157
- const anthropic2 = (_a = part.providerOptions) == null ? void 0 : _a.anthropic;
5586
+ const anthropic2 = (_a15 = part.providerOptions) == null ? void 0 : _a15.anthropic;
5158
5587
  const citationsConfig = anthropic2 == null ? void 0 : anthropic2.citations;
5159
5588
  return (_b = citationsConfig == null ? void 0 : citationsConfig.enabled) != null ? _b : false;
5160
5589
  };
5161
5590
  return prompt.filter((message) => message.role === "user").flatMap((message) => message.content).filter(isCitationPart).map((part) => {
5162
- var _a;
5591
+ var _a15;
5163
5592
  const filePart = part;
5164
5593
  return {
5165
- title: (_a = filePart.filename) != null ? _a : "Untitled Document",
5594
+ title: (_a15 = filePart.filename) != null ? _a15 : "Untitled Document",
5166
5595
  filename: filePart.filename,
5167
5596
  mediaType: filePart.mediaType
5168
5597
  };
5169
5598
  });
5170
5599
  }
5171
5600
  async doGenerate(options) {
5172
- var _a, _b, _c, _d, _e, _f, _g, _h;
5601
+ var _a15, _b, _c, _d, _e, _f, _g, _h;
5173
5602
  const { args, warnings, betas, usesJsonResponseTool } = await this.getArgs(options);
5174
5603
  const citationDocuments = this.extractCitationDocuments(options.prompt);
5175
5604
  const {
@@ -5312,11 +5741,11 @@ var AnthropicMessagesLanguageModel = class {
5312
5741
  toolCallId: part.tool_use_id,
5313
5742
  toolName: "web_search",
5314
5743
  result: part.content.map((result) => {
5315
- var _a2;
5744
+ var _a22;
5316
5745
  return {
5317
5746
  url: result.url,
5318
5747
  title: result.title,
5319
- pageAge: (_a2 = result.page_age) != null ? _a2 : null,
5748
+ pageAge: (_a22 = result.page_age) != null ? _a22 : null,
5320
5749
  encryptedContent: result.encrypted_content,
5321
5750
  type: result.type
5322
5751
  };
@@ -5332,7 +5761,7 @@ var AnthropicMessagesLanguageModel = class {
5332
5761
  title: result.title,
5333
5762
  providerMetadata: {
5334
5763
  anthropic: {
5335
- pageAge: (_a = result.page_age) != null ? _a : null
5764
+ pageAge: (_a15 = result.page_age) != null ? _a15 : null
5336
5765
  }
5337
5766
  }
5338
5767
  });
@@ -5469,7 +5898,7 @@ var AnthropicMessagesLanguageModel = class {
5469
5898
  controller.enqueue({ type: "stream-start", warnings });
5470
5899
  },
5471
5900
  transform(chunk, controller) {
5472
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
5901
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j;
5473
5902
  if (options.includeRawChunks) {
5474
5903
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
5475
5904
  }
@@ -5608,11 +6037,11 @@ var AnthropicMessagesLanguageModel = class {
5608
6037
  toolCallId: part.tool_use_id,
5609
6038
  toolName: "web_search",
5610
6039
  result: part.content.map((result) => {
5611
- var _a2;
6040
+ var _a22;
5612
6041
  return {
5613
6042
  url: result.url,
5614
6043
  title: result.title,
5615
- pageAge: (_a2 = result.page_age) != null ? _a2 : null,
6044
+ pageAge: (_a22 = result.page_age) != null ? _a22 : null,
5616
6045
  encryptedContent: result.encrypted_content,
5617
6046
  type: result.type
5618
6047
  };
@@ -5628,7 +6057,7 @@ var AnthropicMessagesLanguageModel = class {
5628
6057
  title: result.title,
5629
6058
  providerMetadata: {
5630
6059
  anthropic: {
5631
- pageAge: (_a = result.page_age) != null ? _a : null
6060
+ pageAge: (_a15 = result.page_age) != null ? _a15 : null
5632
6061
  }
5633
6062
  }
5634
6063
  });
@@ -6245,13 +6674,13 @@ var anthropicTools = {
6245
6674
  webSearch_20250305
6246
6675
  };
6247
6676
  function createAnthropic(options = {}) {
6248
- var _a, _b;
6249
- const baseURL = (_a = withoutTrailingSlash(
6677
+ var _a15, _b;
6678
+ const baseURL = (_a15 = withoutTrailingSlash(
6250
6679
  loadOptionalSetting({
6251
6680
  settingValue: options.baseURL,
6252
6681
  environmentVariableName: "ANTHROPIC_BASE_URL"
6253
6682
  })
6254
- )) != null ? _a : "https://api.anthropic.com/v1";
6683
+ )) != null ? _a15 : "https://api.anthropic.com/v1";
6255
6684
  const providerName = (_b = options.name) != null ? _b : "anthropic.messages";
6256
6685
  const getHeaders = () => withUserAgentSuffix(
6257
6686
  {
@@ -6266,13 +6695,13 @@ function createAnthropic(options = {}) {
6266
6695
  `ai-sdk/anthropic/${VERSION3}`
6267
6696
  );
6268
6697
  const createChatModel = (modelId) => {
6269
- var _a2;
6698
+ var _a22;
6270
6699
  return new AnthropicMessagesLanguageModel(modelId, {
6271
6700
  provider: providerName,
6272
6701
  baseURL,
6273
6702
  headers: getHeaders,
6274
6703
  fetch: options.fetch,
6275
- generateId: (_a2 = options.generateId) != null ? _a2 : generateId,
6704
+ generateId: (_a22 = options.generateId) != null ? _a22 : generateId,
6276
6705
  supportedUrls: () => ({
6277
6706
  "image/*": [/^https?:\/\/.*$/]
6278
6707
  })
@@ -6547,11 +6976,11 @@ function isEmptyObjectSchema(jsonSchema2) {
6547
6976
  return jsonSchema2 != null && typeof jsonSchema2 === "object" && jsonSchema2.type === "object" && (jsonSchema2.properties == null || Object.keys(jsonSchema2.properties).length === 0) && !jsonSchema2.additionalProperties;
6548
6977
  }
6549
6978
  function convertToGoogleGenerativeAIMessages(prompt, options) {
6550
- var _a;
6979
+ var _a15;
6551
6980
  const systemInstructionParts = [];
6552
6981
  const contents = [];
6553
6982
  let systemMessagesAllowed = true;
6554
- const isGemmaModel = (_a = options == null ? void 0 : options.isGemmaModel) != null ? _a : false;
6983
+ const isGemmaModel = (_a15 = options == null ? void 0 : options.isGemmaModel) != null ? _a15 : false;
6555
6984
  for (const { role, content } of prompt) {
6556
6985
  switch (role) {
6557
6986
  case "system": {
@@ -6599,8 +7028,8 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
6599
7028
  contents.push({
6600
7029
  role: "model",
6601
7030
  parts: content.map((part) => {
6602
- var _a2, _b, _c;
6603
- const thoughtSignature = ((_b = (_a2 = part.providerOptions) == null ? void 0 : _a2.google) == null ? void 0 : _b.thoughtSignature) != null ? String((_c = part.providerOptions.google) == null ? void 0 : _c.thoughtSignature) : void 0;
7031
+ var _a22, _b, _c;
7032
+ const thoughtSignature = ((_b = (_a22 = part.providerOptions) == null ? void 0 : _a22.google) == null ? void 0 : _b.thoughtSignature) != null ? String((_c = part.providerOptions.google) == null ? void 0 : _c.thoughtSignature) : void 0;
6604
7033
  switch (part.type) {
6605
7034
  case "text": {
6606
7035
  return part.text.length === 0 ? void 0 : {
@@ -6823,7 +7252,7 @@ function prepareTools3({
6823
7252
  toolChoice,
6824
7253
  modelId
6825
7254
  }) {
6826
- var _a;
7255
+ var _a15;
6827
7256
  tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
6828
7257
  const toolWarnings = [];
6829
7258
  const isLatest = [
@@ -6942,7 +7371,7 @@ function prepareTools3({
6942
7371
  case "function":
6943
7372
  functionDeclarations.push({
6944
7373
  name: tool2.name,
6945
- description: (_a = tool2.description) != null ? _a : "",
7374
+ description: (_a15 = tool2.description) != null ? _a15 : "",
6946
7375
  parameters: convertJSONSchemaToOpenAPISchema(tool2.inputSchema)
6947
7376
  });
6948
7377
  break;
@@ -7025,17 +7454,17 @@ function mapGoogleGenerativeAIFinishReason({
7025
7454
  var GoogleGenerativeAILanguageModel = class {
7026
7455
  constructor(modelId, config) {
7027
7456
  this.specificationVersion = "v2";
7028
- var _a;
7457
+ var _a15;
7029
7458
  this.modelId = modelId;
7030
7459
  this.config = config;
7031
- this.generateId = (_a = config.generateId) != null ? _a : generateId;
7460
+ this.generateId = (_a15 = config.generateId) != null ? _a15 : generateId;
7032
7461
  }
7033
7462
  get provider() {
7034
7463
  return this.config.provider;
7035
7464
  }
7036
7465
  get supportedUrls() {
7037
- var _a, _b, _c;
7038
- return (_c = (_b = (_a = this.config).supportedUrls) == null ? void 0 : _b.call(_a)) != null ? _c : {};
7466
+ var _a15, _b, _c;
7467
+ return (_c = (_b = (_a15 = this.config).supportedUrls) == null ? void 0 : _b.call(_a15)) != null ? _c : {};
7039
7468
  }
7040
7469
  async getArgs({
7041
7470
  prompt,
@@ -7052,7 +7481,7 @@ var GoogleGenerativeAILanguageModel = class {
7052
7481
  toolChoice,
7053
7482
  providerOptions
7054
7483
  }) {
7055
- var _a;
7484
+ var _a15;
7056
7485
  const warnings = [];
7057
7486
  const googleOptions = await parseProviderOptions({
7058
7487
  provider: "google",
@@ -7098,7 +7527,7 @@ var GoogleGenerativeAILanguageModel = class {
7098
7527
  responseSchema: (responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && // Google GenAI does not support all OpenAPI Schema features,
7099
7528
  // so this is needed as an escape hatch:
7100
7529
  // TODO convert into provider option
7101
- ((_a = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _a : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
7530
+ ((_a15 = googleOptions == null ? void 0 : googleOptions.structuredOutputs) != null ? _a15 : true) ? convertJSONSchemaToOpenAPISchema(responseFormat.schema) : void 0,
7102
7531
  ...(googleOptions == null ? void 0 : googleOptions.audioTimestamp) && {
7103
7532
  audioTimestamp: googleOptions.audioTimestamp
7104
7533
  },
@@ -7124,7 +7553,7 @@ var GoogleGenerativeAILanguageModel = class {
7124
7553
  };
7125
7554
  }
7126
7555
  async doGenerate(options) {
7127
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
7556
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
7128
7557
  const { args, warnings } = await this.getArgs(options);
7129
7558
  const body = JSON.stringify(args);
7130
7559
  const mergedHeaders = combineHeaders(
@@ -7148,7 +7577,7 @@ var GoogleGenerativeAILanguageModel = class {
7148
7577
  });
7149
7578
  const candidate = response.candidates[0];
7150
7579
  const content = [];
7151
- const parts = (_b = (_a = candidate.content) == null ? void 0 : _a.parts) != null ? _b : [];
7580
+ const parts = (_b = (_a15 = candidate.content) == null ? void 0 : _a15.parts) != null ? _b : [];
7152
7581
  const usageMetadata = response.usageMetadata;
7153
7582
  let lastCodeExecutionToolCallId;
7154
7583
  for (const part of parts) {
@@ -7274,7 +7703,7 @@ var GoogleGenerativeAILanguageModel = class {
7274
7703
  controller.enqueue({ type: "stream-start", warnings });
7275
7704
  },
7276
7705
  transform(chunk, controller) {
7277
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
7706
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
7278
7707
  if (options.includeRawChunks) {
7279
7708
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
7280
7709
  }
@@ -7285,7 +7714,7 @@ var GoogleGenerativeAILanguageModel = class {
7285
7714
  const value = chunk.value;
7286
7715
  const usageMetadata = value.usageMetadata;
7287
7716
  if (usageMetadata != null) {
7288
- usage.inputTokens = (_a = usageMetadata.promptTokenCount) != null ? _a : void 0;
7717
+ usage.inputTokens = (_a15 = usageMetadata.promptTokenCount) != null ? _a15 : void 0;
7289
7718
  usage.outputTokens = (_b = usageMetadata.candidatesTokenCount) != null ? _b : void 0;
7290
7719
  usage.totalTokens = (_c = usageMetadata.totalTokenCount) != null ? _c : void 0;
7291
7720
  usage.reasoningTokens = (_d = usageMetadata.thoughtsTokenCount) != null ? _d : void 0;
@@ -7510,7 +7939,7 @@ function extractSources({
7510
7939
  groundingMetadata,
7511
7940
  generateId: generateId3
7512
7941
  }) {
7513
- var _a, _b, _c;
7942
+ var _a15, _b, _c;
7514
7943
  if (!(groundingMetadata == null ? void 0 : groundingMetadata.groundingChunks)) {
7515
7944
  return void 0;
7516
7945
  }
@@ -7522,7 +7951,7 @@ function extractSources({
7522
7951
  sourceType: "url",
7523
7952
  id: generateId3(),
7524
7953
  url: chunk.web.uri,
7525
- title: (_a = chunk.web.title) != null ? _a : void 0
7954
+ title: (_a15 = chunk.web.title) != null ? _a15 : void 0
7526
7955
  });
7527
7956
  } else if (chunk.retrievedContext != null) {
7528
7957
  const uri = chunk.retrievedContext.uri;
@@ -7810,14 +8239,14 @@ var GoogleGenerativeAIImageModel = class {
7810
8239
  this.specificationVersion = "v2";
7811
8240
  }
7812
8241
  get maxImagesPerCall() {
7813
- var _a;
7814
- return (_a = this.settings.maxImagesPerCall) != null ? _a : 4;
8242
+ var _a15;
8243
+ return (_a15 = this.settings.maxImagesPerCall) != null ? _a15 : 4;
7815
8244
  }
7816
8245
  get provider() {
7817
8246
  return this.config.provider;
7818
8247
  }
7819
8248
  async doGenerate(options) {
7820
- var _a, _b, _c;
8249
+ var _a15, _b, _c;
7821
8250
  const {
7822
8251
  prompt,
7823
8252
  n = 1,
@@ -7848,7 +8277,7 @@ var GoogleGenerativeAIImageModel = class {
7848
8277
  providerOptions,
7849
8278
  schema: googleImageProviderOptionsSchema
7850
8279
  });
7851
- const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
8280
+ const currentDate = (_c = (_b = (_a15 = this.config._internal) == null ? void 0 : _a15.currentDate) == null ? void 0 : _b.call(_a15)) != null ? _c : /* @__PURE__ */ new Date();
7852
8281
  const parameters = {
7853
8282
  sampleCount: n
7854
8283
  };
@@ -7909,8 +8338,8 @@ var googleImageProviderOptionsSchema = lazySchema(
7909
8338
  )
7910
8339
  );
7911
8340
  function createGoogleGenerativeAI(options = {}) {
7912
- var _a, _b;
7913
- const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://generativelanguage.googleapis.com/v1beta";
8341
+ var _a15, _b;
8342
+ const baseURL = (_a15 = withoutTrailingSlash(options.baseURL)) != null ? _a15 : "https://generativelanguage.googleapis.com/v1beta";
7914
8343
  const providerName = (_b = options.name) != null ? _b : "google.generative-ai";
7915
8344
  const getHeaders = () => withUserAgentSuffix(
7916
8345
  {
@@ -7924,12 +8353,12 @@ function createGoogleGenerativeAI(options = {}) {
7924
8353
  `ai-sdk/google/${VERSION4}`
7925
8354
  );
7926
8355
  const createChatModel = (modelId) => {
7927
- var _a2;
8356
+ var _a22;
7928
8357
  return new GoogleGenerativeAILanguageModel(modelId, {
7929
8358
  provider: providerName,
7930
8359
  baseURL,
7931
8360
  headers: getHeaders,
7932
- generateId: (_a2 = options.generateId) != null ? _a2 : generateId,
8361
+ generateId: (_a22 = options.generateId) != null ? _a22 : generateId,
7933
8362
  supportedUrls: () => ({
7934
8363
  "*": [
7935
8364
  // Google Generative Language "files" endpoint
@@ -8034,7 +8463,7 @@ function convertToOpenAIChatMessages({
8034
8463
  messages.push({
8035
8464
  role: "user",
8036
8465
  content: content.map((part, index) => {
8037
- var _a, _b, _c;
8466
+ var _a15, _b, _c;
8038
8467
  switch (part.type) {
8039
8468
  case "text": {
8040
8469
  return { type: "text", text: part.text };
@@ -8047,7 +8476,7 @@ function convertToOpenAIChatMessages({
8047
8476
  image_url: {
8048
8477
  url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${convertToBase64(part.data)}`,
8049
8478
  // OpenAI specific extension: image detail
8050
- detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
8479
+ detail: (_b = (_a15 = part.providerOptions) == null ? void 0 : _a15.openai) == null ? void 0 : _b.imageDetail
8051
8480
  }
8052
8481
  };
8053
8482
  } else if (part.mediaType.startsWith("audio/")) {
@@ -8513,13 +8942,13 @@ var OpenAIChatLanguageModel = class {
8513
8942
  toolChoice,
8514
8943
  providerOptions
8515
8944
  }) {
8516
- var _a, _b, _c, _d;
8945
+ var _a15, _b, _c, _d;
8517
8946
  const warnings = [];
8518
- const openaiOptions = (_a = await parseProviderOptions({
8947
+ const openaiOptions = (_a15 = await parseProviderOptions({
8519
8948
  provider: "openai",
8520
8949
  providerOptions,
8521
8950
  schema: openaiChatLanguageModelOptions
8522
- })) != null ? _a : {};
8951
+ })) != null ? _a15 : {};
8523
8952
  const structuredOutputs = (_b = openaiOptions.structuredOutputs) != null ? _b : true;
8524
8953
  if (topK != null) {
8525
8954
  warnings.push({
@@ -8689,7 +9118,7 @@ var OpenAIChatLanguageModel = class {
8689
9118
  };
8690
9119
  }
8691
9120
  async doGenerate(options) {
8692
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
9121
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
8693
9122
  const { args: body, warnings } = await this.getArgs(options);
8694
9123
  const {
8695
9124
  responseHeaders,
@@ -8715,7 +9144,7 @@ var OpenAIChatLanguageModel = class {
8715
9144
  if (text != null && text.length > 0) {
8716
9145
  content.push({ type: "text", text });
8717
9146
  }
8718
- for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
9147
+ for (const toolCall of (_a15 = choice.message.tool_calls) != null ? _a15 : []) {
8719
9148
  content.push({
8720
9149
  type: "tool-call",
8721
9150
  toolCallId: (_b = toolCall.id) != null ? _b : generateId(),
@@ -8804,7 +9233,7 @@ var OpenAIChatLanguageModel = class {
8804
9233
  controller.enqueue({ type: "stream-start", warnings });
8805
9234
  },
8806
9235
  transform(chunk, controller) {
8807
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x;
9236
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x;
8808
9237
  if (options.includeRawChunks) {
8809
9238
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
8810
9239
  }
@@ -8830,7 +9259,7 @@ var OpenAIChatLanguageModel = class {
8830
9259
  }
8831
9260
  }
8832
9261
  if (value.usage != null) {
8833
- usage.inputTokens = (_a = value.usage.prompt_tokens) != null ? _a : void 0;
9262
+ usage.inputTokens = (_a15 = value.usage.prompt_tokens) != null ? _a15 : void 0;
8834
9263
  usage.outputTokens = (_b = value.usage.completion_tokens) != null ? _b : void 0;
8835
9264
  usage.totalTokens = (_c = value.usage.total_tokens) != null ? _c : void 0;
8836
9265
  usage.reasoningTokens = (_e = (_d = value.usage.completion_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : void 0;
@@ -8992,11 +9421,11 @@ function supportsPriorityProcessing(modelId) {
8992
9421
  return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
8993
9422
  }
8994
9423
  function getSystemMessageMode(modelId) {
8995
- var _a, _b;
9424
+ var _a15, _b;
8996
9425
  if (!isReasoningModel(modelId)) {
8997
9426
  return "system";
8998
9427
  }
8999
- return (_b = (_a = reasoningModels[modelId]) == null ? void 0 : _a.systemMessageMode) != null ? _b : "developer";
9428
+ return (_b = (_a15 = reasoningModels[modelId]) == null ? void 0 : _a15.systemMessageMode) != null ? _b : "developer";
9000
9429
  }
9001
9430
  var reasoningModels = {
9002
9431
  o3: {
@@ -9301,7 +9730,7 @@ var OpenAICompletionLanguageModel = class {
9301
9730
  };
9302
9731
  }
9303
9732
  async doGenerate(options) {
9304
- var _a, _b, _c;
9733
+ var _a15, _b, _c;
9305
9734
  const { args, warnings } = await this.getArgs(options);
9306
9735
  const {
9307
9736
  responseHeaders,
@@ -9329,7 +9758,7 @@ var OpenAICompletionLanguageModel = class {
9329
9758
  return {
9330
9759
  content: [{ type: "text", text: choice.text }],
9331
9760
  usage: {
9332
- inputTokens: (_a = response.usage) == null ? void 0 : _a.prompt_tokens,
9761
+ inputTokens: (_a15 = response.usage) == null ? void 0 : _a15.prompt_tokens,
9333
9762
  outputTokens: (_b = response.usage) == null ? void 0 : _b.completion_tokens,
9334
9763
  totalTokens: (_c = response.usage) == null ? void 0 : _c.total_tokens
9335
9764
  },
@@ -9483,7 +9912,7 @@ var OpenAIEmbeddingModel = class {
9483
9912
  abortSignal,
9484
9913
  providerOptions
9485
9914
  }) {
9486
- var _a;
9915
+ var _a15;
9487
9916
  if (values.length > this.maxEmbeddingsPerCall) {
9488
9917
  throw new TooManyEmbeddingValuesForCallError({
9489
9918
  provider: this.provider,
@@ -9492,11 +9921,11 @@ var OpenAIEmbeddingModel = class {
9492
9921
  values
9493
9922
  });
9494
9923
  }
9495
- const openaiOptions = (_a = await parseProviderOptions({
9924
+ const openaiOptions = (_a15 = await parseProviderOptions({
9496
9925
  provider: "openai",
9497
9926
  providerOptions,
9498
9927
  schema: openaiEmbeddingProviderOptions
9499
- })) != null ? _a : {};
9928
+ })) != null ? _a15 : {};
9500
9929
  const {
9501
9930
  responseHeaders,
9502
9931
  value: response,
@@ -9557,8 +9986,8 @@ var OpenAIImageModel = class {
9557
9986
  this.specificationVersion = "v2";
9558
9987
  }
9559
9988
  get maxImagesPerCall() {
9560
- var _a;
9561
- return (_a = modelMaxImagesPerCall[this.modelId]) != null ? _a : 1;
9989
+ var _a15;
9990
+ return (_a15 = modelMaxImagesPerCall[this.modelId]) != null ? _a15 : 1;
9562
9991
  }
9563
9992
  get provider() {
9564
9993
  return this.config.provider;
@@ -9573,7 +10002,7 @@ var OpenAIImageModel = class {
9573
10002
  headers,
9574
10003
  abortSignal
9575
10004
  }) {
9576
- var _a, _b, _c, _d;
10005
+ var _a15, _b, _c, _d;
9577
10006
  const warnings = [];
9578
10007
  if (aspectRatio != null) {
9579
10008
  warnings.push({
@@ -9585,7 +10014,7 @@ var OpenAIImageModel = class {
9585
10014
  if (seed != null) {
9586
10015
  warnings.push({ type: "unsupported-setting", setting: "seed" });
9587
10016
  }
9588
- const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
10017
+ const currentDate = (_c = (_b = (_a15 = this.config._internal) == null ? void 0 : _a15.currentDate) == null ? void 0 : _b.call(_a15)) != null ? _c : /* @__PURE__ */ new Date();
9589
10018
  const { value: response, responseHeaders } = await postJsonToApi({
9590
10019
  url: this.config.url({
9591
10020
  path: "/images/generations",
@@ -9945,7 +10374,7 @@ async function convertToOpenAIResponsesInput({
9945
10374
  store,
9946
10375
  hasLocalShellTool = false
9947
10376
  }) {
9948
- var _a, _b, _c, _d;
10377
+ var _a15, _b, _c, _d;
9949
10378
  const input = [];
9950
10379
  const warnings = [];
9951
10380
  for (const { role, content } of prompt) {
@@ -9980,7 +10409,7 @@ async function convertToOpenAIResponsesInput({
9980
10409
  input.push({
9981
10410
  role: "user",
9982
10411
  content: content.map((part, index) => {
9983
- var _a2, _b2, _c2;
10412
+ var _a22, _b2, _c2;
9984
10413
  switch (part.type) {
9985
10414
  case "text": {
9986
10415
  return { type: "input_text", text: part.text };
@@ -9993,7 +10422,7 @@ async function convertToOpenAIResponsesInput({
9993
10422
  ...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
9994
10423
  image_url: `data:${mediaType};base64,${convertToBase64(part.data)}`
9995
10424
  },
9996
- detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
10425
+ detail: (_b2 = (_a22 = part.providerOptions) == null ? void 0 : _a22.openai) == null ? void 0 : _b2.imageDetail
9997
10426
  };
9998
10427
  } else if (part.mediaType === "application/pdf") {
9999
10428
  if (part.data instanceof URL) {
@@ -10025,7 +10454,7 @@ async function convertToOpenAIResponsesInput({
10025
10454
  for (const part of content) {
10026
10455
  switch (part.type) {
10027
10456
  case "text": {
10028
- const id = (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.itemId;
10457
+ const id = (_b = (_a15 = part.providerOptions) == null ? void 0 : _a15.openai) == null ? void 0 : _b.itemId;
10029
10458
  if (store && id != null) {
10030
10459
  input.push({ type: "item_reference", id });
10031
10460
  break;
@@ -10898,7 +11327,7 @@ var OpenAIResponsesLanguageModel = class {
10898
11327
  toolChoice,
10899
11328
  responseFormat
10900
11329
  }) {
10901
- var _a, _b, _c, _d;
11330
+ var _a15, _b, _c, _d;
10902
11331
  const warnings = [];
10903
11332
  const modelConfig = getResponsesModelConfig(this.modelId);
10904
11333
  if (topK != null) {
@@ -10938,7 +11367,7 @@ var OpenAIResponsesLanguageModel = class {
10938
11367
  prompt,
10939
11368
  systemMessageMode: modelConfig.systemMessageMode,
10940
11369
  fileIdPrefixes: this.config.fileIdPrefixes,
10941
- store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true,
11370
+ store: (_a15 = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a15 : true,
10942
11371
  hasLocalShellTool: hasOpenAITool("openai.local_shell")
10943
11372
  });
10944
11373
  warnings.push(...inputWarnings);
@@ -11093,7 +11522,7 @@ var OpenAIResponsesLanguageModel = class {
11093
11522
  };
11094
11523
  }
11095
11524
  async doGenerate(options) {
11096
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
11525
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
11097
11526
  const {
11098
11527
  args: body,
11099
11528
  warnings,
@@ -11145,7 +11574,7 @@ var OpenAIResponsesLanguageModel = class {
11145
11574
  providerMetadata: {
11146
11575
  openai: {
11147
11576
  itemId: part.id,
11148
- reasoningEncryptedContent: (_a = part.encrypted_content) != null ? _a : null
11577
+ reasoningEncryptedContent: (_a15 = part.encrypted_content) != null ? _a15 : null
11149
11578
  }
11150
11579
  }
11151
11580
  });
@@ -11414,7 +11843,7 @@ var OpenAIResponsesLanguageModel = class {
11414
11843
  controller.enqueue({ type: "stream-start", warnings });
11415
11844
  },
11416
11845
  transform(chunk, controller) {
11417
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
11846
+ var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
11418
11847
  if (options.includeRawChunks) {
11419
11848
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
11420
11849
  }
@@ -11524,7 +11953,7 @@ var OpenAIResponsesLanguageModel = class {
11524
11953
  providerMetadata: {
11525
11954
  openai: {
11526
11955
  itemId: value.item.id,
11527
- reasoningEncryptedContent: (_a = value.item.encrypted_content) != null ? _a : null
11956
+ reasoningEncryptedContent: (_a15 = value.item.encrypted_content) != null ? _a15 : null
11528
11957
  }
11529
11958
  }
11530
11959
  });
@@ -11902,11 +12331,11 @@ function getResponsesModelConfig(modelId) {
11902
12331
  };
11903
12332
  }
11904
12333
  function mapWebSearchOutput(action) {
11905
- var _a;
12334
+ var _a15;
11906
12335
  switch (action.type) {
11907
12336
  case "search":
11908
12337
  return {
11909
- action: { type: "search", query: (_a = action.query) != null ? _a : void 0 },
12338
+ action: { type: "search", query: (_a15 = action.query) != null ? _a15 : void 0 },
11910
12339
  // include sources when provided by the Responses API (behind include flag)
11911
12340
  ...action.sources != null && { sources: action.sources }
11912
12341
  };
@@ -11991,8 +12420,8 @@ var OpenAISpeechModel = class {
11991
12420
  };
11992
12421
  }
11993
12422
  async doGenerate(options) {
11994
- var _a, _b, _c;
11995
- const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
12423
+ var _a15, _b, _c;
12424
+ const currentDate = (_c = (_b = (_a15 = this.config._internal) == null ? void 0 : _a15.currentDate) == null ? void 0 : _b.call(_a15)) != null ? _c : /* @__PURE__ */ new Date();
11996
12425
  const { requestBody, warnings } = await this.getArgs(options);
11997
12426
  const {
11998
12427
  value: audio,
@@ -12203,8 +12632,8 @@ var OpenAITranscriptionModel = class {
12203
12632
  };
12204
12633
  }
12205
12634
  async doGenerate(options) {
12206
- var _a, _b, _c, _d, _e, _f, _g, _h;
12207
- const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
12635
+ var _a15, _b, _c, _d, _e, _f, _g, _h;
12636
+ const currentDate = (_c = (_b = (_a15 = this.config._internal) == null ? void 0 : _a15.currentDate) == null ? void 0 : _b.call(_a15)) != null ? _c : /* @__PURE__ */ new Date();
12208
12637
  const { formData, warnings } = await this.getArgs(options);
12209
12638
  const {
12210
12639
  value: response,
@@ -12250,13 +12679,13 @@ var OpenAITranscriptionModel = class {
12250
12679
  };
12251
12680
  var VERSION5 = "2.0.69" ;
12252
12681
  function createOpenAI(options = {}) {
12253
- var _a, _b;
12254
- const baseURL = (_a = withoutTrailingSlash(
12682
+ var _a15, _b;
12683
+ const baseURL = (_a15 = withoutTrailingSlash(
12255
12684
  loadOptionalSetting({
12256
12685
  settingValue: options.baseURL,
12257
12686
  environmentVariableName: "OPENAI_BASE_URL"
12258
12687
  })
12259
- )) != null ? _a : "https://api.openai.com/v1";
12688
+ )) != null ? _a15 : "https://api.openai.com/v1";
12260
12689
  const providerName = (_b = options.name) != null ? _b : "openai";
12261
12690
  const getHeaders = () => withUserAgentSuffix(
12262
12691
  {
@@ -12345,6 +12774,6 @@ function createOpenAI(options = {}) {
12345
12774
  }
12346
12775
  createOpenAI();
12347
12776
 
12348
- export { MastraModelGateway, OpenAICompatibleImageModel, combineHeaders, convertBase64ToUint8Array, convertToBase64, createAnthropic, createBinaryResponseHandler, createEventSourceResponseHandler, createGoogleGenerativeAI, createJsonErrorResponseHandler, createJsonResponseHandler, createOpenAI, createOpenAICompatible, createProviderDefinedToolFactoryWithOutputSchema, generateId, injectJsonInstructionIntoMessages, isParsableJson, lazySchema, lazyValidator, loadApiKey, loadSetting, mediaTypeToExtension, parseProviderOptions, postFormDataToApi, postJsonToApi, validateTypes, withUserAgentSuffix, withoutTrailingSlash, zodSchema };
12349
- //# sourceMappingURL=chunk-ARAQIW6E.js.map
12350
- //# sourceMappingURL=chunk-ARAQIW6E.js.map
12777
+ export { APICallError, EmptyResponseBodyError, EventSourceParserStream, InvalidArgumentError, InvalidPromptError, InvalidResponseDataError, JSONParseError, LoadAPIKeyError, MastraModelGateway, NoSuchModelError, OpenAICompatibleImageModel, TooManyEmbeddingValuesForCallError, TypeValidationError, UnsupportedFunctionalityError, combineHeaders, convertBase64ToUint8Array, convertToBase64, createAnthropic, createBinaryResponseHandler, createEventSourceResponseHandler, createGoogleGenerativeAI, createJsonErrorResponseHandler, createJsonResponseHandler, createOpenAI, createOpenAICompatible, createProviderDefinedToolFactoryWithOutputSchema, generateId, injectJsonInstructionIntoMessages, isParsableJson, lazySchema, lazyValidator, loadApiKey, loadSetting, mediaTypeToExtension, parseProviderOptions, postFormDataToApi, postJsonToApi, validateTypes, withUserAgentSuffix, withoutTrailingSlash, zodSchema };
12778
+ //# sourceMappingURL=chunk-K66U47VL.js.map
12779
+ //# sourceMappingURL=chunk-K66U47VL.js.map