@mastra/core 1.0.0-beta.6 → 1.0.0-beta.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (203) hide show
  1. package/CHANGELOG.md +195 -0
  2. package/dist/agent/agent-legacy.d.ts.map +1 -1
  3. package/dist/agent/agent.d.ts.map +1 -1
  4. package/dist/agent/index.cjs +9 -9
  5. package/dist/agent/index.js +2 -2
  6. package/dist/agent/message-list/index.cjs +3 -3
  7. package/dist/agent/message-list/index.js +1 -1
  8. package/dist/agent/workflows/prepare-stream/map-results-step.d.ts.map +1 -1
  9. package/dist/agent/workflows/prepare-stream/prepare-memory-step.d.ts.map +1 -1
  10. package/dist/agent/workflows/prepare-stream/schema.d.ts +2 -1
  11. package/dist/agent/workflows/prepare-stream/schema.d.ts.map +1 -1
  12. package/dist/{chunk-NHNXIYDG.cjs → chunk-3EYBRHB2.cjs} +2 -2
  13. package/dist/chunk-3EYBRHB2.cjs.map +1 -0
  14. package/dist/{chunk-AQAIS7FH.cjs → chunk-3JA6VPNM.cjs} +2 -2
  15. package/dist/chunk-3JA6VPNM.cjs.map +1 -0
  16. package/dist/{chunk-WDRA4WKG.js → chunk-4GTU7MB2.js} +5 -5
  17. package/dist/{chunk-WDRA4WKG.js.map → chunk-4GTU7MB2.js.map} +1 -1
  18. package/dist/{chunk-S5FPOCQF.cjs → chunk-6BYHQ52F.cjs} +24 -24
  19. package/dist/{chunk-S5FPOCQF.cjs.map → chunk-6BYHQ52F.cjs.map} +1 -1
  20. package/dist/{chunk-RTROJFEU.cjs → chunk-6C2PU6J4.cjs} +8 -8
  21. package/dist/{chunk-RTROJFEU.cjs.map → chunk-6C2PU6J4.cjs.map} +1 -1
  22. package/dist/{chunk-6GOLI2EY.js → chunk-7PXCGGMO.js} +8 -12
  23. package/dist/chunk-7PXCGGMO.js.map +1 -0
  24. package/dist/chunk-AZQMPE7G.cjs +4981 -0
  25. package/dist/chunk-AZQMPE7G.cjs.map +1 -0
  26. package/dist/{chunk-HNHZGFZY.cjs → chunk-CZEJQSWB.cjs} +10 -2
  27. package/dist/chunk-CZEJQSWB.cjs.map +1 -0
  28. package/dist/{chunk-APMPOYPI.js → chunk-DMPHPHMU.js} +3 -3
  29. package/dist/{chunk-APMPOYPI.js.map → chunk-DMPHPHMU.js.map} +1 -1
  30. package/dist/{chunk-45NA5ZD3.js → chunk-EDNHZZUP.js} +160 -60
  31. package/dist/chunk-EDNHZZUP.js.map +1 -0
  32. package/dist/{chunk-LEZIKVRQ.js → chunk-F54IK3GJ.js} +2 -2
  33. package/dist/chunk-F54IK3GJ.js.map +1 -0
  34. package/dist/{chunk-FYP3AT6W.js → chunk-FNSFXWDN.js} +2 -2
  35. package/dist/chunk-FNSFXWDN.js.map +1 -0
  36. package/dist/{chunk-XY337TTF.js → chunk-GW7BNMGA.js} +4 -4
  37. package/dist/{chunk-XY337TTF.js.map → chunk-GW7BNMGA.js.map} +1 -1
  38. package/dist/{chunk-OPALPWAM.js → chunk-HDMH5IDV.js} +42 -5
  39. package/dist/chunk-HDMH5IDV.js.map +1 -0
  40. package/dist/{chunk-NN3NOFRU.cjs → chunk-HP6TRJ2Q.cjs} +207 -106
  41. package/dist/chunk-HP6TRJ2Q.cjs.map +1 -0
  42. package/dist/{chunk-3TKNXJES.cjs → chunk-KBXFAF37.cjs} +46 -6
  43. package/dist/chunk-KBXFAF37.cjs.map +1 -0
  44. package/dist/{chunk-7ZADRRDW.js → chunk-PK2A5WBG.js} +3 -3
  45. package/dist/chunk-PK2A5WBG.js.map +1 -0
  46. package/dist/{chunk-DXMSVGJ6.js → chunk-QDIYYAA4.js} +28 -59
  47. package/dist/chunk-QDIYYAA4.js.map +1 -0
  48. package/dist/{chunk-TARWUQG6.js → chunk-QHJ7YVNX.js} +8 -8
  49. package/dist/{chunk-TARWUQG6.js.map → chunk-QHJ7YVNX.js.map} +1 -1
  50. package/dist/{chunk-IWTMMUQY.cjs → chunk-QYNZAPFE.cjs} +4 -4
  51. package/dist/{chunk-IWTMMUQY.cjs.map → chunk-QYNZAPFE.cjs.map} +1 -1
  52. package/dist/{chunk-TS7B26US.js → chunk-RWT3XPKO.js} +3 -3
  53. package/dist/{chunk-TS7B26US.js.map → chunk-RWT3XPKO.js.map} +1 -1
  54. package/dist/{chunk-LDWBLENZ.cjs → chunk-SD2BDUJ6.cjs} +28 -32
  55. package/dist/chunk-SD2BDUJ6.cjs.map +1 -0
  56. package/dist/{chunk-NIUEWICQ.js → chunk-SPIUKQDK.js} +8 -8
  57. package/dist/{chunk-NIUEWICQ.js.map → chunk-SPIUKQDK.js.map} +1 -1
  58. package/dist/chunk-TLLXRG2Z.js +4959 -0
  59. package/dist/chunk-TLLXRG2Z.js.map +1 -0
  60. package/dist/{chunk-SVLMF4UZ.cjs → chunk-TQB2HMEC.cjs} +45 -44
  61. package/dist/chunk-TQB2HMEC.cjs.map +1 -0
  62. package/dist/{chunk-KZLP6TPJ.js → chunk-URBPEE67.js} +4 -4
  63. package/dist/{chunk-KZLP6TPJ.js.map → chunk-URBPEE67.js.map} +1 -1
  64. package/dist/{chunk-6J4NRNT2.cjs → chunk-VWC5AUOQ.cjs} +29 -60
  65. package/dist/chunk-VWC5AUOQ.cjs.map +1 -0
  66. package/dist/{chunk-4KUOSXJ7.cjs → chunk-Y5SFNZUK.cjs} +11 -11
  67. package/dist/{chunk-4KUOSXJ7.cjs.map → chunk-Y5SFNZUK.cjs.map} +1 -1
  68. package/dist/{chunk-OIMB2SNZ.cjs → chunk-Y7YCQDML.cjs} +14 -14
  69. package/dist/{chunk-OIMB2SNZ.cjs.map → chunk-Y7YCQDML.cjs.map} +1 -1
  70. package/dist/{chunk-VRFSEZBA.cjs → chunk-YBEW5YWC.cjs} +6 -6
  71. package/dist/{chunk-VRFSEZBA.cjs.map → chunk-YBEW5YWC.cjs.map} +1 -1
  72. package/dist/{chunk-SQAX4OW6.cjs → chunk-YUXTDKYN.cjs} +15 -15
  73. package/dist/{chunk-SQAX4OW6.cjs.map → chunk-YUXTDKYN.cjs.map} +1 -1
  74. package/dist/{chunk-RHTV5C5D.cjs → chunk-YWMMBIOM.cjs} +9 -9
  75. package/dist/{chunk-RHTV5C5D.cjs.map → chunk-YWMMBIOM.cjs.map} +1 -1
  76. package/dist/{chunk-WMAMOYRR.js → chunk-Z57R5WS4.js} +4 -4
  77. package/dist/{chunk-WMAMOYRR.js.map → chunk-Z57R5WS4.js.map} +1 -1
  78. package/dist/{chunk-QM5SRDJX.js → chunk-ZUWJCGLM.js} +5 -4
  79. package/dist/chunk-ZUWJCGLM.js.map +1 -0
  80. package/dist/evals/index.cjs +9 -9
  81. package/dist/evals/index.js +2 -2
  82. package/dist/evals/scoreTraces/index.cjs +5 -5
  83. package/dist/evals/scoreTraces/index.js +2 -2
  84. package/dist/evals/types.d.ts +19 -18
  85. package/dist/evals/types.d.ts.map +1 -1
  86. package/dist/index.cjs +2 -2
  87. package/dist/index.js +1 -1
  88. package/dist/llm/index.cjs +15 -15
  89. package/dist/llm/index.js +5 -5
  90. package/dist/llm/model/gateways/constants.d.ts.map +1 -1
  91. package/dist/llm/model/gateways/models-dev.d.ts.map +1 -1
  92. package/dist/llm/model/model.loop.d.ts +1 -1
  93. package/dist/llm/model/model.loop.d.ts.map +1 -1
  94. package/dist/llm/model/provider-options.d.ts +4 -1
  95. package/dist/llm/model/provider-options.d.ts.map +1 -1
  96. package/dist/llm/model/provider-types.generated.d.ts +12 -22
  97. package/dist/loop/index.cjs +2 -2
  98. package/dist/loop/index.js +1 -1
  99. package/dist/loop/workflows/agentic-execution/llm-execution-step.d.ts.map +1 -1
  100. package/dist/loop/workflows/stream.d.ts.map +1 -1
  101. package/dist/mastra/index.cjs +2 -2
  102. package/dist/mastra/index.js +1 -1
  103. package/dist/memory/index.cjs +6 -6
  104. package/dist/memory/index.js +1 -1
  105. package/dist/models-dev-6GD3644V.js +3 -0
  106. package/dist/{models-dev-F6MTIYTO.js.map → models-dev-6GD3644V.js.map} +1 -1
  107. package/dist/models-dev-IUQBXJSS.cjs +12 -0
  108. package/dist/{models-dev-XIVR5EJV.cjs.map → models-dev-IUQBXJSS.cjs.map} +1 -1
  109. package/dist/netlify-GXJ5D5DD.js +3 -0
  110. package/dist/{netlify-C2YBIK7A.js.map → netlify-GXJ5D5DD.js.map} +1 -1
  111. package/dist/netlify-KJLY3GFS.cjs +12 -0
  112. package/dist/{netlify-XWROOTP4.cjs.map → netlify-KJLY3GFS.cjs.map} +1 -1
  113. package/dist/observability/index.cjs +10 -10
  114. package/dist/observability/index.js +1 -1
  115. package/dist/observability/types/tracing.d.ts +18 -0
  116. package/dist/observability/types/tracing.d.ts.map +1 -1
  117. package/dist/processors/index.cjs +17 -17
  118. package/dist/processors/index.js +1 -1
  119. package/dist/provider-registry-A5QZFI2X.js +3 -0
  120. package/dist/{provider-registry-GRYJWVEM.js.map → provider-registry-A5QZFI2X.js.map} +1 -1
  121. package/dist/provider-registry-V32PGH6F.cjs +40 -0
  122. package/dist/{provider-registry-BXX7CA3H.cjs.map → provider-registry-V32PGH6F.cjs.map} +1 -1
  123. package/dist/provider-registry.json +24 -55
  124. package/dist/relevance/index.cjs +2 -2
  125. package/dist/relevance/index.js +1 -1
  126. package/dist/storage/base.d.ts +22 -1
  127. package/dist/storage/base.d.ts.map +1 -1
  128. package/dist/storage/domains/scores/base.d.ts +2 -2
  129. package/dist/storage/domains/scores/base.d.ts.map +1 -1
  130. package/dist/storage/domains/scores/inmemory.d.ts +2 -2
  131. package/dist/storage/domains/scores/inmemory.d.ts.map +1 -1
  132. package/dist/storage/index.cjs +43 -31
  133. package/dist/storage/index.js +1 -1
  134. package/dist/storage/mock.d.ts +2 -2
  135. package/dist/storage/mock.d.ts.map +1 -1
  136. package/dist/storage/storageWithInit.d.ts.map +1 -1
  137. package/dist/storage/utils.d.ts +38 -0
  138. package/dist/storage/utils.d.ts.map +1 -1
  139. package/dist/stream/aisdk/v5/execute.d.ts +3 -2
  140. package/dist/stream/aisdk/v5/execute.d.ts.map +1 -1
  141. package/dist/stream/aisdk/v5/input.d.ts +4 -1
  142. package/dist/stream/aisdk/v5/input.d.ts.map +1 -1
  143. package/dist/stream/index.cjs +11 -11
  144. package/dist/stream/index.js +2 -2
  145. package/dist/test-utils/llm-mock.cjs +13 -13
  146. package/dist/test-utils/llm-mock.cjs.map +1 -1
  147. package/dist/test-utils/llm-mock.js +6 -6
  148. package/dist/test-utils/llm-mock.js.map +1 -1
  149. package/dist/tools/index.cjs +2 -2
  150. package/dist/tools/index.js +1 -1
  151. package/dist/tools/stream.d.ts +17 -1
  152. package/dist/tools/stream.d.ts.map +1 -1
  153. package/dist/tools/types.d.ts +2 -2
  154. package/dist/tools/types.d.ts.map +1 -1
  155. package/dist/utils.cjs +22 -22
  156. package/dist/utils.js +1 -1
  157. package/dist/vector/index.cjs +2 -2
  158. package/dist/vector/index.js +1 -1
  159. package/dist/workflows/default.d.ts +2 -2
  160. package/dist/workflows/default.d.ts.map +1 -1
  161. package/dist/workflows/evented/index.cjs +10 -10
  162. package/dist/workflows/evented/index.js +1 -1
  163. package/dist/workflows/evented/step-executor.d.ts.map +1 -1
  164. package/dist/workflows/handlers/control-flow.d.ts.map +1 -1
  165. package/dist/workflows/handlers/step.d.ts.map +1 -1
  166. package/dist/workflows/index.cjs +24 -20
  167. package/dist/workflows/index.js +1 -1
  168. package/dist/workflows/step.d.ts +5 -5
  169. package/dist/workflows/step.d.ts.map +1 -1
  170. package/dist/workflows/types.d.ts +1 -0
  171. package/dist/workflows/types.d.ts.map +1 -1
  172. package/dist/workflows/utils.d.ts +10 -1
  173. package/dist/workflows/utils.d.ts.map +1 -1
  174. package/dist/workflows/workflow.d.ts +1 -1
  175. package/dist/workflows/workflow.d.ts.map +1 -1
  176. package/package.json +5 -3
  177. package/src/llm/model/provider-types.generated.d.ts +12 -22
  178. package/dist/chunk-3TKNXJES.cjs.map +0 -1
  179. package/dist/chunk-45NA5ZD3.js.map +0 -1
  180. package/dist/chunk-6GOLI2EY.js.map +0 -1
  181. package/dist/chunk-6J4NRNT2.cjs.map +0 -1
  182. package/dist/chunk-7ZADRRDW.js.map +0 -1
  183. package/dist/chunk-AQAIS7FH.cjs.map +0 -1
  184. package/dist/chunk-DXMSVGJ6.js.map +0 -1
  185. package/dist/chunk-FYP3AT6W.js.map +0 -1
  186. package/dist/chunk-HNHZGFZY.cjs.map +0 -1
  187. package/dist/chunk-LDWBLENZ.cjs.map +0 -1
  188. package/dist/chunk-LEZIKVRQ.js.map +0 -1
  189. package/dist/chunk-NHNXIYDG.cjs.map +0 -1
  190. package/dist/chunk-NN3NOFRU.cjs.map +0 -1
  191. package/dist/chunk-OPALPWAM.js.map +0 -1
  192. package/dist/chunk-QGWNF2QJ.cjs +0 -1697
  193. package/dist/chunk-QGWNF2QJ.cjs.map +0 -1
  194. package/dist/chunk-QM5SRDJX.js.map +0 -1
  195. package/dist/chunk-SVLMF4UZ.cjs.map +0 -1
  196. package/dist/chunk-T2UNO766.js +0 -1694
  197. package/dist/chunk-T2UNO766.js.map +0 -1
  198. package/dist/models-dev-F6MTIYTO.js +0 -3
  199. package/dist/models-dev-XIVR5EJV.cjs +0 -12
  200. package/dist/netlify-C2YBIK7A.js +0 -3
  201. package/dist/netlify-XWROOTP4.cjs +0 -12
  202. package/dist/provider-registry-BXX7CA3H.cjs +0 -40
  203. package/dist/provider-registry-GRYJWVEM.js +0 -3
@@ -1,1697 +0,0 @@
1
- 'use strict';
2
-
3
- var chunkHNHZGFZY_cjs = require('./chunk-HNHZGFZY.cjs');
4
- var v4 = require('zod/v4');
5
- var aiSdkProviderV5 = require('@openrouter/ai-sdk-provider-v5');
6
-
7
- function convertToMistralChatMessages(prompt) {
8
- const messages = [];
9
- for (let i = 0; i < prompt.length; i++) {
10
- const { role, content } = prompt[i];
11
- const isLastMessage = i === prompt.length - 1;
12
- switch (role) {
13
- case "system": {
14
- messages.push({ role: "system", content });
15
- break;
16
- }
17
- case "user": {
18
- messages.push({
19
- role: "user",
20
- content: content.map((part) => {
21
- switch (part.type) {
22
- case "text": {
23
- return { type: "text", text: part.text };
24
- }
25
- case "file": {
26
- if (part.mediaType.startsWith("image/")) {
27
- const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
28
- return {
29
- type: "image_url",
30
- image_url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${chunkHNHZGFZY_cjs.convertToBase64(part.data)}`
31
- };
32
- } else if (part.mediaType === "application/pdf") {
33
- return {
34
- type: "document_url",
35
- document_url: part.data.toString()
36
- };
37
- } else {
38
- throw new chunkHNHZGFZY_cjs.UnsupportedFunctionalityError({
39
- functionality: "Only images and PDF file parts are supported"
40
- });
41
- }
42
- }
43
- }
44
- })
45
- });
46
- break;
47
- }
48
- case "assistant": {
49
- let text = "";
50
- const toolCalls = [];
51
- for (const part of content) {
52
- switch (part.type) {
53
- case "text": {
54
- text += part.text;
55
- break;
56
- }
57
- case "tool-call": {
58
- toolCalls.push({
59
- id: part.toolCallId,
60
- type: "function",
61
- function: {
62
- name: part.toolName,
63
- arguments: JSON.stringify(part.input)
64
- }
65
- });
66
- break;
67
- }
68
- case "reasoning": {
69
- text += part.text;
70
- break;
71
- }
72
- default: {
73
- throw new Error(
74
- `Unsupported content type in assistant message: ${part.type}`
75
- );
76
- }
77
- }
78
- }
79
- messages.push({
80
- role: "assistant",
81
- content: text,
82
- prefix: isLastMessage ? true : void 0,
83
- tool_calls: toolCalls.length > 0 ? toolCalls : void 0
84
- });
85
- break;
86
- }
87
- case "tool": {
88
- for (const toolResponse of content) {
89
- const output = toolResponse.output;
90
- let contentValue;
91
- switch (output.type) {
92
- case "text":
93
- case "error-text":
94
- contentValue = output.value;
95
- break;
96
- case "content":
97
- case "json":
98
- case "error-json":
99
- contentValue = JSON.stringify(output.value);
100
- break;
101
- }
102
- messages.push({
103
- role: "tool",
104
- name: toolResponse.toolName,
105
- tool_call_id: toolResponse.toolCallId,
106
- content: contentValue
107
- });
108
- }
109
- break;
110
- }
111
- default: {
112
- const _exhaustiveCheck = role;
113
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
114
- }
115
- }
116
- }
117
- return messages;
118
- }
119
- function getResponseMetadata({
120
- id,
121
- model,
122
- created
123
- }) {
124
- return {
125
- id: id != null ? id : void 0,
126
- modelId: model != null ? model : void 0,
127
- timestamp: created != null ? new Date(created * 1e3) : void 0
128
- };
129
- }
130
- function mapMistralFinishReason(finishReason) {
131
- switch (finishReason) {
132
- case "stop":
133
- return "stop";
134
- case "length":
135
- case "model_length":
136
- return "length";
137
- case "tool_calls":
138
- return "tool-calls";
139
- default:
140
- return "unknown";
141
- }
142
- }
143
- var mistralLanguageModelOptions = v4.z.object({
144
- /**
145
- Whether to inject a safety prompt before all conversations.
146
-
147
- Defaults to `false`.
148
- */
149
- safePrompt: v4.z.boolean().optional(),
150
- documentImageLimit: v4.z.number().optional(),
151
- documentPageLimit: v4.z.number().optional(),
152
- /**
153
- * Whether to use structured outputs.
154
- *
155
- * @default true
156
- */
157
- structuredOutputs: v4.z.boolean().optional(),
158
- /**
159
- * Whether to use strict JSON schema validation.
160
- *
161
- * @default false
162
- */
163
- strictJsonSchema: v4.z.boolean().optional(),
164
- /**
165
- * Whether to enable parallel function calling during tool use.
166
- * When set to false, the model will use at most one tool per response.
167
- *
168
- * @default true
169
- */
170
- parallelToolCalls: v4.z.boolean().optional()
171
- });
172
- var mistralErrorDataSchema = v4.z.object({
173
- object: v4.z.literal("error"),
174
- message: v4.z.string(),
175
- type: v4.z.string(),
176
- param: v4.z.string().nullable(),
177
- code: v4.z.string().nullable()
178
- });
179
- var mistralFailedResponseHandler = chunkHNHZGFZY_cjs.createJsonErrorResponseHandler({
180
- errorSchema: mistralErrorDataSchema,
181
- errorToMessage: (data) => data.message
182
- });
183
- function prepareTools({
184
- tools,
185
- toolChoice
186
- }) {
187
- tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
188
- const toolWarnings = [];
189
- if (tools == null) {
190
- return { tools: void 0, toolChoice: void 0, toolWarnings };
191
- }
192
- const mistralTools = [];
193
- for (const tool of tools) {
194
- if (tool.type === "provider-defined") {
195
- toolWarnings.push({ type: "unsupported-tool", tool });
196
- } else {
197
- mistralTools.push({
198
- type: "function",
199
- function: {
200
- name: tool.name,
201
- description: tool.description,
202
- parameters: tool.inputSchema
203
- }
204
- });
205
- }
206
- }
207
- if (toolChoice == null) {
208
- return { tools: mistralTools, toolChoice: void 0, toolWarnings };
209
- }
210
- const type = toolChoice.type;
211
- switch (type) {
212
- case "auto":
213
- case "none":
214
- return { tools: mistralTools, toolChoice: type, toolWarnings };
215
- case "required":
216
- return { tools: mistralTools, toolChoice: "any", toolWarnings };
217
- // mistral does not support tool mode directly,
218
- // so we filter the tools and force the tool choice through 'any'
219
- case "tool":
220
- return {
221
- tools: mistralTools.filter(
222
- (tool) => tool.function.name === toolChoice.toolName
223
- ),
224
- toolChoice: "any",
225
- toolWarnings
226
- };
227
- default: {
228
- const _exhaustiveCheck = type;
229
- throw new chunkHNHZGFZY_cjs.UnsupportedFunctionalityError({
230
- functionality: `tool choice type: ${_exhaustiveCheck}`
231
- });
232
- }
233
- }
234
- }
235
- var MistralChatLanguageModel = class {
236
- constructor(modelId, config) {
237
- this.specificationVersion = "v2";
238
- this.supportedUrls = {
239
- "application/pdf": [/^https:\/\/.*$/]
240
- };
241
- var _a;
242
- this.modelId = modelId;
243
- this.config = config;
244
- this.generateId = (_a = config.generateId) != null ? _a : chunkHNHZGFZY_cjs.generateId;
245
- }
246
- get provider() {
247
- return this.config.provider;
248
- }
249
- async getArgs({
250
- prompt,
251
- maxOutputTokens,
252
- temperature,
253
- topP,
254
- topK,
255
- frequencyPenalty,
256
- presencePenalty,
257
- stopSequences,
258
- responseFormat,
259
- seed,
260
- providerOptions,
261
- tools,
262
- toolChoice
263
- }) {
264
- var _a, _b, _c, _d;
265
- const warnings = [];
266
- const options = (_a = await chunkHNHZGFZY_cjs.parseProviderOptions({
267
- provider: "mistral",
268
- providerOptions,
269
- schema: mistralLanguageModelOptions
270
- })) != null ? _a : {};
271
- if (topK != null) {
272
- warnings.push({
273
- type: "unsupported-setting",
274
- setting: "topK"
275
- });
276
- }
277
- if (frequencyPenalty != null) {
278
- warnings.push({
279
- type: "unsupported-setting",
280
- setting: "frequencyPenalty"
281
- });
282
- }
283
- if (presencePenalty != null) {
284
- warnings.push({
285
- type: "unsupported-setting",
286
- setting: "presencePenalty"
287
- });
288
- }
289
- if (stopSequences != null) {
290
- warnings.push({
291
- type: "unsupported-setting",
292
- setting: "stopSequences"
293
- });
294
- }
295
- const structuredOutputs = (_b = options.structuredOutputs) != null ? _b : true;
296
- const strictJsonSchema = (_c = options.strictJsonSchema) != null ? _c : false;
297
- if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && !(responseFormat == null ? void 0 : responseFormat.schema)) {
298
- prompt = chunkHNHZGFZY_cjs.injectJsonInstructionIntoMessages({
299
- messages: prompt,
300
- schema: responseFormat.schema
301
- });
302
- }
303
- const baseArgs = {
304
- // model id:
305
- model: this.modelId,
306
- // model specific settings:
307
- safe_prompt: options.safePrompt,
308
- // standardized settings:
309
- max_tokens: maxOutputTokens,
310
- temperature,
311
- top_p: topP,
312
- random_seed: seed,
313
- // response format:
314
- response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? structuredOutputs && (responseFormat == null ? void 0 : responseFormat.schema) != null ? {
315
- type: "json_schema",
316
- json_schema: {
317
- schema: responseFormat.schema,
318
- strict: strictJsonSchema,
319
- name: (_d = responseFormat.name) != null ? _d : "response",
320
- description: responseFormat.description
321
- }
322
- } : { type: "json_object" } : void 0,
323
- // mistral-specific provider options:
324
- document_image_limit: options.documentImageLimit,
325
- document_page_limit: options.documentPageLimit,
326
- // messages:
327
- messages: convertToMistralChatMessages(prompt)
328
- };
329
- const {
330
- tools: mistralTools,
331
- toolChoice: mistralToolChoice,
332
- toolWarnings
333
- } = prepareTools({
334
- tools,
335
- toolChoice
336
- });
337
- return {
338
- args: {
339
- ...baseArgs,
340
- tools: mistralTools,
341
- tool_choice: mistralToolChoice,
342
- ...mistralTools != null && options.parallelToolCalls !== void 0 ? { parallel_tool_calls: options.parallelToolCalls } : {}
343
- },
344
- warnings: [...warnings, ...toolWarnings]
345
- };
346
- }
347
- async doGenerate(options) {
348
- const { args: body, warnings } = await this.getArgs(options);
349
- const {
350
- responseHeaders,
351
- value: response,
352
- rawValue: rawResponse
353
- } = await chunkHNHZGFZY_cjs.postJsonToApi({
354
- url: `${this.config.baseURL}/chat/completions`,
355
- headers: chunkHNHZGFZY_cjs.combineHeaders(this.config.headers(), options.headers),
356
- body,
357
- failedResponseHandler: mistralFailedResponseHandler,
358
- successfulResponseHandler: chunkHNHZGFZY_cjs.createJsonResponseHandler(
359
- mistralChatResponseSchema
360
- ),
361
- abortSignal: options.abortSignal,
362
- fetch: this.config.fetch
363
- });
364
- const choice = response.choices[0];
365
- const content = [];
366
- if (choice.message.content != null && Array.isArray(choice.message.content)) {
367
- for (const part of choice.message.content) {
368
- if (part.type === "thinking") {
369
- const reasoningText = extractReasoningContent(part.thinking);
370
- if (reasoningText.length > 0) {
371
- content.push({ type: "reasoning", text: reasoningText });
372
- }
373
- } else if (part.type === "text") {
374
- if (part.text.length > 0) {
375
- content.push({ type: "text", text: part.text });
376
- }
377
- }
378
- }
379
- } else {
380
- const text = extractTextContent(choice.message.content);
381
- if (text != null && text.length > 0) {
382
- content.push({ type: "text", text });
383
- }
384
- }
385
- if (choice.message.tool_calls != null) {
386
- for (const toolCall of choice.message.tool_calls) {
387
- content.push({
388
- type: "tool-call",
389
- toolCallId: toolCall.id,
390
- toolName: toolCall.function.name,
391
- input: toolCall.function.arguments
392
- });
393
- }
394
- }
395
- return {
396
- content,
397
- finishReason: mapMistralFinishReason(choice.finish_reason),
398
- usage: {
399
- inputTokens: response.usage.prompt_tokens,
400
- outputTokens: response.usage.completion_tokens,
401
- totalTokens: response.usage.total_tokens
402
- },
403
- request: { body },
404
- response: {
405
- ...getResponseMetadata(response),
406
- headers: responseHeaders,
407
- body: rawResponse
408
- },
409
- warnings
410
- };
411
- }
412
- async doStream(options) {
413
- const { args, warnings } = await this.getArgs(options);
414
- const body = { ...args, stream: true };
415
- const { responseHeaders, value: response } = await chunkHNHZGFZY_cjs.postJsonToApi({
416
- url: `${this.config.baseURL}/chat/completions`,
417
- headers: chunkHNHZGFZY_cjs.combineHeaders(this.config.headers(), options.headers),
418
- body,
419
- failedResponseHandler: mistralFailedResponseHandler,
420
- successfulResponseHandler: chunkHNHZGFZY_cjs.createEventSourceResponseHandler(
421
- mistralChatChunkSchema
422
- ),
423
- abortSignal: options.abortSignal,
424
- fetch: this.config.fetch
425
- });
426
- let finishReason = "unknown";
427
- const usage = {
428
- inputTokens: void 0,
429
- outputTokens: void 0,
430
- totalTokens: void 0
431
- };
432
- let isFirstChunk = true;
433
- let activeText = false;
434
- let activeReasoningId = null;
435
- const generateId2 = this.generateId;
436
- return {
437
- stream: response.pipeThrough(
438
- new TransformStream({
439
- start(controller) {
440
- controller.enqueue({ type: "stream-start", warnings });
441
- },
442
- transform(chunk, controller) {
443
- if (options.includeRawChunks) {
444
- controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
445
- }
446
- if (!chunk.success) {
447
- controller.enqueue({ type: "error", error: chunk.error });
448
- return;
449
- }
450
- const value = chunk.value;
451
- if (isFirstChunk) {
452
- isFirstChunk = false;
453
- controller.enqueue({
454
- type: "response-metadata",
455
- ...getResponseMetadata(value)
456
- });
457
- }
458
- if (value.usage != null) {
459
- usage.inputTokens = value.usage.prompt_tokens;
460
- usage.outputTokens = value.usage.completion_tokens;
461
- usage.totalTokens = value.usage.total_tokens;
462
- }
463
- const choice = value.choices[0];
464
- const delta = choice.delta;
465
- const textContent = extractTextContent(delta.content);
466
- if (delta.content != null && Array.isArray(delta.content)) {
467
- for (const part of delta.content) {
468
- if (part.type === "thinking") {
469
- const reasoningDelta = extractReasoningContent(part.thinking);
470
- if (reasoningDelta.length > 0) {
471
- if (activeReasoningId == null) {
472
- if (activeText) {
473
- controller.enqueue({ type: "text-end", id: "0" });
474
- activeText = false;
475
- }
476
- activeReasoningId = generateId2();
477
- controller.enqueue({
478
- type: "reasoning-start",
479
- id: activeReasoningId
480
- });
481
- }
482
- controller.enqueue({
483
- type: "reasoning-delta",
484
- id: activeReasoningId,
485
- delta: reasoningDelta
486
- });
487
- }
488
- }
489
- }
490
- }
491
- if (textContent != null && textContent.length > 0) {
492
- if (!activeText) {
493
- if (activeReasoningId != null) {
494
- controller.enqueue({
495
- type: "reasoning-end",
496
- id: activeReasoningId
497
- });
498
- activeReasoningId = null;
499
- }
500
- controller.enqueue({ type: "text-start", id: "0" });
501
- activeText = true;
502
- }
503
- controller.enqueue({
504
- type: "text-delta",
505
- id: "0",
506
- delta: textContent
507
- });
508
- }
509
- if ((delta == null ? void 0 : delta.tool_calls) != null) {
510
- for (const toolCall of delta.tool_calls) {
511
- const toolCallId = toolCall.id;
512
- const toolName = toolCall.function.name;
513
- const input = toolCall.function.arguments;
514
- controller.enqueue({
515
- type: "tool-input-start",
516
- id: toolCallId,
517
- toolName
518
- });
519
- controller.enqueue({
520
- type: "tool-input-delta",
521
- id: toolCallId,
522
- delta: input
523
- });
524
- controller.enqueue({
525
- type: "tool-input-end",
526
- id: toolCallId
527
- });
528
- controller.enqueue({
529
- type: "tool-call",
530
- toolCallId,
531
- toolName,
532
- input
533
- });
534
- }
535
- }
536
- if (choice.finish_reason != null) {
537
- finishReason = mapMistralFinishReason(choice.finish_reason);
538
- }
539
- },
540
- flush(controller) {
541
- if (activeReasoningId != null) {
542
- controller.enqueue({
543
- type: "reasoning-end",
544
- id: activeReasoningId
545
- });
546
- }
547
- if (activeText) {
548
- controller.enqueue({ type: "text-end", id: "0" });
549
- }
550
- controller.enqueue({
551
- type: "finish",
552
- finishReason,
553
- usage
554
- });
555
- }
556
- })
557
- ),
558
- request: { body },
559
- response: { headers: responseHeaders }
560
- };
561
- }
562
- };
563
- function extractReasoningContent(thinking) {
564
- return thinking.filter((chunk) => chunk.type === "text").map((chunk) => chunk.text).join("");
565
- }
566
- function extractTextContent(content) {
567
- if (typeof content === "string") {
568
- return content;
569
- }
570
- if (content == null) {
571
- return void 0;
572
- }
573
- const textContent = [];
574
- for (const chunk of content) {
575
- const { type } = chunk;
576
- switch (type) {
577
- case "text":
578
- textContent.push(chunk.text);
579
- break;
580
- case "thinking":
581
- case "image_url":
582
- case "reference":
583
- break;
584
- default: {
585
- const _exhaustiveCheck = type;
586
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
587
- }
588
- }
589
- }
590
- return textContent.length ? textContent.join("") : void 0;
591
- }
592
- var mistralContentSchema = v4.z.union([
593
- v4.z.string(),
594
- v4.z.array(
595
- v4.z.discriminatedUnion("type", [
596
- v4.z.object({
597
- type: v4.z.literal("text"),
598
- text: v4.z.string()
599
- }),
600
- v4.z.object({
601
- type: v4.z.literal("image_url"),
602
- image_url: v4.z.union([
603
- v4.z.string(),
604
- v4.z.object({
605
- url: v4.z.string(),
606
- detail: v4.z.string().nullable()
607
- })
608
- ])
609
- }),
610
- v4.z.object({
611
- type: v4.z.literal("reference"),
612
- reference_ids: v4.z.array(v4.z.number())
613
- }),
614
- v4.z.object({
615
- type: v4.z.literal("thinking"),
616
- thinking: v4.z.array(
617
- v4.z.object({
618
- type: v4.z.literal("text"),
619
- text: v4.z.string()
620
- })
621
- )
622
- })
623
- ])
624
- )
625
- ]).nullish();
626
- var mistralUsageSchema = v4.z.object({
627
- prompt_tokens: v4.z.number(),
628
- completion_tokens: v4.z.number(),
629
- total_tokens: v4.z.number()
630
- });
631
- var mistralChatResponseSchema = v4.z.object({
632
- id: v4.z.string().nullish(),
633
- created: v4.z.number().nullish(),
634
- model: v4.z.string().nullish(),
635
- choices: v4.z.array(
636
- v4.z.object({
637
- message: v4.z.object({
638
- role: v4.z.literal("assistant"),
639
- content: mistralContentSchema,
640
- tool_calls: v4.z.array(
641
- v4.z.object({
642
- id: v4.z.string(),
643
- function: v4.z.object({ name: v4.z.string(), arguments: v4.z.string() })
644
- })
645
- ).nullish()
646
- }),
647
- index: v4.z.number(),
648
- finish_reason: v4.z.string().nullish()
649
- })
650
- ),
651
- object: v4.z.literal("chat.completion"),
652
- usage: mistralUsageSchema
653
- });
654
- var mistralChatChunkSchema = v4.z.object({
655
- id: v4.z.string().nullish(),
656
- created: v4.z.number().nullish(),
657
- model: v4.z.string().nullish(),
658
- choices: v4.z.array(
659
- v4.z.object({
660
- delta: v4.z.object({
661
- role: v4.z.enum(["assistant"]).optional(),
662
- content: mistralContentSchema,
663
- tool_calls: v4.z.array(
664
- v4.z.object({
665
- id: v4.z.string(),
666
- function: v4.z.object({ name: v4.z.string(), arguments: v4.z.string() })
667
- })
668
- ).nullish()
669
- }),
670
- finish_reason: v4.z.string().nullish(),
671
- index: v4.z.number()
672
- })
673
- ),
674
- usage: mistralUsageSchema.nullish()
675
- });
676
- var MistralEmbeddingModel = class {
677
- constructor(modelId, config) {
678
- this.specificationVersion = "v2";
679
- this.maxEmbeddingsPerCall = 32;
680
- this.supportsParallelCalls = false;
681
- this.modelId = modelId;
682
- this.config = config;
683
- }
684
- get provider() {
685
- return this.config.provider;
686
- }
687
- async doEmbed({
688
- values,
689
- abortSignal,
690
- headers
691
- }) {
692
- if (values.length > this.maxEmbeddingsPerCall) {
693
- throw new chunkHNHZGFZY_cjs.TooManyEmbeddingValuesForCallError({
694
- provider: this.provider,
695
- modelId: this.modelId,
696
- maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
697
- values
698
- });
699
- }
700
- const {
701
- responseHeaders,
702
- value: response,
703
- rawValue
704
- } = await chunkHNHZGFZY_cjs.postJsonToApi({
705
- url: `${this.config.baseURL}/embeddings`,
706
- headers: chunkHNHZGFZY_cjs.combineHeaders(this.config.headers(), headers),
707
- body: {
708
- model: this.modelId,
709
- input: values,
710
- encoding_format: "float"
711
- },
712
- failedResponseHandler: mistralFailedResponseHandler,
713
- successfulResponseHandler: chunkHNHZGFZY_cjs.createJsonResponseHandler(
714
- MistralTextEmbeddingResponseSchema
715
- ),
716
- abortSignal,
717
- fetch: this.config.fetch
718
- });
719
- return {
720
- embeddings: response.data.map((item) => item.embedding),
721
- usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
722
- response: { headers: responseHeaders, body: rawValue }
723
- };
724
- }
725
- };
726
- var MistralTextEmbeddingResponseSchema = v4.z.object({
727
- data: v4.z.array(v4.z.object({ embedding: v4.z.array(v4.z.number()) })),
728
- usage: v4.z.object({ prompt_tokens: v4.z.number() }).nullish()
729
- });
730
- var VERSION = "2.0.24" ;
731
- function createMistral(options = {}) {
732
- var _a;
733
- const baseURL = (_a = chunkHNHZGFZY_cjs.withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
734
- const getHeaders = () => chunkHNHZGFZY_cjs.withUserAgentSuffix(
735
- {
736
- Authorization: `Bearer ${chunkHNHZGFZY_cjs.loadApiKey({
737
- apiKey: options.apiKey,
738
- environmentVariableName: "MISTRAL_API_KEY",
739
- description: "Mistral"
740
- })}`,
741
- ...options.headers
742
- },
743
- `ai-sdk/mistral/${VERSION}`
744
- );
745
- const createChatModel = (modelId) => new MistralChatLanguageModel(modelId, {
746
- provider: "mistral.chat",
747
- baseURL,
748
- headers: getHeaders,
749
- fetch: options.fetch,
750
- generateId: options.generateId
751
- });
752
- const createEmbeddingModel = (modelId) => new MistralEmbeddingModel(modelId, {
753
- provider: "mistral.embedding",
754
- baseURL,
755
- headers: getHeaders,
756
- fetch: options.fetch
757
- });
758
- const provider = function(modelId) {
759
- if (new.target) {
760
- throw new Error(
761
- "The Mistral model function cannot be called with the new keyword."
762
- );
763
- }
764
- return createChatModel(modelId);
765
- };
766
- provider.languageModel = createChatModel;
767
- provider.chat = createChatModel;
768
- provider.embedding = createEmbeddingModel;
769
- provider.textEmbedding = createEmbeddingModel;
770
- provider.textEmbeddingModel = createEmbeddingModel;
771
- provider.imageModel = (modelId) => {
772
- throw new chunkHNHZGFZY_cjs.NoSuchModelError({ modelId, modelType: "imageModel" });
773
- };
774
- return provider;
775
- }
776
- createMistral();
777
- function convertToXaiChatMessages(prompt) {
778
- const messages = [];
779
- const warnings = [];
780
- for (const { role, content } of prompt) {
781
- switch (role) {
782
- case "system": {
783
- messages.push({ role: "system", content });
784
- break;
785
- }
786
- case "user": {
787
- if (content.length === 1 && content[0].type === "text") {
788
- messages.push({ role: "user", content: content[0].text });
789
- break;
790
- }
791
- messages.push({
792
- role: "user",
793
- content: content.map((part) => {
794
- switch (part.type) {
795
- case "text": {
796
- return { type: "text", text: part.text };
797
- }
798
- case "file": {
799
- if (part.mediaType.startsWith("image/")) {
800
- const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
801
- return {
802
- type: "image_url",
803
- image_url: {
804
- url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${chunkHNHZGFZY_cjs.convertToBase64(part.data)}`
805
- }
806
- };
807
- } else {
808
- throw new chunkHNHZGFZY_cjs.UnsupportedFunctionalityError({
809
- functionality: `file part media type ${part.mediaType}`
810
- });
811
- }
812
- }
813
- }
814
- })
815
- });
816
- break;
817
- }
818
- case "assistant": {
819
- let text = "";
820
- const toolCalls = [];
821
- for (const part of content) {
822
- switch (part.type) {
823
- case "text": {
824
- text += part.text;
825
- break;
826
- }
827
- case "tool-call": {
828
- toolCalls.push({
829
- id: part.toolCallId,
830
- type: "function",
831
- function: {
832
- name: part.toolName,
833
- arguments: JSON.stringify(part.input)
834
- }
835
- });
836
- break;
837
- }
838
- }
839
- }
840
- messages.push({
841
- role: "assistant",
842
- content: text,
843
- tool_calls: toolCalls.length > 0 ? toolCalls : void 0
844
- });
845
- break;
846
- }
847
- case "tool": {
848
- for (const toolResponse of content) {
849
- const output = toolResponse.output;
850
- let contentValue;
851
- switch (output.type) {
852
- case "text":
853
- case "error-text":
854
- contentValue = output.value;
855
- break;
856
- case "content":
857
- case "json":
858
- case "error-json":
859
- contentValue = JSON.stringify(output.value);
860
- break;
861
- }
862
- messages.push({
863
- role: "tool",
864
- tool_call_id: toolResponse.toolCallId,
865
- content: contentValue
866
- });
867
- }
868
- break;
869
- }
870
- default: {
871
- const _exhaustiveCheck = role;
872
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
873
- }
874
- }
875
- }
876
- return { messages, warnings };
877
- }
878
- function getResponseMetadata2({
879
- id,
880
- model,
881
- created
882
- }) {
883
- return {
884
- id: id != null ? id : void 0,
885
- modelId: model != null ? model : void 0,
886
- timestamp: created != null ? new Date(created * 1e3) : void 0
887
- };
888
- }
889
- function mapXaiFinishReason(finishReason) {
890
- switch (finishReason) {
891
- case "stop":
892
- return "stop";
893
- case "length":
894
- return "length";
895
- case "tool_calls":
896
- case "function_call":
897
- return "tool-calls";
898
- case "content_filter":
899
- return "content-filter";
900
- default:
901
- return "unknown";
902
- }
903
- }
904
- var webSourceSchema = v4.z.object({
905
- type: v4.z.literal("web"),
906
- country: v4.z.string().length(2).optional(),
907
- excludedWebsites: v4.z.array(v4.z.string()).max(5).optional(),
908
- allowedWebsites: v4.z.array(v4.z.string()).max(5).optional(),
909
- safeSearch: v4.z.boolean().optional()
910
- });
911
- var xSourceSchema = v4.z.object({
912
- type: v4.z.literal("x"),
913
- excludedXHandles: v4.z.array(v4.z.string()).optional(),
914
- includedXHandles: v4.z.array(v4.z.string()).optional(),
915
- postFavoriteCount: v4.z.number().int().optional(),
916
- postViewCount: v4.z.number().int().optional(),
917
- /**
918
- * @deprecated use `includedXHandles` instead
919
- */
920
- xHandles: v4.z.array(v4.z.string()).optional()
921
- });
922
- var newsSourceSchema = v4.z.object({
923
- type: v4.z.literal("news"),
924
- country: v4.z.string().length(2).optional(),
925
- excludedWebsites: v4.z.array(v4.z.string()).max(5).optional(),
926
- safeSearch: v4.z.boolean().optional()
927
- });
928
- var rssSourceSchema = v4.z.object({
929
- type: v4.z.literal("rss"),
930
- links: v4.z.array(v4.z.string().url()).max(1)
931
- // currently only supports one RSS link
932
- });
933
- var searchSourceSchema = v4.z.discriminatedUnion("type", [
934
- webSourceSchema,
935
- xSourceSchema,
936
- newsSourceSchema,
937
- rssSourceSchema
938
- ]);
939
- var xaiProviderOptions = v4.z.object({
940
- reasoningEffort: v4.z.enum(["low", "high"]).optional(),
941
- /**
942
- * Whether to enable parallel function calling during tool use.
943
- * When true, the model can call multiple functions in parallel.
944
- * When false, the model will call functions sequentially.
945
- * Defaults to true.
946
- */
947
- parallel_function_calling: v4.z.boolean().optional(),
948
- searchParameters: v4.z.object({
949
- /**
950
- * search mode preference
951
- * - "off": disables search completely
952
- * - "auto": model decides whether to search (default)
953
- * - "on": always enables search
954
- */
955
- mode: v4.z.enum(["off", "auto", "on"]),
956
- /**
957
- * whether to return citations in the response
958
- * defaults to true
959
- */
960
- returnCitations: v4.z.boolean().optional(),
961
- /**
962
- * start date for search data (ISO8601 format: YYYY-MM-DD)
963
- */
964
- fromDate: v4.z.string().optional(),
965
- /**
966
- * end date for search data (ISO8601 format: YYYY-MM-DD)
967
- */
968
- toDate: v4.z.string().optional(),
969
- /**
970
- * maximum number of search results to consider
971
- * defaults to 20
972
- */
973
- maxSearchResults: v4.z.number().min(1).max(50).optional(),
974
- /**
975
- * data sources to search from
976
- * defaults to ["web", "x"] if not specified
977
- */
978
- sources: v4.z.array(searchSourceSchema).optional()
979
- }).optional()
980
- });
981
- var xaiErrorDataSchema = v4.z.object({
982
- error: v4.z.object({
983
- message: v4.z.string(),
984
- type: v4.z.string().nullish(),
985
- param: v4.z.any().nullish(),
986
- code: v4.z.union([v4.z.string(), v4.z.number()]).nullish()
987
- })
988
- });
989
- var xaiFailedResponseHandler = chunkHNHZGFZY_cjs.createJsonErrorResponseHandler({
990
- errorSchema: xaiErrorDataSchema,
991
- errorToMessage: (data) => data.error.message
992
- });
993
- function prepareTools2({
994
- tools,
995
- toolChoice
996
- }) {
997
- tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
998
- const toolWarnings = [];
999
- if (tools == null) {
1000
- return { tools: void 0, toolChoice: void 0, toolWarnings };
1001
- }
1002
- const xaiTools = [];
1003
- for (const tool of tools) {
1004
- if (tool.type === "provider-defined") {
1005
- toolWarnings.push({ type: "unsupported-tool", tool });
1006
- } else {
1007
- xaiTools.push({
1008
- type: "function",
1009
- function: {
1010
- name: tool.name,
1011
- description: tool.description,
1012
- parameters: tool.inputSchema
1013
- }
1014
- });
1015
- }
1016
- }
1017
- if (toolChoice == null) {
1018
- return { tools: xaiTools, toolChoice: void 0, toolWarnings };
1019
- }
1020
- const type = toolChoice.type;
1021
- switch (type) {
1022
- case "auto":
1023
- case "none":
1024
- return { tools: xaiTools, toolChoice: type, toolWarnings };
1025
- case "required":
1026
- return { tools: xaiTools, toolChoice: "required", toolWarnings };
1027
- case "tool":
1028
- return {
1029
- tools: xaiTools,
1030
- toolChoice: {
1031
- type: "function",
1032
- function: { name: toolChoice.toolName }
1033
- },
1034
- toolWarnings
1035
- };
1036
- default: {
1037
- const _exhaustiveCheck = type;
1038
- throw new chunkHNHZGFZY_cjs.UnsupportedFunctionalityError({
1039
- functionality: `tool choice type: ${_exhaustiveCheck}`
1040
- });
1041
- }
1042
- }
1043
- }
1044
- var XaiChatLanguageModel = class {
1045
- constructor(modelId, config) {
1046
- this.specificationVersion = "v2";
1047
- this.supportedUrls = {
1048
- "image/*": [/^https?:\/\/.*$/]
1049
- };
1050
- this.modelId = modelId;
1051
- this.config = config;
1052
- }
1053
- get provider() {
1054
- return this.config.provider;
1055
- }
1056
- async getArgs({
1057
- prompt,
1058
- maxOutputTokens,
1059
- temperature,
1060
- topP,
1061
- topK,
1062
- frequencyPenalty,
1063
- presencePenalty,
1064
- stopSequences,
1065
- seed,
1066
- responseFormat,
1067
- providerOptions,
1068
- tools,
1069
- toolChoice
1070
- }) {
1071
- var _a, _b, _c;
1072
- const warnings = [];
1073
- const options = (_a = await chunkHNHZGFZY_cjs.parseProviderOptions({
1074
- provider: "xai",
1075
- providerOptions,
1076
- schema: xaiProviderOptions
1077
- })) != null ? _a : {};
1078
- if (topK != null) {
1079
- warnings.push({
1080
- type: "unsupported-setting",
1081
- setting: "topK"
1082
- });
1083
- }
1084
- if (frequencyPenalty != null) {
1085
- warnings.push({
1086
- type: "unsupported-setting",
1087
- setting: "frequencyPenalty"
1088
- });
1089
- }
1090
- if (presencePenalty != null) {
1091
- warnings.push({
1092
- type: "unsupported-setting",
1093
- setting: "presencePenalty"
1094
- });
1095
- }
1096
- if (stopSequences != null) {
1097
- warnings.push({
1098
- type: "unsupported-setting",
1099
- setting: "stopSequences"
1100
- });
1101
- }
1102
- const { messages, warnings: messageWarnings } = convertToXaiChatMessages(prompt);
1103
- warnings.push(...messageWarnings);
1104
- const {
1105
- tools: xaiTools,
1106
- toolChoice: xaiToolChoice,
1107
- toolWarnings
1108
- } = prepareTools2({
1109
- tools,
1110
- toolChoice
1111
- });
1112
- warnings.push(...toolWarnings);
1113
- const baseArgs = {
1114
- // model id
1115
- model: this.modelId,
1116
- // standard generation settings
1117
- max_tokens: maxOutputTokens,
1118
- temperature,
1119
- top_p: topP,
1120
- seed,
1121
- reasoning_effort: options.reasoningEffort,
1122
- // parallel function calling
1123
- parallel_function_calling: options.parallel_function_calling,
1124
- // response format
1125
- response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? responseFormat.schema != null ? {
1126
- type: "json_schema",
1127
- json_schema: {
1128
- name: (_b = responseFormat.name) != null ? _b : "response",
1129
- schema: responseFormat.schema,
1130
- strict: true
1131
- }
1132
- } : { type: "json_object" } : void 0,
1133
- // search parameters
1134
- search_parameters: options.searchParameters ? {
1135
- mode: options.searchParameters.mode,
1136
- return_citations: options.searchParameters.returnCitations,
1137
- from_date: options.searchParameters.fromDate,
1138
- to_date: options.searchParameters.toDate,
1139
- max_search_results: options.searchParameters.maxSearchResults,
1140
- sources: (_c = options.searchParameters.sources) == null ? void 0 : _c.map((source) => {
1141
- var _a2;
1142
- return {
1143
- type: source.type,
1144
- ...source.type === "web" && {
1145
- country: source.country,
1146
- excluded_websites: source.excludedWebsites,
1147
- allowed_websites: source.allowedWebsites,
1148
- safe_search: source.safeSearch
1149
- },
1150
- ...source.type === "x" && {
1151
- excluded_x_handles: source.excludedXHandles,
1152
- included_x_handles: (_a2 = source.includedXHandles) != null ? _a2 : source.xHandles,
1153
- post_favorite_count: source.postFavoriteCount,
1154
- post_view_count: source.postViewCount
1155
- },
1156
- ...source.type === "news" && {
1157
- country: source.country,
1158
- excluded_websites: source.excludedWebsites,
1159
- safe_search: source.safeSearch
1160
- },
1161
- ...source.type === "rss" && {
1162
- links: source.links
1163
- }
1164
- };
1165
- })
1166
- } : void 0,
1167
- // messages in xai format
1168
- messages,
1169
- // tools in xai format
1170
- tools: xaiTools,
1171
- tool_choice: xaiToolChoice
1172
- };
1173
- return {
1174
- args: baseArgs,
1175
- warnings
1176
- };
1177
- }
1178
- async doGenerate(options) {
1179
- var _a, _b, _c;
1180
- const { args: body, warnings } = await this.getArgs(options);
1181
- const {
1182
- responseHeaders,
1183
- value: response,
1184
- rawValue: rawResponse
1185
- } = await chunkHNHZGFZY_cjs.postJsonToApi({
1186
- url: `${(_a = this.config.baseURL) != null ? _a : "https://api.x.ai/v1"}/chat/completions`,
1187
- headers: chunkHNHZGFZY_cjs.combineHeaders(this.config.headers(), options.headers),
1188
- body,
1189
- failedResponseHandler: xaiFailedResponseHandler,
1190
- successfulResponseHandler: chunkHNHZGFZY_cjs.createJsonResponseHandler(
1191
- xaiChatResponseSchema
1192
- ),
1193
- abortSignal: options.abortSignal,
1194
- fetch: this.config.fetch
1195
- });
1196
- const choice = response.choices[0];
1197
- const content = [];
1198
- if (choice.message.content != null && choice.message.content.length > 0) {
1199
- let text = choice.message.content;
1200
- const lastMessage = body.messages[body.messages.length - 1];
1201
- if ((lastMessage == null ? void 0 : lastMessage.role) === "assistant" && text === lastMessage.content) {
1202
- text = "";
1203
- }
1204
- if (text.length > 0) {
1205
- content.push({ type: "text", text });
1206
- }
1207
- }
1208
- if (choice.message.reasoning_content != null && choice.message.reasoning_content.length > 0) {
1209
- content.push({
1210
- type: "reasoning",
1211
- text: choice.message.reasoning_content
1212
- });
1213
- }
1214
- if (choice.message.tool_calls != null) {
1215
- for (const toolCall of choice.message.tool_calls) {
1216
- content.push({
1217
- type: "tool-call",
1218
- toolCallId: toolCall.id,
1219
- toolName: toolCall.function.name,
1220
- input: toolCall.function.arguments
1221
- });
1222
- }
1223
- }
1224
- if (response.citations != null) {
1225
- for (const url of response.citations) {
1226
- content.push({
1227
- type: "source",
1228
- sourceType: "url",
1229
- id: this.config.generateId(),
1230
- url
1231
- });
1232
- }
1233
- }
1234
- return {
1235
- content,
1236
- finishReason: mapXaiFinishReason(choice.finish_reason),
1237
- usage: {
1238
- inputTokens: response.usage.prompt_tokens,
1239
- outputTokens: response.usage.completion_tokens,
1240
- totalTokens: response.usage.total_tokens,
1241
- reasoningTokens: (_c = (_b = response.usage.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null ? _c : void 0
1242
- },
1243
- request: { body },
1244
- response: {
1245
- ...getResponseMetadata2(response),
1246
- headers: responseHeaders,
1247
- body: rawResponse
1248
- },
1249
- warnings
1250
- };
1251
- }
1252
- async doStream(options) {
1253
- var _a;
1254
- const { args, warnings } = await this.getArgs(options);
1255
- const body = {
1256
- ...args,
1257
- stream: true,
1258
- stream_options: {
1259
- include_usage: true
1260
- }
1261
- };
1262
- const { responseHeaders, value: response } = await chunkHNHZGFZY_cjs.postJsonToApi({
1263
- url: `${(_a = this.config.baseURL) != null ? _a : "https://api.x.ai/v1"}/chat/completions`,
1264
- headers: chunkHNHZGFZY_cjs.combineHeaders(this.config.headers(), options.headers),
1265
- body,
1266
- failedResponseHandler: xaiFailedResponseHandler,
1267
- successfulResponseHandler: chunkHNHZGFZY_cjs.createEventSourceResponseHandler(xaiChatChunkSchema),
1268
- abortSignal: options.abortSignal,
1269
- fetch: this.config.fetch
1270
- });
1271
- let finishReason = "unknown";
1272
- const usage = {
1273
- inputTokens: void 0,
1274
- outputTokens: void 0,
1275
- totalTokens: void 0
1276
- };
1277
- let isFirstChunk = true;
1278
- const contentBlocks = {};
1279
- const lastReasoningDeltas = {};
1280
- const self = this;
1281
- return {
1282
- stream: response.pipeThrough(
1283
- new TransformStream({
1284
- start(controller) {
1285
- controller.enqueue({ type: "stream-start", warnings });
1286
- },
1287
- transform(chunk, controller) {
1288
- var _a2, _b;
1289
- if (options.includeRawChunks) {
1290
- controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1291
- }
1292
- if (!chunk.success) {
1293
- controller.enqueue({ type: "error", error: chunk.error });
1294
- return;
1295
- }
1296
- const value = chunk.value;
1297
- if (isFirstChunk) {
1298
- controller.enqueue({
1299
- type: "response-metadata",
1300
- ...getResponseMetadata2(value)
1301
- });
1302
- isFirstChunk = false;
1303
- }
1304
- if (value.citations != null) {
1305
- for (const url of value.citations) {
1306
- controller.enqueue({
1307
- type: "source",
1308
- sourceType: "url",
1309
- id: self.config.generateId(),
1310
- url
1311
- });
1312
- }
1313
- }
1314
- if (value.usage != null) {
1315
- usage.inputTokens = value.usage.prompt_tokens;
1316
- usage.outputTokens = value.usage.completion_tokens;
1317
- usage.totalTokens = value.usage.total_tokens;
1318
- usage.reasoningTokens = (_b = (_a2 = value.usage.completion_tokens_details) == null ? void 0 : _a2.reasoning_tokens) != null ? _b : void 0;
1319
- }
1320
- const choice = value.choices[0];
1321
- if ((choice == null ? void 0 : choice.finish_reason) != null) {
1322
- finishReason = mapXaiFinishReason(choice.finish_reason);
1323
- }
1324
- if ((choice == null ? void 0 : choice.delta) == null) {
1325
- return;
1326
- }
1327
- const delta = choice.delta;
1328
- const choiceIndex = choice.index;
1329
- if (delta.content != null && delta.content.length > 0) {
1330
- const textContent = delta.content;
1331
- const lastMessage = body.messages[body.messages.length - 1];
1332
- if ((lastMessage == null ? void 0 : lastMessage.role) === "assistant" && textContent === lastMessage.content) {
1333
- return;
1334
- }
1335
- const blockId = `text-${value.id || choiceIndex}`;
1336
- if (contentBlocks[blockId] == null) {
1337
- contentBlocks[blockId] = { type: "text" };
1338
- controller.enqueue({
1339
- type: "text-start",
1340
- id: blockId
1341
- });
1342
- }
1343
- controller.enqueue({
1344
- type: "text-delta",
1345
- id: blockId,
1346
- delta: textContent
1347
- });
1348
- }
1349
- if (delta.reasoning_content != null && delta.reasoning_content.length > 0) {
1350
- const blockId = `reasoning-${value.id || choiceIndex}`;
1351
- if (lastReasoningDeltas[blockId] === delta.reasoning_content) {
1352
- return;
1353
- }
1354
- lastReasoningDeltas[blockId] = delta.reasoning_content;
1355
- if (contentBlocks[blockId] == null) {
1356
- contentBlocks[blockId] = { type: "reasoning" };
1357
- controller.enqueue({
1358
- type: "reasoning-start",
1359
- id: blockId
1360
- });
1361
- }
1362
- controller.enqueue({
1363
- type: "reasoning-delta",
1364
- id: blockId,
1365
- delta: delta.reasoning_content
1366
- });
1367
- }
1368
- if (delta.tool_calls != null) {
1369
- for (const toolCall of delta.tool_calls) {
1370
- const toolCallId = toolCall.id;
1371
- controller.enqueue({
1372
- type: "tool-input-start",
1373
- id: toolCallId,
1374
- toolName: toolCall.function.name
1375
- });
1376
- controller.enqueue({
1377
- type: "tool-input-delta",
1378
- id: toolCallId,
1379
- delta: toolCall.function.arguments
1380
- });
1381
- controller.enqueue({
1382
- type: "tool-input-end",
1383
- id: toolCallId
1384
- });
1385
- controller.enqueue({
1386
- type: "tool-call",
1387
- toolCallId,
1388
- toolName: toolCall.function.name,
1389
- input: toolCall.function.arguments
1390
- });
1391
- }
1392
- }
1393
- },
1394
- flush(controller) {
1395
- for (const [blockId, block] of Object.entries(contentBlocks)) {
1396
- controller.enqueue({
1397
- type: block.type === "text" ? "text-end" : "reasoning-end",
1398
- id: blockId
1399
- });
1400
- }
1401
- controller.enqueue({ type: "finish", finishReason, usage });
1402
- }
1403
- })
1404
- ),
1405
- request: { body },
1406
- response: { headers: responseHeaders }
1407
- };
1408
- }
1409
- };
1410
- var xaiUsageSchema = v4.z.object({
1411
- prompt_tokens: v4.z.number(),
1412
- completion_tokens: v4.z.number(),
1413
- total_tokens: v4.z.number(),
1414
- completion_tokens_details: v4.z.object({
1415
- reasoning_tokens: v4.z.number().nullish()
1416
- }).nullish()
1417
- });
1418
- var xaiChatResponseSchema = v4.z.object({
1419
- id: v4.z.string().nullish(),
1420
- created: v4.z.number().nullish(),
1421
- model: v4.z.string().nullish(),
1422
- choices: v4.z.array(
1423
- v4.z.object({
1424
- message: v4.z.object({
1425
- role: v4.z.literal("assistant"),
1426
- content: v4.z.string().nullish(),
1427
- reasoning_content: v4.z.string().nullish(),
1428
- tool_calls: v4.z.array(
1429
- v4.z.object({
1430
- id: v4.z.string(),
1431
- type: v4.z.literal("function"),
1432
- function: v4.z.object({
1433
- name: v4.z.string(),
1434
- arguments: v4.z.string()
1435
- })
1436
- })
1437
- ).nullish()
1438
- }),
1439
- index: v4.z.number(),
1440
- finish_reason: v4.z.string().nullish()
1441
- })
1442
- ),
1443
- object: v4.z.literal("chat.completion"),
1444
- usage: xaiUsageSchema,
1445
- citations: v4.z.array(v4.z.string().url()).nullish()
1446
- });
1447
- var xaiChatChunkSchema = v4.z.object({
1448
- id: v4.z.string().nullish(),
1449
- created: v4.z.number().nullish(),
1450
- model: v4.z.string().nullish(),
1451
- choices: v4.z.array(
1452
- v4.z.object({
1453
- delta: v4.z.object({
1454
- role: v4.z.enum(["assistant"]).optional(),
1455
- content: v4.z.string().nullish(),
1456
- reasoning_content: v4.z.string().nullish(),
1457
- tool_calls: v4.z.array(
1458
- v4.z.object({
1459
- id: v4.z.string(),
1460
- type: v4.z.literal("function"),
1461
- function: v4.z.object({
1462
- name: v4.z.string(),
1463
- arguments: v4.z.string()
1464
- })
1465
- })
1466
- ).nullish()
1467
- }),
1468
- finish_reason: v4.z.string().nullish(),
1469
- index: v4.z.number()
1470
- })
1471
- ),
1472
- usage: xaiUsageSchema.nullish(),
1473
- citations: v4.z.array(v4.z.string().url()).nullish()
1474
- });
1475
- var VERSION2 = "2.0.33" ;
1476
- var xaiErrorStructure = {
1477
- errorSchema: xaiErrorDataSchema,
1478
- errorToMessage: (data) => data.error.message
1479
- };
1480
- function createXai(options = {}) {
1481
- var _a;
1482
- const baseURL = chunkHNHZGFZY_cjs.withoutTrailingSlash(
1483
- (_a = options.baseURL) != null ? _a : "https://api.x.ai/v1"
1484
- );
1485
- const getHeaders = () => chunkHNHZGFZY_cjs.withUserAgentSuffix(
1486
- {
1487
- Authorization: `Bearer ${chunkHNHZGFZY_cjs.loadApiKey({
1488
- apiKey: options.apiKey,
1489
- environmentVariableName: "XAI_API_KEY",
1490
- description: "xAI API key"
1491
- })}`,
1492
- ...options.headers
1493
- },
1494
- `ai-sdk/xai/${VERSION2}`
1495
- );
1496
- const createLanguageModel = (modelId) => {
1497
- return new XaiChatLanguageModel(modelId, {
1498
- provider: "xai.chat",
1499
- baseURL,
1500
- headers: getHeaders,
1501
- generateId: chunkHNHZGFZY_cjs.generateId,
1502
- fetch: options.fetch
1503
- });
1504
- };
1505
- const createImageModel = (modelId) => {
1506
- return new chunkHNHZGFZY_cjs.OpenAICompatibleImageModel(modelId, {
1507
- provider: "xai.image",
1508
- url: ({ path }) => `${baseURL}${path}`,
1509
- headers: getHeaders,
1510
- fetch: options.fetch,
1511
- errorStructure: xaiErrorStructure
1512
- });
1513
- };
1514
- const provider = (modelId) => createLanguageModel(modelId);
1515
- provider.languageModel = createLanguageModel;
1516
- provider.chat = createLanguageModel;
1517
- provider.textEmbeddingModel = (modelId) => {
1518
- throw new chunkHNHZGFZY_cjs.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
1519
- };
1520
- provider.imageModel = createImageModel;
1521
- provider.image = createImageModel;
1522
- return provider;
1523
- }
1524
- createXai();
1525
-
1526
- // src/llm/model/gateway-resolver.ts
1527
- function parseModelRouterId(routerId, gatewayPrefix) {
1528
- if (gatewayPrefix && !routerId.startsWith(`${gatewayPrefix}/`)) {
1529
- throw new Error(`Expected ${gatewayPrefix}/ in model router ID ${routerId}`);
1530
- }
1531
- const idParts = routerId.split("/");
1532
- if (gatewayPrefix === "azure-openai") {
1533
- if (idParts.length < 2) {
1534
- throw new Error(`Expected format azure-openai/deployment-name, but got ${routerId}`);
1535
- }
1536
- return {
1537
- providerId: "azure-openai",
1538
- modelId: idParts.slice(1).join("/")
1539
- // Deployment name
1540
- };
1541
- }
1542
- if (gatewayPrefix && idParts.length < 3) {
1543
- throw new Error(
1544
- `Expected atleast 3 id parts ${gatewayPrefix}/provider/model, but only saw ${idParts.length} in ${routerId}`
1545
- );
1546
- }
1547
- const providerId = idParts.at(gatewayPrefix ? 1 : 0);
1548
- const modelId = idParts.slice(gatewayPrefix ? 2 : 1).join(`/`);
1549
- if (!routerId.includes(`/`) || !providerId || !modelId) {
1550
- throw new Error(
1551
- `Attempted to parse provider/model from ${routerId} but this ID doesn't appear to contain a provider`
1552
- );
1553
- }
1554
- return {
1555
- providerId,
1556
- modelId
1557
- };
1558
- }
1559
-
1560
- // src/llm/model/gateways/constants.ts
1561
- var PROVIDERS_WITH_INSTALLED_PACKAGES = ["anthropic", "google", "mistral", "openai", "openrouter", "xai"];
1562
- var EXCLUDED_PROVIDERS = ["github-copilot"];
1563
-
1564
- // src/llm/model/gateways/models-dev.ts
1565
- var OPENAI_COMPATIBLE_OVERRIDES = {
1566
- cerebras: {
1567
- url: "https://api.cerebras.ai/v1"
1568
- },
1569
- mistral: {
1570
- url: "https://api.mistral.ai/v1"
1571
- },
1572
- groq: {
1573
- url: "https://api.groq.com/openai/v1"
1574
- },
1575
- togetherai: {
1576
- url: "https://api.together.xyz/v1"
1577
- },
1578
- deepinfra: {
1579
- url: "https://api.deepinfra.com/v1/openai"
1580
- },
1581
- perplexity: {
1582
- url: "https://api.perplexity.ai"
1583
- },
1584
- vercel: {
1585
- url: "https://ai-gateway.vercel.sh/v1",
1586
- apiKeyEnvVar: "AI_GATEWAY_API_KEY"
1587
- }
1588
- };
1589
- var ModelsDevGateway = class extends chunkHNHZGFZY_cjs.MastraModelGateway {
1590
- id = "models.dev";
1591
- name = "models.dev";
1592
- providerConfigs = {};
1593
- constructor(providerConfigs) {
1594
- super();
1595
- if (providerConfigs) this.providerConfigs = providerConfigs;
1596
- }
1597
- async fetchProviders() {
1598
- const response = await fetch("https://models.dev/api.json");
1599
- if (!response.ok) {
1600
- throw new Error(`Failed to fetch from models.dev: ${response.statusText}`);
1601
- }
1602
- const data = await response.json();
1603
- const providerConfigs = {};
1604
- for (const [providerId, providerInfo] of Object.entries(data)) {
1605
- if (EXCLUDED_PROVIDERS.includes(providerId)) continue;
1606
- if (!providerInfo || typeof providerInfo !== "object" || !providerInfo.models) continue;
1607
- const normalizedId = providerId;
1608
- const isOpenAICompatible = providerInfo.npm === "@ai-sdk/openai-compatible" || providerInfo.npm === "@ai-sdk/gateway" || // Vercel AI Gateway is OpenAI-compatible
1609
- normalizedId in OPENAI_COMPATIBLE_OVERRIDES;
1610
- const hasInstalledPackage = PROVIDERS_WITH_INSTALLED_PACKAGES.includes(providerId);
1611
- const hasApiAndEnv = providerInfo.api && providerInfo.env && providerInfo.env.length > 0;
1612
- if (isOpenAICompatible || hasInstalledPackage || hasApiAndEnv) {
1613
- const modelIds = Object.keys(providerInfo.models).sort();
1614
- const url = providerInfo.api || OPENAI_COMPATIBLE_OVERRIDES[normalizedId]?.url;
1615
- if (!hasInstalledPackage && !url) {
1616
- continue;
1617
- }
1618
- const apiKeyEnvVar = providerInfo.env?.[0] || `${normalizedId.toUpperCase().replace(/-/g, "_")}_API_KEY`;
1619
- const apiKeyHeader = !hasInstalledPackage ? OPENAI_COMPATIBLE_OVERRIDES[normalizedId]?.apiKeyHeader || "Authorization" : void 0;
1620
- providerConfigs[normalizedId] = {
1621
- url,
1622
- apiKeyEnvVar,
1623
- apiKeyHeader,
1624
- name: providerInfo.name || providerId.charAt(0).toUpperCase() + providerId.slice(1),
1625
- models: modelIds,
1626
- docUrl: providerInfo.doc,
1627
- // Include documentation URL if available
1628
- gateway: `models.dev`
1629
- };
1630
- }
1631
- }
1632
- this.providerConfigs = providerConfigs;
1633
- return providerConfigs;
1634
- }
1635
- buildUrl(routerId, envVars) {
1636
- const { providerId } = parseModelRouterId(routerId);
1637
- const config = this.providerConfigs[providerId];
1638
- if (!config?.url) {
1639
- return;
1640
- }
1641
- const baseUrlEnvVar = `${providerId.toUpperCase().replace(/-/g, "_")}_BASE_URL`;
1642
- const customBaseUrl = envVars?.[baseUrlEnvVar] || process.env[baseUrlEnvVar];
1643
- return customBaseUrl || config.url;
1644
- }
1645
- getApiKey(modelId) {
1646
- const [provider, model] = modelId.split("/");
1647
- if (!provider || !model) {
1648
- throw new Error(`Could not identify provider from model id ${modelId}`);
1649
- }
1650
- const config = this.providerConfigs[provider];
1651
- if (!config) {
1652
- throw new Error(`Could not find config for provider ${provider} with model id ${modelId}`);
1653
- }
1654
- const apiKey = typeof config.apiKeyEnvVar === `string` ? process.env[config.apiKeyEnvVar] : void 0;
1655
- if (!apiKey) {
1656
- throw new Error(`Could not find API key process.env.${config.apiKeyEnvVar} for model id ${modelId}`);
1657
- }
1658
- return Promise.resolve(apiKey);
1659
- }
1660
- async resolveLanguageModel({
1661
- modelId,
1662
- providerId,
1663
- apiKey,
1664
- headers
1665
- }) {
1666
- const baseURL = this.buildUrl(`${providerId}/${modelId}`);
1667
- switch (providerId) {
1668
- case "openai":
1669
- return chunkHNHZGFZY_cjs.createOpenAI({ apiKey }).responses(modelId);
1670
- case "gemini":
1671
- case "google":
1672
- return chunkHNHZGFZY_cjs.createGoogleGenerativeAI({
1673
- apiKey
1674
- }).chat(modelId);
1675
- case "anthropic":
1676
- return chunkHNHZGFZY_cjs.createAnthropic({ apiKey })(modelId);
1677
- case "mistral":
1678
- return createMistral({ apiKey })(modelId);
1679
- case "openrouter":
1680
- return aiSdkProviderV5.createOpenRouter({ apiKey, headers })(modelId);
1681
- case "xai":
1682
- return createXai({
1683
- apiKey
1684
- })(modelId);
1685
- default:
1686
- if (!baseURL) throw new Error(`No API URL found for ${providerId}/${modelId}`);
1687
- return chunkHNHZGFZY_cjs.createOpenAICompatible({ name: providerId, apiKey, baseURL, supportsStructuredOutputs: true }).chatModel(
1688
- modelId
1689
- );
1690
- }
1691
- }
1692
- };
1693
-
1694
- exports.ModelsDevGateway = ModelsDevGateway;
1695
- exports.parseModelRouterId = parseModelRouterId;
1696
- //# sourceMappingURL=chunk-QGWNF2QJ.cjs.map
1697
- //# sourceMappingURL=chunk-QGWNF2QJ.cjs.map