@mastra/core 1.0.0-beta.0 → 1.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. package/CHANGELOG.md +17 -0
  2. package/dist/agent/index.cjs +6 -6
  3. package/dist/agent/index.js +1 -1
  4. package/dist/{chunk-QCQLOMJM.cjs → chunk-22443P6A.cjs} +22 -21
  5. package/dist/chunk-22443P6A.cjs.map +1 -0
  6. package/dist/{chunk-YIK3ASEG.cjs → chunk-2ZVKF4HP.cjs} +117 -42
  7. package/dist/chunk-2ZVKF4HP.cjs.map +1 -0
  8. package/dist/{chunk-26SQQNMU.js → chunk-7CBEP2ZQ.js} +36 -12
  9. package/dist/chunk-7CBEP2ZQ.js.map +1 -0
  10. package/dist/{chunk-7SKXKUYT.js → chunk-7PO6SEJF.js} +6 -3
  11. package/dist/chunk-7PO6SEJF.js.map +1 -0
  12. package/dist/{chunk-S5MJLXMG.cjs → chunk-CB575O6L.cjs} +10 -2
  13. package/dist/chunk-CB575O6L.cjs.map +1 -0
  14. package/dist/chunk-HDJFSJCK.js +2237 -0
  15. package/dist/chunk-HDJFSJCK.js.map +1 -0
  16. package/dist/{chunk-BU4IAJWF.js → chunk-I4CXL4SR.js} +3 -3
  17. package/dist/{chunk-BU4IAJWF.js.map → chunk-I4CXL4SR.js.map} +1 -1
  18. package/dist/{chunk-VV753WCB.cjs → chunk-IQO7ANVS.cjs} +8 -8
  19. package/dist/{chunk-VV753WCB.cjs.map → chunk-IQO7ANVS.cjs.map} +1 -1
  20. package/dist/{chunk-L7XKOKOW.js → chunk-JPGVRWWL.js} +3 -3
  21. package/dist/chunk-JPGVRWWL.js.map +1 -0
  22. package/dist/{chunk-BXOL277H.cjs → chunk-JYYQQEBH.cjs} +7 -4
  23. package/dist/chunk-JYYQQEBH.cjs.map +1 -0
  24. package/dist/chunk-LWBQ4P4N.cjs +2240 -0
  25. package/dist/chunk-LWBQ4P4N.cjs.map +1 -0
  26. package/dist/{chunk-VJUZZB2I.js → chunk-SNPVZPLB.js} +4 -4
  27. package/dist/{chunk-VJUZZB2I.js.map → chunk-SNPVZPLB.js.map} +1 -1
  28. package/dist/{chunk-32CTMD2C.js → chunk-W7UH2PWL.js} +108 -33
  29. package/dist/chunk-W7UH2PWL.js.map +1 -0
  30. package/dist/{chunk-FD734TPS.cjs → chunk-YCVEJ3UN.cjs} +37 -13
  31. package/dist/chunk-YCVEJ3UN.cjs.map +1 -0
  32. package/dist/{chunk-P6APHXPZ.js → chunk-ZGHTOYHW.js} +5 -4
  33. package/dist/chunk-ZGHTOYHW.js.map +1 -0
  34. package/dist/{chunk-QUZGDSWE.cjs → chunk-ZWNI5IWX.cjs} +11 -11
  35. package/dist/{chunk-QUZGDSWE.cjs.map → chunk-ZWNI5IWX.cjs.map} +1 -1
  36. package/dist/evals/index.cjs +4 -4
  37. package/dist/evals/index.js +1 -1
  38. package/dist/evals/scoreTraces/index.cjs +3 -3
  39. package/dist/evals/scoreTraces/index.js +1 -1
  40. package/dist/index.cjs +2 -2
  41. package/dist/index.js +1 -1
  42. package/dist/llm/index.cjs +7 -7
  43. package/dist/llm/index.js +1 -1
  44. package/dist/llm/model/gateways/constants.d.ts.map +1 -1
  45. package/dist/llm/model/gateways/models-dev.d.ts.map +1 -1
  46. package/dist/llm/model/provider-types.generated.d.ts +48 -10
  47. package/dist/loop/index.cjs +2 -2
  48. package/dist/loop/index.js +1 -1
  49. package/dist/mastra/index.cjs +2 -2
  50. package/dist/mastra/index.d.ts.map +1 -1
  51. package/dist/mastra/index.js +1 -1
  52. package/dist/memory/index.cjs +2 -2
  53. package/dist/memory/index.js +1 -1
  54. package/dist/models-dev-DNBKXHT4.js +3 -0
  55. package/dist/{models-dev-7U4NRMM3.js.map → models-dev-DNBKXHT4.js.map} +1 -1
  56. package/dist/models-dev-YBEEQIX6.cjs +12 -0
  57. package/dist/{models-dev-VKSAQPRK.cjs.map → models-dev-YBEEQIX6.cjs.map} +1 -1
  58. package/dist/netlify-7G2L5VSH.js +3 -0
  59. package/dist/{netlify-42ZNWIDQ.js.map → netlify-7G2L5VSH.js.map} +1 -1
  60. package/dist/netlify-GWNGSIRZ.cjs +12 -0
  61. package/dist/{netlify-2IDXTNFW.cjs.map → netlify-GWNGSIRZ.cjs.map} +1 -1
  62. package/dist/processors/index.cjs +11 -11
  63. package/dist/processors/index.js +1 -1
  64. package/dist/provider-registry.json +101 -26
  65. package/dist/relevance/index.cjs +2 -2
  66. package/dist/relevance/index.js +1 -1
  67. package/dist/stream/index.cjs +8 -8
  68. package/dist/stream/index.js +1 -1
  69. package/dist/workflows/default.d.ts.map +1 -1
  70. package/dist/workflows/evented/index.cjs +10 -10
  71. package/dist/workflows/evented/index.js +1 -1
  72. package/dist/workflows/evented/step-executor.d.ts.map +1 -1
  73. package/dist/workflows/index.cjs +16 -16
  74. package/dist/workflows/index.js +1 -1
  75. package/dist/workflows/step.d.ts +1 -1
  76. package/dist/workflows/step.d.ts.map +1 -1
  77. package/dist/workflows/types.d.ts +11 -5
  78. package/dist/workflows/types.d.ts.map +1 -1
  79. package/dist/workflows/workflow.d.ts +1 -0
  80. package/dist/workflows/workflow.d.ts.map +1 -1
  81. package/package.json +5 -4
  82. package/src/llm/model/provider-types.generated.d.ts +48 -10
  83. package/dist/chunk-26SQQNMU.js.map +0 -1
  84. package/dist/chunk-32CTMD2C.js.map +0 -1
  85. package/dist/chunk-7SKXKUYT.js.map +0 -1
  86. package/dist/chunk-BNBRQS7N.js +0 -910
  87. package/dist/chunk-BNBRQS7N.js.map +0 -1
  88. package/dist/chunk-BXOL277H.cjs.map +0 -1
  89. package/dist/chunk-FD734TPS.cjs.map +0 -1
  90. package/dist/chunk-IU2SZXJQ.cjs +0 -913
  91. package/dist/chunk-IU2SZXJQ.cjs.map +0 -1
  92. package/dist/chunk-L7XKOKOW.js.map +0 -1
  93. package/dist/chunk-P6APHXPZ.js.map +0 -1
  94. package/dist/chunk-QCQLOMJM.cjs.map +0 -1
  95. package/dist/chunk-S5MJLXMG.cjs.map +0 -1
  96. package/dist/chunk-YIK3ASEG.cjs.map +0 -1
  97. package/dist/models-dev-7U4NRMM3.js +0 -3
  98. package/dist/models-dev-VKSAQPRK.cjs +0 -12
  99. package/dist/netlify-2IDXTNFW.cjs +0 -12
  100. package/dist/netlify-42ZNWIDQ.js +0 -3
@@ -0,0 +1,2240 @@
1
+ 'use strict';
2
+
3
+ var chunkCB575O6L_cjs = require('./chunk-CB575O6L.cjs');
4
+ var v4 = require('zod/v4');
5
+ var aiSdkProviderV5 = require('@openrouter/ai-sdk-provider-v5');
6
+
7
+ function combineHeaders2(...headers) {
8
+ return headers.reduce(
9
+ (combinedHeaders, currentHeaders) => ({
10
+ ...combinedHeaders,
11
+ ...currentHeaders != null ? currentHeaders : {}
12
+ }),
13
+ {}
14
+ );
15
+ }
16
+ function extractResponseHeaders(response) {
17
+ return Object.fromEntries([...response.headers]);
18
+ }
19
+ var createIdGenerator = ({
20
+ prefix,
21
+ size = 16,
22
+ alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
23
+ separator = "-"
24
+ } = {}) => {
25
+ const generator = () => {
26
+ const alphabetLength = alphabet.length;
27
+ const chars = new Array(size);
28
+ for (let i = 0; i < size; i++) {
29
+ chars[i] = alphabet[Math.random() * alphabetLength | 0];
30
+ }
31
+ return chars.join("");
32
+ };
33
+ if (prefix == null) {
34
+ return generator;
35
+ }
36
+ if (alphabet.includes(separator)) {
37
+ throw new chunkCB575O6L_cjs.InvalidArgumentError({
38
+ argument: "separator",
39
+ message: `The separator "${separator}" must not be part of the alphabet "${alphabet}".`
40
+ });
41
+ }
42
+ return () => `${prefix}${separator}${generator()}`;
43
+ };
44
+ var generateId2 = createIdGenerator();
45
+ function isAbortError(error) {
46
+ return (error instanceof Error || error instanceof DOMException) && (error.name === "AbortError" || error.name === "ResponseAborted" || // Next.js
47
+ error.name === "TimeoutError");
48
+ }
49
+ var FETCH_FAILED_ERROR_MESSAGES = ["fetch failed", "failed to fetch"];
50
+ function handleFetchError({
51
+ error,
52
+ url,
53
+ requestBodyValues
54
+ }) {
55
+ if (isAbortError(error)) {
56
+ return error;
57
+ }
58
+ if (error instanceof TypeError && FETCH_FAILED_ERROR_MESSAGES.includes(error.message.toLowerCase())) {
59
+ const cause = error.cause;
60
+ if (cause != null) {
61
+ return new chunkCB575O6L_cjs.APICallError({
62
+ message: `Cannot connect to API: ${cause.message}`,
63
+ cause,
64
+ url,
65
+ requestBodyValues,
66
+ isRetryable: true
67
+ // retry when network error
68
+ });
69
+ }
70
+ }
71
+ return error;
72
+ }
73
+ function getRuntimeEnvironmentUserAgent(globalThisAny = globalThis) {
74
+ var _a, _b, _c;
75
+ if (globalThisAny.window) {
76
+ return `runtime/browser`;
77
+ }
78
+ if ((_a = globalThisAny.navigator) == null ? void 0 : _a.userAgent) {
79
+ return `runtime/${globalThisAny.navigator.userAgent.toLowerCase()}`;
80
+ }
81
+ if ((_c = (_b = globalThisAny.process) == null ? void 0 : _b.versions) == null ? void 0 : _c.node) {
82
+ return `runtime/node.js/${globalThisAny.process.version.substring(0)}`;
83
+ }
84
+ if (globalThisAny.EdgeRuntime) {
85
+ return `runtime/vercel-edge`;
86
+ }
87
+ return "runtime/unknown";
88
+ }
89
+ function normalizeHeaders(headers) {
90
+ if (headers == null) {
91
+ return {};
92
+ }
93
+ const normalized = {};
94
+ if (headers instanceof Headers) {
95
+ headers.forEach((value, key) => {
96
+ normalized[key.toLowerCase()] = value;
97
+ });
98
+ } else {
99
+ if (!Array.isArray(headers)) {
100
+ headers = Object.entries(headers);
101
+ }
102
+ for (const [key, value] of headers) {
103
+ if (value != null) {
104
+ normalized[key.toLowerCase()] = value;
105
+ }
106
+ }
107
+ }
108
+ return normalized;
109
+ }
110
+ function withUserAgentSuffix2(headers, ...userAgentSuffixParts) {
111
+ const normalizedHeaders = new Headers(normalizeHeaders(headers));
112
+ const currentUserAgentHeader = normalizedHeaders.get("user-agent") || "";
113
+ normalizedHeaders.set(
114
+ "user-agent",
115
+ [currentUserAgentHeader, ...userAgentSuffixParts].filter(Boolean).join(" ")
116
+ );
117
+ return Object.fromEntries(normalizedHeaders.entries());
118
+ }
119
+ var VERSION = "3.0.16" ;
120
+ var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
121
+ var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
122
+ var DEFAULT_GENERIC_SUFFIX = "You MUST answer with JSON.";
123
+ function injectJsonInstruction({
124
+ prompt,
125
+ schema,
126
+ schemaPrefix = schema != null ? DEFAULT_SCHEMA_PREFIX : void 0,
127
+ schemaSuffix = schema != null ? DEFAULT_SCHEMA_SUFFIX : DEFAULT_GENERIC_SUFFIX
128
+ }) {
129
+ return [
130
+ prompt != null && prompt.length > 0 ? prompt : void 0,
131
+ prompt != null && prompt.length > 0 ? "" : void 0,
132
+ // add a newline if prompt is not null
133
+ schemaPrefix,
134
+ schema != null ? JSON.stringify(schema) : void 0,
135
+ schemaSuffix
136
+ ].filter((line) => line != null).join("\n");
137
+ }
138
+ function injectJsonInstructionIntoMessages({
139
+ messages,
140
+ schema,
141
+ schemaPrefix,
142
+ schemaSuffix
143
+ }) {
144
+ var _a, _b;
145
+ const systemMessage = ((_a = messages[0]) == null ? void 0 : _a.role) === "system" ? { ...messages[0] } : { role: "system", content: "" };
146
+ systemMessage.content = injectJsonInstruction({
147
+ prompt: systemMessage.content,
148
+ schema,
149
+ schemaPrefix,
150
+ schemaSuffix
151
+ });
152
+ return [
153
+ systemMessage,
154
+ ...((_b = messages[0]) == null ? void 0 : _b.role) === "system" ? messages.slice(1) : messages
155
+ ];
156
+ }
157
+ function loadApiKey2({
158
+ apiKey,
159
+ environmentVariableName,
160
+ apiKeyParameterName = "apiKey",
161
+ description
162
+ }) {
163
+ if (typeof apiKey === "string") {
164
+ return apiKey;
165
+ }
166
+ if (apiKey != null) {
167
+ throw new chunkCB575O6L_cjs.LoadAPIKeyError({
168
+ message: `${description} API key must be a string.`
169
+ });
170
+ }
171
+ if (typeof process === "undefined") {
172
+ throw new chunkCB575O6L_cjs.LoadAPIKeyError({
173
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`
174
+ });
175
+ }
176
+ apiKey = process.env[environmentVariableName];
177
+ if (apiKey == null) {
178
+ throw new chunkCB575O6L_cjs.LoadAPIKeyError({
179
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`
180
+ });
181
+ }
182
+ if (typeof apiKey !== "string") {
183
+ throw new chunkCB575O6L_cjs.LoadAPIKeyError({
184
+ message: `${description} API key must be a string. The value of the ${environmentVariableName} environment variable is not a string.`
185
+ });
186
+ }
187
+ return apiKey;
188
+ }
189
+ var suspectProtoRx = /"__proto__"\s*:/;
190
+ var suspectConstructorRx = /"constructor"\s*:/;
191
+ function _parse(text) {
192
+ const obj = JSON.parse(text);
193
+ if (obj === null || typeof obj !== "object") {
194
+ return obj;
195
+ }
196
+ if (suspectProtoRx.test(text) === false && suspectConstructorRx.test(text) === false) {
197
+ return obj;
198
+ }
199
+ return filter(obj);
200
+ }
201
+ function filter(obj) {
202
+ let next = [obj];
203
+ while (next.length) {
204
+ const nodes = next;
205
+ next = [];
206
+ for (const node of nodes) {
207
+ if (Object.prototype.hasOwnProperty.call(node, "__proto__")) {
208
+ throw new SyntaxError("Object contains forbidden prototype property");
209
+ }
210
+ if (Object.prototype.hasOwnProperty.call(node, "constructor") && Object.prototype.hasOwnProperty.call(node.constructor, "prototype")) {
211
+ throw new SyntaxError("Object contains forbidden prototype property");
212
+ }
213
+ for (const key in node) {
214
+ const value = node[key];
215
+ if (value && typeof value === "object") {
216
+ next.push(value);
217
+ }
218
+ }
219
+ }
220
+ }
221
+ return obj;
222
+ }
223
+ function secureJsonParse(text) {
224
+ const { stackTraceLimit } = Error;
225
+ Error.stackTraceLimit = 0;
226
+ try {
227
+ return _parse(text);
228
+ } finally {
229
+ Error.stackTraceLimit = stackTraceLimit;
230
+ }
231
+ }
232
+ var validatorSymbol = Symbol.for("vercel.ai.validator");
233
+ function validator(validate) {
234
+ return { [validatorSymbol]: true, validate };
235
+ }
236
+ function isValidator(value) {
237
+ return typeof value === "object" && value !== null && validatorSymbol in value && value[validatorSymbol] === true && "validate" in value;
238
+ }
239
+ function asValidator(value) {
240
+ return isValidator(value) ? value : typeof value === "function" ? value() : standardSchemaValidator(value);
241
+ }
242
+ function standardSchemaValidator(standardSchema) {
243
+ return validator(async (value) => {
244
+ const result = await standardSchema["~standard"].validate(value);
245
+ return result.issues == null ? { success: true, value: result.value } : {
246
+ success: false,
247
+ error: new chunkCB575O6L_cjs.TypeValidationError({
248
+ value,
249
+ cause: result.issues
250
+ })
251
+ };
252
+ });
253
+ }
254
+ async function validateTypes({
255
+ value,
256
+ schema
257
+ }) {
258
+ const result = await safeValidateTypes({ value, schema });
259
+ if (!result.success) {
260
+ throw chunkCB575O6L_cjs.TypeValidationError.wrap({ value, cause: result.error });
261
+ }
262
+ return result.value;
263
+ }
264
+ async function safeValidateTypes({
265
+ value,
266
+ schema
267
+ }) {
268
+ const validator2 = asValidator(schema);
269
+ try {
270
+ if (validator2.validate == null) {
271
+ return { success: true, value, rawValue: value };
272
+ }
273
+ const result = await validator2.validate(value);
274
+ if (result.success) {
275
+ return { success: true, value: result.value, rawValue: value };
276
+ }
277
+ return {
278
+ success: false,
279
+ error: chunkCB575O6L_cjs.TypeValidationError.wrap({ value, cause: result.error }),
280
+ rawValue: value
281
+ };
282
+ } catch (error) {
283
+ return {
284
+ success: false,
285
+ error: chunkCB575O6L_cjs.TypeValidationError.wrap({ value, cause: error }),
286
+ rawValue: value
287
+ };
288
+ }
289
+ }
290
+ async function parseJSON({
291
+ text,
292
+ schema
293
+ }) {
294
+ try {
295
+ const value = secureJsonParse(text);
296
+ if (schema == null) {
297
+ return value;
298
+ }
299
+ return validateTypes({ value, schema });
300
+ } catch (error) {
301
+ if (chunkCB575O6L_cjs.JSONParseError.isInstance(error) || chunkCB575O6L_cjs.TypeValidationError.isInstance(error)) {
302
+ throw error;
303
+ }
304
+ throw new chunkCB575O6L_cjs.JSONParseError({ text, cause: error });
305
+ }
306
+ }
307
+ async function safeParseJSON({
308
+ text,
309
+ schema
310
+ }) {
311
+ try {
312
+ const value = secureJsonParse(text);
313
+ if (schema == null) {
314
+ return { success: true, value, rawValue: value };
315
+ }
316
+ return await safeValidateTypes({ value, schema });
317
+ } catch (error) {
318
+ return {
319
+ success: false,
320
+ error: chunkCB575O6L_cjs.JSONParseError.isInstance(error) ? error : new chunkCB575O6L_cjs.JSONParseError({ text, cause: error }),
321
+ rawValue: void 0
322
+ };
323
+ }
324
+ }
325
+ function parseJsonEventStream({
326
+ stream,
327
+ schema
328
+ }) {
329
+ return stream.pipeThrough(new TextDecoderStream()).pipeThrough(new chunkCB575O6L_cjs.EventSourceParserStream()).pipeThrough(
330
+ new TransformStream({
331
+ async transform({ data }, controller) {
332
+ if (data === "[DONE]") {
333
+ return;
334
+ }
335
+ controller.enqueue(await safeParseJSON({ text: data, schema }));
336
+ }
337
+ })
338
+ );
339
+ }
340
+ async function parseProviderOptions2({
341
+ provider,
342
+ providerOptions,
343
+ schema
344
+ }) {
345
+ if ((providerOptions == null ? void 0 : providerOptions[provider]) == null) {
346
+ return void 0;
347
+ }
348
+ const parsedProviderOptions = await safeValidateTypes({
349
+ value: providerOptions[provider],
350
+ schema
351
+ });
352
+ if (!parsedProviderOptions.success) {
353
+ throw new chunkCB575O6L_cjs.InvalidArgumentError({
354
+ argument: "providerOptions",
355
+ message: `invalid ${provider} provider options`,
356
+ cause: parsedProviderOptions.error
357
+ });
358
+ }
359
+ return parsedProviderOptions.value;
360
+ }
361
+ var getOriginalFetch2 = () => globalThis.fetch;
362
+ var postJsonToApi2 = async ({
363
+ url,
364
+ headers,
365
+ body,
366
+ failedResponseHandler,
367
+ successfulResponseHandler,
368
+ abortSignal,
369
+ fetch: fetch2
370
+ }) => postToApi({
371
+ url,
372
+ headers: {
373
+ "Content-Type": "application/json",
374
+ ...headers
375
+ },
376
+ body: {
377
+ content: JSON.stringify(body),
378
+ values: body
379
+ },
380
+ failedResponseHandler,
381
+ successfulResponseHandler,
382
+ abortSignal,
383
+ fetch: fetch2
384
+ });
385
+ var postToApi = async ({
386
+ url,
387
+ headers = {},
388
+ body,
389
+ successfulResponseHandler,
390
+ failedResponseHandler,
391
+ abortSignal,
392
+ fetch: fetch2 = getOriginalFetch2()
393
+ }) => {
394
+ try {
395
+ const response = await fetch2(url, {
396
+ method: "POST",
397
+ headers: withUserAgentSuffix2(
398
+ headers,
399
+ `ai-sdk/provider-utils/${VERSION}`,
400
+ getRuntimeEnvironmentUserAgent()
401
+ ),
402
+ body: body.content,
403
+ signal: abortSignal
404
+ });
405
+ const responseHeaders = extractResponseHeaders(response);
406
+ if (!response.ok) {
407
+ let errorInformation;
408
+ try {
409
+ errorInformation = await failedResponseHandler({
410
+ response,
411
+ url,
412
+ requestBodyValues: body.values
413
+ });
414
+ } catch (error) {
415
+ if (isAbortError(error) || chunkCB575O6L_cjs.APICallError.isInstance(error)) {
416
+ throw error;
417
+ }
418
+ throw new chunkCB575O6L_cjs.APICallError({
419
+ message: "Failed to process error response",
420
+ cause: error,
421
+ statusCode: response.status,
422
+ url,
423
+ responseHeaders,
424
+ requestBodyValues: body.values
425
+ });
426
+ }
427
+ throw errorInformation.value;
428
+ }
429
+ try {
430
+ return await successfulResponseHandler({
431
+ response,
432
+ url,
433
+ requestBodyValues: body.values
434
+ });
435
+ } catch (error) {
436
+ if (error instanceof Error) {
437
+ if (isAbortError(error) || chunkCB575O6L_cjs.APICallError.isInstance(error)) {
438
+ throw error;
439
+ }
440
+ }
441
+ throw new chunkCB575O6L_cjs.APICallError({
442
+ message: "Failed to process successful response",
443
+ cause: error,
444
+ statusCode: response.status,
445
+ url,
446
+ responseHeaders,
447
+ requestBodyValues: body.values
448
+ });
449
+ }
450
+ } catch (error) {
451
+ throw handleFetchError({ error, url, requestBodyValues: body.values });
452
+ }
453
+ };
454
+ var createJsonErrorResponseHandler2 = ({
455
+ errorSchema,
456
+ errorToMessage,
457
+ isRetryable
458
+ }) => async ({ response, url, requestBodyValues }) => {
459
+ const responseBody = await response.text();
460
+ const responseHeaders = extractResponseHeaders(response);
461
+ if (responseBody.trim() === "") {
462
+ return {
463
+ responseHeaders,
464
+ value: new chunkCB575O6L_cjs.APICallError({
465
+ message: response.statusText,
466
+ url,
467
+ requestBodyValues,
468
+ statusCode: response.status,
469
+ responseHeaders,
470
+ responseBody,
471
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
472
+ })
473
+ };
474
+ }
475
+ try {
476
+ const parsedError = await parseJSON({
477
+ text: responseBody,
478
+ schema: errorSchema
479
+ });
480
+ return {
481
+ responseHeaders,
482
+ value: new chunkCB575O6L_cjs.APICallError({
483
+ message: errorToMessage(parsedError),
484
+ url,
485
+ requestBodyValues,
486
+ statusCode: response.status,
487
+ responseHeaders,
488
+ responseBody,
489
+ data: parsedError,
490
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
491
+ })
492
+ };
493
+ } catch (parseError) {
494
+ return {
495
+ responseHeaders,
496
+ value: new chunkCB575O6L_cjs.APICallError({
497
+ message: response.statusText,
498
+ url,
499
+ requestBodyValues,
500
+ statusCode: response.status,
501
+ responseHeaders,
502
+ responseBody,
503
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
504
+ })
505
+ };
506
+ }
507
+ };
508
+ var createEventSourceResponseHandler2 = (chunkSchema) => async ({ response }) => {
509
+ const responseHeaders = extractResponseHeaders(response);
510
+ if (response.body == null) {
511
+ throw new chunkCB575O6L_cjs.EmptyResponseBodyError({});
512
+ }
513
+ return {
514
+ responseHeaders,
515
+ value: parseJsonEventStream({
516
+ stream: response.body,
517
+ schema: chunkSchema
518
+ })
519
+ };
520
+ };
521
+ var createJsonResponseHandler2 = (responseSchema) => async ({ response, url, requestBodyValues }) => {
522
+ const responseBody = await response.text();
523
+ const parsedResult = await safeParseJSON({
524
+ text: responseBody,
525
+ schema: responseSchema
526
+ });
527
+ const responseHeaders = extractResponseHeaders(response);
528
+ if (!parsedResult.success) {
529
+ throw new chunkCB575O6L_cjs.APICallError({
530
+ message: "Invalid JSON response",
531
+ cause: parsedResult.error,
532
+ statusCode: response.status,
533
+ responseHeaders,
534
+ responseBody,
535
+ url,
536
+ requestBodyValues
537
+ });
538
+ }
539
+ return {
540
+ responseHeaders,
541
+ value: parsedResult.value,
542
+ rawValue: parsedResult.rawValue
543
+ };
544
+ };
545
+ new Set(
546
+ "ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvxyz0123456789"
547
+ );
548
+ var { btoa} = globalThis;
549
+ function convertUint8ArrayToBase64(array) {
550
+ let latin1string = "";
551
+ for (let i = 0; i < array.length; i++) {
552
+ latin1string += String.fromCodePoint(array[i]);
553
+ }
554
+ return btoa(latin1string);
555
+ }
556
+ function convertToBase642(value) {
557
+ return value instanceof Uint8Array ? convertUint8ArrayToBase64(value) : value;
558
+ }
559
+ function withoutTrailingSlash2(url) {
560
+ return url == null ? void 0 : url.replace(/\/$/, "");
561
+ }
562
+ function convertToMistralChatMessages(prompt) {
563
+ const messages = [];
564
+ for (let i = 0; i < prompt.length; i++) {
565
+ const { role, content } = prompt[i];
566
+ const isLastMessage = i === prompt.length - 1;
567
+ switch (role) {
568
+ case "system": {
569
+ messages.push({ role: "system", content });
570
+ break;
571
+ }
572
+ case "user": {
573
+ messages.push({
574
+ role: "user",
575
+ content: content.map((part) => {
576
+ switch (part.type) {
577
+ case "text": {
578
+ return { type: "text", text: part.text };
579
+ }
580
+ case "file": {
581
+ if (part.mediaType.startsWith("image/")) {
582
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
583
+ return {
584
+ type: "image_url",
585
+ image_url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${convertToBase642(part.data)}`
586
+ };
587
+ } else if (part.mediaType === "application/pdf") {
588
+ return {
589
+ type: "document_url",
590
+ document_url: part.data.toString()
591
+ };
592
+ } else {
593
+ throw new chunkCB575O6L_cjs.UnsupportedFunctionalityError({
594
+ functionality: "Only images and PDF file parts are supported"
595
+ });
596
+ }
597
+ }
598
+ }
599
+ })
600
+ });
601
+ break;
602
+ }
603
+ case "assistant": {
604
+ let text = "";
605
+ const toolCalls = [];
606
+ for (const part of content) {
607
+ switch (part.type) {
608
+ case "text": {
609
+ text += part.text;
610
+ break;
611
+ }
612
+ case "tool-call": {
613
+ toolCalls.push({
614
+ id: part.toolCallId,
615
+ type: "function",
616
+ function: {
617
+ name: part.toolName,
618
+ arguments: JSON.stringify(part.input)
619
+ }
620
+ });
621
+ break;
622
+ }
623
+ case "reasoning": {
624
+ text += part.text;
625
+ break;
626
+ }
627
+ default: {
628
+ throw new Error(
629
+ `Unsupported content type in assistant message: ${part.type}`
630
+ );
631
+ }
632
+ }
633
+ }
634
+ messages.push({
635
+ role: "assistant",
636
+ content: text,
637
+ prefix: isLastMessage ? true : void 0,
638
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
639
+ });
640
+ break;
641
+ }
642
+ case "tool": {
643
+ for (const toolResponse of content) {
644
+ const output = toolResponse.output;
645
+ let contentValue;
646
+ switch (output.type) {
647
+ case "text":
648
+ case "error-text":
649
+ contentValue = output.value;
650
+ break;
651
+ case "content":
652
+ case "json":
653
+ case "error-json":
654
+ contentValue = JSON.stringify(output.value);
655
+ break;
656
+ }
657
+ messages.push({
658
+ role: "tool",
659
+ name: toolResponse.toolName,
660
+ tool_call_id: toolResponse.toolCallId,
661
+ content: contentValue
662
+ });
663
+ }
664
+ break;
665
+ }
666
+ default: {
667
+ const _exhaustiveCheck = role;
668
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
669
+ }
670
+ }
671
+ }
672
+ return messages;
673
+ }
674
+ function getResponseMetadata({
675
+ id,
676
+ model,
677
+ created
678
+ }) {
679
+ return {
680
+ id: id != null ? id : void 0,
681
+ modelId: model != null ? model : void 0,
682
+ timestamp: created != null ? new Date(created * 1e3) : void 0
683
+ };
684
+ }
685
+ function mapMistralFinishReason(finishReason) {
686
+ switch (finishReason) {
687
+ case "stop":
688
+ return "stop";
689
+ case "length":
690
+ case "model_length":
691
+ return "length";
692
+ case "tool_calls":
693
+ return "tool-calls";
694
+ default:
695
+ return "unknown";
696
+ }
697
+ }
698
+ var mistralLanguageModelOptions = v4.z.object({
699
+ /**
700
+ Whether to inject a safety prompt before all conversations.
701
+
702
+ Defaults to `false`.
703
+ */
704
+ safePrompt: v4.z.boolean().optional(),
705
+ documentImageLimit: v4.z.number().optional(),
706
+ documentPageLimit: v4.z.number().optional(),
707
+ /**
708
+ * Whether to use structured outputs.
709
+ *
710
+ * @default true
711
+ */
712
+ structuredOutputs: v4.z.boolean().optional(),
713
+ /**
714
+ * Whether to use strict JSON schema validation.
715
+ *
716
+ * @default false
717
+ */
718
+ strictJsonSchema: v4.z.boolean().optional(),
719
+ /**
720
+ * Whether to enable parallel function calling during tool use.
721
+ * When set to false, the model will use at most one tool per response.
722
+ *
723
+ * @default true
724
+ */
725
+ parallelToolCalls: v4.z.boolean().optional()
726
+ });
727
+ var mistralErrorDataSchema = v4.z.object({
728
+ object: v4.z.literal("error"),
729
+ message: v4.z.string(),
730
+ type: v4.z.string(),
731
+ param: v4.z.string().nullable(),
732
+ code: v4.z.string().nullable()
733
+ });
734
+ var mistralFailedResponseHandler = createJsonErrorResponseHandler2({
735
+ errorSchema: mistralErrorDataSchema,
736
+ errorToMessage: (data) => data.message
737
+ });
738
+ function prepareTools({
739
+ tools,
740
+ toolChoice
741
+ }) {
742
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
743
+ const toolWarnings = [];
744
+ if (tools == null) {
745
+ return { tools: void 0, toolChoice: void 0, toolWarnings };
746
+ }
747
+ const mistralTools = [];
748
+ for (const tool of tools) {
749
+ if (tool.type === "provider-defined") {
750
+ toolWarnings.push({ type: "unsupported-tool", tool });
751
+ } else {
752
+ mistralTools.push({
753
+ type: "function",
754
+ function: {
755
+ name: tool.name,
756
+ description: tool.description,
757
+ parameters: tool.inputSchema
758
+ }
759
+ });
760
+ }
761
+ }
762
+ if (toolChoice == null) {
763
+ return { tools: mistralTools, toolChoice: void 0, toolWarnings };
764
+ }
765
+ const type = toolChoice.type;
766
+ switch (type) {
767
+ case "auto":
768
+ case "none":
769
+ return { tools: mistralTools, toolChoice: type, toolWarnings };
770
+ case "required":
771
+ return { tools: mistralTools, toolChoice: "any", toolWarnings };
772
+ // mistral does not support tool mode directly,
773
+ // so we filter the tools and force the tool choice through 'any'
774
+ case "tool":
775
+ return {
776
+ tools: mistralTools.filter(
777
+ (tool) => tool.function.name === toolChoice.toolName
778
+ ),
779
+ toolChoice: "any",
780
+ toolWarnings
781
+ };
782
+ default: {
783
+ const _exhaustiveCheck = type;
784
+ throw new chunkCB575O6L_cjs.UnsupportedFunctionalityError({
785
+ functionality: `tool choice type: ${_exhaustiveCheck}`
786
+ });
787
+ }
788
+ }
789
+ }
790
+ var MistralChatLanguageModel = class {
791
+ constructor(modelId, config) {
792
+ this.specificationVersion = "v2";
793
+ this.supportedUrls = {
794
+ "application/pdf": [/^https:\/\/.*$/]
795
+ };
796
+ var _a;
797
+ this.modelId = modelId;
798
+ this.config = config;
799
+ this.generateId = (_a = config.generateId) != null ? _a : generateId2;
800
+ }
801
+ get provider() {
802
+ return this.config.provider;
803
+ }
804
+ async getArgs({
805
+ prompt,
806
+ maxOutputTokens,
807
+ temperature,
808
+ topP,
809
+ topK,
810
+ frequencyPenalty,
811
+ presencePenalty,
812
+ stopSequences,
813
+ responseFormat,
814
+ seed,
815
+ providerOptions,
816
+ tools,
817
+ toolChoice
818
+ }) {
819
+ var _a, _b, _c, _d;
820
+ const warnings = [];
821
+ const options = (_a = await parseProviderOptions2({
822
+ provider: "mistral",
823
+ providerOptions,
824
+ schema: mistralLanguageModelOptions
825
+ })) != null ? _a : {};
826
+ if (topK != null) {
827
+ warnings.push({
828
+ type: "unsupported-setting",
829
+ setting: "topK"
830
+ });
831
+ }
832
+ if (frequencyPenalty != null) {
833
+ warnings.push({
834
+ type: "unsupported-setting",
835
+ setting: "frequencyPenalty"
836
+ });
837
+ }
838
+ if (presencePenalty != null) {
839
+ warnings.push({
840
+ type: "unsupported-setting",
841
+ setting: "presencePenalty"
842
+ });
843
+ }
844
+ if (stopSequences != null) {
845
+ warnings.push({
846
+ type: "unsupported-setting",
847
+ setting: "stopSequences"
848
+ });
849
+ }
850
+ const structuredOutputs = (_b = options.structuredOutputs) != null ? _b : true;
851
+ const strictJsonSchema = (_c = options.strictJsonSchema) != null ? _c : false;
852
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && !(responseFormat == null ? void 0 : responseFormat.schema)) {
853
+ prompt = injectJsonInstructionIntoMessages({
854
+ messages: prompt,
855
+ schema: responseFormat.schema
856
+ });
857
+ }
858
+ const baseArgs = {
859
+ // model id:
860
+ model: this.modelId,
861
+ // model specific settings:
862
+ safe_prompt: options.safePrompt,
863
+ // standardized settings:
864
+ max_tokens: maxOutputTokens,
865
+ temperature,
866
+ top_p: topP,
867
+ random_seed: seed,
868
+ // response format:
869
+ response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? structuredOutputs && (responseFormat == null ? void 0 : responseFormat.schema) != null ? {
870
+ type: "json_schema",
871
+ json_schema: {
872
+ schema: responseFormat.schema,
873
+ strict: strictJsonSchema,
874
+ name: (_d = responseFormat.name) != null ? _d : "response",
875
+ description: responseFormat.description
876
+ }
877
+ } : { type: "json_object" } : void 0,
878
+ // mistral-specific provider options:
879
+ document_image_limit: options.documentImageLimit,
880
+ document_page_limit: options.documentPageLimit,
881
+ // messages:
882
+ messages: convertToMistralChatMessages(prompt)
883
+ };
884
+ const {
885
+ tools: mistralTools,
886
+ toolChoice: mistralToolChoice,
887
+ toolWarnings
888
+ } = prepareTools({
889
+ tools,
890
+ toolChoice
891
+ });
892
+ return {
893
+ args: {
894
+ ...baseArgs,
895
+ tools: mistralTools,
896
+ tool_choice: mistralToolChoice,
897
+ ...mistralTools != null && options.parallelToolCalls !== void 0 ? { parallel_tool_calls: options.parallelToolCalls } : {}
898
+ },
899
+ warnings: [...warnings, ...toolWarnings]
900
+ };
901
+ }
902
+ async doGenerate(options) {
903
+ const { args: body, warnings } = await this.getArgs(options);
904
+ const {
905
+ responseHeaders,
906
+ value: response,
907
+ rawValue: rawResponse
908
+ } = await postJsonToApi2({
909
+ url: `${this.config.baseURL}/chat/completions`,
910
+ headers: combineHeaders2(this.config.headers(), options.headers),
911
+ body,
912
+ failedResponseHandler: mistralFailedResponseHandler,
913
+ successfulResponseHandler: createJsonResponseHandler2(
914
+ mistralChatResponseSchema
915
+ ),
916
+ abortSignal: options.abortSignal,
917
+ fetch: this.config.fetch
918
+ });
919
+ const choice = response.choices[0];
920
+ const content = [];
921
+ if (choice.message.content != null && Array.isArray(choice.message.content)) {
922
+ for (const part of choice.message.content) {
923
+ if (part.type === "thinking") {
924
+ const reasoningText = extractReasoningContent(part.thinking);
925
+ if (reasoningText.length > 0) {
926
+ content.push({ type: "reasoning", text: reasoningText });
927
+ }
928
+ } else if (part.type === "text") {
929
+ if (part.text.length > 0) {
930
+ content.push({ type: "text", text: part.text });
931
+ }
932
+ }
933
+ }
934
+ } else {
935
+ const text = extractTextContent(choice.message.content);
936
+ if (text != null && text.length > 0) {
937
+ content.push({ type: "text", text });
938
+ }
939
+ }
940
+ if (choice.message.tool_calls != null) {
941
+ for (const toolCall of choice.message.tool_calls) {
942
+ content.push({
943
+ type: "tool-call",
944
+ toolCallId: toolCall.id,
945
+ toolName: toolCall.function.name,
946
+ input: toolCall.function.arguments
947
+ });
948
+ }
949
+ }
950
+ return {
951
+ content,
952
+ finishReason: mapMistralFinishReason(choice.finish_reason),
953
+ usage: {
954
+ inputTokens: response.usage.prompt_tokens,
955
+ outputTokens: response.usage.completion_tokens,
956
+ totalTokens: response.usage.total_tokens
957
+ },
958
+ request: { body },
959
+ response: {
960
+ ...getResponseMetadata(response),
961
+ headers: responseHeaders,
962
+ body: rawResponse
963
+ },
964
+ warnings
965
+ };
966
+ }
967
+ async doStream(options) {
968
+ const { args, warnings } = await this.getArgs(options);
969
+ const body = { ...args, stream: true };
970
+ const { responseHeaders, value: response } = await postJsonToApi2({
971
+ url: `${this.config.baseURL}/chat/completions`,
972
+ headers: combineHeaders2(this.config.headers(), options.headers),
973
+ body,
974
+ failedResponseHandler: mistralFailedResponseHandler,
975
+ successfulResponseHandler: createEventSourceResponseHandler2(
976
+ mistralChatChunkSchema
977
+ ),
978
+ abortSignal: options.abortSignal,
979
+ fetch: this.config.fetch
980
+ });
981
+ let finishReason = "unknown";
982
+ const usage = {
983
+ inputTokens: void 0,
984
+ outputTokens: void 0,
985
+ totalTokens: void 0
986
+ };
987
+ let isFirstChunk = true;
988
+ let activeText = false;
989
+ let activeReasoningId = null;
990
+ const generateId22 = this.generateId;
991
+ return {
992
+ stream: response.pipeThrough(
993
+ new TransformStream({
994
+ start(controller) {
995
+ controller.enqueue({ type: "stream-start", warnings });
996
+ },
997
+ transform(chunk, controller) {
998
+ if (options.includeRawChunks) {
999
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1000
+ }
1001
+ if (!chunk.success) {
1002
+ controller.enqueue({ type: "error", error: chunk.error });
1003
+ return;
1004
+ }
1005
+ const value = chunk.value;
1006
+ if (isFirstChunk) {
1007
+ isFirstChunk = false;
1008
+ controller.enqueue({
1009
+ type: "response-metadata",
1010
+ ...getResponseMetadata(value)
1011
+ });
1012
+ }
1013
+ if (value.usage != null) {
1014
+ usage.inputTokens = value.usage.prompt_tokens;
1015
+ usage.outputTokens = value.usage.completion_tokens;
1016
+ usage.totalTokens = value.usage.total_tokens;
1017
+ }
1018
+ const choice = value.choices[0];
1019
+ const delta = choice.delta;
1020
+ const textContent = extractTextContent(delta.content);
1021
+ if (delta.content != null && Array.isArray(delta.content)) {
1022
+ for (const part of delta.content) {
1023
+ if (part.type === "thinking") {
1024
+ const reasoningDelta = extractReasoningContent(part.thinking);
1025
+ if (reasoningDelta.length > 0) {
1026
+ if (activeReasoningId == null) {
1027
+ if (activeText) {
1028
+ controller.enqueue({ type: "text-end", id: "0" });
1029
+ activeText = false;
1030
+ }
1031
+ activeReasoningId = generateId22();
1032
+ controller.enqueue({
1033
+ type: "reasoning-start",
1034
+ id: activeReasoningId
1035
+ });
1036
+ }
1037
+ controller.enqueue({
1038
+ type: "reasoning-delta",
1039
+ id: activeReasoningId,
1040
+ delta: reasoningDelta
1041
+ });
1042
+ }
1043
+ }
1044
+ }
1045
+ }
1046
+ if (textContent != null && textContent.length > 0) {
1047
+ if (!activeText) {
1048
+ if (activeReasoningId != null) {
1049
+ controller.enqueue({
1050
+ type: "reasoning-end",
1051
+ id: activeReasoningId
1052
+ });
1053
+ activeReasoningId = null;
1054
+ }
1055
+ controller.enqueue({ type: "text-start", id: "0" });
1056
+ activeText = true;
1057
+ }
1058
+ controller.enqueue({
1059
+ type: "text-delta",
1060
+ id: "0",
1061
+ delta: textContent
1062
+ });
1063
+ }
1064
+ if ((delta == null ? void 0 : delta.tool_calls) != null) {
1065
+ for (const toolCall of delta.tool_calls) {
1066
+ const toolCallId = toolCall.id;
1067
+ const toolName = toolCall.function.name;
1068
+ const input = toolCall.function.arguments;
1069
+ controller.enqueue({
1070
+ type: "tool-input-start",
1071
+ id: toolCallId,
1072
+ toolName
1073
+ });
1074
+ controller.enqueue({
1075
+ type: "tool-input-delta",
1076
+ id: toolCallId,
1077
+ delta: input
1078
+ });
1079
+ controller.enqueue({
1080
+ type: "tool-input-end",
1081
+ id: toolCallId
1082
+ });
1083
+ controller.enqueue({
1084
+ type: "tool-call",
1085
+ toolCallId,
1086
+ toolName,
1087
+ input
1088
+ });
1089
+ }
1090
+ }
1091
+ if (choice.finish_reason != null) {
1092
+ finishReason = mapMistralFinishReason(choice.finish_reason);
1093
+ }
1094
+ },
1095
+ flush(controller) {
1096
+ if (activeReasoningId != null) {
1097
+ controller.enqueue({
1098
+ type: "reasoning-end",
1099
+ id: activeReasoningId
1100
+ });
1101
+ }
1102
+ if (activeText) {
1103
+ controller.enqueue({ type: "text-end", id: "0" });
1104
+ }
1105
+ controller.enqueue({
1106
+ type: "finish",
1107
+ finishReason,
1108
+ usage
1109
+ });
1110
+ }
1111
+ })
1112
+ ),
1113
+ request: { body },
1114
+ response: { headers: responseHeaders }
1115
+ };
1116
+ }
1117
+ };
1118
+ function extractReasoningContent(thinking) {
1119
+ return thinking.filter((chunk) => chunk.type === "text").map((chunk) => chunk.text).join("");
1120
+ }
1121
+ function extractTextContent(content) {
1122
+ if (typeof content === "string") {
1123
+ return content;
1124
+ }
1125
+ if (content == null) {
1126
+ return void 0;
1127
+ }
1128
+ const textContent = [];
1129
+ for (const chunk of content) {
1130
+ const { type } = chunk;
1131
+ switch (type) {
1132
+ case "text":
1133
+ textContent.push(chunk.text);
1134
+ break;
1135
+ case "thinking":
1136
+ case "image_url":
1137
+ case "reference":
1138
+ break;
1139
+ default: {
1140
+ const _exhaustiveCheck = type;
1141
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
1142
+ }
1143
+ }
1144
+ }
1145
+ return textContent.length ? textContent.join("") : void 0;
1146
+ }
1147
+ var mistralContentSchema = v4.z.union([
1148
+ v4.z.string(),
1149
+ v4.z.array(
1150
+ v4.z.discriminatedUnion("type", [
1151
+ v4.z.object({
1152
+ type: v4.z.literal("text"),
1153
+ text: v4.z.string()
1154
+ }),
1155
+ v4.z.object({
1156
+ type: v4.z.literal("image_url"),
1157
+ image_url: v4.z.union([
1158
+ v4.z.string(),
1159
+ v4.z.object({
1160
+ url: v4.z.string(),
1161
+ detail: v4.z.string().nullable()
1162
+ })
1163
+ ])
1164
+ }),
1165
+ v4.z.object({
1166
+ type: v4.z.literal("reference"),
1167
+ reference_ids: v4.z.array(v4.z.number())
1168
+ }),
1169
+ v4.z.object({
1170
+ type: v4.z.literal("thinking"),
1171
+ thinking: v4.z.array(
1172
+ v4.z.object({
1173
+ type: v4.z.literal("text"),
1174
+ text: v4.z.string()
1175
+ })
1176
+ )
1177
+ })
1178
+ ])
1179
+ )
1180
+ ]).nullish();
1181
+ var mistralUsageSchema = v4.z.object({
1182
+ prompt_tokens: v4.z.number(),
1183
+ completion_tokens: v4.z.number(),
1184
+ total_tokens: v4.z.number()
1185
+ });
1186
+ var mistralChatResponseSchema = v4.z.object({
1187
+ id: v4.z.string().nullish(),
1188
+ created: v4.z.number().nullish(),
1189
+ model: v4.z.string().nullish(),
1190
+ choices: v4.z.array(
1191
+ v4.z.object({
1192
+ message: v4.z.object({
1193
+ role: v4.z.literal("assistant"),
1194
+ content: mistralContentSchema,
1195
+ tool_calls: v4.z.array(
1196
+ v4.z.object({
1197
+ id: v4.z.string(),
1198
+ function: v4.z.object({ name: v4.z.string(), arguments: v4.z.string() })
1199
+ })
1200
+ ).nullish()
1201
+ }),
1202
+ index: v4.z.number(),
1203
+ finish_reason: v4.z.string().nullish()
1204
+ })
1205
+ ),
1206
+ object: v4.z.literal("chat.completion"),
1207
+ usage: mistralUsageSchema
1208
+ });
1209
+ var mistralChatChunkSchema = v4.z.object({
1210
+ id: v4.z.string().nullish(),
1211
+ created: v4.z.number().nullish(),
1212
+ model: v4.z.string().nullish(),
1213
+ choices: v4.z.array(
1214
+ v4.z.object({
1215
+ delta: v4.z.object({
1216
+ role: v4.z.enum(["assistant"]).optional(),
1217
+ content: mistralContentSchema,
1218
+ tool_calls: v4.z.array(
1219
+ v4.z.object({
1220
+ id: v4.z.string(),
1221
+ function: v4.z.object({ name: v4.z.string(), arguments: v4.z.string() })
1222
+ })
1223
+ ).nullish()
1224
+ }),
1225
+ finish_reason: v4.z.string().nullish(),
1226
+ index: v4.z.number()
1227
+ })
1228
+ ),
1229
+ usage: mistralUsageSchema.nullish()
1230
+ });
1231
+ var MistralEmbeddingModel = class {
1232
+ constructor(modelId, config) {
1233
+ this.specificationVersion = "v2";
1234
+ this.maxEmbeddingsPerCall = 32;
1235
+ this.supportsParallelCalls = false;
1236
+ this.modelId = modelId;
1237
+ this.config = config;
1238
+ }
1239
+ get provider() {
1240
+ return this.config.provider;
1241
+ }
1242
+ async doEmbed({
1243
+ values,
1244
+ abortSignal,
1245
+ headers
1246
+ }) {
1247
+ if (values.length > this.maxEmbeddingsPerCall) {
1248
+ throw new chunkCB575O6L_cjs.TooManyEmbeddingValuesForCallError({
1249
+ provider: this.provider,
1250
+ modelId: this.modelId,
1251
+ maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
1252
+ values
1253
+ });
1254
+ }
1255
+ const {
1256
+ responseHeaders,
1257
+ value: response,
1258
+ rawValue
1259
+ } = await postJsonToApi2({
1260
+ url: `${this.config.baseURL}/embeddings`,
1261
+ headers: combineHeaders2(this.config.headers(), headers),
1262
+ body: {
1263
+ model: this.modelId,
1264
+ input: values,
1265
+ encoding_format: "float"
1266
+ },
1267
+ failedResponseHandler: mistralFailedResponseHandler,
1268
+ successfulResponseHandler: createJsonResponseHandler2(
1269
+ MistralTextEmbeddingResponseSchema
1270
+ ),
1271
+ abortSignal,
1272
+ fetch: this.config.fetch
1273
+ });
1274
+ return {
1275
+ embeddings: response.data.map((item) => item.embedding),
1276
+ usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
1277
+ response: { headers: responseHeaders, body: rawValue }
1278
+ };
1279
+ }
1280
+ };
1281
+ var MistralTextEmbeddingResponseSchema = v4.z.object({
1282
+ data: v4.z.array(v4.z.object({ embedding: v4.z.array(v4.z.number()) })),
1283
+ usage: v4.z.object({ prompt_tokens: v4.z.number() }).nullish()
1284
+ });
1285
+ var VERSION2 = "2.0.23" ;
1286
+ function createMistral(options = {}) {
1287
+ var _a;
1288
+ const baseURL = (_a = withoutTrailingSlash2(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
1289
+ const getHeaders = () => withUserAgentSuffix2(
1290
+ {
1291
+ Authorization: `Bearer ${loadApiKey2({
1292
+ apiKey: options.apiKey,
1293
+ environmentVariableName: "MISTRAL_API_KEY",
1294
+ description: "Mistral"
1295
+ })}`,
1296
+ ...options.headers
1297
+ },
1298
+ `ai-sdk/mistral/${VERSION2}`
1299
+ );
1300
+ const createChatModel = (modelId) => new MistralChatLanguageModel(modelId, {
1301
+ provider: "mistral.chat",
1302
+ baseURL,
1303
+ headers: getHeaders,
1304
+ fetch: options.fetch,
1305
+ generateId: options.generateId
1306
+ });
1307
+ const createEmbeddingModel = (modelId) => new MistralEmbeddingModel(modelId, {
1308
+ provider: "mistral.embedding",
1309
+ baseURL,
1310
+ headers: getHeaders,
1311
+ fetch: options.fetch
1312
+ });
1313
+ const provider = function(modelId) {
1314
+ if (new.target) {
1315
+ throw new Error(
1316
+ "The Mistral model function cannot be called with the new keyword."
1317
+ );
1318
+ }
1319
+ return createChatModel(modelId);
1320
+ };
1321
+ provider.languageModel = createChatModel;
1322
+ provider.chat = createChatModel;
1323
+ provider.embedding = createEmbeddingModel;
1324
+ provider.textEmbedding = createEmbeddingModel;
1325
+ provider.textEmbeddingModel = createEmbeddingModel;
1326
+ provider.imageModel = (modelId) => {
1327
+ throw new chunkCB575O6L_cjs.NoSuchModelError({ modelId, modelType: "imageModel" });
1328
+ };
1329
+ return provider;
1330
+ }
1331
+ createMistral();
1332
+ function convertToXaiChatMessages(prompt) {
1333
+ const messages = [];
1334
+ const warnings = [];
1335
+ for (const { role, content } of prompt) {
1336
+ switch (role) {
1337
+ case "system": {
1338
+ messages.push({ role: "system", content });
1339
+ break;
1340
+ }
1341
+ case "user": {
1342
+ if (content.length === 1 && content[0].type === "text") {
1343
+ messages.push({ role: "user", content: content[0].text });
1344
+ break;
1345
+ }
1346
+ messages.push({
1347
+ role: "user",
1348
+ content: content.map((part) => {
1349
+ switch (part.type) {
1350
+ case "text": {
1351
+ return { type: "text", text: part.text };
1352
+ }
1353
+ case "file": {
1354
+ if (part.mediaType.startsWith("image/")) {
1355
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
1356
+ return {
1357
+ type: "image_url",
1358
+ image_url: {
1359
+ url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${chunkCB575O6L_cjs.convertToBase64(part.data)}`
1360
+ }
1361
+ };
1362
+ } else {
1363
+ throw new chunkCB575O6L_cjs.UnsupportedFunctionalityError({
1364
+ functionality: `file part media type ${part.mediaType}`
1365
+ });
1366
+ }
1367
+ }
1368
+ }
1369
+ })
1370
+ });
1371
+ break;
1372
+ }
1373
+ case "assistant": {
1374
+ let text = "";
1375
+ const toolCalls = [];
1376
+ for (const part of content) {
1377
+ switch (part.type) {
1378
+ case "text": {
1379
+ text += part.text;
1380
+ break;
1381
+ }
1382
+ case "tool-call": {
1383
+ toolCalls.push({
1384
+ id: part.toolCallId,
1385
+ type: "function",
1386
+ function: {
1387
+ name: part.toolName,
1388
+ arguments: JSON.stringify(part.input)
1389
+ }
1390
+ });
1391
+ break;
1392
+ }
1393
+ }
1394
+ }
1395
+ messages.push({
1396
+ role: "assistant",
1397
+ content: text,
1398
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
1399
+ });
1400
+ break;
1401
+ }
1402
+ case "tool": {
1403
+ for (const toolResponse of content) {
1404
+ const output = toolResponse.output;
1405
+ let contentValue;
1406
+ switch (output.type) {
1407
+ case "text":
1408
+ case "error-text":
1409
+ contentValue = output.value;
1410
+ break;
1411
+ case "content":
1412
+ case "json":
1413
+ case "error-json":
1414
+ contentValue = JSON.stringify(output.value);
1415
+ break;
1416
+ }
1417
+ messages.push({
1418
+ role: "tool",
1419
+ tool_call_id: toolResponse.toolCallId,
1420
+ content: contentValue
1421
+ });
1422
+ }
1423
+ break;
1424
+ }
1425
+ default: {
1426
+ const _exhaustiveCheck = role;
1427
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1428
+ }
1429
+ }
1430
+ }
1431
+ return { messages, warnings };
1432
+ }
1433
+ function getResponseMetadata2({
1434
+ id,
1435
+ model,
1436
+ created
1437
+ }) {
1438
+ return {
1439
+ id: id != null ? id : void 0,
1440
+ modelId: model != null ? model : void 0,
1441
+ timestamp: created != null ? new Date(created * 1e3) : void 0
1442
+ };
1443
+ }
1444
+ function mapXaiFinishReason(finishReason) {
1445
+ switch (finishReason) {
1446
+ case "stop":
1447
+ return "stop";
1448
+ case "length":
1449
+ return "length";
1450
+ case "tool_calls":
1451
+ case "function_call":
1452
+ return "tool-calls";
1453
+ case "content_filter":
1454
+ return "content-filter";
1455
+ default:
1456
+ return "unknown";
1457
+ }
1458
+ }
1459
+ var webSourceSchema = v4.z.object({
1460
+ type: v4.z.literal("web"),
1461
+ country: v4.z.string().length(2).optional(),
1462
+ excludedWebsites: v4.z.array(v4.z.string()).max(5).optional(),
1463
+ allowedWebsites: v4.z.array(v4.z.string()).max(5).optional(),
1464
+ safeSearch: v4.z.boolean().optional()
1465
+ });
1466
+ var xSourceSchema = v4.z.object({
1467
+ type: v4.z.literal("x"),
1468
+ excludedXHandles: v4.z.array(v4.z.string()).optional(),
1469
+ includedXHandles: v4.z.array(v4.z.string()).optional(),
1470
+ postFavoriteCount: v4.z.number().int().optional(),
1471
+ postViewCount: v4.z.number().int().optional(),
1472
+ /**
1473
+ * @deprecated use `includedXHandles` instead
1474
+ */
1475
+ xHandles: v4.z.array(v4.z.string()).optional()
1476
+ });
1477
+ var newsSourceSchema = v4.z.object({
1478
+ type: v4.z.literal("news"),
1479
+ country: v4.z.string().length(2).optional(),
1480
+ excludedWebsites: v4.z.array(v4.z.string()).max(5).optional(),
1481
+ safeSearch: v4.z.boolean().optional()
1482
+ });
1483
+ var rssSourceSchema = v4.z.object({
1484
+ type: v4.z.literal("rss"),
1485
+ links: v4.z.array(v4.z.string().url()).max(1)
1486
+ // currently only supports one RSS link
1487
+ });
1488
+ var searchSourceSchema = v4.z.discriminatedUnion("type", [
1489
+ webSourceSchema,
1490
+ xSourceSchema,
1491
+ newsSourceSchema,
1492
+ rssSourceSchema
1493
+ ]);
1494
+ var xaiProviderOptions = v4.z.object({
1495
+ reasoningEffort: v4.z.enum(["low", "high"]).optional(),
1496
+ searchParameters: v4.z.object({
1497
+ /**
1498
+ * search mode preference
1499
+ * - "off": disables search completely
1500
+ * - "auto": model decides whether to search (default)
1501
+ * - "on": always enables search
1502
+ */
1503
+ mode: v4.z.enum(["off", "auto", "on"]),
1504
+ /**
1505
+ * whether to return citations in the response
1506
+ * defaults to true
1507
+ */
1508
+ returnCitations: v4.z.boolean().optional(),
1509
+ /**
1510
+ * start date for search data (ISO8601 format: YYYY-MM-DD)
1511
+ */
1512
+ fromDate: v4.z.string().optional(),
1513
+ /**
1514
+ * end date for search data (ISO8601 format: YYYY-MM-DD)
1515
+ */
1516
+ toDate: v4.z.string().optional(),
1517
+ /**
1518
+ * maximum number of search results to consider
1519
+ * defaults to 20
1520
+ */
1521
+ maxSearchResults: v4.z.number().min(1).max(50).optional(),
1522
+ /**
1523
+ * data sources to search from
1524
+ * defaults to ["web", "x"] if not specified
1525
+ */
1526
+ sources: v4.z.array(searchSourceSchema).optional()
1527
+ }).optional()
1528
+ });
1529
+ var xaiErrorDataSchema = v4.z.object({
1530
+ error: v4.z.object({
1531
+ message: v4.z.string(),
1532
+ type: v4.z.string().nullish(),
1533
+ param: v4.z.any().nullish(),
1534
+ code: v4.z.union([v4.z.string(), v4.z.number()]).nullish()
1535
+ })
1536
+ });
1537
+ var xaiFailedResponseHandler = chunkCB575O6L_cjs.createJsonErrorResponseHandler({
1538
+ errorSchema: xaiErrorDataSchema,
1539
+ errorToMessage: (data) => data.error.message
1540
+ });
1541
+ function prepareTools2({
1542
+ tools,
1543
+ toolChoice
1544
+ }) {
1545
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
1546
+ const toolWarnings = [];
1547
+ if (tools == null) {
1548
+ return { tools: void 0, toolChoice: void 0, toolWarnings };
1549
+ }
1550
+ const xaiTools = [];
1551
+ for (const tool of tools) {
1552
+ if (tool.type === "provider-defined") {
1553
+ toolWarnings.push({ type: "unsupported-tool", tool });
1554
+ } else {
1555
+ xaiTools.push({
1556
+ type: "function",
1557
+ function: {
1558
+ name: tool.name,
1559
+ description: tool.description,
1560
+ parameters: tool.inputSchema
1561
+ }
1562
+ });
1563
+ }
1564
+ }
1565
+ if (toolChoice == null) {
1566
+ return { tools: xaiTools, toolChoice: void 0, toolWarnings };
1567
+ }
1568
+ const type = toolChoice.type;
1569
+ switch (type) {
1570
+ case "auto":
1571
+ case "none":
1572
+ return { tools: xaiTools, toolChoice: type, toolWarnings };
1573
+ case "required":
1574
+ return { tools: xaiTools, toolChoice: "required", toolWarnings };
1575
+ case "tool":
1576
+ return {
1577
+ tools: xaiTools,
1578
+ toolChoice: {
1579
+ type: "function",
1580
+ function: { name: toolChoice.toolName }
1581
+ },
1582
+ toolWarnings
1583
+ };
1584
+ default: {
1585
+ const _exhaustiveCheck = type;
1586
+ throw new chunkCB575O6L_cjs.UnsupportedFunctionalityError({
1587
+ functionality: `tool choice type: ${_exhaustiveCheck}`
1588
+ });
1589
+ }
1590
+ }
1591
+ }
1592
+ var XaiChatLanguageModel = class {
1593
+ constructor(modelId, config) {
1594
+ this.specificationVersion = "v2";
1595
+ this.supportedUrls = {
1596
+ "image/*": [/^https?:\/\/.*$/]
1597
+ };
1598
+ this.modelId = modelId;
1599
+ this.config = config;
1600
+ }
1601
+ get provider() {
1602
+ return this.config.provider;
1603
+ }
1604
+ async getArgs({
1605
+ prompt,
1606
+ maxOutputTokens,
1607
+ temperature,
1608
+ topP,
1609
+ topK,
1610
+ frequencyPenalty,
1611
+ presencePenalty,
1612
+ stopSequences,
1613
+ seed,
1614
+ responseFormat,
1615
+ providerOptions,
1616
+ tools,
1617
+ toolChoice
1618
+ }) {
1619
+ var _a, _b, _c;
1620
+ const warnings = [];
1621
+ const options = (_a = await chunkCB575O6L_cjs.parseProviderOptions({
1622
+ provider: "xai",
1623
+ providerOptions,
1624
+ schema: xaiProviderOptions
1625
+ })) != null ? _a : {};
1626
+ if (topK != null) {
1627
+ warnings.push({
1628
+ type: "unsupported-setting",
1629
+ setting: "topK"
1630
+ });
1631
+ }
1632
+ if (frequencyPenalty != null) {
1633
+ warnings.push({
1634
+ type: "unsupported-setting",
1635
+ setting: "frequencyPenalty"
1636
+ });
1637
+ }
1638
+ if (presencePenalty != null) {
1639
+ warnings.push({
1640
+ type: "unsupported-setting",
1641
+ setting: "presencePenalty"
1642
+ });
1643
+ }
1644
+ if (stopSequences != null) {
1645
+ warnings.push({
1646
+ type: "unsupported-setting",
1647
+ setting: "stopSequences"
1648
+ });
1649
+ }
1650
+ if (responseFormat != null && responseFormat.type === "json" && responseFormat.schema != null) {
1651
+ warnings.push({
1652
+ type: "unsupported-setting",
1653
+ setting: "responseFormat",
1654
+ details: "JSON response format schema is not supported"
1655
+ });
1656
+ }
1657
+ const { messages, warnings: messageWarnings } = convertToXaiChatMessages(prompt);
1658
+ warnings.push(...messageWarnings);
1659
+ const {
1660
+ tools: xaiTools,
1661
+ toolChoice: xaiToolChoice,
1662
+ toolWarnings
1663
+ } = prepareTools2({
1664
+ tools,
1665
+ toolChoice
1666
+ });
1667
+ warnings.push(...toolWarnings);
1668
+ const baseArgs = {
1669
+ // model id
1670
+ model: this.modelId,
1671
+ // standard generation settings
1672
+ max_tokens: maxOutputTokens,
1673
+ temperature,
1674
+ top_p: topP,
1675
+ seed,
1676
+ reasoning_effort: options.reasoningEffort,
1677
+ // response format
1678
+ response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? responseFormat.schema != null ? {
1679
+ type: "json_schema",
1680
+ json_schema: {
1681
+ name: (_b = responseFormat.name) != null ? _b : "response",
1682
+ schema: responseFormat.schema,
1683
+ strict: true
1684
+ }
1685
+ } : { type: "json_object" } : void 0,
1686
+ // search parameters
1687
+ search_parameters: options.searchParameters ? {
1688
+ mode: options.searchParameters.mode,
1689
+ return_citations: options.searchParameters.returnCitations,
1690
+ from_date: options.searchParameters.fromDate,
1691
+ to_date: options.searchParameters.toDate,
1692
+ max_search_results: options.searchParameters.maxSearchResults,
1693
+ sources: (_c = options.searchParameters.sources) == null ? void 0 : _c.map((source) => {
1694
+ var _a2;
1695
+ return {
1696
+ type: source.type,
1697
+ ...source.type === "web" && {
1698
+ country: source.country,
1699
+ excluded_websites: source.excludedWebsites,
1700
+ allowed_websites: source.allowedWebsites,
1701
+ safe_search: source.safeSearch
1702
+ },
1703
+ ...source.type === "x" && {
1704
+ excluded_x_handles: source.excludedXHandles,
1705
+ included_x_handles: (_a2 = source.includedXHandles) != null ? _a2 : source.xHandles,
1706
+ post_favorite_count: source.postFavoriteCount,
1707
+ post_view_count: source.postViewCount
1708
+ },
1709
+ ...source.type === "news" && {
1710
+ country: source.country,
1711
+ excluded_websites: source.excludedWebsites,
1712
+ safe_search: source.safeSearch
1713
+ },
1714
+ ...source.type === "rss" && {
1715
+ links: source.links
1716
+ }
1717
+ };
1718
+ })
1719
+ } : void 0,
1720
+ // messages in xai format
1721
+ messages,
1722
+ // tools in xai format
1723
+ tools: xaiTools,
1724
+ tool_choice: xaiToolChoice
1725
+ };
1726
+ return {
1727
+ args: baseArgs,
1728
+ warnings
1729
+ };
1730
+ }
1731
+ async doGenerate(options) {
1732
+ var _a, _b, _c;
1733
+ const { args: body, warnings } = await this.getArgs(options);
1734
+ const {
1735
+ responseHeaders,
1736
+ value: response,
1737
+ rawValue: rawResponse
1738
+ } = await chunkCB575O6L_cjs.postJsonToApi({
1739
+ url: `${(_a = this.config.baseURL) != null ? _a : "https://api.x.ai/v1"}/chat/completions`,
1740
+ headers: chunkCB575O6L_cjs.combineHeaders(this.config.headers(), options.headers),
1741
+ body,
1742
+ failedResponseHandler: xaiFailedResponseHandler,
1743
+ successfulResponseHandler: chunkCB575O6L_cjs.createJsonResponseHandler(
1744
+ xaiChatResponseSchema
1745
+ ),
1746
+ abortSignal: options.abortSignal,
1747
+ fetch: this.config.fetch
1748
+ });
1749
+ const choice = response.choices[0];
1750
+ const content = [];
1751
+ if (choice.message.content != null && choice.message.content.length > 0) {
1752
+ let text = choice.message.content;
1753
+ const lastMessage = body.messages[body.messages.length - 1];
1754
+ if ((lastMessage == null ? void 0 : lastMessage.role) === "assistant" && text === lastMessage.content) {
1755
+ text = "";
1756
+ }
1757
+ if (text.length > 0) {
1758
+ content.push({ type: "text", text });
1759
+ }
1760
+ }
1761
+ if (choice.message.reasoning_content != null && choice.message.reasoning_content.length > 0) {
1762
+ content.push({
1763
+ type: "reasoning",
1764
+ text: choice.message.reasoning_content
1765
+ });
1766
+ }
1767
+ if (choice.message.tool_calls != null) {
1768
+ for (const toolCall of choice.message.tool_calls) {
1769
+ content.push({
1770
+ type: "tool-call",
1771
+ toolCallId: toolCall.id,
1772
+ toolName: toolCall.function.name,
1773
+ input: toolCall.function.arguments
1774
+ });
1775
+ }
1776
+ }
1777
+ if (response.citations != null) {
1778
+ for (const url of response.citations) {
1779
+ content.push({
1780
+ type: "source",
1781
+ sourceType: "url",
1782
+ id: this.config.generateId(),
1783
+ url
1784
+ });
1785
+ }
1786
+ }
1787
+ return {
1788
+ content,
1789
+ finishReason: mapXaiFinishReason(choice.finish_reason),
1790
+ usage: {
1791
+ inputTokens: response.usage.prompt_tokens,
1792
+ outputTokens: response.usage.completion_tokens,
1793
+ totalTokens: response.usage.total_tokens,
1794
+ reasoningTokens: (_c = (_b = response.usage.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null ? _c : void 0
1795
+ },
1796
+ request: { body },
1797
+ response: {
1798
+ ...getResponseMetadata2(response),
1799
+ headers: responseHeaders,
1800
+ body: rawResponse
1801
+ },
1802
+ warnings
1803
+ };
1804
+ }
1805
+ async doStream(options) {
1806
+ var _a;
1807
+ const { args, warnings } = await this.getArgs(options);
1808
+ const body = {
1809
+ ...args,
1810
+ stream: true,
1811
+ stream_options: {
1812
+ include_usage: true
1813
+ }
1814
+ };
1815
+ const { responseHeaders, value: response } = await chunkCB575O6L_cjs.postJsonToApi({
1816
+ url: `${(_a = this.config.baseURL) != null ? _a : "https://api.x.ai/v1"}/chat/completions`,
1817
+ headers: chunkCB575O6L_cjs.combineHeaders(this.config.headers(), options.headers),
1818
+ body,
1819
+ failedResponseHandler: xaiFailedResponseHandler,
1820
+ successfulResponseHandler: chunkCB575O6L_cjs.createEventSourceResponseHandler(xaiChatChunkSchema),
1821
+ abortSignal: options.abortSignal,
1822
+ fetch: this.config.fetch
1823
+ });
1824
+ let finishReason = "unknown";
1825
+ const usage = {
1826
+ inputTokens: void 0,
1827
+ outputTokens: void 0,
1828
+ totalTokens: void 0
1829
+ };
1830
+ let isFirstChunk = true;
1831
+ const contentBlocks = {};
1832
+ const lastReasoningDeltas = {};
1833
+ const self = this;
1834
+ return {
1835
+ stream: response.pipeThrough(
1836
+ new TransformStream({
1837
+ start(controller) {
1838
+ controller.enqueue({ type: "stream-start", warnings });
1839
+ },
1840
+ transform(chunk, controller) {
1841
+ var _a2, _b;
1842
+ if (options.includeRawChunks) {
1843
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1844
+ }
1845
+ if (!chunk.success) {
1846
+ controller.enqueue({ type: "error", error: chunk.error });
1847
+ return;
1848
+ }
1849
+ const value = chunk.value;
1850
+ if (isFirstChunk) {
1851
+ controller.enqueue({
1852
+ type: "response-metadata",
1853
+ ...getResponseMetadata2(value)
1854
+ });
1855
+ isFirstChunk = false;
1856
+ }
1857
+ if (value.citations != null) {
1858
+ for (const url of value.citations) {
1859
+ controller.enqueue({
1860
+ type: "source",
1861
+ sourceType: "url",
1862
+ id: self.config.generateId(),
1863
+ url
1864
+ });
1865
+ }
1866
+ }
1867
+ if (value.usage != null) {
1868
+ usage.inputTokens = value.usage.prompt_tokens;
1869
+ usage.outputTokens = value.usage.completion_tokens;
1870
+ usage.totalTokens = value.usage.total_tokens;
1871
+ usage.reasoningTokens = (_b = (_a2 = value.usage.completion_tokens_details) == null ? void 0 : _a2.reasoning_tokens) != null ? _b : void 0;
1872
+ }
1873
+ const choice = value.choices[0];
1874
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
1875
+ finishReason = mapXaiFinishReason(choice.finish_reason);
1876
+ }
1877
+ if ((choice == null ? void 0 : choice.delta) == null) {
1878
+ return;
1879
+ }
1880
+ const delta = choice.delta;
1881
+ const choiceIndex = choice.index;
1882
+ if (delta.content != null && delta.content.length > 0) {
1883
+ const textContent = delta.content;
1884
+ const lastMessage = body.messages[body.messages.length - 1];
1885
+ if ((lastMessage == null ? void 0 : lastMessage.role) === "assistant" && textContent === lastMessage.content) {
1886
+ return;
1887
+ }
1888
+ const blockId = `text-${value.id || choiceIndex}`;
1889
+ if (contentBlocks[blockId] == null) {
1890
+ contentBlocks[blockId] = { type: "text" };
1891
+ controller.enqueue({
1892
+ type: "text-start",
1893
+ id: blockId
1894
+ });
1895
+ }
1896
+ controller.enqueue({
1897
+ type: "text-delta",
1898
+ id: blockId,
1899
+ delta: textContent
1900
+ });
1901
+ }
1902
+ if (delta.reasoning_content != null && delta.reasoning_content.length > 0) {
1903
+ const blockId = `reasoning-${value.id || choiceIndex}`;
1904
+ if (lastReasoningDeltas[blockId] === delta.reasoning_content) {
1905
+ return;
1906
+ }
1907
+ lastReasoningDeltas[blockId] = delta.reasoning_content;
1908
+ if (contentBlocks[blockId] == null) {
1909
+ contentBlocks[blockId] = { type: "reasoning" };
1910
+ controller.enqueue({
1911
+ type: "reasoning-start",
1912
+ id: blockId
1913
+ });
1914
+ }
1915
+ controller.enqueue({
1916
+ type: "reasoning-delta",
1917
+ id: blockId,
1918
+ delta: delta.reasoning_content
1919
+ });
1920
+ }
1921
+ if (delta.tool_calls != null) {
1922
+ for (const toolCall of delta.tool_calls) {
1923
+ const toolCallId = toolCall.id;
1924
+ controller.enqueue({
1925
+ type: "tool-input-start",
1926
+ id: toolCallId,
1927
+ toolName: toolCall.function.name
1928
+ });
1929
+ controller.enqueue({
1930
+ type: "tool-input-delta",
1931
+ id: toolCallId,
1932
+ delta: toolCall.function.arguments
1933
+ });
1934
+ controller.enqueue({
1935
+ type: "tool-input-end",
1936
+ id: toolCallId
1937
+ });
1938
+ controller.enqueue({
1939
+ type: "tool-call",
1940
+ toolCallId,
1941
+ toolName: toolCall.function.name,
1942
+ input: toolCall.function.arguments
1943
+ });
1944
+ }
1945
+ }
1946
+ },
1947
+ flush(controller) {
1948
+ for (const [blockId, block] of Object.entries(contentBlocks)) {
1949
+ controller.enqueue({
1950
+ type: block.type === "text" ? "text-end" : "reasoning-end",
1951
+ id: blockId
1952
+ });
1953
+ }
1954
+ controller.enqueue({ type: "finish", finishReason, usage });
1955
+ }
1956
+ })
1957
+ ),
1958
+ request: { body },
1959
+ response: { headers: responseHeaders }
1960
+ };
1961
+ }
1962
+ };
1963
+ var xaiUsageSchema = v4.z.object({
1964
+ prompt_tokens: v4.z.number(),
1965
+ completion_tokens: v4.z.number(),
1966
+ total_tokens: v4.z.number(),
1967
+ completion_tokens_details: v4.z.object({
1968
+ reasoning_tokens: v4.z.number().nullish()
1969
+ }).nullish()
1970
+ });
1971
+ var xaiChatResponseSchema = v4.z.object({
1972
+ id: v4.z.string().nullish(),
1973
+ created: v4.z.number().nullish(),
1974
+ model: v4.z.string().nullish(),
1975
+ choices: v4.z.array(
1976
+ v4.z.object({
1977
+ message: v4.z.object({
1978
+ role: v4.z.literal("assistant"),
1979
+ content: v4.z.string().nullish(),
1980
+ reasoning_content: v4.z.string().nullish(),
1981
+ tool_calls: v4.z.array(
1982
+ v4.z.object({
1983
+ id: v4.z.string(),
1984
+ type: v4.z.literal("function"),
1985
+ function: v4.z.object({
1986
+ name: v4.z.string(),
1987
+ arguments: v4.z.string()
1988
+ })
1989
+ })
1990
+ ).nullish()
1991
+ }),
1992
+ index: v4.z.number(),
1993
+ finish_reason: v4.z.string().nullish()
1994
+ })
1995
+ ),
1996
+ object: v4.z.literal("chat.completion"),
1997
+ usage: xaiUsageSchema,
1998
+ citations: v4.z.array(v4.z.string().url()).nullish()
1999
+ });
2000
+ var xaiChatChunkSchema = v4.z.object({
2001
+ id: v4.z.string().nullish(),
2002
+ created: v4.z.number().nullish(),
2003
+ model: v4.z.string().nullish(),
2004
+ choices: v4.z.array(
2005
+ v4.z.object({
2006
+ delta: v4.z.object({
2007
+ role: v4.z.enum(["assistant"]).optional(),
2008
+ content: v4.z.string().nullish(),
2009
+ reasoning_content: v4.z.string().nullish(),
2010
+ tool_calls: v4.z.array(
2011
+ v4.z.object({
2012
+ id: v4.z.string(),
2013
+ type: v4.z.literal("function"),
2014
+ function: v4.z.object({
2015
+ name: v4.z.string(),
2016
+ arguments: v4.z.string()
2017
+ })
2018
+ })
2019
+ ).nullish()
2020
+ }),
2021
+ finish_reason: v4.z.string().nullish(),
2022
+ index: v4.z.number()
2023
+ })
2024
+ ),
2025
+ usage: xaiUsageSchema.nullish(),
2026
+ citations: v4.z.array(v4.z.string().url()).nullish()
2027
+ });
2028
+ var VERSION3 = "2.0.26" ;
2029
+ var xaiErrorStructure = {
2030
+ errorSchema: xaiErrorDataSchema,
2031
+ errorToMessage: (data) => data.error.message
2032
+ };
2033
+ function createXai(options = {}) {
2034
+ var _a;
2035
+ const baseURL = chunkCB575O6L_cjs.withoutTrailingSlash(
2036
+ (_a = options.baseURL) != null ? _a : "https://api.x.ai/v1"
2037
+ );
2038
+ const getHeaders = () => chunkCB575O6L_cjs.withUserAgentSuffix(
2039
+ {
2040
+ Authorization: `Bearer ${chunkCB575O6L_cjs.loadApiKey({
2041
+ apiKey: options.apiKey,
2042
+ environmentVariableName: "XAI_API_KEY",
2043
+ description: "xAI API key"
2044
+ })}`,
2045
+ ...options.headers
2046
+ },
2047
+ `ai-sdk/xai/${VERSION3}`
2048
+ );
2049
+ const createLanguageModel = (modelId) => {
2050
+ return new XaiChatLanguageModel(modelId, {
2051
+ provider: "xai.chat",
2052
+ baseURL,
2053
+ headers: getHeaders,
2054
+ generateId: chunkCB575O6L_cjs.generateId,
2055
+ fetch: options.fetch
2056
+ });
2057
+ };
2058
+ const createImageModel = (modelId) => {
2059
+ return new chunkCB575O6L_cjs.OpenAICompatibleImageModel(modelId, {
2060
+ provider: "xai.image",
2061
+ url: ({ path }) => `${baseURL}${path}`,
2062
+ headers: getHeaders,
2063
+ fetch: options.fetch,
2064
+ errorStructure: xaiErrorStructure
2065
+ });
2066
+ };
2067
+ const provider = (modelId) => createLanguageModel(modelId);
2068
+ provider.languageModel = createLanguageModel;
2069
+ provider.chat = createLanguageModel;
2070
+ provider.textEmbeddingModel = (modelId) => {
2071
+ throw new chunkCB575O6L_cjs.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
2072
+ };
2073
+ provider.imageModel = createImageModel;
2074
+ provider.image = createImageModel;
2075
+ return provider;
2076
+ }
2077
+ createXai();
2078
+
2079
+ // src/llm/model/gateway-resolver.ts
2080
+ function parseModelRouterId(routerId, gatewayPrefix) {
2081
+ if (gatewayPrefix && !routerId.startsWith(`${gatewayPrefix}/`)) {
2082
+ throw new Error(`Expected ${gatewayPrefix}/ in model router ID ${routerId}`);
2083
+ }
2084
+ const idParts = routerId.split("/");
2085
+ if (gatewayPrefix && idParts.length < 3) {
2086
+ throw new Error(
2087
+ `Expected atleast 3 id parts ${gatewayPrefix}/provider/model, but only saw ${idParts.length} in ${routerId}`
2088
+ );
2089
+ }
2090
+ const providerId = idParts.at(gatewayPrefix ? 1 : 0);
2091
+ const modelId = idParts.slice(gatewayPrefix ? 2 : 1).join(`/`);
2092
+ if (!routerId.includes(`/`) || !providerId || !modelId) {
2093
+ throw new Error(
2094
+ `Attempted to parse provider/model from ${routerId} but this ID doesn't appear to contain a provider`
2095
+ );
2096
+ }
2097
+ return {
2098
+ providerId,
2099
+ modelId
2100
+ };
2101
+ }
2102
+
2103
+ // src/llm/model/gateways/constants.ts
2104
+ var PROVIDERS_WITH_INSTALLED_PACKAGES = ["anthropic", "google", "mistral", "openai", "openrouter", "xai"];
2105
+ var EXCLUDED_PROVIDERS = ["github-copilot"];
2106
+
2107
+ // src/llm/model/gateways/models-dev.ts
2108
+ var OPENAI_COMPATIBLE_OVERRIDES = {
2109
+ cerebras: {
2110
+ url: "https://api.cerebras.ai/v1"
2111
+ },
2112
+ mistral: {
2113
+ url: "https://api.mistral.ai/v1"
2114
+ },
2115
+ groq: {
2116
+ url: "https://api.groq.com/openai/v1"
2117
+ },
2118
+ togetherai: {
2119
+ url: "https://api.together.xyz/v1"
2120
+ },
2121
+ deepinfra: {
2122
+ url: "https://api.deepinfra.com/v1/openai"
2123
+ },
2124
+ perplexity: {
2125
+ url: "https://api.perplexity.ai"
2126
+ },
2127
+ vercel: {
2128
+ url: "https://ai-gateway.vercel.sh/v1",
2129
+ apiKeyEnvVar: "AI_GATEWAY_API_KEY"
2130
+ }
2131
+ };
2132
+ var ModelsDevGateway = class extends chunkCB575O6L_cjs.MastraModelGateway {
2133
+ name = "models.dev";
2134
+ prefix = void 0;
2135
+ // No prefix for registry gateway
2136
+ providerConfigs = {};
2137
+ constructor(providerConfigs) {
2138
+ super();
2139
+ if (providerConfigs) this.providerConfigs = providerConfigs;
2140
+ }
2141
+ async fetchProviders() {
2142
+ const response = await fetch("https://models.dev/api.json");
2143
+ if (!response.ok) {
2144
+ throw new Error(`Failed to fetch from models.dev: ${response.statusText}`);
2145
+ }
2146
+ const data = await response.json();
2147
+ const providerConfigs = {};
2148
+ for (const [providerId, providerInfo] of Object.entries(data)) {
2149
+ if (EXCLUDED_PROVIDERS.includes(providerId)) continue;
2150
+ if (!providerInfo || typeof providerInfo !== "object" || !providerInfo.models) continue;
2151
+ const normalizedId = providerId;
2152
+ const isOpenAICompatible = providerInfo.npm === "@ai-sdk/openai-compatible" || providerInfo.npm === "@ai-sdk/gateway" || // Vercel AI Gateway is OpenAI-compatible
2153
+ normalizedId in OPENAI_COMPATIBLE_OVERRIDES;
2154
+ const hasInstalledPackage = PROVIDERS_WITH_INSTALLED_PACKAGES.includes(providerId);
2155
+ const hasApiAndEnv = providerInfo.api && providerInfo.env && providerInfo.env.length > 0;
2156
+ if (isOpenAICompatible || hasInstalledPackage || hasApiAndEnv) {
2157
+ const modelIds = Object.keys(providerInfo.models).sort();
2158
+ const url = providerInfo.api || OPENAI_COMPATIBLE_OVERRIDES[normalizedId]?.url;
2159
+ if (!hasInstalledPackage && !url) {
2160
+ continue;
2161
+ }
2162
+ const apiKeyEnvVar = providerInfo.env?.[0] || `${normalizedId.toUpperCase().replace(/-/g, "_")}_API_KEY`;
2163
+ const apiKeyHeader = !hasInstalledPackage ? OPENAI_COMPATIBLE_OVERRIDES[normalizedId]?.apiKeyHeader || "Authorization" : void 0;
2164
+ providerConfigs[normalizedId] = {
2165
+ url,
2166
+ apiKeyEnvVar,
2167
+ apiKeyHeader,
2168
+ name: providerInfo.name || providerId.charAt(0).toUpperCase() + providerId.slice(1),
2169
+ models: modelIds,
2170
+ docUrl: providerInfo.doc,
2171
+ // Include documentation URL if available
2172
+ gateway: `models.dev`
2173
+ };
2174
+ }
2175
+ }
2176
+ this.providerConfigs = providerConfigs;
2177
+ return providerConfigs;
2178
+ }
2179
+ buildUrl(routerId, envVars) {
2180
+ const { providerId } = parseModelRouterId(routerId);
2181
+ const config = this.providerConfigs[providerId];
2182
+ if (!config?.url) {
2183
+ return;
2184
+ }
2185
+ const baseUrlEnvVar = `${providerId.toUpperCase().replace(/-/g, "_")}_BASE_URL`;
2186
+ const customBaseUrl = envVars?.[baseUrlEnvVar] || process.env[baseUrlEnvVar];
2187
+ return customBaseUrl || config.url;
2188
+ }
2189
+ getApiKey(modelId) {
2190
+ const [provider, model] = modelId.split("/");
2191
+ if (!provider || !model) {
2192
+ throw new Error(`Could not identify provider from model id ${modelId}`);
2193
+ }
2194
+ const config = this.providerConfigs[provider];
2195
+ if (!config) {
2196
+ throw new Error(`Could not find config for provider ${provider} with model id ${modelId}`);
2197
+ }
2198
+ const apiKey = typeof config.apiKeyEnvVar === `string` ? process.env[config.apiKeyEnvVar] : void 0;
2199
+ if (!apiKey) {
2200
+ throw new Error(`Could not find API key process.env.${config.apiKeyEnvVar} for model id ${modelId}`);
2201
+ }
2202
+ return Promise.resolve(apiKey);
2203
+ }
2204
+ async resolveLanguageModel({
2205
+ modelId,
2206
+ providerId,
2207
+ apiKey
2208
+ }) {
2209
+ const baseURL = this.buildUrl(`${providerId}/${modelId}`);
2210
+ switch (providerId) {
2211
+ case "openai":
2212
+ return chunkCB575O6L_cjs.createOpenAI({ apiKey }).responses(modelId);
2213
+ case "gemini":
2214
+ case "google":
2215
+ return chunkCB575O6L_cjs.createGoogleGenerativeAI({
2216
+ apiKey
2217
+ }).chat(modelId);
2218
+ case "anthropic":
2219
+ return chunkCB575O6L_cjs.createAnthropic({ apiKey })(modelId);
2220
+ case "mistral":
2221
+ return createMistral({ apiKey })(modelId);
2222
+ case "openrouter":
2223
+ return aiSdkProviderV5.createOpenRouter({ apiKey })(modelId);
2224
+ case "xai":
2225
+ return createXai({
2226
+ apiKey
2227
+ })(modelId);
2228
+ default:
2229
+ if (!baseURL) throw new Error(`No API URL found for ${providerId}/${modelId}`);
2230
+ return chunkCB575O6L_cjs.createOpenAICompatible({ name: providerId, apiKey, baseURL, supportsStructuredOutputs: true }).chatModel(
2231
+ modelId
2232
+ );
2233
+ }
2234
+ }
2235
+ };
2236
+
2237
+ exports.ModelsDevGateway = ModelsDevGateway;
2238
+ exports.parseModelRouterId = parseModelRouterId;
2239
+ //# sourceMappingURL=chunk-LWBQ4P4N.cjs.map
2240
+ //# sourceMappingURL=chunk-LWBQ4P4N.cjs.map