@mastra/core 1.0.0-beta.0 → 1.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. package/CHANGELOG.md +17 -0
  2. package/dist/agent/index.cjs +6 -6
  3. package/dist/agent/index.js +1 -1
  4. package/dist/{chunk-QCQLOMJM.cjs → chunk-22443P6A.cjs} +22 -21
  5. package/dist/chunk-22443P6A.cjs.map +1 -0
  6. package/dist/{chunk-YIK3ASEG.cjs → chunk-2ZVKF4HP.cjs} +117 -42
  7. package/dist/chunk-2ZVKF4HP.cjs.map +1 -0
  8. package/dist/{chunk-26SQQNMU.js → chunk-7CBEP2ZQ.js} +36 -12
  9. package/dist/chunk-7CBEP2ZQ.js.map +1 -0
  10. package/dist/{chunk-7SKXKUYT.js → chunk-7PO6SEJF.js} +6 -3
  11. package/dist/chunk-7PO6SEJF.js.map +1 -0
  12. package/dist/{chunk-S5MJLXMG.cjs → chunk-CB575O6L.cjs} +10 -2
  13. package/dist/chunk-CB575O6L.cjs.map +1 -0
  14. package/dist/chunk-HDJFSJCK.js +2237 -0
  15. package/dist/chunk-HDJFSJCK.js.map +1 -0
  16. package/dist/{chunk-BU4IAJWF.js → chunk-I4CXL4SR.js} +3 -3
  17. package/dist/{chunk-BU4IAJWF.js.map → chunk-I4CXL4SR.js.map} +1 -1
  18. package/dist/{chunk-VV753WCB.cjs → chunk-IQO7ANVS.cjs} +8 -8
  19. package/dist/{chunk-VV753WCB.cjs.map → chunk-IQO7ANVS.cjs.map} +1 -1
  20. package/dist/{chunk-L7XKOKOW.js → chunk-JPGVRWWL.js} +3 -3
  21. package/dist/chunk-JPGVRWWL.js.map +1 -0
  22. package/dist/{chunk-BXOL277H.cjs → chunk-JYYQQEBH.cjs} +7 -4
  23. package/dist/chunk-JYYQQEBH.cjs.map +1 -0
  24. package/dist/chunk-LWBQ4P4N.cjs +2240 -0
  25. package/dist/chunk-LWBQ4P4N.cjs.map +1 -0
  26. package/dist/{chunk-VJUZZB2I.js → chunk-SNPVZPLB.js} +4 -4
  27. package/dist/{chunk-VJUZZB2I.js.map → chunk-SNPVZPLB.js.map} +1 -1
  28. package/dist/{chunk-32CTMD2C.js → chunk-W7UH2PWL.js} +108 -33
  29. package/dist/chunk-W7UH2PWL.js.map +1 -0
  30. package/dist/{chunk-FD734TPS.cjs → chunk-YCVEJ3UN.cjs} +37 -13
  31. package/dist/chunk-YCVEJ3UN.cjs.map +1 -0
  32. package/dist/{chunk-P6APHXPZ.js → chunk-ZGHTOYHW.js} +5 -4
  33. package/dist/chunk-ZGHTOYHW.js.map +1 -0
  34. package/dist/{chunk-QUZGDSWE.cjs → chunk-ZWNI5IWX.cjs} +11 -11
  35. package/dist/{chunk-QUZGDSWE.cjs.map → chunk-ZWNI5IWX.cjs.map} +1 -1
  36. package/dist/evals/index.cjs +4 -4
  37. package/dist/evals/index.js +1 -1
  38. package/dist/evals/scoreTraces/index.cjs +3 -3
  39. package/dist/evals/scoreTraces/index.js +1 -1
  40. package/dist/index.cjs +2 -2
  41. package/dist/index.js +1 -1
  42. package/dist/llm/index.cjs +7 -7
  43. package/dist/llm/index.js +1 -1
  44. package/dist/llm/model/gateways/constants.d.ts.map +1 -1
  45. package/dist/llm/model/gateways/models-dev.d.ts.map +1 -1
  46. package/dist/llm/model/provider-types.generated.d.ts +48 -10
  47. package/dist/loop/index.cjs +2 -2
  48. package/dist/loop/index.js +1 -1
  49. package/dist/mastra/index.cjs +2 -2
  50. package/dist/mastra/index.d.ts.map +1 -1
  51. package/dist/mastra/index.js +1 -1
  52. package/dist/memory/index.cjs +2 -2
  53. package/dist/memory/index.js +1 -1
  54. package/dist/models-dev-DNBKXHT4.js +3 -0
  55. package/dist/{models-dev-7U4NRMM3.js.map → models-dev-DNBKXHT4.js.map} +1 -1
  56. package/dist/models-dev-YBEEQIX6.cjs +12 -0
  57. package/dist/{models-dev-VKSAQPRK.cjs.map → models-dev-YBEEQIX6.cjs.map} +1 -1
  58. package/dist/netlify-7G2L5VSH.js +3 -0
  59. package/dist/{netlify-42ZNWIDQ.js.map → netlify-7G2L5VSH.js.map} +1 -1
  60. package/dist/netlify-GWNGSIRZ.cjs +12 -0
  61. package/dist/{netlify-2IDXTNFW.cjs.map → netlify-GWNGSIRZ.cjs.map} +1 -1
  62. package/dist/processors/index.cjs +11 -11
  63. package/dist/processors/index.js +1 -1
  64. package/dist/provider-registry.json +101 -26
  65. package/dist/relevance/index.cjs +2 -2
  66. package/dist/relevance/index.js +1 -1
  67. package/dist/stream/index.cjs +8 -8
  68. package/dist/stream/index.js +1 -1
  69. package/dist/workflows/default.d.ts.map +1 -1
  70. package/dist/workflows/evented/index.cjs +10 -10
  71. package/dist/workflows/evented/index.js +1 -1
  72. package/dist/workflows/evented/step-executor.d.ts.map +1 -1
  73. package/dist/workflows/index.cjs +16 -16
  74. package/dist/workflows/index.js +1 -1
  75. package/dist/workflows/step.d.ts +1 -1
  76. package/dist/workflows/step.d.ts.map +1 -1
  77. package/dist/workflows/types.d.ts +11 -5
  78. package/dist/workflows/types.d.ts.map +1 -1
  79. package/dist/workflows/workflow.d.ts +1 -0
  80. package/dist/workflows/workflow.d.ts.map +1 -1
  81. package/package.json +5 -4
  82. package/src/llm/model/provider-types.generated.d.ts +48 -10
  83. package/dist/chunk-26SQQNMU.js.map +0 -1
  84. package/dist/chunk-32CTMD2C.js.map +0 -1
  85. package/dist/chunk-7SKXKUYT.js.map +0 -1
  86. package/dist/chunk-BNBRQS7N.js +0 -910
  87. package/dist/chunk-BNBRQS7N.js.map +0 -1
  88. package/dist/chunk-BXOL277H.cjs.map +0 -1
  89. package/dist/chunk-FD734TPS.cjs.map +0 -1
  90. package/dist/chunk-IU2SZXJQ.cjs +0 -913
  91. package/dist/chunk-IU2SZXJQ.cjs.map +0 -1
  92. package/dist/chunk-L7XKOKOW.js.map +0 -1
  93. package/dist/chunk-P6APHXPZ.js.map +0 -1
  94. package/dist/chunk-QCQLOMJM.cjs.map +0 -1
  95. package/dist/chunk-S5MJLXMG.cjs.map +0 -1
  96. package/dist/chunk-YIK3ASEG.cjs.map +0 -1
  97. package/dist/models-dev-7U4NRMM3.js +0 -3
  98. package/dist/models-dev-VKSAQPRK.cjs +0 -12
  99. package/dist/netlify-2IDXTNFW.cjs +0 -12
  100. package/dist/netlify-42ZNWIDQ.js +0 -3
@@ -0,0 +1,2237 @@
1
+ import { createJsonErrorResponseHandler, InvalidArgumentError, withoutTrailingSlash, generateId, withUserAgentSuffix, parseProviderOptions, postJsonToApi, createJsonResponseHandler, combineHeaders, createEventSourceResponseHandler, loadApiKey, UnsupportedFunctionalityError, APICallError, convertToBase64, LoadAPIKeyError, TypeValidationError, EmptyResponseBodyError, JSONParseError, EventSourceParserStream, NoSuchModelError, MastraModelGateway, createOpenAICompatible, createAnthropic, createGoogleGenerativeAI, createOpenAI, TooManyEmbeddingValuesForCallError, OpenAICompatibleImageModel } from './chunk-JPGVRWWL.js';
2
+ import { z } from 'zod/v4';
3
+ import { createOpenRouter } from '@openrouter/ai-sdk-provider-v5';
4
+
5
+ function combineHeaders2(...headers) {
6
+ return headers.reduce(
7
+ (combinedHeaders, currentHeaders) => ({
8
+ ...combinedHeaders,
9
+ ...currentHeaders != null ? currentHeaders : {}
10
+ }),
11
+ {}
12
+ );
13
+ }
14
+ function extractResponseHeaders(response) {
15
+ return Object.fromEntries([...response.headers]);
16
+ }
17
+ var createIdGenerator = ({
18
+ prefix,
19
+ size = 16,
20
+ alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
21
+ separator = "-"
22
+ } = {}) => {
23
+ const generator = () => {
24
+ const alphabetLength = alphabet.length;
25
+ const chars = new Array(size);
26
+ for (let i = 0; i < size; i++) {
27
+ chars[i] = alphabet[Math.random() * alphabetLength | 0];
28
+ }
29
+ return chars.join("");
30
+ };
31
+ if (prefix == null) {
32
+ return generator;
33
+ }
34
+ if (alphabet.includes(separator)) {
35
+ throw new InvalidArgumentError({
36
+ argument: "separator",
37
+ message: `The separator "${separator}" must not be part of the alphabet "${alphabet}".`
38
+ });
39
+ }
40
+ return () => `${prefix}${separator}${generator()}`;
41
+ };
42
+ var generateId2 = createIdGenerator();
43
+ function isAbortError(error) {
44
+ return (error instanceof Error || error instanceof DOMException) && (error.name === "AbortError" || error.name === "ResponseAborted" || // Next.js
45
+ error.name === "TimeoutError");
46
+ }
47
+ var FETCH_FAILED_ERROR_MESSAGES = ["fetch failed", "failed to fetch"];
48
+ function handleFetchError({
49
+ error,
50
+ url,
51
+ requestBodyValues
52
+ }) {
53
+ if (isAbortError(error)) {
54
+ return error;
55
+ }
56
+ if (error instanceof TypeError && FETCH_FAILED_ERROR_MESSAGES.includes(error.message.toLowerCase())) {
57
+ const cause = error.cause;
58
+ if (cause != null) {
59
+ return new APICallError({
60
+ message: `Cannot connect to API: ${cause.message}`,
61
+ cause,
62
+ url,
63
+ requestBodyValues,
64
+ isRetryable: true
65
+ // retry when network error
66
+ });
67
+ }
68
+ }
69
+ return error;
70
+ }
71
+ function getRuntimeEnvironmentUserAgent(globalThisAny = globalThis) {
72
+ var _a, _b, _c;
73
+ if (globalThisAny.window) {
74
+ return `runtime/browser`;
75
+ }
76
+ if ((_a = globalThisAny.navigator) == null ? void 0 : _a.userAgent) {
77
+ return `runtime/${globalThisAny.navigator.userAgent.toLowerCase()}`;
78
+ }
79
+ if ((_c = (_b = globalThisAny.process) == null ? void 0 : _b.versions) == null ? void 0 : _c.node) {
80
+ return `runtime/node.js/${globalThisAny.process.version.substring(0)}`;
81
+ }
82
+ if (globalThisAny.EdgeRuntime) {
83
+ return `runtime/vercel-edge`;
84
+ }
85
+ return "runtime/unknown";
86
+ }
87
+ function normalizeHeaders(headers) {
88
+ if (headers == null) {
89
+ return {};
90
+ }
91
+ const normalized = {};
92
+ if (headers instanceof Headers) {
93
+ headers.forEach((value, key) => {
94
+ normalized[key.toLowerCase()] = value;
95
+ });
96
+ } else {
97
+ if (!Array.isArray(headers)) {
98
+ headers = Object.entries(headers);
99
+ }
100
+ for (const [key, value] of headers) {
101
+ if (value != null) {
102
+ normalized[key.toLowerCase()] = value;
103
+ }
104
+ }
105
+ }
106
+ return normalized;
107
+ }
108
+ function withUserAgentSuffix2(headers, ...userAgentSuffixParts) {
109
+ const normalizedHeaders = new Headers(normalizeHeaders(headers));
110
+ const currentUserAgentHeader = normalizedHeaders.get("user-agent") || "";
111
+ normalizedHeaders.set(
112
+ "user-agent",
113
+ [currentUserAgentHeader, ...userAgentSuffixParts].filter(Boolean).join(" ")
114
+ );
115
+ return Object.fromEntries(normalizedHeaders.entries());
116
+ }
117
+ var VERSION = "3.0.16" ;
118
+ var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
119
+ var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
120
+ var DEFAULT_GENERIC_SUFFIX = "You MUST answer with JSON.";
121
+ function injectJsonInstruction({
122
+ prompt,
123
+ schema,
124
+ schemaPrefix = schema != null ? DEFAULT_SCHEMA_PREFIX : void 0,
125
+ schemaSuffix = schema != null ? DEFAULT_SCHEMA_SUFFIX : DEFAULT_GENERIC_SUFFIX
126
+ }) {
127
+ return [
128
+ prompt != null && prompt.length > 0 ? prompt : void 0,
129
+ prompt != null && prompt.length > 0 ? "" : void 0,
130
+ // add a newline if prompt is not null
131
+ schemaPrefix,
132
+ schema != null ? JSON.stringify(schema) : void 0,
133
+ schemaSuffix
134
+ ].filter((line) => line != null).join("\n");
135
+ }
136
+ function injectJsonInstructionIntoMessages({
137
+ messages,
138
+ schema,
139
+ schemaPrefix,
140
+ schemaSuffix
141
+ }) {
142
+ var _a, _b;
143
+ const systemMessage = ((_a = messages[0]) == null ? void 0 : _a.role) === "system" ? { ...messages[0] } : { role: "system", content: "" };
144
+ systemMessage.content = injectJsonInstruction({
145
+ prompt: systemMessage.content,
146
+ schema,
147
+ schemaPrefix,
148
+ schemaSuffix
149
+ });
150
+ return [
151
+ systemMessage,
152
+ ...((_b = messages[0]) == null ? void 0 : _b.role) === "system" ? messages.slice(1) : messages
153
+ ];
154
+ }
155
+ function loadApiKey2({
156
+ apiKey,
157
+ environmentVariableName,
158
+ apiKeyParameterName = "apiKey",
159
+ description
160
+ }) {
161
+ if (typeof apiKey === "string") {
162
+ return apiKey;
163
+ }
164
+ if (apiKey != null) {
165
+ throw new LoadAPIKeyError({
166
+ message: `${description} API key must be a string.`
167
+ });
168
+ }
169
+ if (typeof process === "undefined") {
170
+ throw new LoadAPIKeyError({
171
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`
172
+ });
173
+ }
174
+ apiKey = process.env[environmentVariableName];
175
+ if (apiKey == null) {
176
+ throw new LoadAPIKeyError({
177
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`
178
+ });
179
+ }
180
+ if (typeof apiKey !== "string") {
181
+ throw new LoadAPIKeyError({
182
+ message: `${description} API key must be a string. The value of the ${environmentVariableName} environment variable is not a string.`
183
+ });
184
+ }
185
+ return apiKey;
186
+ }
187
+ var suspectProtoRx = /"__proto__"\s*:/;
188
+ var suspectConstructorRx = /"constructor"\s*:/;
189
+ function _parse(text) {
190
+ const obj = JSON.parse(text);
191
+ if (obj === null || typeof obj !== "object") {
192
+ return obj;
193
+ }
194
+ if (suspectProtoRx.test(text) === false && suspectConstructorRx.test(text) === false) {
195
+ return obj;
196
+ }
197
+ return filter(obj);
198
+ }
199
+ function filter(obj) {
200
+ let next = [obj];
201
+ while (next.length) {
202
+ const nodes = next;
203
+ next = [];
204
+ for (const node of nodes) {
205
+ if (Object.prototype.hasOwnProperty.call(node, "__proto__")) {
206
+ throw new SyntaxError("Object contains forbidden prototype property");
207
+ }
208
+ if (Object.prototype.hasOwnProperty.call(node, "constructor") && Object.prototype.hasOwnProperty.call(node.constructor, "prototype")) {
209
+ throw new SyntaxError("Object contains forbidden prototype property");
210
+ }
211
+ for (const key in node) {
212
+ const value = node[key];
213
+ if (value && typeof value === "object") {
214
+ next.push(value);
215
+ }
216
+ }
217
+ }
218
+ }
219
+ return obj;
220
+ }
221
+ function secureJsonParse(text) {
222
+ const { stackTraceLimit } = Error;
223
+ Error.stackTraceLimit = 0;
224
+ try {
225
+ return _parse(text);
226
+ } finally {
227
+ Error.stackTraceLimit = stackTraceLimit;
228
+ }
229
+ }
230
+ var validatorSymbol = Symbol.for("vercel.ai.validator");
231
+ function validator(validate) {
232
+ return { [validatorSymbol]: true, validate };
233
+ }
234
+ function isValidator(value) {
235
+ return typeof value === "object" && value !== null && validatorSymbol in value && value[validatorSymbol] === true && "validate" in value;
236
+ }
237
+ function asValidator(value) {
238
+ return isValidator(value) ? value : typeof value === "function" ? value() : standardSchemaValidator(value);
239
+ }
240
+ function standardSchemaValidator(standardSchema) {
241
+ return validator(async (value) => {
242
+ const result = await standardSchema["~standard"].validate(value);
243
+ return result.issues == null ? { success: true, value: result.value } : {
244
+ success: false,
245
+ error: new TypeValidationError({
246
+ value,
247
+ cause: result.issues
248
+ })
249
+ };
250
+ });
251
+ }
252
+ async function validateTypes({
253
+ value,
254
+ schema
255
+ }) {
256
+ const result = await safeValidateTypes({ value, schema });
257
+ if (!result.success) {
258
+ throw TypeValidationError.wrap({ value, cause: result.error });
259
+ }
260
+ return result.value;
261
+ }
262
+ async function safeValidateTypes({
263
+ value,
264
+ schema
265
+ }) {
266
+ const validator2 = asValidator(schema);
267
+ try {
268
+ if (validator2.validate == null) {
269
+ return { success: true, value, rawValue: value };
270
+ }
271
+ const result = await validator2.validate(value);
272
+ if (result.success) {
273
+ return { success: true, value: result.value, rawValue: value };
274
+ }
275
+ return {
276
+ success: false,
277
+ error: TypeValidationError.wrap({ value, cause: result.error }),
278
+ rawValue: value
279
+ };
280
+ } catch (error) {
281
+ return {
282
+ success: false,
283
+ error: TypeValidationError.wrap({ value, cause: error }),
284
+ rawValue: value
285
+ };
286
+ }
287
+ }
288
+ async function parseJSON({
289
+ text,
290
+ schema
291
+ }) {
292
+ try {
293
+ const value = secureJsonParse(text);
294
+ if (schema == null) {
295
+ return value;
296
+ }
297
+ return validateTypes({ value, schema });
298
+ } catch (error) {
299
+ if (JSONParseError.isInstance(error) || TypeValidationError.isInstance(error)) {
300
+ throw error;
301
+ }
302
+ throw new JSONParseError({ text, cause: error });
303
+ }
304
+ }
305
+ async function safeParseJSON({
306
+ text,
307
+ schema
308
+ }) {
309
+ try {
310
+ const value = secureJsonParse(text);
311
+ if (schema == null) {
312
+ return { success: true, value, rawValue: value };
313
+ }
314
+ return await safeValidateTypes({ value, schema });
315
+ } catch (error) {
316
+ return {
317
+ success: false,
318
+ error: JSONParseError.isInstance(error) ? error : new JSONParseError({ text, cause: error }),
319
+ rawValue: void 0
320
+ };
321
+ }
322
+ }
323
+ function parseJsonEventStream({
324
+ stream,
325
+ schema
326
+ }) {
327
+ return stream.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream()).pipeThrough(
328
+ new TransformStream({
329
+ async transform({ data }, controller) {
330
+ if (data === "[DONE]") {
331
+ return;
332
+ }
333
+ controller.enqueue(await safeParseJSON({ text: data, schema }));
334
+ }
335
+ })
336
+ );
337
+ }
338
+ async function parseProviderOptions2({
339
+ provider,
340
+ providerOptions,
341
+ schema
342
+ }) {
343
+ if ((providerOptions == null ? void 0 : providerOptions[provider]) == null) {
344
+ return void 0;
345
+ }
346
+ const parsedProviderOptions = await safeValidateTypes({
347
+ value: providerOptions[provider],
348
+ schema
349
+ });
350
+ if (!parsedProviderOptions.success) {
351
+ throw new InvalidArgumentError({
352
+ argument: "providerOptions",
353
+ message: `invalid ${provider} provider options`,
354
+ cause: parsedProviderOptions.error
355
+ });
356
+ }
357
+ return parsedProviderOptions.value;
358
+ }
359
+ var getOriginalFetch2 = () => globalThis.fetch;
360
+ var postJsonToApi2 = async ({
361
+ url,
362
+ headers,
363
+ body,
364
+ failedResponseHandler,
365
+ successfulResponseHandler,
366
+ abortSignal,
367
+ fetch: fetch2
368
+ }) => postToApi({
369
+ url,
370
+ headers: {
371
+ "Content-Type": "application/json",
372
+ ...headers
373
+ },
374
+ body: {
375
+ content: JSON.stringify(body),
376
+ values: body
377
+ },
378
+ failedResponseHandler,
379
+ successfulResponseHandler,
380
+ abortSignal,
381
+ fetch: fetch2
382
+ });
383
+ var postToApi = async ({
384
+ url,
385
+ headers = {},
386
+ body,
387
+ successfulResponseHandler,
388
+ failedResponseHandler,
389
+ abortSignal,
390
+ fetch: fetch2 = getOriginalFetch2()
391
+ }) => {
392
+ try {
393
+ const response = await fetch2(url, {
394
+ method: "POST",
395
+ headers: withUserAgentSuffix2(
396
+ headers,
397
+ `ai-sdk/provider-utils/${VERSION}`,
398
+ getRuntimeEnvironmentUserAgent()
399
+ ),
400
+ body: body.content,
401
+ signal: abortSignal
402
+ });
403
+ const responseHeaders = extractResponseHeaders(response);
404
+ if (!response.ok) {
405
+ let errorInformation;
406
+ try {
407
+ errorInformation = await failedResponseHandler({
408
+ response,
409
+ url,
410
+ requestBodyValues: body.values
411
+ });
412
+ } catch (error) {
413
+ if (isAbortError(error) || APICallError.isInstance(error)) {
414
+ throw error;
415
+ }
416
+ throw new APICallError({
417
+ message: "Failed to process error response",
418
+ cause: error,
419
+ statusCode: response.status,
420
+ url,
421
+ responseHeaders,
422
+ requestBodyValues: body.values
423
+ });
424
+ }
425
+ throw errorInformation.value;
426
+ }
427
+ try {
428
+ return await successfulResponseHandler({
429
+ response,
430
+ url,
431
+ requestBodyValues: body.values
432
+ });
433
+ } catch (error) {
434
+ if (error instanceof Error) {
435
+ if (isAbortError(error) || APICallError.isInstance(error)) {
436
+ throw error;
437
+ }
438
+ }
439
+ throw new APICallError({
440
+ message: "Failed to process successful response",
441
+ cause: error,
442
+ statusCode: response.status,
443
+ url,
444
+ responseHeaders,
445
+ requestBodyValues: body.values
446
+ });
447
+ }
448
+ } catch (error) {
449
+ throw handleFetchError({ error, url, requestBodyValues: body.values });
450
+ }
451
+ };
452
+ var createJsonErrorResponseHandler2 = ({
453
+ errorSchema,
454
+ errorToMessage,
455
+ isRetryable
456
+ }) => async ({ response, url, requestBodyValues }) => {
457
+ const responseBody = await response.text();
458
+ const responseHeaders = extractResponseHeaders(response);
459
+ if (responseBody.trim() === "") {
460
+ return {
461
+ responseHeaders,
462
+ value: new APICallError({
463
+ message: response.statusText,
464
+ url,
465
+ requestBodyValues,
466
+ statusCode: response.status,
467
+ responseHeaders,
468
+ responseBody,
469
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
470
+ })
471
+ };
472
+ }
473
+ try {
474
+ const parsedError = await parseJSON({
475
+ text: responseBody,
476
+ schema: errorSchema
477
+ });
478
+ return {
479
+ responseHeaders,
480
+ value: new APICallError({
481
+ message: errorToMessage(parsedError),
482
+ url,
483
+ requestBodyValues,
484
+ statusCode: response.status,
485
+ responseHeaders,
486
+ responseBody,
487
+ data: parsedError,
488
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
489
+ })
490
+ };
491
+ } catch (parseError) {
492
+ return {
493
+ responseHeaders,
494
+ value: new APICallError({
495
+ message: response.statusText,
496
+ url,
497
+ requestBodyValues,
498
+ statusCode: response.status,
499
+ responseHeaders,
500
+ responseBody,
501
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
502
+ })
503
+ };
504
+ }
505
+ };
506
+ var createEventSourceResponseHandler2 = (chunkSchema) => async ({ response }) => {
507
+ const responseHeaders = extractResponseHeaders(response);
508
+ if (response.body == null) {
509
+ throw new EmptyResponseBodyError({});
510
+ }
511
+ return {
512
+ responseHeaders,
513
+ value: parseJsonEventStream({
514
+ stream: response.body,
515
+ schema: chunkSchema
516
+ })
517
+ };
518
+ };
519
+ var createJsonResponseHandler2 = (responseSchema) => async ({ response, url, requestBodyValues }) => {
520
+ const responseBody = await response.text();
521
+ const parsedResult = await safeParseJSON({
522
+ text: responseBody,
523
+ schema: responseSchema
524
+ });
525
+ const responseHeaders = extractResponseHeaders(response);
526
+ if (!parsedResult.success) {
527
+ throw new APICallError({
528
+ message: "Invalid JSON response",
529
+ cause: parsedResult.error,
530
+ statusCode: response.status,
531
+ responseHeaders,
532
+ responseBody,
533
+ url,
534
+ requestBodyValues
535
+ });
536
+ }
537
+ return {
538
+ responseHeaders,
539
+ value: parsedResult.value,
540
+ rawValue: parsedResult.rawValue
541
+ };
542
+ };
543
+ new Set(
544
+ "ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvxyz0123456789"
545
+ );
546
+ var { btoa} = globalThis;
547
+ function convertUint8ArrayToBase64(array) {
548
+ let latin1string = "";
549
+ for (let i = 0; i < array.length; i++) {
550
+ latin1string += String.fromCodePoint(array[i]);
551
+ }
552
+ return btoa(latin1string);
553
+ }
554
+ function convertToBase642(value) {
555
+ return value instanceof Uint8Array ? convertUint8ArrayToBase64(value) : value;
556
+ }
557
+ function withoutTrailingSlash2(url) {
558
+ return url == null ? void 0 : url.replace(/\/$/, "");
559
+ }
560
+ function convertToMistralChatMessages(prompt) {
561
+ const messages = [];
562
+ for (let i = 0; i < prompt.length; i++) {
563
+ const { role, content } = prompt[i];
564
+ const isLastMessage = i === prompt.length - 1;
565
+ switch (role) {
566
+ case "system": {
567
+ messages.push({ role: "system", content });
568
+ break;
569
+ }
570
+ case "user": {
571
+ messages.push({
572
+ role: "user",
573
+ content: content.map((part) => {
574
+ switch (part.type) {
575
+ case "text": {
576
+ return { type: "text", text: part.text };
577
+ }
578
+ case "file": {
579
+ if (part.mediaType.startsWith("image/")) {
580
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
581
+ return {
582
+ type: "image_url",
583
+ image_url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${convertToBase642(part.data)}`
584
+ };
585
+ } else if (part.mediaType === "application/pdf") {
586
+ return {
587
+ type: "document_url",
588
+ document_url: part.data.toString()
589
+ };
590
+ } else {
591
+ throw new UnsupportedFunctionalityError({
592
+ functionality: "Only images and PDF file parts are supported"
593
+ });
594
+ }
595
+ }
596
+ }
597
+ })
598
+ });
599
+ break;
600
+ }
601
+ case "assistant": {
602
+ let text = "";
603
+ const toolCalls = [];
604
+ for (const part of content) {
605
+ switch (part.type) {
606
+ case "text": {
607
+ text += part.text;
608
+ break;
609
+ }
610
+ case "tool-call": {
611
+ toolCalls.push({
612
+ id: part.toolCallId,
613
+ type: "function",
614
+ function: {
615
+ name: part.toolName,
616
+ arguments: JSON.stringify(part.input)
617
+ }
618
+ });
619
+ break;
620
+ }
621
+ case "reasoning": {
622
+ text += part.text;
623
+ break;
624
+ }
625
+ default: {
626
+ throw new Error(
627
+ `Unsupported content type in assistant message: ${part.type}`
628
+ );
629
+ }
630
+ }
631
+ }
632
+ messages.push({
633
+ role: "assistant",
634
+ content: text,
635
+ prefix: isLastMessage ? true : void 0,
636
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
637
+ });
638
+ break;
639
+ }
640
+ case "tool": {
641
+ for (const toolResponse of content) {
642
+ const output = toolResponse.output;
643
+ let contentValue;
644
+ switch (output.type) {
645
+ case "text":
646
+ case "error-text":
647
+ contentValue = output.value;
648
+ break;
649
+ case "content":
650
+ case "json":
651
+ case "error-json":
652
+ contentValue = JSON.stringify(output.value);
653
+ break;
654
+ }
655
+ messages.push({
656
+ role: "tool",
657
+ name: toolResponse.toolName,
658
+ tool_call_id: toolResponse.toolCallId,
659
+ content: contentValue
660
+ });
661
+ }
662
+ break;
663
+ }
664
+ default: {
665
+ const _exhaustiveCheck = role;
666
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
667
+ }
668
+ }
669
+ }
670
+ return messages;
671
+ }
672
+ function getResponseMetadata({
673
+ id,
674
+ model,
675
+ created
676
+ }) {
677
+ return {
678
+ id: id != null ? id : void 0,
679
+ modelId: model != null ? model : void 0,
680
+ timestamp: created != null ? new Date(created * 1e3) : void 0
681
+ };
682
+ }
683
+ function mapMistralFinishReason(finishReason) {
684
+ switch (finishReason) {
685
+ case "stop":
686
+ return "stop";
687
+ case "length":
688
+ case "model_length":
689
+ return "length";
690
+ case "tool_calls":
691
+ return "tool-calls";
692
+ default:
693
+ return "unknown";
694
+ }
695
+ }
696
+ var mistralLanguageModelOptions = z.object({
697
+ /**
698
+ Whether to inject a safety prompt before all conversations.
699
+
700
+ Defaults to `false`.
701
+ */
702
+ safePrompt: z.boolean().optional(),
703
+ documentImageLimit: z.number().optional(),
704
+ documentPageLimit: z.number().optional(),
705
+ /**
706
+ * Whether to use structured outputs.
707
+ *
708
+ * @default true
709
+ */
710
+ structuredOutputs: z.boolean().optional(),
711
+ /**
712
+ * Whether to use strict JSON schema validation.
713
+ *
714
+ * @default false
715
+ */
716
+ strictJsonSchema: z.boolean().optional(),
717
+ /**
718
+ * Whether to enable parallel function calling during tool use.
719
+ * When set to false, the model will use at most one tool per response.
720
+ *
721
+ * @default true
722
+ */
723
+ parallelToolCalls: z.boolean().optional()
724
+ });
725
+ var mistralErrorDataSchema = z.object({
726
+ object: z.literal("error"),
727
+ message: z.string(),
728
+ type: z.string(),
729
+ param: z.string().nullable(),
730
+ code: z.string().nullable()
731
+ });
732
+ var mistralFailedResponseHandler = createJsonErrorResponseHandler2({
733
+ errorSchema: mistralErrorDataSchema,
734
+ errorToMessage: (data) => data.message
735
+ });
736
+ function prepareTools({
737
+ tools,
738
+ toolChoice
739
+ }) {
740
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
741
+ const toolWarnings = [];
742
+ if (tools == null) {
743
+ return { tools: void 0, toolChoice: void 0, toolWarnings };
744
+ }
745
+ const mistralTools = [];
746
+ for (const tool of tools) {
747
+ if (tool.type === "provider-defined") {
748
+ toolWarnings.push({ type: "unsupported-tool", tool });
749
+ } else {
750
+ mistralTools.push({
751
+ type: "function",
752
+ function: {
753
+ name: tool.name,
754
+ description: tool.description,
755
+ parameters: tool.inputSchema
756
+ }
757
+ });
758
+ }
759
+ }
760
+ if (toolChoice == null) {
761
+ return { tools: mistralTools, toolChoice: void 0, toolWarnings };
762
+ }
763
+ const type = toolChoice.type;
764
+ switch (type) {
765
+ case "auto":
766
+ case "none":
767
+ return { tools: mistralTools, toolChoice: type, toolWarnings };
768
+ case "required":
769
+ return { tools: mistralTools, toolChoice: "any", toolWarnings };
770
+ // mistral does not support tool mode directly,
771
+ // so we filter the tools and force the tool choice through 'any'
772
+ case "tool":
773
+ return {
774
+ tools: mistralTools.filter(
775
+ (tool) => tool.function.name === toolChoice.toolName
776
+ ),
777
+ toolChoice: "any",
778
+ toolWarnings
779
+ };
780
+ default: {
781
+ const _exhaustiveCheck = type;
782
+ throw new UnsupportedFunctionalityError({
783
+ functionality: `tool choice type: ${_exhaustiveCheck}`
784
+ });
785
+ }
786
+ }
787
+ }
788
+ var MistralChatLanguageModel = class {
789
+ constructor(modelId, config) {
790
+ this.specificationVersion = "v2";
791
+ this.supportedUrls = {
792
+ "application/pdf": [/^https:\/\/.*$/]
793
+ };
794
+ var _a;
795
+ this.modelId = modelId;
796
+ this.config = config;
797
+ this.generateId = (_a = config.generateId) != null ? _a : generateId2;
798
+ }
799
+ get provider() {
800
+ return this.config.provider;
801
+ }
802
+ async getArgs({
803
+ prompt,
804
+ maxOutputTokens,
805
+ temperature,
806
+ topP,
807
+ topK,
808
+ frequencyPenalty,
809
+ presencePenalty,
810
+ stopSequences,
811
+ responseFormat,
812
+ seed,
813
+ providerOptions,
814
+ tools,
815
+ toolChoice
816
+ }) {
817
+ var _a, _b, _c, _d;
818
+ const warnings = [];
819
+ const options = (_a = await parseProviderOptions2({
820
+ provider: "mistral",
821
+ providerOptions,
822
+ schema: mistralLanguageModelOptions
823
+ })) != null ? _a : {};
824
+ if (topK != null) {
825
+ warnings.push({
826
+ type: "unsupported-setting",
827
+ setting: "topK"
828
+ });
829
+ }
830
+ if (frequencyPenalty != null) {
831
+ warnings.push({
832
+ type: "unsupported-setting",
833
+ setting: "frequencyPenalty"
834
+ });
835
+ }
836
+ if (presencePenalty != null) {
837
+ warnings.push({
838
+ type: "unsupported-setting",
839
+ setting: "presencePenalty"
840
+ });
841
+ }
842
+ if (stopSequences != null) {
843
+ warnings.push({
844
+ type: "unsupported-setting",
845
+ setting: "stopSequences"
846
+ });
847
+ }
848
+ const structuredOutputs = (_b = options.structuredOutputs) != null ? _b : true;
849
+ const strictJsonSchema = (_c = options.strictJsonSchema) != null ? _c : false;
850
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && !(responseFormat == null ? void 0 : responseFormat.schema)) {
851
+ prompt = injectJsonInstructionIntoMessages({
852
+ messages: prompt,
853
+ schema: responseFormat.schema
854
+ });
855
+ }
856
+ const baseArgs = {
857
+ // model id:
858
+ model: this.modelId,
859
+ // model specific settings:
860
+ safe_prompt: options.safePrompt,
861
+ // standardized settings:
862
+ max_tokens: maxOutputTokens,
863
+ temperature,
864
+ top_p: topP,
865
+ random_seed: seed,
866
+ // response format:
867
+ response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? structuredOutputs && (responseFormat == null ? void 0 : responseFormat.schema) != null ? {
868
+ type: "json_schema",
869
+ json_schema: {
870
+ schema: responseFormat.schema,
871
+ strict: strictJsonSchema,
872
+ name: (_d = responseFormat.name) != null ? _d : "response",
873
+ description: responseFormat.description
874
+ }
875
+ } : { type: "json_object" } : void 0,
876
+ // mistral-specific provider options:
877
+ document_image_limit: options.documentImageLimit,
878
+ document_page_limit: options.documentPageLimit,
879
+ // messages:
880
+ messages: convertToMistralChatMessages(prompt)
881
+ };
882
+ const {
883
+ tools: mistralTools,
884
+ toolChoice: mistralToolChoice,
885
+ toolWarnings
886
+ } = prepareTools({
887
+ tools,
888
+ toolChoice
889
+ });
890
+ return {
891
+ args: {
892
+ ...baseArgs,
893
+ tools: mistralTools,
894
+ tool_choice: mistralToolChoice,
895
+ ...mistralTools != null && options.parallelToolCalls !== void 0 ? { parallel_tool_calls: options.parallelToolCalls } : {}
896
+ },
897
+ warnings: [...warnings, ...toolWarnings]
898
+ };
899
+ }
900
+ async doGenerate(options) {
901
+ const { args: body, warnings } = await this.getArgs(options);
902
+ const {
903
+ responseHeaders,
904
+ value: response,
905
+ rawValue: rawResponse
906
+ } = await postJsonToApi2({
907
+ url: `${this.config.baseURL}/chat/completions`,
908
+ headers: combineHeaders2(this.config.headers(), options.headers),
909
+ body,
910
+ failedResponseHandler: mistralFailedResponseHandler,
911
+ successfulResponseHandler: createJsonResponseHandler2(
912
+ mistralChatResponseSchema
913
+ ),
914
+ abortSignal: options.abortSignal,
915
+ fetch: this.config.fetch
916
+ });
917
+ const choice = response.choices[0];
918
+ const content = [];
919
+ if (choice.message.content != null && Array.isArray(choice.message.content)) {
920
+ for (const part of choice.message.content) {
921
+ if (part.type === "thinking") {
922
+ const reasoningText = extractReasoningContent(part.thinking);
923
+ if (reasoningText.length > 0) {
924
+ content.push({ type: "reasoning", text: reasoningText });
925
+ }
926
+ } else if (part.type === "text") {
927
+ if (part.text.length > 0) {
928
+ content.push({ type: "text", text: part.text });
929
+ }
930
+ }
931
+ }
932
+ } else {
933
+ const text = extractTextContent(choice.message.content);
934
+ if (text != null && text.length > 0) {
935
+ content.push({ type: "text", text });
936
+ }
937
+ }
938
+ if (choice.message.tool_calls != null) {
939
+ for (const toolCall of choice.message.tool_calls) {
940
+ content.push({
941
+ type: "tool-call",
942
+ toolCallId: toolCall.id,
943
+ toolName: toolCall.function.name,
944
+ input: toolCall.function.arguments
945
+ });
946
+ }
947
+ }
948
+ return {
949
+ content,
950
+ finishReason: mapMistralFinishReason(choice.finish_reason),
951
+ usage: {
952
+ inputTokens: response.usage.prompt_tokens,
953
+ outputTokens: response.usage.completion_tokens,
954
+ totalTokens: response.usage.total_tokens
955
+ },
956
+ request: { body },
957
+ response: {
958
+ ...getResponseMetadata(response),
959
+ headers: responseHeaders,
960
+ body: rawResponse
961
+ },
962
+ warnings
963
+ };
964
+ }
965
+ async doStream(options) {
966
+ const { args, warnings } = await this.getArgs(options);
967
+ const body = { ...args, stream: true };
968
+ const { responseHeaders, value: response } = await postJsonToApi2({
969
+ url: `${this.config.baseURL}/chat/completions`,
970
+ headers: combineHeaders2(this.config.headers(), options.headers),
971
+ body,
972
+ failedResponseHandler: mistralFailedResponseHandler,
973
+ successfulResponseHandler: createEventSourceResponseHandler2(
974
+ mistralChatChunkSchema
975
+ ),
976
+ abortSignal: options.abortSignal,
977
+ fetch: this.config.fetch
978
+ });
979
+ let finishReason = "unknown";
980
+ const usage = {
981
+ inputTokens: void 0,
982
+ outputTokens: void 0,
983
+ totalTokens: void 0
984
+ };
985
+ let isFirstChunk = true;
986
+ let activeText = false;
987
+ let activeReasoningId = null;
988
+ const generateId22 = this.generateId;
989
+ return {
990
+ stream: response.pipeThrough(
991
+ new TransformStream({
992
+ start(controller) {
993
+ controller.enqueue({ type: "stream-start", warnings });
994
+ },
995
+ transform(chunk, controller) {
996
+ if (options.includeRawChunks) {
997
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
998
+ }
999
+ if (!chunk.success) {
1000
+ controller.enqueue({ type: "error", error: chunk.error });
1001
+ return;
1002
+ }
1003
+ const value = chunk.value;
1004
+ if (isFirstChunk) {
1005
+ isFirstChunk = false;
1006
+ controller.enqueue({
1007
+ type: "response-metadata",
1008
+ ...getResponseMetadata(value)
1009
+ });
1010
+ }
1011
+ if (value.usage != null) {
1012
+ usage.inputTokens = value.usage.prompt_tokens;
1013
+ usage.outputTokens = value.usage.completion_tokens;
1014
+ usage.totalTokens = value.usage.total_tokens;
1015
+ }
1016
+ const choice = value.choices[0];
1017
+ const delta = choice.delta;
1018
+ const textContent = extractTextContent(delta.content);
1019
+ if (delta.content != null && Array.isArray(delta.content)) {
1020
+ for (const part of delta.content) {
1021
+ if (part.type === "thinking") {
1022
+ const reasoningDelta = extractReasoningContent(part.thinking);
1023
+ if (reasoningDelta.length > 0) {
1024
+ if (activeReasoningId == null) {
1025
+ if (activeText) {
1026
+ controller.enqueue({ type: "text-end", id: "0" });
1027
+ activeText = false;
1028
+ }
1029
+ activeReasoningId = generateId22();
1030
+ controller.enqueue({
1031
+ type: "reasoning-start",
1032
+ id: activeReasoningId
1033
+ });
1034
+ }
1035
+ controller.enqueue({
1036
+ type: "reasoning-delta",
1037
+ id: activeReasoningId,
1038
+ delta: reasoningDelta
1039
+ });
1040
+ }
1041
+ }
1042
+ }
1043
+ }
1044
+ if (textContent != null && textContent.length > 0) {
1045
+ if (!activeText) {
1046
+ if (activeReasoningId != null) {
1047
+ controller.enqueue({
1048
+ type: "reasoning-end",
1049
+ id: activeReasoningId
1050
+ });
1051
+ activeReasoningId = null;
1052
+ }
1053
+ controller.enqueue({ type: "text-start", id: "0" });
1054
+ activeText = true;
1055
+ }
1056
+ controller.enqueue({
1057
+ type: "text-delta",
1058
+ id: "0",
1059
+ delta: textContent
1060
+ });
1061
+ }
1062
+ if ((delta == null ? void 0 : delta.tool_calls) != null) {
1063
+ for (const toolCall of delta.tool_calls) {
1064
+ const toolCallId = toolCall.id;
1065
+ const toolName = toolCall.function.name;
1066
+ const input = toolCall.function.arguments;
1067
+ controller.enqueue({
1068
+ type: "tool-input-start",
1069
+ id: toolCallId,
1070
+ toolName
1071
+ });
1072
+ controller.enqueue({
1073
+ type: "tool-input-delta",
1074
+ id: toolCallId,
1075
+ delta: input
1076
+ });
1077
+ controller.enqueue({
1078
+ type: "tool-input-end",
1079
+ id: toolCallId
1080
+ });
1081
+ controller.enqueue({
1082
+ type: "tool-call",
1083
+ toolCallId,
1084
+ toolName,
1085
+ input
1086
+ });
1087
+ }
1088
+ }
1089
+ if (choice.finish_reason != null) {
1090
+ finishReason = mapMistralFinishReason(choice.finish_reason);
1091
+ }
1092
+ },
1093
+ flush(controller) {
1094
+ if (activeReasoningId != null) {
1095
+ controller.enqueue({
1096
+ type: "reasoning-end",
1097
+ id: activeReasoningId
1098
+ });
1099
+ }
1100
+ if (activeText) {
1101
+ controller.enqueue({ type: "text-end", id: "0" });
1102
+ }
1103
+ controller.enqueue({
1104
+ type: "finish",
1105
+ finishReason,
1106
+ usage
1107
+ });
1108
+ }
1109
+ })
1110
+ ),
1111
+ request: { body },
1112
+ response: { headers: responseHeaders }
1113
+ };
1114
+ }
1115
+ };
1116
+ function extractReasoningContent(thinking) {
1117
+ return thinking.filter((chunk) => chunk.type === "text").map((chunk) => chunk.text).join("");
1118
+ }
1119
+ function extractTextContent(content) {
1120
+ if (typeof content === "string") {
1121
+ return content;
1122
+ }
1123
+ if (content == null) {
1124
+ return void 0;
1125
+ }
1126
+ const textContent = [];
1127
+ for (const chunk of content) {
1128
+ const { type } = chunk;
1129
+ switch (type) {
1130
+ case "text":
1131
+ textContent.push(chunk.text);
1132
+ break;
1133
+ case "thinking":
1134
+ case "image_url":
1135
+ case "reference":
1136
+ break;
1137
+ default: {
1138
+ const _exhaustiveCheck = type;
1139
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
1140
+ }
1141
+ }
1142
+ }
1143
+ return textContent.length ? textContent.join("") : void 0;
1144
+ }
1145
+ var mistralContentSchema = z.union([
1146
+ z.string(),
1147
+ z.array(
1148
+ z.discriminatedUnion("type", [
1149
+ z.object({
1150
+ type: z.literal("text"),
1151
+ text: z.string()
1152
+ }),
1153
+ z.object({
1154
+ type: z.literal("image_url"),
1155
+ image_url: z.union([
1156
+ z.string(),
1157
+ z.object({
1158
+ url: z.string(),
1159
+ detail: z.string().nullable()
1160
+ })
1161
+ ])
1162
+ }),
1163
+ z.object({
1164
+ type: z.literal("reference"),
1165
+ reference_ids: z.array(z.number())
1166
+ }),
1167
+ z.object({
1168
+ type: z.literal("thinking"),
1169
+ thinking: z.array(
1170
+ z.object({
1171
+ type: z.literal("text"),
1172
+ text: z.string()
1173
+ })
1174
+ )
1175
+ })
1176
+ ])
1177
+ )
1178
+ ]).nullish();
1179
+ var mistralUsageSchema = z.object({
1180
+ prompt_tokens: z.number(),
1181
+ completion_tokens: z.number(),
1182
+ total_tokens: z.number()
1183
+ });
1184
+ var mistralChatResponseSchema = z.object({
1185
+ id: z.string().nullish(),
1186
+ created: z.number().nullish(),
1187
+ model: z.string().nullish(),
1188
+ choices: z.array(
1189
+ z.object({
1190
+ message: z.object({
1191
+ role: z.literal("assistant"),
1192
+ content: mistralContentSchema,
1193
+ tool_calls: z.array(
1194
+ z.object({
1195
+ id: z.string(),
1196
+ function: z.object({ name: z.string(), arguments: z.string() })
1197
+ })
1198
+ ).nullish()
1199
+ }),
1200
+ index: z.number(),
1201
+ finish_reason: z.string().nullish()
1202
+ })
1203
+ ),
1204
+ object: z.literal("chat.completion"),
1205
+ usage: mistralUsageSchema
1206
+ });
1207
+ var mistralChatChunkSchema = z.object({
1208
+ id: z.string().nullish(),
1209
+ created: z.number().nullish(),
1210
+ model: z.string().nullish(),
1211
+ choices: z.array(
1212
+ z.object({
1213
+ delta: z.object({
1214
+ role: z.enum(["assistant"]).optional(),
1215
+ content: mistralContentSchema,
1216
+ tool_calls: z.array(
1217
+ z.object({
1218
+ id: z.string(),
1219
+ function: z.object({ name: z.string(), arguments: z.string() })
1220
+ })
1221
+ ).nullish()
1222
+ }),
1223
+ finish_reason: z.string().nullish(),
1224
+ index: z.number()
1225
+ })
1226
+ ),
1227
+ usage: mistralUsageSchema.nullish()
1228
+ });
1229
+ var MistralEmbeddingModel = class {
1230
+ constructor(modelId, config) {
1231
+ this.specificationVersion = "v2";
1232
+ this.maxEmbeddingsPerCall = 32;
1233
+ this.supportsParallelCalls = false;
1234
+ this.modelId = modelId;
1235
+ this.config = config;
1236
+ }
1237
+ get provider() {
1238
+ return this.config.provider;
1239
+ }
1240
+ async doEmbed({
1241
+ values,
1242
+ abortSignal,
1243
+ headers
1244
+ }) {
1245
+ if (values.length > this.maxEmbeddingsPerCall) {
1246
+ throw new TooManyEmbeddingValuesForCallError({
1247
+ provider: this.provider,
1248
+ modelId: this.modelId,
1249
+ maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
1250
+ values
1251
+ });
1252
+ }
1253
+ const {
1254
+ responseHeaders,
1255
+ value: response,
1256
+ rawValue
1257
+ } = await postJsonToApi2({
1258
+ url: `${this.config.baseURL}/embeddings`,
1259
+ headers: combineHeaders2(this.config.headers(), headers),
1260
+ body: {
1261
+ model: this.modelId,
1262
+ input: values,
1263
+ encoding_format: "float"
1264
+ },
1265
+ failedResponseHandler: mistralFailedResponseHandler,
1266
+ successfulResponseHandler: createJsonResponseHandler2(
1267
+ MistralTextEmbeddingResponseSchema
1268
+ ),
1269
+ abortSignal,
1270
+ fetch: this.config.fetch
1271
+ });
1272
+ return {
1273
+ embeddings: response.data.map((item) => item.embedding),
1274
+ usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
1275
+ response: { headers: responseHeaders, body: rawValue }
1276
+ };
1277
+ }
1278
+ };
1279
+ var MistralTextEmbeddingResponseSchema = z.object({
1280
+ data: z.array(z.object({ embedding: z.array(z.number()) })),
1281
+ usage: z.object({ prompt_tokens: z.number() }).nullish()
1282
+ });
1283
+ var VERSION2 = "2.0.23" ;
1284
+ function createMistral(options = {}) {
1285
+ var _a;
1286
+ const baseURL = (_a = withoutTrailingSlash2(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
1287
+ const getHeaders = () => withUserAgentSuffix2(
1288
+ {
1289
+ Authorization: `Bearer ${loadApiKey2({
1290
+ apiKey: options.apiKey,
1291
+ environmentVariableName: "MISTRAL_API_KEY",
1292
+ description: "Mistral"
1293
+ })}`,
1294
+ ...options.headers
1295
+ },
1296
+ `ai-sdk/mistral/${VERSION2}`
1297
+ );
1298
+ const createChatModel = (modelId) => new MistralChatLanguageModel(modelId, {
1299
+ provider: "mistral.chat",
1300
+ baseURL,
1301
+ headers: getHeaders,
1302
+ fetch: options.fetch,
1303
+ generateId: options.generateId
1304
+ });
1305
+ const createEmbeddingModel = (modelId) => new MistralEmbeddingModel(modelId, {
1306
+ provider: "mistral.embedding",
1307
+ baseURL,
1308
+ headers: getHeaders,
1309
+ fetch: options.fetch
1310
+ });
1311
+ const provider = function(modelId) {
1312
+ if (new.target) {
1313
+ throw new Error(
1314
+ "The Mistral model function cannot be called with the new keyword."
1315
+ );
1316
+ }
1317
+ return createChatModel(modelId);
1318
+ };
1319
+ provider.languageModel = createChatModel;
1320
+ provider.chat = createChatModel;
1321
+ provider.embedding = createEmbeddingModel;
1322
+ provider.textEmbedding = createEmbeddingModel;
1323
+ provider.textEmbeddingModel = createEmbeddingModel;
1324
+ provider.imageModel = (modelId) => {
1325
+ throw new NoSuchModelError({ modelId, modelType: "imageModel" });
1326
+ };
1327
+ return provider;
1328
+ }
1329
+ createMistral();
1330
+ function convertToXaiChatMessages(prompt) {
1331
+ const messages = [];
1332
+ const warnings = [];
1333
+ for (const { role, content } of prompt) {
1334
+ switch (role) {
1335
+ case "system": {
1336
+ messages.push({ role: "system", content });
1337
+ break;
1338
+ }
1339
+ case "user": {
1340
+ if (content.length === 1 && content[0].type === "text") {
1341
+ messages.push({ role: "user", content: content[0].text });
1342
+ break;
1343
+ }
1344
+ messages.push({
1345
+ role: "user",
1346
+ content: content.map((part) => {
1347
+ switch (part.type) {
1348
+ case "text": {
1349
+ return { type: "text", text: part.text };
1350
+ }
1351
+ case "file": {
1352
+ if (part.mediaType.startsWith("image/")) {
1353
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
1354
+ return {
1355
+ type: "image_url",
1356
+ image_url: {
1357
+ url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${convertToBase64(part.data)}`
1358
+ }
1359
+ };
1360
+ } else {
1361
+ throw new UnsupportedFunctionalityError({
1362
+ functionality: `file part media type ${part.mediaType}`
1363
+ });
1364
+ }
1365
+ }
1366
+ }
1367
+ })
1368
+ });
1369
+ break;
1370
+ }
1371
+ case "assistant": {
1372
+ let text = "";
1373
+ const toolCalls = [];
1374
+ for (const part of content) {
1375
+ switch (part.type) {
1376
+ case "text": {
1377
+ text += part.text;
1378
+ break;
1379
+ }
1380
+ case "tool-call": {
1381
+ toolCalls.push({
1382
+ id: part.toolCallId,
1383
+ type: "function",
1384
+ function: {
1385
+ name: part.toolName,
1386
+ arguments: JSON.stringify(part.input)
1387
+ }
1388
+ });
1389
+ break;
1390
+ }
1391
+ }
1392
+ }
1393
+ messages.push({
1394
+ role: "assistant",
1395
+ content: text,
1396
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
1397
+ });
1398
+ break;
1399
+ }
1400
+ case "tool": {
1401
+ for (const toolResponse of content) {
1402
+ const output = toolResponse.output;
1403
+ let contentValue;
1404
+ switch (output.type) {
1405
+ case "text":
1406
+ case "error-text":
1407
+ contentValue = output.value;
1408
+ break;
1409
+ case "content":
1410
+ case "json":
1411
+ case "error-json":
1412
+ contentValue = JSON.stringify(output.value);
1413
+ break;
1414
+ }
1415
+ messages.push({
1416
+ role: "tool",
1417
+ tool_call_id: toolResponse.toolCallId,
1418
+ content: contentValue
1419
+ });
1420
+ }
1421
+ break;
1422
+ }
1423
+ default: {
1424
+ const _exhaustiveCheck = role;
1425
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1426
+ }
1427
+ }
1428
+ }
1429
+ return { messages, warnings };
1430
+ }
1431
+ function getResponseMetadata2({
1432
+ id,
1433
+ model,
1434
+ created
1435
+ }) {
1436
+ return {
1437
+ id: id != null ? id : void 0,
1438
+ modelId: model != null ? model : void 0,
1439
+ timestamp: created != null ? new Date(created * 1e3) : void 0
1440
+ };
1441
+ }
1442
+ function mapXaiFinishReason(finishReason) {
1443
+ switch (finishReason) {
1444
+ case "stop":
1445
+ return "stop";
1446
+ case "length":
1447
+ return "length";
1448
+ case "tool_calls":
1449
+ case "function_call":
1450
+ return "tool-calls";
1451
+ case "content_filter":
1452
+ return "content-filter";
1453
+ default:
1454
+ return "unknown";
1455
+ }
1456
+ }
1457
+ var webSourceSchema = z.object({
1458
+ type: z.literal("web"),
1459
+ country: z.string().length(2).optional(),
1460
+ excludedWebsites: z.array(z.string()).max(5).optional(),
1461
+ allowedWebsites: z.array(z.string()).max(5).optional(),
1462
+ safeSearch: z.boolean().optional()
1463
+ });
1464
+ var xSourceSchema = z.object({
1465
+ type: z.literal("x"),
1466
+ excludedXHandles: z.array(z.string()).optional(),
1467
+ includedXHandles: z.array(z.string()).optional(),
1468
+ postFavoriteCount: z.number().int().optional(),
1469
+ postViewCount: z.number().int().optional(),
1470
+ /**
1471
+ * @deprecated use `includedXHandles` instead
1472
+ */
1473
+ xHandles: z.array(z.string()).optional()
1474
+ });
1475
+ var newsSourceSchema = z.object({
1476
+ type: z.literal("news"),
1477
+ country: z.string().length(2).optional(),
1478
+ excludedWebsites: z.array(z.string()).max(5).optional(),
1479
+ safeSearch: z.boolean().optional()
1480
+ });
1481
+ var rssSourceSchema = z.object({
1482
+ type: z.literal("rss"),
1483
+ links: z.array(z.string().url()).max(1)
1484
+ // currently only supports one RSS link
1485
+ });
1486
+ var searchSourceSchema = z.discriminatedUnion("type", [
1487
+ webSourceSchema,
1488
+ xSourceSchema,
1489
+ newsSourceSchema,
1490
+ rssSourceSchema
1491
+ ]);
1492
+ var xaiProviderOptions = z.object({
1493
+ reasoningEffort: z.enum(["low", "high"]).optional(),
1494
+ searchParameters: z.object({
1495
+ /**
1496
+ * search mode preference
1497
+ * - "off": disables search completely
1498
+ * - "auto": model decides whether to search (default)
1499
+ * - "on": always enables search
1500
+ */
1501
+ mode: z.enum(["off", "auto", "on"]),
1502
+ /**
1503
+ * whether to return citations in the response
1504
+ * defaults to true
1505
+ */
1506
+ returnCitations: z.boolean().optional(),
1507
+ /**
1508
+ * start date for search data (ISO8601 format: YYYY-MM-DD)
1509
+ */
1510
+ fromDate: z.string().optional(),
1511
+ /**
1512
+ * end date for search data (ISO8601 format: YYYY-MM-DD)
1513
+ */
1514
+ toDate: z.string().optional(),
1515
+ /**
1516
+ * maximum number of search results to consider
1517
+ * defaults to 20
1518
+ */
1519
+ maxSearchResults: z.number().min(1).max(50).optional(),
1520
+ /**
1521
+ * data sources to search from
1522
+ * defaults to ["web", "x"] if not specified
1523
+ */
1524
+ sources: z.array(searchSourceSchema).optional()
1525
+ }).optional()
1526
+ });
1527
+ var xaiErrorDataSchema = z.object({
1528
+ error: z.object({
1529
+ message: z.string(),
1530
+ type: z.string().nullish(),
1531
+ param: z.any().nullish(),
1532
+ code: z.union([z.string(), z.number()]).nullish()
1533
+ })
1534
+ });
1535
+ var xaiFailedResponseHandler = createJsonErrorResponseHandler({
1536
+ errorSchema: xaiErrorDataSchema,
1537
+ errorToMessage: (data) => data.error.message
1538
+ });
1539
+ function prepareTools2({
1540
+ tools,
1541
+ toolChoice
1542
+ }) {
1543
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
1544
+ const toolWarnings = [];
1545
+ if (tools == null) {
1546
+ return { tools: void 0, toolChoice: void 0, toolWarnings };
1547
+ }
1548
+ const xaiTools = [];
1549
+ for (const tool of tools) {
1550
+ if (tool.type === "provider-defined") {
1551
+ toolWarnings.push({ type: "unsupported-tool", tool });
1552
+ } else {
1553
+ xaiTools.push({
1554
+ type: "function",
1555
+ function: {
1556
+ name: tool.name,
1557
+ description: tool.description,
1558
+ parameters: tool.inputSchema
1559
+ }
1560
+ });
1561
+ }
1562
+ }
1563
+ if (toolChoice == null) {
1564
+ return { tools: xaiTools, toolChoice: void 0, toolWarnings };
1565
+ }
1566
+ const type = toolChoice.type;
1567
+ switch (type) {
1568
+ case "auto":
1569
+ case "none":
1570
+ return { tools: xaiTools, toolChoice: type, toolWarnings };
1571
+ case "required":
1572
+ return { tools: xaiTools, toolChoice: "required", toolWarnings };
1573
+ case "tool":
1574
+ return {
1575
+ tools: xaiTools,
1576
+ toolChoice: {
1577
+ type: "function",
1578
+ function: { name: toolChoice.toolName }
1579
+ },
1580
+ toolWarnings
1581
+ };
1582
+ default: {
1583
+ const _exhaustiveCheck = type;
1584
+ throw new UnsupportedFunctionalityError({
1585
+ functionality: `tool choice type: ${_exhaustiveCheck}`
1586
+ });
1587
+ }
1588
+ }
1589
+ }
1590
+ var XaiChatLanguageModel = class {
1591
+ constructor(modelId, config) {
1592
+ this.specificationVersion = "v2";
1593
+ this.supportedUrls = {
1594
+ "image/*": [/^https?:\/\/.*$/]
1595
+ };
1596
+ this.modelId = modelId;
1597
+ this.config = config;
1598
+ }
1599
+ get provider() {
1600
+ return this.config.provider;
1601
+ }
1602
+ async getArgs({
1603
+ prompt,
1604
+ maxOutputTokens,
1605
+ temperature,
1606
+ topP,
1607
+ topK,
1608
+ frequencyPenalty,
1609
+ presencePenalty,
1610
+ stopSequences,
1611
+ seed,
1612
+ responseFormat,
1613
+ providerOptions,
1614
+ tools,
1615
+ toolChoice
1616
+ }) {
1617
+ var _a, _b, _c;
1618
+ const warnings = [];
1619
+ const options = (_a = await parseProviderOptions({
1620
+ provider: "xai",
1621
+ providerOptions,
1622
+ schema: xaiProviderOptions
1623
+ })) != null ? _a : {};
1624
+ if (topK != null) {
1625
+ warnings.push({
1626
+ type: "unsupported-setting",
1627
+ setting: "topK"
1628
+ });
1629
+ }
1630
+ if (frequencyPenalty != null) {
1631
+ warnings.push({
1632
+ type: "unsupported-setting",
1633
+ setting: "frequencyPenalty"
1634
+ });
1635
+ }
1636
+ if (presencePenalty != null) {
1637
+ warnings.push({
1638
+ type: "unsupported-setting",
1639
+ setting: "presencePenalty"
1640
+ });
1641
+ }
1642
+ if (stopSequences != null) {
1643
+ warnings.push({
1644
+ type: "unsupported-setting",
1645
+ setting: "stopSequences"
1646
+ });
1647
+ }
1648
+ if (responseFormat != null && responseFormat.type === "json" && responseFormat.schema != null) {
1649
+ warnings.push({
1650
+ type: "unsupported-setting",
1651
+ setting: "responseFormat",
1652
+ details: "JSON response format schema is not supported"
1653
+ });
1654
+ }
1655
+ const { messages, warnings: messageWarnings } = convertToXaiChatMessages(prompt);
1656
+ warnings.push(...messageWarnings);
1657
+ const {
1658
+ tools: xaiTools,
1659
+ toolChoice: xaiToolChoice,
1660
+ toolWarnings
1661
+ } = prepareTools2({
1662
+ tools,
1663
+ toolChoice
1664
+ });
1665
+ warnings.push(...toolWarnings);
1666
+ const baseArgs = {
1667
+ // model id
1668
+ model: this.modelId,
1669
+ // standard generation settings
1670
+ max_tokens: maxOutputTokens,
1671
+ temperature,
1672
+ top_p: topP,
1673
+ seed,
1674
+ reasoning_effort: options.reasoningEffort,
1675
+ // response format
1676
+ response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? responseFormat.schema != null ? {
1677
+ type: "json_schema",
1678
+ json_schema: {
1679
+ name: (_b = responseFormat.name) != null ? _b : "response",
1680
+ schema: responseFormat.schema,
1681
+ strict: true
1682
+ }
1683
+ } : { type: "json_object" } : void 0,
1684
+ // search parameters
1685
+ search_parameters: options.searchParameters ? {
1686
+ mode: options.searchParameters.mode,
1687
+ return_citations: options.searchParameters.returnCitations,
1688
+ from_date: options.searchParameters.fromDate,
1689
+ to_date: options.searchParameters.toDate,
1690
+ max_search_results: options.searchParameters.maxSearchResults,
1691
+ sources: (_c = options.searchParameters.sources) == null ? void 0 : _c.map((source) => {
1692
+ var _a2;
1693
+ return {
1694
+ type: source.type,
1695
+ ...source.type === "web" && {
1696
+ country: source.country,
1697
+ excluded_websites: source.excludedWebsites,
1698
+ allowed_websites: source.allowedWebsites,
1699
+ safe_search: source.safeSearch
1700
+ },
1701
+ ...source.type === "x" && {
1702
+ excluded_x_handles: source.excludedXHandles,
1703
+ included_x_handles: (_a2 = source.includedXHandles) != null ? _a2 : source.xHandles,
1704
+ post_favorite_count: source.postFavoriteCount,
1705
+ post_view_count: source.postViewCount
1706
+ },
1707
+ ...source.type === "news" && {
1708
+ country: source.country,
1709
+ excluded_websites: source.excludedWebsites,
1710
+ safe_search: source.safeSearch
1711
+ },
1712
+ ...source.type === "rss" && {
1713
+ links: source.links
1714
+ }
1715
+ };
1716
+ })
1717
+ } : void 0,
1718
+ // messages in xai format
1719
+ messages,
1720
+ // tools in xai format
1721
+ tools: xaiTools,
1722
+ tool_choice: xaiToolChoice
1723
+ };
1724
+ return {
1725
+ args: baseArgs,
1726
+ warnings
1727
+ };
1728
+ }
1729
+ async doGenerate(options) {
1730
+ var _a, _b, _c;
1731
+ const { args: body, warnings } = await this.getArgs(options);
1732
+ const {
1733
+ responseHeaders,
1734
+ value: response,
1735
+ rawValue: rawResponse
1736
+ } = await postJsonToApi({
1737
+ url: `${(_a = this.config.baseURL) != null ? _a : "https://api.x.ai/v1"}/chat/completions`,
1738
+ headers: combineHeaders(this.config.headers(), options.headers),
1739
+ body,
1740
+ failedResponseHandler: xaiFailedResponseHandler,
1741
+ successfulResponseHandler: createJsonResponseHandler(
1742
+ xaiChatResponseSchema
1743
+ ),
1744
+ abortSignal: options.abortSignal,
1745
+ fetch: this.config.fetch
1746
+ });
1747
+ const choice = response.choices[0];
1748
+ const content = [];
1749
+ if (choice.message.content != null && choice.message.content.length > 0) {
1750
+ let text = choice.message.content;
1751
+ const lastMessage = body.messages[body.messages.length - 1];
1752
+ if ((lastMessage == null ? void 0 : lastMessage.role) === "assistant" && text === lastMessage.content) {
1753
+ text = "";
1754
+ }
1755
+ if (text.length > 0) {
1756
+ content.push({ type: "text", text });
1757
+ }
1758
+ }
1759
+ if (choice.message.reasoning_content != null && choice.message.reasoning_content.length > 0) {
1760
+ content.push({
1761
+ type: "reasoning",
1762
+ text: choice.message.reasoning_content
1763
+ });
1764
+ }
1765
+ if (choice.message.tool_calls != null) {
1766
+ for (const toolCall of choice.message.tool_calls) {
1767
+ content.push({
1768
+ type: "tool-call",
1769
+ toolCallId: toolCall.id,
1770
+ toolName: toolCall.function.name,
1771
+ input: toolCall.function.arguments
1772
+ });
1773
+ }
1774
+ }
1775
+ if (response.citations != null) {
1776
+ for (const url of response.citations) {
1777
+ content.push({
1778
+ type: "source",
1779
+ sourceType: "url",
1780
+ id: this.config.generateId(),
1781
+ url
1782
+ });
1783
+ }
1784
+ }
1785
+ return {
1786
+ content,
1787
+ finishReason: mapXaiFinishReason(choice.finish_reason),
1788
+ usage: {
1789
+ inputTokens: response.usage.prompt_tokens,
1790
+ outputTokens: response.usage.completion_tokens,
1791
+ totalTokens: response.usage.total_tokens,
1792
+ reasoningTokens: (_c = (_b = response.usage.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null ? _c : void 0
1793
+ },
1794
+ request: { body },
1795
+ response: {
1796
+ ...getResponseMetadata2(response),
1797
+ headers: responseHeaders,
1798
+ body: rawResponse
1799
+ },
1800
+ warnings
1801
+ };
1802
+ }
1803
+ async doStream(options) {
1804
+ var _a;
1805
+ const { args, warnings } = await this.getArgs(options);
1806
+ const body = {
1807
+ ...args,
1808
+ stream: true,
1809
+ stream_options: {
1810
+ include_usage: true
1811
+ }
1812
+ };
1813
+ const { responseHeaders, value: response } = await postJsonToApi({
1814
+ url: `${(_a = this.config.baseURL) != null ? _a : "https://api.x.ai/v1"}/chat/completions`,
1815
+ headers: combineHeaders(this.config.headers(), options.headers),
1816
+ body,
1817
+ failedResponseHandler: xaiFailedResponseHandler,
1818
+ successfulResponseHandler: createEventSourceResponseHandler(xaiChatChunkSchema),
1819
+ abortSignal: options.abortSignal,
1820
+ fetch: this.config.fetch
1821
+ });
1822
+ let finishReason = "unknown";
1823
+ const usage = {
1824
+ inputTokens: void 0,
1825
+ outputTokens: void 0,
1826
+ totalTokens: void 0
1827
+ };
1828
+ let isFirstChunk = true;
1829
+ const contentBlocks = {};
1830
+ const lastReasoningDeltas = {};
1831
+ const self = this;
1832
+ return {
1833
+ stream: response.pipeThrough(
1834
+ new TransformStream({
1835
+ start(controller) {
1836
+ controller.enqueue({ type: "stream-start", warnings });
1837
+ },
1838
+ transform(chunk, controller) {
1839
+ var _a2, _b;
1840
+ if (options.includeRawChunks) {
1841
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
1842
+ }
1843
+ if (!chunk.success) {
1844
+ controller.enqueue({ type: "error", error: chunk.error });
1845
+ return;
1846
+ }
1847
+ const value = chunk.value;
1848
+ if (isFirstChunk) {
1849
+ controller.enqueue({
1850
+ type: "response-metadata",
1851
+ ...getResponseMetadata2(value)
1852
+ });
1853
+ isFirstChunk = false;
1854
+ }
1855
+ if (value.citations != null) {
1856
+ for (const url of value.citations) {
1857
+ controller.enqueue({
1858
+ type: "source",
1859
+ sourceType: "url",
1860
+ id: self.config.generateId(),
1861
+ url
1862
+ });
1863
+ }
1864
+ }
1865
+ if (value.usage != null) {
1866
+ usage.inputTokens = value.usage.prompt_tokens;
1867
+ usage.outputTokens = value.usage.completion_tokens;
1868
+ usage.totalTokens = value.usage.total_tokens;
1869
+ usage.reasoningTokens = (_b = (_a2 = value.usage.completion_tokens_details) == null ? void 0 : _a2.reasoning_tokens) != null ? _b : void 0;
1870
+ }
1871
+ const choice = value.choices[0];
1872
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
1873
+ finishReason = mapXaiFinishReason(choice.finish_reason);
1874
+ }
1875
+ if ((choice == null ? void 0 : choice.delta) == null) {
1876
+ return;
1877
+ }
1878
+ const delta = choice.delta;
1879
+ const choiceIndex = choice.index;
1880
+ if (delta.content != null && delta.content.length > 0) {
1881
+ const textContent = delta.content;
1882
+ const lastMessage = body.messages[body.messages.length - 1];
1883
+ if ((lastMessage == null ? void 0 : lastMessage.role) === "assistant" && textContent === lastMessage.content) {
1884
+ return;
1885
+ }
1886
+ const blockId = `text-${value.id || choiceIndex}`;
1887
+ if (contentBlocks[blockId] == null) {
1888
+ contentBlocks[blockId] = { type: "text" };
1889
+ controller.enqueue({
1890
+ type: "text-start",
1891
+ id: blockId
1892
+ });
1893
+ }
1894
+ controller.enqueue({
1895
+ type: "text-delta",
1896
+ id: blockId,
1897
+ delta: textContent
1898
+ });
1899
+ }
1900
+ if (delta.reasoning_content != null && delta.reasoning_content.length > 0) {
1901
+ const blockId = `reasoning-${value.id || choiceIndex}`;
1902
+ if (lastReasoningDeltas[blockId] === delta.reasoning_content) {
1903
+ return;
1904
+ }
1905
+ lastReasoningDeltas[blockId] = delta.reasoning_content;
1906
+ if (contentBlocks[blockId] == null) {
1907
+ contentBlocks[blockId] = { type: "reasoning" };
1908
+ controller.enqueue({
1909
+ type: "reasoning-start",
1910
+ id: blockId
1911
+ });
1912
+ }
1913
+ controller.enqueue({
1914
+ type: "reasoning-delta",
1915
+ id: blockId,
1916
+ delta: delta.reasoning_content
1917
+ });
1918
+ }
1919
+ if (delta.tool_calls != null) {
1920
+ for (const toolCall of delta.tool_calls) {
1921
+ const toolCallId = toolCall.id;
1922
+ controller.enqueue({
1923
+ type: "tool-input-start",
1924
+ id: toolCallId,
1925
+ toolName: toolCall.function.name
1926
+ });
1927
+ controller.enqueue({
1928
+ type: "tool-input-delta",
1929
+ id: toolCallId,
1930
+ delta: toolCall.function.arguments
1931
+ });
1932
+ controller.enqueue({
1933
+ type: "tool-input-end",
1934
+ id: toolCallId
1935
+ });
1936
+ controller.enqueue({
1937
+ type: "tool-call",
1938
+ toolCallId,
1939
+ toolName: toolCall.function.name,
1940
+ input: toolCall.function.arguments
1941
+ });
1942
+ }
1943
+ }
1944
+ },
1945
+ flush(controller) {
1946
+ for (const [blockId, block] of Object.entries(contentBlocks)) {
1947
+ controller.enqueue({
1948
+ type: block.type === "text" ? "text-end" : "reasoning-end",
1949
+ id: blockId
1950
+ });
1951
+ }
1952
+ controller.enqueue({ type: "finish", finishReason, usage });
1953
+ }
1954
+ })
1955
+ ),
1956
+ request: { body },
1957
+ response: { headers: responseHeaders }
1958
+ };
1959
+ }
1960
+ };
1961
+ var xaiUsageSchema = z.object({
1962
+ prompt_tokens: z.number(),
1963
+ completion_tokens: z.number(),
1964
+ total_tokens: z.number(),
1965
+ completion_tokens_details: z.object({
1966
+ reasoning_tokens: z.number().nullish()
1967
+ }).nullish()
1968
+ });
1969
+ var xaiChatResponseSchema = z.object({
1970
+ id: z.string().nullish(),
1971
+ created: z.number().nullish(),
1972
+ model: z.string().nullish(),
1973
+ choices: z.array(
1974
+ z.object({
1975
+ message: z.object({
1976
+ role: z.literal("assistant"),
1977
+ content: z.string().nullish(),
1978
+ reasoning_content: z.string().nullish(),
1979
+ tool_calls: z.array(
1980
+ z.object({
1981
+ id: z.string(),
1982
+ type: z.literal("function"),
1983
+ function: z.object({
1984
+ name: z.string(),
1985
+ arguments: z.string()
1986
+ })
1987
+ })
1988
+ ).nullish()
1989
+ }),
1990
+ index: z.number(),
1991
+ finish_reason: z.string().nullish()
1992
+ })
1993
+ ),
1994
+ object: z.literal("chat.completion"),
1995
+ usage: xaiUsageSchema,
1996
+ citations: z.array(z.string().url()).nullish()
1997
+ });
1998
+ var xaiChatChunkSchema = z.object({
1999
+ id: z.string().nullish(),
2000
+ created: z.number().nullish(),
2001
+ model: z.string().nullish(),
2002
+ choices: z.array(
2003
+ z.object({
2004
+ delta: z.object({
2005
+ role: z.enum(["assistant"]).optional(),
2006
+ content: z.string().nullish(),
2007
+ reasoning_content: z.string().nullish(),
2008
+ tool_calls: z.array(
2009
+ z.object({
2010
+ id: z.string(),
2011
+ type: z.literal("function"),
2012
+ function: z.object({
2013
+ name: z.string(),
2014
+ arguments: z.string()
2015
+ })
2016
+ })
2017
+ ).nullish()
2018
+ }),
2019
+ finish_reason: z.string().nullish(),
2020
+ index: z.number()
2021
+ })
2022
+ ),
2023
+ usage: xaiUsageSchema.nullish(),
2024
+ citations: z.array(z.string().url()).nullish()
2025
+ });
2026
+ var VERSION3 = "2.0.26" ;
2027
+ var xaiErrorStructure = {
2028
+ errorSchema: xaiErrorDataSchema,
2029
+ errorToMessage: (data) => data.error.message
2030
+ };
2031
+ function createXai(options = {}) {
2032
+ var _a;
2033
+ const baseURL = withoutTrailingSlash(
2034
+ (_a = options.baseURL) != null ? _a : "https://api.x.ai/v1"
2035
+ );
2036
+ const getHeaders = () => withUserAgentSuffix(
2037
+ {
2038
+ Authorization: `Bearer ${loadApiKey({
2039
+ apiKey: options.apiKey,
2040
+ environmentVariableName: "XAI_API_KEY",
2041
+ description: "xAI API key"
2042
+ })}`,
2043
+ ...options.headers
2044
+ },
2045
+ `ai-sdk/xai/${VERSION3}`
2046
+ );
2047
+ const createLanguageModel = (modelId) => {
2048
+ return new XaiChatLanguageModel(modelId, {
2049
+ provider: "xai.chat",
2050
+ baseURL,
2051
+ headers: getHeaders,
2052
+ generateId,
2053
+ fetch: options.fetch
2054
+ });
2055
+ };
2056
+ const createImageModel = (modelId) => {
2057
+ return new OpenAICompatibleImageModel(modelId, {
2058
+ provider: "xai.image",
2059
+ url: ({ path }) => `${baseURL}${path}`,
2060
+ headers: getHeaders,
2061
+ fetch: options.fetch,
2062
+ errorStructure: xaiErrorStructure
2063
+ });
2064
+ };
2065
+ const provider = (modelId) => createLanguageModel(modelId);
2066
+ provider.languageModel = createLanguageModel;
2067
+ provider.chat = createLanguageModel;
2068
+ provider.textEmbeddingModel = (modelId) => {
2069
+ throw new NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
2070
+ };
2071
+ provider.imageModel = createImageModel;
2072
+ provider.image = createImageModel;
2073
+ return provider;
2074
+ }
2075
+ createXai();
2076
+
2077
+ // src/llm/model/gateway-resolver.ts
2078
+ function parseModelRouterId(routerId, gatewayPrefix) {
2079
+ if (gatewayPrefix && !routerId.startsWith(`${gatewayPrefix}/`)) {
2080
+ throw new Error(`Expected ${gatewayPrefix}/ in model router ID ${routerId}`);
2081
+ }
2082
+ const idParts = routerId.split("/");
2083
+ if (gatewayPrefix && idParts.length < 3) {
2084
+ throw new Error(
2085
+ `Expected atleast 3 id parts ${gatewayPrefix}/provider/model, but only saw ${idParts.length} in ${routerId}`
2086
+ );
2087
+ }
2088
+ const providerId = idParts.at(gatewayPrefix ? 1 : 0);
2089
+ const modelId = idParts.slice(gatewayPrefix ? 2 : 1).join(`/`);
2090
+ if (!routerId.includes(`/`) || !providerId || !modelId) {
2091
+ throw new Error(
2092
+ `Attempted to parse provider/model from ${routerId} but this ID doesn't appear to contain a provider`
2093
+ );
2094
+ }
2095
+ return {
2096
+ providerId,
2097
+ modelId
2098
+ };
2099
+ }
2100
+
2101
+ // src/llm/model/gateways/constants.ts
2102
+ var PROVIDERS_WITH_INSTALLED_PACKAGES = ["anthropic", "google", "mistral", "openai", "openrouter", "xai"];
2103
+ var EXCLUDED_PROVIDERS = ["github-copilot"];
2104
+
2105
+ // src/llm/model/gateways/models-dev.ts
2106
+ var OPENAI_COMPATIBLE_OVERRIDES = {
2107
+ cerebras: {
2108
+ url: "https://api.cerebras.ai/v1"
2109
+ },
2110
+ mistral: {
2111
+ url: "https://api.mistral.ai/v1"
2112
+ },
2113
+ groq: {
2114
+ url: "https://api.groq.com/openai/v1"
2115
+ },
2116
+ togetherai: {
2117
+ url: "https://api.together.xyz/v1"
2118
+ },
2119
+ deepinfra: {
2120
+ url: "https://api.deepinfra.com/v1/openai"
2121
+ },
2122
+ perplexity: {
2123
+ url: "https://api.perplexity.ai"
2124
+ },
2125
+ vercel: {
2126
+ url: "https://ai-gateway.vercel.sh/v1",
2127
+ apiKeyEnvVar: "AI_GATEWAY_API_KEY"
2128
+ }
2129
+ };
2130
+ var ModelsDevGateway = class extends MastraModelGateway {
2131
+ name = "models.dev";
2132
+ prefix = void 0;
2133
+ // No prefix for registry gateway
2134
+ providerConfigs = {};
2135
+ constructor(providerConfigs) {
2136
+ super();
2137
+ if (providerConfigs) this.providerConfigs = providerConfigs;
2138
+ }
2139
+ async fetchProviders() {
2140
+ const response = await fetch("https://models.dev/api.json");
2141
+ if (!response.ok) {
2142
+ throw new Error(`Failed to fetch from models.dev: ${response.statusText}`);
2143
+ }
2144
+ const data = await response.json();
2145
+ const providerConfigs = {};
2146
+ for (const [providerId, providerInfo] of Object.entries(data)) {
2147
+ if (EXCLUDED_PROVIDERS.includes(providerId)) continue;
2148
+ if (!providerInfo || typeof providerInfo !== "object" || !providerInfo.models) continue;
2149
+ const normalizedId = providerId;
2150
+ const isOpenAICompatible = providerInfo.npm === "@ai-sdk/openai-compatible" || providerInfo.npm === "@ai-sdk/gateway" || // Vercel AI Gateway is OpenAI-compatible
2151
+ normalizedId in OPENAI_COMPATIBLE_OVERRIDES;
2152
+ const hasInstalledPackage = PROVIDERS_WITH_INSTALLED_PACKAGES.includes(providerId);
2153
+ const hasApiAndEnv = providerInfo.api && providerInfo.env && providerInfo.env.length > 0;
2154
+ if (isOpenAICompatible || hasInstalledPackage || hasApiAndEnv) {
2155
+ const modelIds = Object.keys(providerInfo.models).sort();
2156
+ const url = providerInfo.api || OPENAI_COMPATIBLE_OVERRIDES[normalizedId]?.url;
2157
+ if (!hasInstalledPackage && !url) {
2158
+ continue;
2159
+ }
2160
+ const apiKeyEnvVar = providerInfo.env?.[0] || `${normalizedId.toUpperCase().replace(/-/g, "_")}_API_KEY`;
2161
+ const apiKeyHeader = !hasInstalledPackage ? OPENAI_COMPATIBLE_OVERRIDES[normalizedId]?.apiKeyHeader || "Authorization" : void 0;
2162
+ providerConfigs[normalizedId] = {
2163
+ url,
2164
+ apiKeyEnvVar,
2165
+ apiKeyHeader,
2166
+ name: providerInfo.name || providerId.charAt(0).toUpperCase() + providerId.slice(1),
2167
+ models: modelIds,
2168
+ docUrl: providerInfo.doc,
2169
+ // Include documentation URL if available
2170
+ gateway: `models.dev`
2171
+ };
2172
+ }
2173
+ }
2174
+ this.providerConfigs = providerConfigs;
2175
+ return providerConfigs;
2176
+ }
2177
+ buildUrl(routerId, envVars) {
2178
+ const { providerId } = parseModelRouterId(routerId);
2179
+ const config = this.providerConfigs[providerId];
2180
+ if (!config?.url) {
2181
+ return;
2182
+ }
2183
+ const baseUrlEnvVar = `${providerId.toUpperCase().replace(/-/g, "_")}_BASE_URL`;
2184
+ const customBaseUrl = envVars?.[baseUrlEnvVar] || process.env[baseUrlEnvVar];
2185
+ return customBaseUrl || config.url;
2186
+ }
2187
+ getApiKey(modelId) {
2188
+ const [provider, model] = modelId.split("/");
2189
+ if (!provider || !model) {
2190
+ throw new Error(`Could not identify provider from model id ${modelId}`);
2191
+ }
2192
+ const config = this.providerConfigs[provider];
2193
+ if (!config) {
2194
+ throw new Error(`Could not find config for provider ${provider} with model id ${modelId}`);
2195
+ }
2196
+ const apiKey = typeof config.apiKeyEnvVar === `string` ? process.env[config.apiKeyEnvVar] : void 0;
2197
+ if (!apiKey) {
2198
+ throw new Error(`Could not find API key process.env.${config.apiKeyEnvVar} for model id ${modelId}`);
2199
+ }
2200
+ return Promise.resolve(apiKey);
2201
+ }
2202
+ async resolveLanguageModel({
2203
+ modelId,
2204
+ providerId,
2205
+ apiKey
2206
+ }) {
2207
+ const baseURL = this.buildUrl(`${providerId}/${modelId}`);
2208
+ switch (providerId) {
2209
+ case "openai":
2210
+ return createOpenAI({ apiKey }).responses(modelId);
2211
+ case "gemini":
2212
+ case "google":
2213
+ return createGoogleGenerativeAI({
2214
+ apiKey
2215
+ }).chat(modelId);
2216
+ case "anthropic":
2217
+ return createAnthropic({ apiKey })(modelId);
2218
+ case "mistral":
2219
+ return createMistral({ apiKey })(modelId);
2220
+ case "openrouter":
2221
+ return createOpenRouter({ apiKey })(modelId);
2222
+ case "xai":
2223
+ return createXai({
2224
+ apiKey
2225
+ })(modelId);
2226
+ default:
2227
+ if (!baseURL) throw new Error(`No API URL found for ${providerId}/${modelId}`);
2228
+ return createOpenAICompatible({ name: providerId, apiKey, baseURL, supportsStructuredOutputs: true }).chatModel(
2229
+ modelId
2230
+ );
2231
+ }
2232
+ }
2233
+ };
2234
+
2235
+ export { ModelsDevGateway, parseModelRouterId };
2236
+ //# sourceMappingURL=chunk-HDJFSJCK.js.map
2237
+ //# sourceMappingURL=chunk-HDJFSJCK.js.map