@ai-sdk/mistral 0.0.0-85f9a635-20240518005312 → 0.0.0-98261322-20260122142521

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -20,25 +20,71 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // src/index.ts
21
21
  var src_exports = {};
22
22
  __export(src_exports, {
23
- Mistral: () => Mistral,
23
+ VERSION: () => VERSION,
24
24
  createMistral: () => createMistral,
25
25
  mistral: () => mistral
26
26
  });
27
27
  module.exports = __toCommonJS(src_exports);
28
28
 
29
- // src/mistral-facade.ts
30
- var import_provider_utils3 = require("@ai-sdk/provider-utils");
29
+ // src/mistral-provider.ts
30
+ var import_provider4 = require("@ai-sdk/provider");
31
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
31
32
 
32
33
  // src/mistral-chat-language-model.ts
33
- var import_provider2 = require("@ai-sdk/provider");
34
- var import_provider_utils2 = require("@ai-sdk/provider-utils");
35
- var import_zod2 = require("zod");
34
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
35
+ var import_v43 = require("zod/v4");
36
+
37
+ // src/convert-mistral-usage.ts
38
+ function convertMistralUsage(usage) {
39
+ if (usage == null) {
40
+ return {
41
+ inputTokens: {
42
+ total: void 0,
43
+ noCache: void 0,
44
+ cacheRead: void 0,
45
+ cacheWrite: void 0
46
+ },
47
+ outputTokens: {
48
+ total: void 0,
49
+ text: void 0,
50
+ reasoning: void 0
51
+ },
52
+ raw: void 0
53
+ };
54
+ }
55
+ const promptTokens = usage.prompt_tokens;
56
+ const completionTokens = usage.completion_tokens;
57
+ return {
58
+ inputTokens: {
59
+ total: promptTokens,
60
+ noCache: promptTokens,
61
+ cacheRead: void 0,
62
+ cacheWrite: void 0
63
+ },
64
+ outputTokens: {
65
+ total: completionTokens,
66
+ text: completionTokens,
67
+ reasoning: void 0
68
+ },
69
+ raw: usage
70
+ };
71
+ }
36
72
 
37
73
  // src/convert-to-mistral-chat-messages.ts
38
74
  var import_provider = require("@ai-sdk/provider");
75
+ var import_provider_utils = require("@ai-sdk/provider-utils");
76
+ function formatFileUrl({
77
+ data,
78
+ mediaType
79
+ }) {
80
+ return data instanceof URL ? data.toString() : `data:${mediaType};base64,${(0, import_provider_utils.convertToBase64)(data)}`;
81
+ }
39
82
  function convertToMistralChatMessages(prompt) {
83
+ var _a;
40
84
  const messages = [];
41
- for (const { role, content } of prompt) {
85
+ for (let i = 0; i < prompt.length; i++) {
86
+ const { role, content } = prompt[i];
87
+ const isLastMessage = i === prompt.length - 1;
42
88
  switch (role) {
43
89
  case "system": {
44
90
  messages.push({ role: "system", content });
@@ -50,15 +96,31 @@ function convertToMistralChatMessages(prompt) {
50
96
  content: content.map((part) => {
51
97
  switch (part.type) {
52
98
  case "text": {
53
- return part.text;
99
+ return { type: "text", text: part.text };
54
100
  }
55
- case "image": {
56
- throw new import_provider.UnsupportedFunctionalityError({
57
- functionality: "image-part"
58
- });
101
+ case "file": {
102
+ if (part.mediaType.startsWith("image/")) {
103
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
104
+ return {
105
+ type: "image_url",
106
+ image_url: formatFileUrl({ data: part.data, mediaType })
107
+ };
108
+ } else if (part.mediaType === "application/pdf") {
109
+ return {
110
+ type: "document_url",
111
+ document_url: formatFileUrl({
112
+ data: part.data,
113
+ mediaType: "application/pdf"
114
+ })
115
+ };
116
+ } else {
117
+ throw new import_provider.UnsupportedFunctionalityError({
118
+ functionality: "Only images and PDF file parts are supported"
119
+ });
120
+ }
59
121
  }
60
122
  }
61
- }).join("")
123
+ })
62
124
  });
63
125
  break;
64
126
  }
@@ -77,34 +139,56 @@ function convertToMistralChatMessages(prompt) {
77
139
  type: "function",
78
140
  function: {
79
141
  name: part.toolName,
80
- arguments: JSON.stringify(part.args)
142
+ arguments: JSON.stringify(part.input)
81
143
  }
82
144
  });
83
145
  break;
84
146
  }
147
+ case "reasoning": {
148
+ text += part.text;
149
+ break;
150
+ }
85
151
  default: {
86
- const _exhaustiveCheck = part;
87
- throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
152
+ throw new Error(
153
+ `Unsupported content type in assistant message: ${part.type}`
154
+ );
88
155
  }
89
156
  }
90
157
  }
91
158
  messages.push({
92
159
  role: "assistant",
93
160
  content: text,
94
- tool_calls: toolCalls.length > 0 ? toolCalls.map(({ function: { name, arguments: args } }) => ({
95
- id: "null",
96
- type: "function",
97
- function: { name, arguments: args }
98
- })) : void 0
161
+ prefix: isLastMessage ? true : void 0,
162
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
99
163
  });
100
164
  break;
101
165
  }
102
166
  case "tool": {
103
167
  for (const toolResponse of content) {
168
+ if (toolResponse.type === "tool-approval-response") {
169
+ continue;
170
+ }
171
+ const output = toolResponse.output;
172
+ let contentValue;
173
+ switch (output.type) {
174
+ case "text":
175
+ case "error-text":
176
+ contentValue = output.value;
177
+ break;
178
+ case "execution-denied":
179
+ contentValue = (_a = output.reason) != null ? _a : "Tool execution denied.";
180
+ break;
181
+ case "content":
182
+ case "json":
183
+ case "error-json":
184
+ contentValue = JSON.stringify(output.value);
185
+ break;
186
+ }
104
187
  messages.push({
105
188
  role: "tool",
106
189
  name: toolResponse.toolName,
107
- content: JSON.stringify(toolResponse.result)
190
+ tool_call_id: toolResponse.toolCallId,
191
+ content: contentValue
108
192
  });
109
193
  }
110
194
  break;
@@ -118,6 +202,19 @@ function convertToMistralChatMessages(prompt) {
118
202
  return messages;
119
203
  }
120
204
 
205
+ // src/get-response-metadata.ts
206
+ function getResponseMetadata({
207
+ id,
208
+ model,
209
+ created
210
+ }) {
211
+ return {
212
+ id: id != null ? id : void 0,
213
+ modelId: model != null ? model : void 0,
214
+ timestamp: created != null ? new Date(created * 1e3) : void 0
215
+ };
216
+ }
217
+
121
218
  // src/map-mistral-finish-reason.ts
122
219
  function mapMistralFinishReason(finishReason) {
123
220
  switch (finishReason) {
@@ -133,344 +230,562 @@ function mapMistralFinishReason(finishReason) {
133
230
  }
134
231
  }
135
232
 
233
+ // src/mistral-chat-options.ts
234
+ var import_v4 = require("zod/v4");
235
+ var mistralLanguageModelOptions = import_v4.z.object({
236
+ /**
237
+ Whether to inject a safety prompt before all conversations.
238
+
239
+ Defaults to `false`.
240
+ */
241
+ safePrompt: import_v4.z.boolean().optional(),
242
+ documentImageLimit: import_v4.z.number().optional(),
243
+ documentPageLimit: import_v4.z.number().optional(),
244
+ /**
245
+ * Whether to use structured outputs.
246
+ *
247
+ * @default true
248
+ */
249
+ structuredOutputs: import_v4.z.boolean().optional(),
250
+ /**
251
+ * Whether to use strict JSON schema validation.
252
+ *
253
+ * @default false
254
+ */
255
+ strictJsonSchema: import_v4.z.boolean().optional(),
256
+ /**
257
+ * Whether to enable parallel function calling during tool use.
258
+ * When set to false, the model will use at most one tool per response.
259
+ *
260
+ * @default true
261
+ */
262
+ parallelToolCalls: import_v4.z.boolean().optional()
263
+ });
264
+
136
265
  // src/mistral-error.ts
137
- var import_provider_utils = require("@ai-sdk/provider-utils");
138
- var import_zod = require("zod");
139
- var mistralErrorDataSchema = import_zod.z.object({
140
- object: import_zod.z.literal("error"),
141
- message: import_zod.z.string(),
142
- type: import_zod.z.string(),
143
- param: import_zod.z.string().nullable(),
144
- code: import_zod.z.string().nullable()
266
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
267
+ var import_v42 = require("zod/v4");
268
+ var mistralErrorDataSchema = import_v42.z.object({
269
+ object: import_v42.z.literal("error"),
270
+ message: import_v42.z.string(),
271
+ type: import_v42.z.string(),
272
+ param: import_v42.z.string().nullable(),
273
+ code: import_v42.z.string().nullable()
145
274
  });
146
- var mistralFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
275
+ var mistralFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
147
276
  errorSchema: mistralErrorDataSchema,
148
277
  errorToMessage: (data) => data.message
149
278
  });
150
279
 
280
+ // src/mistral-prepare-tools.ts
281
+ var import_provider2 = require("@ai-sdk/provider");
282
+ function prepareTools({
283
+ tools,
284
+ toolChoice
285
+ }) {
286
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
287
+ const toolWarnings = [];
288
+ if (tools == null) {
289
+ return { tools: void 0, toolChoice: void 0, toolWarnings };
290
+ }
291
+ const mistralTools = [];
292
+ for (const tool of tools) {
293
+ if (tool.type === "provider") {
294
+ toolWarnings.push({
295
+ type: "unsupported",
296
+ feature: `provider-defined tool ${tool.id}`
297
+ });
298
+ } else {
299
+ mistralTools.push({
300
+ type: "function",
301
+ function: {
302
+ name: tool.name,
303
+ description: tool.description,
304
+ parameters: tool.inputSchema,
305
+ ...tool.strict != null ? { strict: tool.strict } : {}
306
+ }
307
+ });
308
+ }
309
+ }
310
+ if (toolChoice == null) {
311
+ return { tools: mistralTools, toolChoice: void 0, toolWarnings };
312
+ }
313
+ const type = toolChoice.type;
314
+ switch (type) {
315
+ case "auto":
316
+ case "none":
317
+ return { tools: mistralTools, toolChoice: type, toolWarnings };
318
+ case "required":
319
+ return { tools: mistralTools, toolChoice: "any", toolWarnings };
320
+ // mistral does not support tool mode directly,
321
+ // so we filter the tools and force the tool choice through 'any'
322
+ case "tool":
323
+ return {
324
+ tools: mistralTools.filter(
325
+ (tool) => tool.function.name === toolChoice.toolName
326
+ ),
327
+ toolChoice: "any",
328
+ toolWarnings
329
+ };
330
+ default: {
331
+ const _exhaustiveCheck = type;
332
+ throw new import_provider2.UnsupportedFunctionalityError({
333
+ functionality: `tool choice type: ${_exhaustiveCheck}`
334
+ });
335
+ }
336
+ }
337
+ }
338
+
151
339
  // src/mistral-chat-language-model.ts
152
340
  var MistralChatLanguageModel = class {
153
- constructor(modelId, settings, config) {
154
- this.specificationVersion = "v1";
155
- this.defaultObjectGenerationMode = "json";
341
+ constructor(modelId, config) {
342
+ this.specificationVersion = "v3";
343
+ this.supportedUrls = {
344
+ "application/pdf": [/^https:\/\/.*$/]
345
+ };
346
+ var _a;
156
347
  this.modelId = modelId;
157
- this.settings = settings;
158
348
  this.config = config;
349
+ this.generateId = (_a = config.generateId) != null ? _a : import_provider_utils3.generateId;
159
350
  }
160
351
  get provider() {
161
352
  return this.config.provider;
162
353
  }
163
- getArgs({
164
- mode,
354
+ async getArgs({
165
355
  prompt,
166
- maxTokens,
356
+ maxOutputTokens,
167
357
  temperature,
168
358
  topP,
359
+ topK,
169
360
  frequencyPenalty,
170
361
  presencePenalty,
171
- seed
362
+ stopSequences,
363
+ responseFormat,
364
+ seed,
365
+ providerOptions,
366
+ tools,
367
+ toolChoice
172
368
  }) {
173
- var _a;
174
- const type = mode.type;
369
+ var _a, _b, _c, _d;
175
370
  const warnings = [];
371
+ const options = (_a = await (0, import_provider_utils3.parseProviderOptions)({
372
+ provider: "mistral",
373
+ providerOptions,
374
+ schema: mistralLanguageModelOptions
375
+ })) != null ? _a : {};
376
+ if (topK != null) {
377
+ warnings.push({ type: "unsupported", feature: "topK" });
378
+ }
176
379
  if (frequencyPenalty != null) {
177
- warnings.push({
178
- type: "unsupported-setting",
179
- setting: "frequencyPenalty"
180
- });
380
+ warnings.push({ type: "unsupported", feature: "frequencyPenalty" });
181
381
  }
182
382
  if (presencePenalty != null) {
183
- warnings.push({
184
- type: "unsupported-setting",
185
- setting: "presencePenalty"
383
+ warnings.push({ type: "unsupported", feature: "presencePenalty" });
384
+ }
385
+ if (stopSequences != null) {
386
+ warnings.push({ type: "unsupported", feature: "stopSequences" });
387
+ }
388
+ const structuredOutputs = (_b = options.structuredOutputs) != null ? _b : true;
389
+ const strictJsonSchema = (_c = options.strictJsonSchema) != null ? _c : false;
390
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && !(responseFormat == null ? void 0 : responseFormat.schema)) {
391
+ prompt = (0, import_provider_utils3.injectJsonInstructionIntoMessages)({
392
+ messages: prompt,
393
+ schema: responseFormat.schema
186
394
  });
187
395
  }
188
396
  const baseArgs = {
189
397
  // model id:
190
398
  model: this.modelId,
191
399
  // model specific settings:
192
- safe_prompt: this.settings.safePrompt,
400
+ safe_prompt: options.safePrompt,
193
401
  // standardized settings:
194
- max_tokens: maxTokens,
402
+ max_tokens: maxOutputTokens,
195
403
  temperature,
196
404
  top_p: topP,
197
405
  random_seed: seed,
406
+ // response format:
407
+ response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? structuredOutputs && (responseFormat == null ? void 0 : responseFormat.schema) != null ? {
408
+ type: "json_schema",
409
+ json_schema: {
410
+ schema: responseFormat.schema,
411
+ strict: strictJsonSchema,
412
+ name: (_d = responseFormat.name) != null ? _d : "response",
413
+ description: responseFormat.description
414
+ }
415
+ } : { type: "json_object" } : void 0,
416
+ // mistral-specific provider options:
417
+ document_image_limit: options.documentImageLimit,
418
+ document_page_limit: options.documentPageLimit,
198
419
  // messages:
199
420
  messages: convertToMistralChatMessages(prompt)
200
421
  };
201
- switch (type) {
202
- case "regular": {
203
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
204
- return {
205
- args: {
206
- ...baseArgs,
207
- tools: tools == null ? void 0 : tools.map((tool) => ({
208
- type: "function",
209
- function: {
210
- name: tool.name,
211
- description: tool.description,
212
- parameters: tool.parameters
213
- }
214
- }))
215
- },
216
- warnings
217
- };
218
- }
219
- case "object-json": {
220
- return {
221
- args: {
222
- ...baseArgs,
223
- response_format: { type: "json_object" }
224
- },
225
- warnings
226
- };
227
- }
228
- case "object-tool": {
229
- return {
230
- args: {
231
- ...baseArgs,
232
- tool_choice: "any",
233
- tools: [{ type: "function", function: mode.tool }]
234
- },
235
- warnings
236
- };
237
- }
238
- case "object-grammar": {
239
- throw new import_provider2.UnsupportedFunctionalityError({
240
- functionality: "object-grammar mode"
241
- });
242
- }
243
- default: {
244
- const _exhaustiveCheck = type;
245
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
246
- }
247
- }
422
+ const {
423
+ tools: mistralTools,
424
+ toolChoice: mistralToolChoice,
425
+ toolWarnings
426
+ } = prepareTools({
427
+ tools,
428
+ toolChoice
429
+ });
430
+ return {
431
+ args: {
432
+ ...baseArgs,
433
+ tools: mistralTools,
434
+ tool_choice: mistralToolChoice,
435
+ ...mistralTools != null && options.parallelToolCalls !== void 0 ? { parallel_tool_calls: options.parallelToolCalls } : {}
436
+ },
437
+ warnings: [...warnings, ...toolWarnings]
438
+ };
248
439
  }
249
440
  async doGenerate(options) {
250
- var _a, _b;
251
- const { args, warnings } = this.getArgs(options);
252
- const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
441
+ var _a;
442
+ const { args: body, warnings } = await this.getArgs(options);
443
+ const {
444
+ responseHeaders,
445
+ value: response,
446
+ rawValue: rawResponse
447
+ } = await (0, import_provider_utils3.postJsonToApi)({
253
448
  url: `${this.config.baseURL}/chat/completions`,
254
- headers: this.config.headers(),
255
- body: args,
449
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
450
+ body,
256
451
  failedResponseHandler: mistralFailedResponseHandler,
257
- successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
452
+ successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
258
453
  mistralChatResponseSchema
259
454
  ),
260
- abortSignal: options.abortSignal
455
+ abortSignal: options.abortSignal,
456
+ fetch: this.config.fetch
261
457
  });
262
- const { messages: rawPrompt, ...rawSettings } = args;
263
458
  const choice = response.choices[0];
459
+ const content = [];
460
+ if (choice.message.content != null && Array.isArray(choice.message.content)) {
461
+ for (const part of choice.message.content) {
462
+ if (part.type === "thinking") {
463
+ const reasoningText = extractReasoningContent(part.thinking);
464
+ if (reasoningText.length > 0) {
465
+ content.push({ type: "reasoning", text: reasoningText });
466
+ }
467
+ } else if (part.type === "text") {
468
+ if (part.text.length > 0) {
469
+ content.push({ type: "text", text: part.text });
470
+ }
471
+ }
472
+ }
473
+ } else {
474
+ const text = extractTextContent(choice.message.content);
475
+ if (text != null && text.length > 0) {
476
+ content.push({ type: "text", text });
477
+ }
478
+ }
479
+ if (choice.message.tool_calls != null) {
480
+ for (const toolCall of choice.message.tool_calls) {
481
+ content.push({
482
+ type: "tool-call",
483
+ toolCallId: toolCall.id,
484
+ toolName: toolCall.function.name,
485
+ input: toolCall.function.arguments
486
+ });
487
+ }
488
+ }
264
489
  return {
265
- text: (_a = choice.message.content) != null ? _a : void 0,
266
- toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
267
- toolCallType: "function",
268
- toolCallId: this.config.generateId(),
269
- toolName: toolCall.function.name,
270
- args: toolCall.function.arguments
271
- })),
272
- finishReason: mapMistralFinishReason(choice.finish_reason),
273
- usage: {
274
- promptTokens: response.usage.prompt_tokens,
275
- completionTokens: response.usage.completion_tokens
490
+ content,
491
+ finishReason: {
492
+ unified: mapMistralFinishReason(choice.finish_reason),
493
+ raw: (_a = choice.finish_reason) != null ? _a : void 0
494
+ },
495
+ usage: convertMistralUsage(response.usage),
496
+ request: { body },
497
+ response: {
498
+ ...getResponseMetadata(response),
499
+ headers: responseHeaders,
500
+ body: rawResponse
276
501
  },
277
- rawCall: { rawPrompt, rawSettings },
278
- rawResponse: { headers: responseHeaders },
279
502
  warnings
280
503
  };
281
504
  }
282
505
  async doStream(options) {
283
- const { args, warnings } = this.getArgs(options);
284
- const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
506
+ const { args, warnings } = await this.getArgs(options);
507
+ const body = { ...args, stream: true };
508
+ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
285
509
  url: `${this.config.baseURL}/chat/completions`,
286
- headers: this.config.headers(),
287
- body: {
288
- ...args,
289
- stream: true
290
- },
510
+ headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
511
+ body,
291
512
  failedResponseHandler: mistralFailedResponseHandler,
292
- successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
513
+ successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
293
514
  mistralChatChunkSchema
294
515
  ),
295
- abortSignal: options.abortSignal
516
+ abortSignal: options.abortSignal,
517
+ fetch: this.config.fetch
296
518
  });
297
- const { messages: rawPrompt, ...rawSettings } = args;
298
- let finishReason = "other";
299
- let usage = {
300
- promptTokens: Number.NaN,
301
- completionTokens: Number.NaN
519
+ let finishReason = {
520
+ unified: "other",
521
+ raw: void 0
302
522
  };
303
- const generateId3 = this.config.generateId;
523
+ let usage = void 0;
524
+ let isFirstChunk = true;
525
+ let activeText = false;
526
+ let activeReasoningId = null;
527
+ const generateId2 = this.generateId;
304
528
  return {
305
529
  stream: response.pipeThrough(
306
530
  new TransformStream({
531
+ start(controller) {
532
+ controller.enqueue({ type: "stream-start", warnings });
533
+ },
307
534
  transform(chunk, controller) {
535
+ if (options.includeRawChunks) {
536
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
537
+ }
308
538
  if (!chunk.success) {
309
539
  controller.enqueue({ type: "error", error: chunk.error });
310
540
  return;
311
541
  }
312
542
  const value = chunk.value;
543
+ if (isFirstChunk) {
544
+ isFirstChunk = false;
545
+ controller.enqueue({
546
+ type: "response-metadata",
547
+ ...getResponseMetadata(value)
548
+ });
549
+ }
313
550
  if (value.usage != null) {
314
- usage = {
315
- promptTokens: value.usage.prompt_tokens,
316
- completionTokens: value.usage.completion_tokens
317
- };
551
+ usage = value.usage;
318
552
  }
319
553
  const choice = value.choices[0];
320
- if ((choice == null ? void 0 : choice.finish_reason) != null) {
321
- finishReason = mapMistralFinishReason(choice.finish_reason);
322
- }
323
- if ((choice == null ? void 0 : choice.delta) == null) {
324
- return;
325
- }
326
554
  const delta = choice.delta;
327
- if (delta.content != null) {
555
+ const textContent = extractTextContent(delta.content);
556
+ if (delta.content != null && Array.isArray(delta.content)) {
557
+ for (const part of delta.content) {
558
+ if (part.type === "thinking") {
559
+ const reasoningDelta = extractReasoningContent(part.thinking);
560
+ if (reasoningDelta.length > 0) {
561
+ if (activeReasoningId == null) {
562
+ if (activeText) {
563
+ controller.enqueue({ type: "text-end", id: "0" });
564
+ activeText = false;
565
+ }
566
+ activeReasoningId = generateId2();
567
+ controller.enqueue({
568
+ type: "reasoning-start",
569
+ id: activeReasoningId
570
+ });
571
+ }
572
+ controller.enqueue({
573
+ type: "reasoning-delta",
574
+ id: activeReasoningId,
575
+ delta: reasoningDelta
576
+ });
577
+ }
578
+ }
579
+ }
580
+ }
581
+ if (textContent != null && textContent.length > 0) {
582
+ if (!activeText) {
583
+ if (activeReasoningId != null) {
584
+ controller.enqueue({
585
+ type: "reasoning-end",
586
+ id: activeReasoningId
587
+ });
588
+ activeReasoningId = null;
589
+ }
590
+ controller.enqueue({ type: "text-start", id: "0" });
591
+ activeText = true;
592
+ }
328
593
  controller.enqueue({
329
594
  type: "text-delta",
330
- textDelta: delta.content
595
+ id: "0",
596
+ delta: textContent
331
597
  });
332
598
  }
333
- if (delta.tool_calls != null) {
599
+ if ((delta == null ? void 0 : delta.tool_calls) != null) {
334
600
  for (const toolCall of delta.tool_calls) {
335
- const toolCallId = generateId3();
601
+ const toolCallId = toolCall.id;
602
+ const toolName = toolCall.function.name;
603
+ const input = toolCall.function.arguments;
336
604
  controller.enqueue({
337
- type: "tool-call-delta",
338
- toolCallType: "function",
339
- toolCallId,
340
- toolName: toolCall.function.name,
341
- argsTextDelta: toolCall.function.arguments
605
+ type: "tool-input-start",
606
+ id: toolCallId,
607
+ toolName
608
+ });
609
+ controller.enqueue({
610
+ type: "tool-input-delta",
611
+ id: toolCallId,
612
+ delta: input
613
+ });
614
+ controller.enqueue({
615
+ type: "tool-input-end",
616
+ id: toolCallId
342
617
  });
343
618
  controller.enqueue({
344
619
  type: "tool-call",
345
- toolCallType: "function",
346
620
  toolCallId,
347
- toolName: toolCall.function.name,
348
- args: toolCall.function.arguments
621
+ toolName,
622
+ input
349
623
  });
350
624
  }
351
625
  }
626
+ if (choice.finish_reason != null) {
627
+ finishReason = {
628
+ unified: mapMistralFinishReason(choice.finish_reason),
629
+ raw: choice.finish_reason
630
+ };
631
+ }
352
632
  },
353
633
  flush(controller) {
354
- controller.enqueue({ type: "finish", finishReason, usage });
634
+ if (activeReasoningId != null) {
635
+ controller.enqueue({
636
+ type: "reasoning-end",
637
+ id: activeReasoningId
638
+ });
639
+ }
640
+ if (activeText) {
641
+ controller.enqueue({ type: "text-end", id: "0" });
642
+ }
643
+ controller.enqueue({
644
+ type: "finish",
645
+ finishReason,
646
+ usage: convertMistralUsage(usage)
647
+ });
355
648
  }
356
649
  })
357
650
  ),
358
- rawCall: { rawPrompt, rawSettings },
359
- rawResponse: { headers: responseHeaders },
360
- warnings
651
+ request: { body },
652
+ response: { headers: responseHeaders }
361
653
  };
362
654
  }
363
655
  };
364
- var mistralChatResponseSchema = import_zod2.z.object({
365
- choices: import_zod2.z.array(
366
- import_zod2.z.object({
367
- message: import_zod2.z.object({
368
- role: import_zod2.z.literal("assistant"),
369
- content: import_zod2.z.string().nullable(),
370
- tool_calls: import_zod2.z.array(
371
- import_zod2.z.object({
372
- function: import_zod2.z.object({
373
- name: import_zod2.z.string(),
374
- arguments: import_zod2.z.string()
375
- })
656
+ function extractReasoningContent(thinking) {
657
+ return thinking.filter((chunk) => chunk.type === "text").map((chunk) => chunk.text).join("");
658
+ }
659
+ function extractTextContent(content) {
660
+ if (typeof content === "string") {
661
+ return content;
662
+ }
663
+ if (content == null) {
664
+ return void 0;
665
+ }
666
+ const textContent = [];
667
+ for (const chunk of content) {
668
+ const { type } = chunk;
669
+ switch (type) {
670
+ case "text":
671
+ textContent.push(chunk.text);
672
+ break;
673
+ case "thinking":
674
+ case "image_url":
675
+ case "reference":
676
+ break;
677
+ default: {
678
+ const _exhaustiveCheck = type;
679
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
680
+ }
681
+ }
682
+ }
683
+ return textContent.length ? textContent.join("") : void 0;
684
+ }
685
+ var mistralContentSchema = import_v43.z.union([
686
+ import_v43.z.string(),
687
+ import_v43.z.array(
688
+ import_v43.z.discriminatedUnion("type", [
689
+ import_v43.z.object({
690
+ type: import_v43.z.literal("text"),
691
+ text: import_v43.z.string()
692
+ }),
693
+ import_v43.z.object({
694
+ type: import_v43.z.literal("image_url"),
695
+ image_url: import_v43.z.union([
696
+ import_v43.z.string(),
697
+ import_v43.z.object({
698
+ url: import_v43.z.string(),
699
+ detail: import_v43.z.string().nullable()
376
700
  })
377
- ).optional().nullable()
701
+ ])
378
702
  }),
379
- index: import_zod2.z.number(),
380
- finish_reason: import_zod2.z.string().optional().nullable()
703
+ import_v43.z.object({
704
+ type: import_v43.z.literal("reference"),
705
+ reference_ids: import_v43.z.array(import_v43.z.union([import_v43.z.string(), import_v43.z.number()]))
706
+ }),
707
+ import_v43.z.object({
708
+ type: import_v43.z.literal("thinking"),
709
+ thinking: import_v43.z.array(
710
+ import_v43.z.object({
711
+ type: import_v43.z.literal("text"),
712
+ text: import_v43.z.string()
713
+ })
714
+ )
715
+ })
716
+ ])
717
+ )
718
+ ]).nullish();
719
+ var mistralUsageSchema = import_v43.z.object({
720
+ prompt_tokens: import_v43.z.number(),
721
+ completion_tokens: import_v43.z.number(),
722
+ total_tokens: import_v43.z.number()
723
+ });
724
+ var mistralChatResponseSchema = import_v43.z.object({
725
+ id: import_v43.z.string().nullish(),
726
+ created: import_v43.z.number().nullish(),
727
+ model: import_v43.z.string().nullish(),
728
+ choices: import_v43.z.array(
729
+ import_v43.z.object({
730
+ message: import_v43.z.object({
731
+ role: import_v43.z.literal("assistant"),
732
+ content: mistralContentSchema,
733
+ tool_calls: import_v43.z.array(
734
+ import_v43.z.object({
735
+ id: import_v43.z.string(),
736
+ function: import_v43.z.object({ name: import_v43.z.string(), arguments: import_v43.z.string() })
737
+ })
738
+ ).nullish()
739
+ }),
740
+ index: import_v43.z.number(),
741
+ finish_reason: import_v43.z.string().nullish()
381
742
  })
382
743
  ),
383
- object: import_zod2.z.literal("chat.completion"),
384
- usage: import_zod2.z.object({
385
- prompt_tokens: import_zod2.z.number(),
386
- completion_tokens: import_zod2.z.number()
387
- })
744
+ object: import_v43.z.literal("chat.completion"),
745
+ usage: mistralUsageSchema
388
746
  });
389
- var mistralChatChunkSchema = import_zod2.z.object({
390
- object: import_zod2.z.literal("chat.completion.chunk"),
391
- choices: import_zod2.z.array(
392
- import_zod2.z.object({
393
- delta: import_zod2.z.object({
394
- role: import_zod2.z.enum(["assistant"]).optional(),
395
- content: import_zod2.z.string().nullable().optional(),
396
- tool_calls: import_zod2.z.array(
397
- import_zod2.z.object({
398
- function: import_zod2.z.object({ name: import_zod2.z.string(), arguments: import_zod2.z.string() })
747
+ var mistralChatChunkSchema = import_v43.z.object({
748
+ id: import_v43.z.string().nullish(),
749
+ created: import_v43.z.number().nullish(),
750
+ model: import_v43.z.string().nullish(),
751
+ choices: import_v43.z.array(
752
+ import_v43.z.object({
753
+ delta: import_v43.z.object({
754
+ role: import_v43.z.enum(["assistant"]).optional(),
755
+ content: mistralContentSchema,
756
+ tool_calls: import_v43.z.array(
757
+ import_v43.z.object({
758
+ id: import_v43.z.string(),
759
+ function: import_v43.z.object({ name: import_v43.z.string(), arguments: import_v43.z.string() })
399
760
  })
400
- ).optional().nullable()
761
+ ).nullish()
401
762
  }),
402
- finish_reason: import_zod2.z.string().nullable().optional(),
403
- index: import_zod2.z.number()
763
+ finish_reason: import_v43.z.string().nullish(),
764
+ index: import_v43.z.number()
404
765
  })
405
766
  ),
406
- usage: import_zod2.z.object({
407
- prompt_tokens: import_zod2.z.number(),
408
- completion_tokens: import_zod2.z.number()
409
- }).optional().nullable()
767
+ usage: mistralUsageSchema.nullish()
410
768
  });
411
769
 
412
- // src/mistral-facade.ts
413
- var Mistral = class {
414
- /**
415
- * Creates a new Mistral provider instance.
416
- */
417
- constructor(options = {}) {
418
- var _a, _b, _c;
419
- this.baseURL = (_b = (0, import_provider_utils3.withoutTrailingSlash)((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://api.mistral.ai/v1";
420
- this.apiKey = options.apiKey;
421
- this.headers = options.headers;
422
- this.generateId = (_c = options.generateId) != null ? _c : import_provider_utils3.generateId;
423
- }
424
- get baseConfig() {
425
- return {
426
- baseURL: this.baseURL,
427
- headers: () => ({
428
- Authorization: `Bearer ${(0, import_provider_utils3.loadApiKey)({
429
- apiKey: this.apiKey,
430
- environmentVariableName: "MISTRAL_API_KEY",
431
- description: "Mistral"
432
- })}`,
433
- ...this.headers
434
- })
435
- };
436
- }
437
- chat(modelId, settings = {}) {
438
- return new MistralChatLanguageModel(modelId, settings, {
439
- provider: "mistral.chat",
440
- ...this.baseConfig,
441
- generateId: this.generateId
442
- });
443
- }
444
- };
445
-
446
- // src/mistral-provider.ts
447
- var import_provider_utils5 = require("@ai-sdk/provider-utils");
448
-
449
770
  // src/mistral-embedding-model.ts
450
771
  var import_provider3 = require("@ai-sdk/provider");
451
772
  var import_provider_utils4 = require("@ai-sdk/provider-utils");
452
- var import_zod3 = require("zod");
773
+ var import_v44 = require("zod/v4");
453
774
  var MistralEmbeddingModel = class {
454
- constructor(modelId, settings, config) {
455
- this.specificationVersion = "v1";
775
+ constructor(modelId, config) {
776
+ this.specificationVersion = "v3";
777
+ this.maxEmbeddingsPerCall = 32;
778
+ this.supportsParallelCalls = false;
456
779
  this.modelId = modelId;
457
- this.settings = settings;
458
780
  this.config = config;
459
781
  }
460
782
  get provider() {
461
783
  return this.config.provider;
462
784
  }
463
- get maxEmbeddingsPerCall() {
464
- var _a;
465
- return (_a = this.settings.maxEmbeddingsPerCall) != null ? _a : 32;
466
- }
467
- get supportsParallelCalls() {
468
- var _a;
469
- return (_a = this.settings.supportsParallelCalls) != null ? _a : false;
470
- }
471
785
  async doEmbed({
472
786
  values,
473
- abortSignal
787
+ abortSignal,
788
+ headers
474
789
  }) {
475
790
  if (values.length > this.maxEmbeddingsPerCall) {
476
791
  throw new import_provider3.TooManyEmbeddingValuesForCallError({
@@ -480,9 +795,13 @@ var MistralEmbeddingModel = class {
480
795
  values
481
796
  });
482
797
  }
483
- const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
798
+ const {
799
+ responseHeaders,
800
+ value: response,
801
+ rawValue
802
+ } = await (0, import_provider_utils4.postJsonToApi)({
484
803
  url: `${this.config.baseURL}/embeddings`,
485
- headers: this.config.headers(),
804
+ headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), headers),
486
805
  body: {
487
806
  model: this.modelId,
488
807
  input: values,
@@ -492,64 +811,77 @@ var MistralEmbeddingModel = class {
492
811
  successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
493
812
  MistralTextEmbeddingResponseSchema
494
813
  ),
495
- abortSignal
814
+ abortSignal,
815
+ fetch: this.config.fetch
496
816
  });
497
817
  return {
818
+ warnings: [],
498
819
  embeddings: response.data.map((item) => item.embedding),
499
- rawResponse: { headers: responseHeaders }
820
+ usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
821
+ response: { headers: responseHeaders, body: rawValue }
500
822
  };
501
823
  }
502
824
  };
503
- var MistralTextEmbeddingResponseSchema = import_zod3.z.object({
504
- data: import_zod3.z.array(
505
- import_zod3.z.object({
506
- embedding: import_zod3.z.array(import_zod3.z.number())
507
- })
508
- )
825
+ var MistralTextEmbeddingResponseSchema = import_v44.z.object({
826
+ data: import_v44.z.array(import_v44.z.object({ embedding: import_v44.z.array(import_v44.z.number()) })),
827
+ usage: import_v44.z.object({ prompt_tokens: import_v44.z.number() }).nullish()
509
828
  });
510
829
 
830
+ // src/version.ts
831
+ var VERSION = true ? "0.0.0-98261322-20260122142521" : "0.0.0-test";
832
+
511
833
  // src/mistral-provider.ts
512
834
  function createMistral(options = {}) {
513
- var _a, _b;
514
- const baseURL = (_b = (0, import_provider_utils5.withoutTrailingSlash)((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://api.mistral.ai/v1";
515
- const getHeaders = () => ({
516
- Authorization: `Bearer ${(0, import_provider_utils5.loadApiKey)({
517
- apiKey: options.apiKey,
518
- environmentVariableName: "MISTRAL_API_KEY",
519
- description: "Mistral"
520
- })}`,
521
- ...options.headers
835
+ var _a;
836
+ const baseURL = (_a = (0, import_provider_utils5.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
837
+ const getHeaders = () => (0, import_provider_utils5.withUserAgentSuffix)(
838
+ {
839
+ Authorization: `Bearer ${(0, import_provider_utils5.loadApiKey)({
840
+ apiKey: options.apiKey,
841
+ environmentVariableName: "MISTRAL_API_KEY",
842
+ description: "Mistral"
843
+ })}`,
844
+ ...options.headers
845
+ },
846
+ `ai-sdk/mistral/${VERSION}`
847
+ );
848
+ const createChatModel = (modelId) => new MistralChatLanguageModel(modelId, {
849
+ provider: "mistral.chat",
850
+ baseURL,
851
+ headers: getHeaders,
852
+ fetch: options.fetch,
853
+ generateId: options.generateId
522
854
  });
523
- const createChatModel = (modelId, settings = {}) => {
524
- var _a2;
525
- return new MistralChatLanguageModel(modelId, settings, {
526
- provider: "mistral.chat",
527
- baseURL,
528
- headers: getHeaders,
529
- generateId: (_a2 = options.generateId) != null ? _a2 : import_provider_utils5.generateId
530
- });
531
- };
532
- const createEmbeddingModel = (modelId, settings = {}) => new MistralEmbeddingModel(modelId, settings, {
855
+ const createEmbeddingModel = (modelId) => new MistralEmbeddingModel(modelId, {
533
856
  provider: "mistral.embedding",
534
857
  baseURL,
535
- headers: getHeaders
858
+ headers: getHeaders,
859
+ fetch: options.fetch
536
860
  });
537
- const provider = function(modelId, settings) {
861
+ const provider = function(modelId) {
538
862
  if (new.target) {
539
863
  throw new Error(
540
864
  "The Mistral model function cannot be called with the new keyword."
541
865
  );
542
866
  }
543
- return createChatModel(modelId, settings);
867
+ return createChatModel(modelId);
544
868
  };
869
+ provider.specificationVersion = "v3";
870
+ provider.languageModel = createChatModel;
545
871
  provider.chat = createChatModel;
546
872
  provider.embedding = createEmbeddingModel;
873
+ provider.embeddingModel = createEmbeddingModel;
874
+ provider.textEmbedding = createEmbeddingModel;
875
+ provider.textEmbeddingModel = createEmbeddingModel;
876
+ provider.imageModel = (modelId) => {
877
+ throw new import_provider4.NoSuchModelError({ modelId, modelType: "imageModel" });
878
+ };
547
879
  return provider;
548
880
  }
549
881
  var mistral = createMistral();
550
882
  // Annotate the CommonJS export names for ESM import in node:
551
883
  0 && (module.exports = {
552
- Mistral,
884
+ VERSION,
553
885
  createMistral,
554
886
  mistral
555
887
  });