@ai-sdk/mistral 0.0.0-85f9a635-20240518005312 → 0.0.0-98261322-20260122142521

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,28 +1,78 @@
1
- // src/mistral-facade.ts
1
+ // src/mistral-provider.ts
2
+ import {
3
+ NoSuchModelError
4
+ } from "@ai-sdk/provider";
2
5
  import {
3
- generateId,
4
6
  loadApiKey,
5
- withoutTrailingSlash
7
+ withoutTrailingSlash,
8
+ withUserAgentSuffix
6
9
  } from "@ai-sdk/provider-utils";
7
10
 
8
11
  // src/mistral-chat-language-model.ts
9
12
  import {
10
- UnsupportedFunctionalityError as UnsupportedFunctionalityError2
11
- } from "@ai-sdk/provider";
12
- import {
13
+ combineHeaders,
13
14
  createEventSourceResponseHandler,
14
15
  createJsonResponseHandler,
16
+ generateId,
17
+ injectJsonInstructionIntoMessages,
18
+ parseProviderOptions,
15
19
  postJsonToApi
16
20
  } from "@ai-sdk/provider-utils";
17
- import { z as z2 } from "zod";
21
+ import { z as z3 } from "zod/v4";
22
+
23
+ // src/convert-mistral-usage.ts
24
+ function convertMistralUsage(usage) {
25
+ if (usage == null) {
26
+ return {
27
+ inputTokens: {
28
+ total: void 0,
29
+ noCache: void 0,
30
+ cacheRead: void 0,
31
+ cacheWrite: void 0
32
+ },
33
+ outputTokens: {
34
+ total: void 0,
35
+ text: void 0,
36
+ reasoning: void 0
37
+ },
38
+ raw: void 0
39
+ };
40
+ }
41
+ const promptTokens = usage.prompt_tokens;
42
+ const completionTokens = usage.completion_tokens;
43
+ return {
44
+ inputTokens: {
45
+ total: promptTokens,
46
+ noCache: promptTokens,
47
+ cacheRead: void 0,
48
+ cacheWrite: void 0
49
+ },
50
+ outputTokens: {
51
+ total: completionTokens,
52
+ text: completionTokens,
53
+ reasoning: void 0
54
+ },
55
+ raw: usage
56
+ };
57
+ }
18
58
 
19
59
  // src/convert-to-mistral-chat-messages.ts
20
60
  import {
21
61
  UnsupportedFunctionalityError
22
62
  } from "@ai-sdk/provider";
63
+ import { convertToBase64 } from "@ai-sdk/provider-utils";
64
+ function formatFileUrl({
65
+ data,
66
+ mediaType
67
+ }) {
68
+ return data instanceof URL ? data.toString() : `data:${mediaType};base64,${convertToBase64(data)}`;
69
+ }
23
70
  function convertToMistralChatMessages(prompt) {
71
+ var _a;
24
72
  const messages = [];
25
- for (const { role, content } of prompt) {
73
+ for (let i = 0; i < prompt.length; i++) {
74
+ const { role, content } = prompt[i];
75
+ const isLastMessage = i === prompt.length - 1;
26
76
  switch (role) {
27
77
  case "system": {
28
78
  messages.push({ role: "system", content });
@@ -34,15 +84,31 @@ function convertToMistralChatMessages(prompt) {
34
84
  content: content.map((part) => {
35
85
  switch (part.type) {
36
86
  case "text": {
37
- return part.text;
87
+ return { type: "text", text: part.text };
38
88
  }
39
- case "image": {
40
- throw new UnsupportedFunctionalityError({
41
- functionality: "image-part"
42
- });
89
+ case "file": {
90
+ if (part.mediaType.startsWith("image/")) {
91
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
92
+ return {
93
+ type: "image_url",
94
+ image_url: formatFileUrl({ data: part.data, mediaType })
95
+ };
96
+ } else if (part.mediaType === "application/pdf") {
97
+ return {
98
+ type: "document_url",
99
+ document_url: formatFileUrl({
100
+ data: part.data,
101
+ mediaType: "application/pdf"
102
+ })
103
+ };
104
+ } else {
105
+ throw new UnsupportedFunctionalityError({
106
+ functionality: "Only images and PDF file parts are supported"
107
+ });
108
+ }
43
109
  }
44
110
  }
45
- }).join("")
111
+ })
46
112
  });
47
113
  break;
48
114
  }
@@ -61,34 +127,56 @@ function convertToMistralChatMessages(prompt) {
61
127
  type: "function",
62
128
  function: {
63
129
  name: part.toolName,
64
- arguments: JSON.stringify(part.args)
130
+ arguments: JSON.stringify(part.input)
65
131
  }
66
132
  });
67
133
  break;
68
134
  }
135
+ case "reasoning": {
136
+ text += part.text;
137
+ break;
138
+ }
69
139
  default: {
70
- const _exhaustiveCheck = part;
71
- throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
140
+ throw new Error(
141
+ `Unsupported content type in assistant message: ${part.type}`
142
+ );
72
143
  }
73
144
  }
74
145
  }
75
146
  messages.push({
76
147
  role: "assistant",
77
148
  content: text,
78
- tool_calls: toolCalls.length > 0 ? toolCalls.map(({ function: { name, arguments: args } }) => ({
79
- id: "null",
80
- type: "function",
81
- function: { name, arguments: args }
82
- })) : void 0
149
+ prefix: isLastMessage ? true : void 0,
150
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
83
151
  });
84
152
  break;
85
153
  }
86
154
  case "tool": {
87
155
  for (const toolResponse of content) {
156
+ if (toolResponse.type === "tool-approval-response") {
157
+ continue;
158
+ }
159
+ const output = toolResponse.output;
160
+ let contentValue;
161
+ switch (output.type) {
162
+ case "text":
163
+ case "error-text":
164
+ contentValue = output.value;
165
+ break;
166
+ case "execution-denied":
167
+ contentValue = (_a = output.reason) != null ? _a : "Tool execution denied.";
168
+ break;
169
+ case "content":
170
+ case "json":
171
+ case "error-json":
172
+ contentValue = JSON.stringify(output.value);
173
+ break;
174
+ }
88
175
  messages.push({
89
176
  role: "tool",
90
177
  name: toolResponse.toolName,
91
- content: JSON.stringify(toolResponse.result)
178
+ tool_call_id: toolResponse.toolCallId,
179
+ content: contentValue
92
180
  });
93
181
  }
94
182
  break;
@@ -102,6 +190,19 @@ function convertToMistralChatMessages(prompt) {
102
190
  return messages;
103
191
  }
104
192
 
193
+ // src/get-response-metadata.ts
194
+ function getResponseMetadata({
195
+ id,
196
+ model,
197
+ created
198
+ }) {
199
+ return {
200
+ id: id != null ? id : void 0,
201
+ modelId: model != null ? model : void 0,
202
+ timestamp: created != null ? new Date(created * 1e3) : void 0
203
+ };
204
+ }
205
+
105
206
  // src/map-mistral-finish-reason.ts
106
207
  function mapMistralFinishReason(finishReason) {
107
208
  switch (finishReason) {
@@ -117,353 +218,570 @@ function mapMistralFinishReason(finishReason) {
117
218
  }
118
219
  }
119
220
 
221
+ // src/mistral-chat-options.ts
222
+ import { z } from "zod/v4";
223
+ var mistralLanguageModelOptions = z.object({
224
+ /**
225
+ Whether to inject a safety prompt before all conversations.
226
+
227
+ Defaults to `false`.
228
+ */
229
+ safePrompt: z.boolean().optional(),
230
+ documentImageLimit: z.number().optional(),
231
+ documentPageLimit: z.number().optional(),
232
+ /**
233
+ * Whether to use structured outputs.
234
+ *
235
+ * @default true
236
+ */
237
+ structuredOutputs: z.boolean().optional(),
238
+ /**
239
+ * Whether to use strict JSON schema validation.
240
+ *
241
+ * @default false
242
+ */
243
+ strictJsonSchema: z.boolean().optional(),
244
+ /**
245
+ * Whether to enable parallel function calling during tool use.
246
+ * When set to false, the model will use at most one tool per response.
247
+ *
248
+ * @default true
249
+ */
250
+ parallelToolCalls: z.boolean().optional()
251
+ });
252
+
120
253
  // src/mistral-error.ts
121
254
  import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
122
- import { z } from "zod";
123
- var mistralErrorDataSchema = z.object({
124
- object: z.literal("error"),
125
- message: z.string(),
126
- type: z.string(),
127
- param: z.string().nullable(),
128
- code: z.string().nullable()
255
+ import { z as z2 } from "zod/v4";
256
+ var mistralErrorDataSchema = z2.object({
257
+ object: z2.literal("error"),
258
+ message: z2.string(),
259
+ type: z2.string(),
260
+ param: z2.string().nullable(),
261
+ code: z2.string().nullable()
129
262
  });
130
263
  var mistralFailedResponseHandler = createJsonErrorResponseHandler({
131
264
  errorSchema: mistralErrorDataSchema,
132
265
  errorToMessage: (data) => data.message
133
266
  });
134
267
 
268
+ // src/mistral-prepare-tools.ts
269
+ import {
270
+ UnsupportedFunctionalityError as UnsupportedFunctionalityError2
271
+ } from "@ai-sdk/provider";
272
+ function prepareTools({
273
+ tools,
274
+ toolChoice
275
+ }) {
276
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
277
+ const toolWarnings = [];
278
+ if (tools == null) {
279
+ return { tools: void 0, toolChoice: void 0, toolWarnings };
280
+ }
281
+ const mistralTools = [];
282
+ for (const tool of tools) {
283
+ if (tool.type === "provider") {
284
+ toolWarnings.push({
285
+ type: "unsupported",
286
+ feature: `provider-defined tool ${tool.id}`
287
+ });
288
+ } else {
289
+ mistralTools.push({
290
+ type: "function",
291
+ function: {
292
+ name: tool.name,
293
+ description: tool.description,
294
+ parameters: tool.inputSchema,
295
+ ...tool.strict != null ? { strict: tool.strict } : {}
296
+ }
297
+ });
298
+ }
299
+ }
300
+ if (toolChoice == null) {
301
+ return { tools: mistralTools, toolChoice: void 0, toolWarnings };
302
+ }
303
+ const type = toolChoice.type;
304
+ switch (type) {
305
+ case "auto":
306
+ case "none":
307
+ return { tools: mistralTools, toolChoice: type, toolWarnings };
308
+ case "required":
309
+ return { tools: mistralTools, toolChoice: "any", toolWarnings };
310
+ // mistral does not support tool mode directly,
311
+ // so we filter the tools and force the tool choice through 'any'
312
+ case "tool":
313
+ return {
314
+ tools: mistralTools.filter(
315
+ (tool) => tool.function.name === toolChoice.toolName
316
+ ),
317
+ toolChoice: "any",
318
+ toolWarnings
319
+ };
320
+ default: {
321
+ const _exhaustiveCheck = type;
322
+ throw new UnsupportedFunctionalityError2({
323
+ functionality: `tool choice type: ${_exhaustiveCheck}`
324
+ });
325
+ }
326
+ }
327
+ }
328
+
135
329
  // src/mistral-chat-language-model.ts
136
330
  var MistralChatLanguageModel = class {
137
- constructor(modelId, settings, config) {
138
- this.specificationVersion = "v1";
139
- this.defaultObjectGenerationMode = "json";
331
+ constructor(modelId, config) {
332
+ this.specificationVersion = "v3";
333
+ this.supportedUrls = {
334
+ "application/pdf": [/^https:\/\/.*$/]
335
+ };
336
+ var _a;
140
337
  this.modelId = modelId;
141
- this.settings = settings;
142
338
  this.config = config;
339
+ this.generateId = (_a = config.generateId) != null ? _a : generateId;
143
340
  }
144
341
  get provider() {
145
342
  return this.config.provider;
146
343
  }
147
- getArgs({
148
- mode,
344
+ async getArgs({
149
345
  prompt,
150
- maxTokens,
346
+ maxOutputTokens,
151
347
  temperature,
152
348
  topP,
349
+ topK,
153
350
  frequencyPenalty,
154
351
  presencePenalty,
155
- seed
352
+ stopSequences,
353
+ responseFormat,
354
+ seed,
355
+ providerOptions,
356
+ tools,
357
+ toolChoice
156
358
  }) {
157
- var _a;
158
- const type = mode.type;
359
+ var _a, _b, _c, _d;
159
360
  const warnings = [];
361
+ const options = (_a = await parseProviderOptions({
362
+ provider: "mistral",
363
+ providerOptions,
364
+ schema: mistralLanguageModelOptions
365
+ })) != null ? _a : {};
366
+ if (topK != null) {
367
+ warnings.push({ type: "unsupported", feature: "topK" });
368
+ }
160
369
  if (frequencyPenalty != null) {
161
- warnings.push({
162
- type: "unsupported-setting",
163
- setting: "frequencyPenalty"
164
- });
370
+ warnings.push({ type: "unsupported", feature: "frequencyPenalty" });
165
371
  }
166
372
  if (presencePenalty != null) {
167
- warnings.push({
168
- type: "unsupported-setting",
169
- setting: "presencePenalty"
373
+ warnings.push({ type: "unsupported", feature: "presencePenalty" });
374
+ }
375
+ if (stopSequences != null) {
376
+ warnings.push({ type: "unsupported", feature: "stopSequences" });
377
+ }
378
+ const structuredOutputs = (_b = options.structuredOutputs) != null ? _b : true;
379
+ const strictJsonSchema = (_c = options.strictJsonSchema) != null ? _c : false;
380
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && !(responseFormat == null ? void 0 : responseFormat.schema)) {
381
+ prompt = injectJsonInstructionIntoMessages({
382
+ messages: prompt,
383
+ schema: responseFormat.schema
170
384
  });
171
385
  }
172
386
  const baseArgs = {
173
387
  // model id:
174
388
  model: this.modelId,
175
389
  // model specific settings:
176
- safe_prompt: this.settings.safePrompt,
390
+ safe_prompt: options.safePrompt,
177
391
  // standardized settings:
178
- max_tokens: maxTokens,
392
+ max_tokens: maxOutputTokens,
179
393
  temperature,
180
394
  top_p: topP,
181
395
  random_seed: seed,
396
+ // response format:
397
+ response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? structuredOutputs && (responseFormat == null ? void 0 : responseFormat.schema) != null ? {
398
+ type: "json_schema",
399
+ json_schema: {
400
+ schema: responseFormat.schema,
401
+ strict: strictJsonSchema,
402
+ name: (_d = responseFormat.name) != null ? _d : "response",
403
+ description: responseFormat.description
404
+ }
405
+ } : { type: "json_object" } : void 0,
406
+ // mistral-specific provider options:
407
+ document_image_limit: options.documentImageLimit,
408
+ document_page_limit: options.documentPageLimit,
182
409
  // messages:
183
410
  messages: convertToMistralChatMessages(prompt)
184
411
  };
185
- switch (type) {
186
- case "regular": {
187
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
188
- return {
189
- args: {
190
- ...baseArgs,
191
- tools: tools == null ? void 0 : tools.map((tool) => ({
192
- type: "function",
193
- function: {
194
- name: tool.name,
195
- description: tool.description,
196
- parameters: tool.parameters
197
- }
198
- }))
199
- },
200
- warnings
201
- };
202
- }
203
- case "object-json": {
204
- return {
205
- args: {
206
- ...baseArgs,
207
- response_format: { type: "json_object" }
208
- },
209
- warnings
210
- };
211
- }
212
- case "object-tool": {
213
- return {
214
- args: {
215
- ...baseArgs,
216
- tool_choice: "any",
217
- tools: [{ type: "function", function: mode.tool }]
218
- },
219
- warnings
220
- };
221
- }
222
- case "object-grammar": {
223
- throw new UnsupportedFunctionalityError2({
224
- functionality: "object-grammar mode"
225
- });
226
- }
227
- default: {
228
- const _exhaustiveCheck = type;
229
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
230
- }
231
- }
412
+ const {
413
+ tools: mistralTools,
414
+ toolChoice: mistralToolChoice,
415
+ toolWarnings
416
+ } = prepareTools({
417
+ tools,
418
+ toolChoice
419
+ });
420
+ return {
421
+ args: {
422
+ ...baseArgs,
423
+ tools: mistralTools,
424
+ tool_choice: mistralToolChoice,
425
+ ...mistralTools != null && options.parallelToolCalls !== void 0 ? { parallel_tool_calls: options.parallelToolCalls } : {}
426
+ },
427
+ warnings: [...warnings, ...toolWarnings]
428
+ };
232
429
  }
233
430
  async doGenerate(options) {
234
- var _a, _b;
235
- const { args, warnings } = this.getArgs(options);
236
- const { responseHeaders, value: response } = await postJsonToApi({
431
+ var _a;
432
+ const { args: body, warnings } = await this.getArgs(options);
433
+ const {
434
+ responseHeaders,
435
+ value: response,
436
+ rawValue: rawResponse
437
+ } = await postJsonToApi({
237
438
  url: `${this.config.baseURL}/chat/completions`,
238
- headers: this.config.headers(),
239
- body: args,
439
+ headers: combineHeaders(this.config.headers(), options.headers),
440
+ body,
240
441
  failedResponseHandler: mistralFailedResponseHandler,
241
442
  successfulResponseHandler: createJsonResponseHandler(
242
443
  mistralChatResponseSchema
243
444
  ),
244
- abortSignal: options.abortSignal
445
+ abortSignal: options.abortSignal,
446
+ fetch: this.config.fetch
245
447
  });
246
- const { messages: rawPrompt, ...rawSettings } = args;
247
448
  const choice = response.choices[0];
449
+ const content = [];
450
+ if (choice.message.content != null && Array.isArray(choice.message.content)) {
451
+ for (const part of choice.message.content) {
452
+ if (part.type === "thinking") {
453
+ const reasoningText = extractReasoningContent(part.thinking);
454
+ if (reasoningText.length > 0) {
455
+ content.push({ type: "reasoning", text: reasoningText });
456
+ }
457
+ } else if (part.type === "text") {
458
+ if (part.text.length > 0) {
459
+ content.push({ type: "text", text: part.text });
460
+ }
461
+ }
462
+ }
463
+ } else {
464
+ const text = extractTextContent(choice.message.content);
465
+ if (text != null && text.length > 0) {
466
+ content.push({ type: "text", text });
467
+ }
468
+ }
469
+ if (choice.message.tool_calls != null) {
470
+ for (const toolCall of choice.message.tool_calls) {
471
+ content.push({
472
+ type: "tool-call",
473
+ toolCallId: toolCall.id,
474
+ toolName: toolCall.function.name,
475
+ input: toolCall.function.arguments
476
+ });
477
+ }
478
+ }
248
479
  return {
249
- text: (_a = choice.message.content) != null ? _a : void 0,
250
- toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
251
- toolCallType: "function",
252
- toolCallId: this.config.generateId(),
253
- toolName: toolCall.function.name,
254
- args: toolCall.function.arguments
255
- })),
256
- finishReason: mapMistralFinishReason(choice.finish_reason),
257
- usage: {
258
- promptTokens: response.usage.prompt_tokens,
259
- completionTokens: response.usage.completion_tokens
480
+ content,
481
+ finishReason: {
482
+ unified: mapMistralFinishReason(choice.finish_reason),
483
+ raw: (_a = choice.finish_reason) != null ? _a : void 0
484
+ },
485
+ usage: convertMistralUsage(response.usage),
486
+ request: { body },
487
+ response: {
488
+ ...getResponseMetadata(response),
489
+ headers: responseHeaders,
490
+ body: rawResponse
260
491
  },
261
- rawCall: { rawPrompt, rawSettings },
262
- rawResponse: { headers: responseHeaders },
263
492
  warnings
264
493
  };
265
494
  }
266
495
  async doStream(options) {
267
- const { args, warnings } = this.getArgs(options);
496
+ const { args, warnings } = await this.getArgs(options);
497
+ const body = { ...args, stream: true };
268
498
  const { responseHeaders, value: response } = await postJsonToApi({
269
499
  url: `${this.config.baseURL}/chat/completions`,
270
- headers: this.config.headers(),
271
- body: {
272
- ...args,
273
- stream: true
274
- },
500
+ headers: combineHeaders(this.config.headers(), options.headers),
501
+ body,
275
502
  failedResponseHandler: mistralFailedResponseHandler,
276
503
  successfulResponseHandler: createEventSourceResponseHandler(
277
504
  mistralChatChunkSchema
278
505
  ),
279
- abortSignal: options.abortSignal
506
+ abortSignal: options.abortSignal,
507
+ fetch: this.config.fetch
280
508
  });
281
- const { messages: rawPrompt, ...rawSettings } = args;
282
- let finishReason = "other";
283
- let usage = {
284
- promptTokens: Number.NaN,
285
- completionTokens: Number.NaN
509
+ let finishReason = {
510
+ unified: "other",
511
+ raw: void 0
286
512
  };
287
- const generateId3 = this.config.generateId;
513
+ let usage = void 0;
514
+ let isFirstChunk = true;
515
+ let activeText = false;
516
+ let activeReasoningId = null;
517
+ const generateId2 = this.generateId;
288
518
  return {
289
519
  stream: response.pipeThrough(
290
520
  new TransformStream({
521
+ start(controller) {
522
+ controller.enqueue({ type: "stream-start", warnings });
523
+ },
291
524
  transform(chunk, controller) {
525
+ if (options.includeRawChunks) {
526
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
527
+ }
292
528
  if (!chunk.success) {
293
529
  controller.enqueue({ type: "error", error: chunk.error });
294
530
  return;
295
531
  }
296
532
  const value = chunk.value;
533
+ if (isFirstChunk) {
534
+ isFirstChunk = false;
535
+ controller.enqueue({
536
+ type: "response-metadata",
537
+ ...getResponseMetadata(value)
538
+ });
539
+ }
297
540
  if (value.usage != null) {
298
- usage = {
299
- promptTokens: value.usage.prompt_tokens,
300
- completionTokens: value.usage.completion_tokens
301
- };
541
+ usage = value.usage;
302
542
  }
303
543
  const choice = value.choices[0];
304
- if ((choice == null ? void 0 : choice.finish_reason) != null) {
305
- finishReason = mapMistralFinishReason(choice.finish_reason);
306
- }
307
- if ((choice == null ? void 0 : choice.delta) == null) {
308
- return;
309
- }
310
544
  const delta = choice.delta;
311
- if (delta.content != null) {
545
+ const textContent = extractTextContent(delta.content);
546
+ if (delta.content != null && Array.isArray(delta.content)) {
547
+ for (const part of delta.content) {
548
+ if (part.type === "thinking") {
549
+ const reasoningDelta = extractReasoningContent(part.thinking);
550
+ if (reasoningDelta.length > 0) {
551
+ if (activeReasoningId == null) {
552
+ if (activeText) {
553
+ controller.enqueue({ type: "text-end", id: "0" });
554
+ activeText = false;
555
+ }
556
+ activeReasoningId = generateId2();
557
+ controller.enqueue({
558
+ type: "reasoning-start",
559
+ id: activeReasoningId
560
+ });
561
+ }
562
+ controller.enqueue({
563
+ type: "reasoning-delta",
564
+ id: activeReasoningId,
565
+ delta: reasoningDelta
566
+ });
567
+ }
568
+ }
569
+ }
570
+ }
571
+ if (textContent != null && textContent.length > 0) {
572
+ if (!activeText) {
573
+ if (activeReasoningId != null) {
574
+ controller.enqueue({
575
+ type: "reasoning-end",
576
+ id: activeReasoningId
577
+ });
578
+ activeReasoningId = null;
579
+ }
580
+ controller.enqueue({ type: "text-start", id: "0" });
581
+ activeText = true;
582
+ }
312
583
  controller.enqueue({
313
584
  type: "text-delta",
314
- textDelta: delta.content
585
+ id: "0",
586
+ delta: textContent
315
587
  });
316
588
  }
317
- if (delta.tool_calls != null) {
589
+ if ((delta == null ? void 0 : delta.tool_calls) != null) {
318
590
  for (const toolCall of delta.tool_calls) {
319
- const toolCallId = generateId3();
591
+ const toolCallId = toolCall.id;
592
+ const toolName = toolCall.function.name;
593
+ const input = toolCall.function.arguments;
320
594
  controller.enqueue({
321
- type: "tool-call-delta",
322
- toolCallType: "function",
323
- toolCallId,
324
- toolName: toolCall.function.name,
325
- argsTextDelta: toolCall.function.arguments
595
+ type: "tool-input-start",
596
+ id: toolCallId,
597
+ toolName
598
+ });
599
+ controller.enqueue({
600
+ type: "tool-input-delta",
601
+ id: toolCallId,
602
+ delta: input
603
+ });
604
+ controller.enqueue({
605
+ type: "tool-input-end",
606
+ id: toolCallId
326
607
  });
327
608
  controller.enqueue({
328
609
  type: "tool-call",
329
- toolCallType: "function",
330
610
  toolCallId,
331
- toolName: toolCall.function.name,
332
- args: toolCall.function.arguments
611
+ toolName,
612
+ input
333
613
  });
334
614
  }
335
615
  }
616
+ if (choice.finish_reason != null) {
617
+ finishReason = {
618
+ unified: mapMistralFinishReason(choice.finish_reason),
619
+ raw: choice.finish_reason
620
+ };
621
+ }
336
622
  },
337
623
  flush(controller) {
338
- controller.enqueue({ type: "finish", finishReason, usage });
624
+ if (activeReasoningId != null) {
625
+ controller.enqueue({
626
+ type: "reasoning-end",
627
+ id: activeReasoningId
628
+ });
629
+ }
630
+ if (activeText) {
631
+ controller.enqueue({ type: "text-end", id: "0" });
632
+ }
633
+ controller.enqueue({
634
+ type: "finish",
635
+ finishReason,
636
+ usage: convertMistralUsage(usage)
637
+ });
339
638
  }
340
639
  })
341
640
  ),
342
- rawCall: { rawPrompt, rawSettings },
343
- rawResponse: { headers: responseHeaders },
344
- warnings
641
+ request: { body },
642
+ response: { headers: responseHeaders }
345
643
  };
346
644
  }
347
645
  };
348
- var mistralChatResponseSchema = z2.object({
349
- choices: z2.array(
350
- z2.object({
351
- message: z2.object({
352
- role: z2.literal("assistant"),
353
- content: z2.string().nullable(),
354
- tool_calls: z2.array(
355
- z2.object({
356
- function: z2.object({
357
- name: z2.string(),
358
- arguments: z2.string()
359
- })
646
+ function extractReasoningContent(thinking) {
647
+ return thinking.filter((chunk) => chunk.type === "text").map((chunk) => chunk.text).join("");
648
+ }
649
+ function extractTextContent(content) {
650
+ if (typeof content === "string") {
651
+ return content;
652
+ }
653
+ if (content == null) {
654
+ return void 0;
655
+ }
656
+ const textContent = [];
657
+ for (const chunk of content) {
658
+ const { type } = chunk;
659
+ switch (type) {
660
+ case "text":
661
+ textContent.push(chunk.text);
662
+ break;
663
+ case "thinking":
664
+ case "image_url":
665
+ case "reference":
666
+ break;
667
+ default: {
668
+ const _exhaustiveCheck = type;
669
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
670
+ }
671
+ }
672
+ }
673
+ return textContent.length ? textContent.join("") : void 0;
674
+ }
675
+ var mistralContentSchema = z3.union([
676
+ z3.string(),
677
+ z3.array(
678
+ z3.discriminatedUnion("type", [
679
+ z3.object({
680
+ type: z3.literal("text"),
681
+ text: z3.string()
682
+ }),
683
+ z3.object({
684
+ type: z3.literal("image_url"),
685
+ image_url: z3.union([
686
+ z3.string(),
687
+ z3.object({
688
+ url: z3.string(),
689
+ detail: z3.string().nullable()
690
+ })
691
+ ])
692
+ }),
693
+ z3.object({
694
+ type: z3.literal("reference"),
695
+ reference_ids: z3.array(z3.union([z3.string(), z3.number()]))
696
+ }),
697
+ z3.object({
698
+ type: z3.literal("thinking"),
699
+ thinking: z3.array(
700
+ z3.object({
701
+ type: z3.literal("text"),
702
+ text: z3.string()
703
+ })
704
+ )
705
+ })
706
+ ])
707
+ )
708
+ ]).nullish();
709
+ var mistralUsageSchema = z3.object({
710
+ prompt_tokens: z3.number(),
711
+ completion_tokens: z3.number(),
712
+ total_tokens: z3.number()
713
+ });
714
+ var mistralChatResponseSchema = z3.object({
715
+ id: z3.string().nullish(),
716
+ created: z3.number().nullish(),
717
+ model: z3.string().nullish(),
718
+ choices: z3.array(
719
+ z3.object({
720
+ message: z3.object({
721
+ role: z3.literal("assistant"),
722
+ content: mistralContentSchema,
723
+ tool_calls: z3.array(
724
+ z3.object({
725
+ id: z3.string(),
726
+ function: z3.object({ name: z3.string(), arguments: z3.string() })
360
727
  })
361
- ).optional().nullable()
728
+ ).nullish()
362
729
  }),
363
- index: z2.number(),
364
- finish_reason: z2.string().optional().nullable()
730
+ index: z3.number(),
731
+ finish_reason: z3.string().nullish()
365
732
  })
366
733
  ),
367
- object: z2.literal("chat.completion"),
368
- usage: z2.object({
369
- prompt_tokens: z2.number(),
370
- completion_tokens: z2.number()
371
- })
734
+ object: z3.literal("chat.completion"),
735
+ usage: mistralUsageSchema
372
736
  });
373
- var mistralChatChunkSchema = z2.object({
374
- object: z2.literal("chat.completion.chunk"),
375
- choices: z2.array(
376
- z2.object({
377
- delta: z2.object({
378
- role: z2.enum(["assistant"]).optional(),
379
- content: z2.string().nullable().optional(),
380
- tool_calls: z2.array(
381
- z2.object({
382
- function: z2.object({ name: z2.string(), arguments: z2.string() })
737
+ var mistralChatChunkSchema = z3.object({
738
+ id: z3.string().nullish(),
739
+ created: z3.number().nullish(),
740
+ model: z3.string().nullish(),
741
+ choices: z3.array(
742
+ z3.object({
743
+ delta: z3.object({
744
+ role: z3.enum(["assistant"]).optional(),
745
+ content: mistralContentSchema,
746
+ tool_calls: z3.array(
747
+ z3.object({
748
+ id: z3.string(),
749
+ function: z3.object({ name: z3.string(), arguments: z3.string() })
383
750
  })
384
- ).optional().nullable()
751
+ ).nullish()
385
752
  }),
386
- finish_reason: z2.string().nullable().optional(),
387
- index: z2.number()
753
+ finish_reason: z3.string().nullish(),
754
+ index: z3.number()
388
755
  })
389
756
  ),
390
- usage: z2.object({
391
- prompt_tokens: z2.number(),
392
- completion_tokens: z2.number()
393
- }).optional().nullable()
757
+ usage: mistralUsageSchema.nullish()
394
758
  });
395
759
 
396
- // src/mistral-facade.ts
397
- var Mistral = class {
398
- /**
399
- * Creates a new Mistral provider instance.
400
- */
401
- constructor(options = {}) {
402
- var _a, _b, _c;
403
- this.baseURL = (_b = withoutTrailingSlash((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://api.mistral.ai/v1";
404
- this.apiKey = options.apiKey;
405
- this.headers = options.headers;
406
- this.generateId = (_c = options.generateId) != null ? _c : generateId;
407
- }
408
- get baseConfig() {
409
- return {
410
- baseURL: this.baseURL,
411
- headers: () => ({
412
- Authorization: `Bearer ${loadApiKey({
413
- apiKey: this.apiKey,
414
- environmentVariableName: "MISTRAL_API_KEY",
415
- description: "Mistral"
416
- })}`,
417
- ...this.headers
418
- })
419
- };
420
- }
421
- chat(modelId, settings = {}) {
422
- return new MistralChatLanguageModel(modelId, settings, {
423
- provider: "mistral.chat",
424
- ...this.baseConfig,
425
- generateId: this.generateId
426
- });
427
- }
428
- };
429
-
430
- // src/mistral-provider.ts
431
- import {
432
- generateId as generateId2,
433
- loadApiKey as loadApiKey2,
434
- withoutTrailingSlash as withoutTrailingSlash2
435
- } from "@ai-sdk/provider-utils";
436
-
437
760
  // src/mistral-embedding-model.ts
438
761
  import {
439
762
  TooManyEmbeddingValuesForCallError
440
763
  } from "@ai-sdk/provider";
441
764
  import {
765
+ combineHeaders as combineHeaders2,
442
766
  createJsonResponseHandler as createJsonResponseHandler2,
443
767
  postJsonToApi as postJsonToApi2
444
768
  } from "@ai-sdk/provider-utils";
445
- import { z as z3 } from "zod";
769
+ import { z as z4 } from "zod/v4";
446
770
  var MistralEmbeddingModel = class {
447
- constructor(modelId, settings, config) {
448
- this.specificationVersion = "v1";
771
+ constructor(modelId, config) {
772
+ this.specificationVersion = "v3";
773
+ this.maxEmbeddingsPerCall = 32;
774
+ this.supportsParallelCalls = false;
449
775
  this.modelId = modelId;
450
- this.settings = settings;
451
776
  this.config = config;
452
777
  }
453
778
  get provider() {
454
779
  return this.config.provider;
455
780
  }
456
- get maxEmbeddingsPerCall() {
457
- var _a;
458
- return (_a = this.settings.maxEmbeddingsPerCall) != null ? _a : 32;
459
- }
460
- get supportsParallelCalls() {
461
- var _a;
462
- return (_a = this.settings.supportsParallelCalls) != null ? _a : false;
463
- }
464
781
  async doEmbed({
465
782
  values,
466
- abortSignal
783
+ abortSignal,
784
+ headers
467
785
  }) {
468
786
  if (values.length > this.maxEmbeddingsPerCall) {
469
787
  throw new TooManyEmbeddingValuesForCallError({
@@ -473,9 +791,13 @@ var MistralEmbeddingModel = class {
473
791
  values
474
792
  });
475
793
  }
476
- const { responseHeaders, value: response } = await postJsonToApi2({
794
+ const {
795
+ responseHeaders,
796
+ value: response,
797
+ rawValue
798
+ } = await postJsonToApi2({
477
799
  url: `${this.config.baseURL}/embeddings`,
478
- headers: this.config.headers(),
800
+ headers: combineHeaders2(this.config.headers(), headers),
479
801
  body: {
480
802
  model: this.modelId,
481
803
  input: values,
@@ -485,63 +807,76 @@ var MistralEmbeddingModel = class {
485
807
  successfulResponseHandler: createJsonResponseHandler2(
486
808
  MistralTextEmbeddingResponseSchema
487
809
  ),
488
- abortSignal
810
+ abortSignal,
811
+ fetch: this.config.fetch
489
812
  });
490
813
  return {
814
+ warnings: [],
491
815
  embeddings: response.data.map((item) => item.embedding),
492
- rawResponse: { headers: responseHeaders }
816
+ usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
817
+ response: { headers: responseHeaders, body: rawValue }
493
818
  };
494
819
  }
495
820
  };
496
- var MistralTextEmbeddingResponseSchema = z3.object({
497
- data: z3.array(
498
- z3.object({
499
- embedding: z3.array(z3.number())
500
- })
501
- )
821
+ var MistralTextEmbeddingResponseSchema = z4.object({
822
+ data: z4.array(z4.object({ embedding: z4.array(z4.number()) })),
823
+ usage: z4.object({ prompt_tokens: z4.number() }).nullish()
502
824
  });
503
825
 
826
+ // src/version.ts
827
+ var VERSION = true ? "0.0.0-98261322-20260122142521" : "0.0.0-test";
828
+
504
829
  // src/mistral-provider.ts
505
830
  function createMistral(options = {}) {
506
- var _a, _b;
507
- const baseURL = (_b = withoutTrailingSlash2((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://api.mistral.ai/v1";
508
- const getHeaders = () => ({
509
- Authorization: `Bearer ${loadApiKey2({
510
- apiKey: options.apiKey,
511
- environmentVariableName: "MISTRAL_API_KEY",
512
- description: "Mistral"
513
- })}`,
514
- ...options.headers
831
+ var _a;
832
+ const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
833
+ const getHeaders = () => withUserAgentSuffix(
834
+ {
835
+ Authorization: `Bearer ${loadApiKey({
836
+ apiKey: options.apiKey,
837
+ environmentVariableName: "MISTRAL_API_KEY",
838
+ description: "Mistral"
839
+ })}`,
840
+ ...options.headers
841
+ },
842
+ `ai-sdk/mistral/${VERSION}`
843
+ );
844
+ const createChatModel = (modelId) => new MistralChatLanguageModel(modelId, {
845
+ provider: "mistral.chat",
846
+ baseURL,
847
+ headers: getHeaders,
848
+ fetch: options.fetch,
849
+ generateId: options.generateId
515
850
  });
516
- const createChatModel = (modelId, settings = {}) => {
517
- var _a2;
518
- return new MistralChatLanguageModel(modelId, settings, {
519
- provider: "mistral.chat",
520
- baseURL,
521
- headers: getHeaders,
522
- generateId: (_a2 = options.generateId) != null ? _a2 : generateId2
523
- });
524
- };
525
- const createEmbeddingModel = (modelId, settings = {}) => new MistralEmbeddingModel(modelId, settings, {
851
+ const createEmbeddingModel = (modelId) => new MistralEmbeddingModel(modelId, {
526
852
  provider: "mistral.embedding",
527
853
  baseURL,
528
- headers: getHeaders
854
+ headers: getHeaders,
855
+ fetch: options.fetch
529
856
  });
530
- const provider = function(modelId, settings) {
857
+ const provider = function(modelId) {
531
858
  if (new.target) {
532
859
  throw new Error(
533
860
  "The Mistral model function cannot be called with the new keyword."
534
861
  );
535
862
  }
536
- return createChatModel(modelId, settings);
863
+ return createChatModel(modelId);
537
864
  };
865
+ provider.specificationVersion = "v3";
866
+ provider.languageModel = createChatModel;
538
867
  provider.chat = createChatModel;
539
868
  provider.embedding = createEmbeddingModel;
869
+ provider.embeddingModel = createEmbeddingModel;
870
+ provider.textEmbedding = createEmbeddingModel;
871
+ provider.textEmbeddingModel = createEmbeddingModel;
872
+ provider.imageModel = (modelId) => {
873
+ throw new NoSuchModelError({ modelId, modelType: "imageModel" });
874
+ };
540
875
  return provider;
541
876
  }
542
877
  var mistral = createMistral();
543
878
  export {
544
- Mistral,
879
+ VERSION,
545
880
  createMistral,
546
881
  mistral
547
882
  };