@ai-sdk/mistral 0.0.0-1c33ba03-20260114162300

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs ADDED
@@ -0,0 +1,883 @@
1
+ // src/mistral-provider.ts
2
+ import {
3
+ NoSuchModelError
4
+ } from "@ai-sdk/provider";
5
+ import {
6
+ loadApiKey,
7
+ withoutTrailingSlash,
8
+ withUserAgentSuffix
9
+ } from "@ai-sdk/provider-utils";
10
+
11
+ // src/mistral-chat-language-model.ts
12
+ import {
13
+ combineHeaders,
14
+ createEventSourceResponseHandler,
15
+ createJsonResponseHandler,
16
+ generateId,
17
+ injectJsonInstructionIntoMessages,
18
+ parseProviderOptions,
19
+ postJsonToApi
20
+ } from "@ai-sdk/provider-utils";
21
+ import { z as z3 } from "zod/v4";
22
+
23
+ // src/convert-mistral-usage.ts
24
+ function convertMistralUsage(usage) {
25
+ if (usage == null) {
26
+ return {
27
+ inputTokens: {
28
+ total: void 0,
29
+ noCache: void 0,
30
+ cacheRead: void 0,
31
+ cacheWrite: void 0
32
+ },
33
+ outputTokens: {
34
+ total: void 0,
35
+ text: void 0,
36
+ reasoning: void 0
37
+ },
38
+ raw: void 0
39
+ };
40
+ }
41
+ const promptTokens = usage.prompt_tokens;
42
+ const completionTokens = usage.completion_tokens;
43
+ return {
44
+ inputTokens: {
45
+ total: promptTokens,
46
+ noCache: promptTokens,
47
+ cacheRead: void 0,
48
+ cacheWrite: void 0
49
+ },
50
+ outputTokens: {
51
+ total: completionTokens,
52
+ text: completionTokens,
53
+ reasoning: void 0
54
+ },
55
+ raw: usage
56
+ };
57
+ }
58
+
59
+ // src/convert-to-mistral-chat-messages.ts
60
+ import {
61
+ UnsupportedFunctionalityError
62
+ } from "@ai-sdk/provider";
63
+ import { convertToBase64 } from "@ai-sdk/provider-utils";
64
+ function formatFileUrl({
65
+ data,
66
+ mediaType
67
+ }) {
68
+ return data instanceof URL ? data.toString() : `data:${mediaType};base64,${convertToBase64(data)}`;
69
+ }
70
+ function convertToMistralChatMessages(prompt) {
71
+ var _a;
72
+ const messages = [];
73
+ for (let i = 0; i < prompt.length; i++) {
74
+ const { role, content } = prompt[i];
75
+ const isLastMessage = i === prompt.length - 1;
76
+ switch (role) {
77
+ case "system": {
78
+ messages.push({ role: "system", content });
79
+ break;
80
+ }
81
+ case "user": {
82
+ messages.push({
83
+ role: "user",
84
+ content: content.map((part) => {
85
+ switch (part.type) {
86
+ case "text": {
87
+ return { type: "text", text: part.text };
88
+ }
89
+ case "file": {
90
+ if (part.mediaType.startsWith("image/")) {
91
+ const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
92
+ return {
93
+ type: "image_url",
94
+ image_url: formatFileUrl({ data: part.data, mediaType })
95
+ };
96
+ } else if (part.mediaType === "application/pdf") {
97
+ return {
98
+ type: "document_url",
99
+ document_url: formatFileUrl({
100
+ data: part.data,
101
+ mediaType: "application/pdf"
102
+ })
103
+ };
104
+ } else {
105
+ throw new UnsupportedFunctionalityError({
106
+ functionality: "Only images and PDF file parts are supported"
107
+ });
108
+ }
109
+ }
110
+ }
111
+ })
112
+ });
113
+ break;
114
+ }
115
+ case "assistant": {
116
+ let text = "";
117
+ const toolCalls = [];
118
+ for (const part of content) {
119
+ switch (part.type) {
120
+ case "text": {
121
+ text += part.text;
122
+ break;
123
+ }
124
+ case "tool-call": {
125
+ toolCalls.push({
126
+ id: part.toolCallId,
127
+ type: "function",
128
+ function: {
129
+ name: part.toolName,
130
+ arguments: JSON.stringify(part.input)
131
+ }
132
+ });
133
+ break;
134
+ }
135
+ case "reasoning": {
136
+ text += part.text;
137
+ break;
138
+ }
139
+ default: {
140
+ throw new Error(
141
+ `Unsupported content type in assistant message: ${part.type}`
142
+ );
143
+ }
144
+ }
145
+ }
146
+ messages.push({
147
+ role: "assistant",
148
+ content: text,
149
+ prefix: isLastMessage ? true : void 0,
150
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
151
+ });
152
+ break;
153
+ }
154
+ case "tool": {
155
+ for (const toolResponse of content) {
156
+ if (toolResponse.type === "tool-approval-response") {
157
+ continue;
158
+ }
159
+ const output = toolResponse.output;
160
+ let contentValue;
161
+ switch (output.type) {
162
+ case "text":
163
+ case "error-text":
164
+ contentValue = output.value;
165
+ break;
166
+ case "execution-denied":
167
+ contentValue = (_a = output.reason) != null ? _a : "Tool execution denied.";
168
+ break;
169
+ case "content":
170
+ case "json":
171
+ case "error-json":
172
+ contentValue = JSON.stringify(output.value);
173
+ break;
174
+ }
175
+ messages.push({
176
+ role: "tool",
177
+ name: toolResponse.toolName,
178
+ tool_call_id: toolResponse.toolCallId,
179
+ content: contentValue
180
+ });
181
+ }
182
+ break;
183
+ }
184
+ default: {
185
+ const _exhaustiveCheck = role;
186
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
187
+ }
188
+ }
189
+ }
190
+ return messages;
191
+ }
192
+
193
+ // src/get-response-metadata.ts
194
+ function getResponseMetadata({
195
+ id,
196
+ model,
197
+ created
198
+ }) {
199
+ return {
200
+ id: id != null ? id : void 0,
201
+ modelId: model != null ? model : void 0,
202
+ timestamp: created != null ? new Date(created * 1e3) : void 0
203
+ };
204
+ }
205
+
206
+ // src/map-mistral-finish-reason.ts
207
+ function mapMistralFinishReason(finishReason) {
208
+ switch (finishReason) {
209
+ case "stop":
210
+ return "stop";
211
+ case "length":
212
+ case "model_length":
213
+ return "length";
214
+ case "tool_calls":
215
+ return "tool-calls";
216
+ default:
217
+ return "other";
218
+ }
219
+ }
220
+
221
+ // src/mistral-chat-options.ts
222
+ import { z } from "zod/v4";
223
+ var mistralLanguageModelOptions = z.object({
224
+ /**
225
+ Whether to inject a safety prompt before all conversations.
226
+
227
+ Defaults to `false`.
228
+ */
229
+ safePrompt: z.boolean().optional(),
230
+ documentImageLimit: z.number().optional(),
231
+ documentPageLimit: z.number().optional(),
232
+ /**
233
+ * Whether to use structured outputs.
234
+ *
235
+ * @default true
236
+ */
237
+ structuredOutputs: z.boolean().optional(),
238
+ /**
239
+ * Whether to use strict JSON schema validation.
240
+ *
241
+ * @default false
242
+ */
243
+ strictJsonSchema: z.boolean().optional(),
244
+ /**
245
+ * Whether to enable parallel function calling during tool use.
246
+ * When set to false, the model will use at most one tool per response.
247
+ *
248
+ * @default true
249
+ */
250
+ parallelToolCalls: z.boolean().optional()
251
+ });
252
+
253
+ // src/mistral-error.ts
254
+ import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
255
+ import { z as z2 } from "zod/v4";
256
+ var mistralErrorDataSchema = z2.object({
257
+ object: z2.literal("error"),
258
+ message: z2.string(),
259
+ type: z2.string(),
260
+ param: z2.string().nullable(),
261
+ code: z2.string().nullable()
262
+ });
263
+ var mistralFailedResponseHandler = createJsonErrorResponseHandler({
264
+ errorSchema: mistralErrorDataSchema,
265
+ errorToMessage: (data) => data.message
266
+ });
267
+
268
+ // src/mistral-prepare-tools.ts
269
+ import {
270
+ UnsupportedFunctionalityError as UnsupportedFunctionalityError2
271
+ } from "@ai-sdk/provider";
272
+ function prepareTools({
273
+ tools,
274
+ toolChoice
275
+ }) {
276
+ tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
277
+ const toolWarnings = [];
278
+ if (tools == null) {
279
+ return { tools: void 0, toolChoice: void 0, toolWarnings };
280
+ }
281
+ const mistralTools = [];
282
+ for (const tool of tools) {
283
+ if (tool.type === "provider") {
284
+ toolWarnings.push({
285
+ type: "unsupported",
286
+ feature: `provider-defined tool ${tool.id}`
287
+ });
288
+ } else {
289
+ mistralTools.push({
290
+ type: "function",
291
+ function: {
292
+ name: tool.name,
293
+ description: tool.description,
294
+ parameters: tool.inputSchema,
295
+ ...tool.strict != null ? { strict: tool.strict } : {}
296
+ }
297
+ });
298
+ }
299
+ }
300
+ if (toolChoice == null) {
301
+ return { tools: mistralTools, toolChoice: void 0, toolWarnings };
302
+ }
303
+ const type = toolChoice.type;
304
+ switch (type) {
305
+ case "auto":
306
+ case "none":
307
+ return { tools: mistralTools, toolChoice: type, toolWarnings };
308
+ case "required":
309
+ return { tools: mistralTools, toolChoice: "any", toolWarnings };
310
+ // mistral does not support tool mode directly,
311
+ // so we filter the tools and force the tool choice through 'any'
312
+ case "tool":
313
+ return {
314
+ tools: mistralTools.filter(
315
+ (tool) => tool.function.name === toolChoice.toolName
316
+ ),
317
+ toolChoice: "any",
318
+ toolWarnings
319
+ };
320
+ default: {
321
+ const _exhaustiveCheck = type;
322
+ throw new UnsupportedFunctionalityError2({
323
+ functionality: `tool choice type: ${_exhaustiveCheck}`
324
+ });
325
+ }
326
+ }
327
+ }
328
+
329
+ // src/mistral-chat-language-model.ts
330
+ var MistralChatLanguageModel = class {
331
+ constructor(modelId, config) {
332
+ this.specificationVersion = "v3";
333
+ this.supportedUrls = {
334
+ "application/pdf": [/^https:\/\/.*$/]
335
+ };
336
+ var _a;
337
+ this.modelId = modelId;
338
+ this.config = config;
339
+ this.generateId = (_a = config.generateId) != null ? _a : generateId;
340
+ }
341
+ get provider() {
342
+ return this.config.provider;
343
+ }
344
+ async getArgs({
345
+ prompt,
346
+ maxOutputTokens,
347
+ temperature,
348
+ topP,
349
+ topK,
350
+ frequencyPenalty,
351
+ presencePenalty,
352
+ stopSequences,
353
+ responseFormat,
354
+ seed,
355
+ providerOptions,
356
+ tools,
357
+ toolChoice
358
+ }) {
359
+ var _a, _b, _c, _d;
360
+ const warnings = [];
361
+ const options = (_a = await parseProviderOptions({
362
+ provider: "mistral",
363
+ providerOptions,
364
+ schema: mistralLanguageModelOptions
365
+ })) != null ? _a : {};
366
+ if (topK != null) {
367
+ warnings.push({ type: "unsupported", feature: "topK" });
368
+ }
369
+ if (frequencyPenalty != null) {
370
+ warnings.push({ type: "unsupported", feature: "frequencyPenalty" });
371
+ }
372
+ if (presencePenalty != null) {
373
+ warnings.push({ type: "unsupported", feature: "presencePenalty" });
374
+ }
375
+ if (stopSequences != null) {
376
+ warnings.push({ type: "unsupported", feature: "stopSequences" });
377
+ }
378
+ const structuredOutputs = (_b = options.structuredOutputs) != null ? _b : true;
379
+ const strictJsonSchema = (_c = options.strictJsonSchema) != null ? _c : false;
380
+ if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && !(responseFormat == null ? void 0 : responseFormat.schema)) {
381
+ prompt = injectJsonInstructionIntoMessages({
382
+ messages: prompt,
383
+ schema: responseFormat.schema
384
+ });
385
+ }
386
+ const baseArgs = {
387
+ // model id:
388
+ model: this.modelId,
389
+ // model specific settings:
390
+ safe_prompt: options.safePrompt,
391
+ // standardized settings:
392
+ max_tokens: maxOutputTokens,
393
+ temperature,
394
+ top_p: topP,
395
+ random_seed: seed,
396
+ // response format:
397
+ response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? structuredOutputs && (responseFormat == null ? void 0 : responseFormat.schema) != null ? {
398
+ type: "json_schema",
399
+ json_schema: {
400
+ schema: responseFormat.schema,
401
+ strict: strictJsonSchema,
402
+ name: (_d = responseFormat.name) != null ? _d : "response",
403
+ description: responseFormat.description
404
+ }
405
+ } : { type: "json_object" } : void 0,
406
+ // mistral-specific provider options:
407
+ document_image_limit: options.documentImageLimit,
408
+ document_page_limit: options.documentPageLimit,
409
+ // messages:
410
+ messages: convertToMistralChatMessages(prompt)
411
+ };
412
+ const {
413
+ tools: mistralTools,
414
+ toolChoice: mistralToolChoice,
415
+ toolWarnings
416
+ } = prepareTools({
417
+ tools,
418
+ toolChoice
419
+ });
420
+ return {
421
+ args: {
422
+ ...baseArgs,
423
+ tools: mistralTools,
424
+ tool_choice: mistralToolChoice,
425
+ ...mistralTools != null && options.parallelToolCalls !== void 0 ? { parallel_tool_calls: options.parallelToolCalls } : {}
426
+ },
427
+ warnings: [...warnings, ...toolWarnings]
428
+ };
429
+ }
430
+ async doGenerate(options) {
431
+ var _a;
432
+ const { args: body, warnings } = await this.getArgs(options);
433
+ const {
434
+ responseHeaders,
435
+ value: response,
436
+ rawValue: rawResponse
437
+ } = await postJsonToApi({
438
+ url: `${this.config.baseURL}/chat/completions`,
439
+ headers: combineHeaders(this.config.headers(), options.headers),
440
+ body,
441
+ failedResponseHandler: mistralFailedResponseHandler,
442
+ successfulResponseHandler: createJsonResponseHandler(
443
+ mistralChatResponseSchema
444
+ ),
445
+ abortSignal: options.abortSignal,
446
+ fetch: this.config.fetch
447
+ });
448
+ const choice = response.choices[0];
449
+ const content = [];
450
+ if (choice.message.content != null && Array.isArray(choice.message.content)) {
451
+ for (const part of choice.message.content) {
452
+ if (part.type === "thinking") {
453
+ const reasoningText = extractReasoningContent(part.thinking);
454
+ if (reasoningText.length > 0) {
455
+ content.push({ type: "reasoning", text: reasoningText });
456
+ }
457
+ } else if (part.type === "text") {
458
+ if (part.text.length > 0) {
459
+ content.push({ type: "text", text: part.text });
460
+ }
461
+ }
462
+ }
463
+ } else {
464
+ const text = extractTextContent(choice.message.content);
465
+ if (text != null && text.length > 0) {
466
+ content.push({ type: "text", text });
467
+ }
468
+ }
469
+ if (choice.message.tool_calls != null) {
470
+ for (const toolCall of choice.message.tool_calls) {
471
+ content.push({
472
+ type: "tool-call",
473
+ toolCallId: toolCall.id,
474
+ toolName: toolCall.function.name,
475
+ input: toolCall.function.arguments
476
+ });
477
+ }
478
+ }
479
+ return {
480
+ content,
481
+ finishReason: {
482
+ unified: mapMistralFinishReason(choice.finish_reason),
483
+ raw: (_a = choice.finish_reason) != null ? _a : void 0
484
+ },
485
+ usage: convertMistralUsage(response.usage),
486
+ request: { body },
487
+ response: {
488
+ ...getResponseMetadata(response),
489
+ headers: responseHeaders,
490
+ body: rawResponse
491
+ },
492
+ warnings
493
+ };
494
+ }
495
+ async doStream(options) {
496
+ const { args, warnings } = await this.getArgs(options);
497
+ const body = { ...args, stream: true };
498
+ const { responseHeaders, value: response } = await postJsonToApi({
499
+ url: `${this.config.baseURL}/chat/completions`,
500
+ headers: combineHeaders(this.config.headers(), options.headers),
501
+ body,
502
+ failedResponseHandler: mistralFailedResponseHandler,
503
+ successfulResponseHandler: createEventSourceResponseHandler(
504
+ mistralChatChunkSchema
505
+ ),
506
+ abortSignal: options.abortSignal,
507
+ fetch: this.config.fetch
508
+ });
509
+ let finishReason = {
510
+ unified: "other",
511
+ raw: void 0
512
+ };
513
+ let usage = void 0;
514
+ let isFirstChunk = true;
515
+ let activeText = false;
516
+ let activeReasoningId = null;
517
+ const generateId2 = this.generateId;
518
+ return {
519
+ stream: response.pipeThrough(
520
+ new TransformStream({
521
+ start(controller) {
522
+ controller.enqueue({ type: "stream-start", warnings });
523
+ },
524
+ transform(chunk, controller) {
525
+ if (options.includeRawChunks) {
526
+ controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
527
+ }
528
+ if (!chunk.success) {
529
+ controller.enqueue({ type: "error", error: chunk.error });
530
+ return;
531
+ }
532
+ const value = chunk.value;
533
+ if (isFirstChunk) {
534
+ isFirstChunk = false;
535
+ controller.enqueue({
536
+ type: "response-metadata",
537
+ ...getResponseMetadata(value)
538
+ });
539
+ }
540
+ if (value.usage != null) {
541
+ usage = value.usage;
542
+ }
543
+ const choice = value.choices[0];
544
+ const delta = choice.delta;
545
+ const textContent = extractTextContent(delta.content);
546
+ if (delta.content != null && Array.isArray(delta.content)) {
547
+ for (const part of delta.content) {
548
+ if (part.type === "thinking") {
549
+ const reasoningDelta = extractReasoningContent(part.thinking);
550
+ if (reasoningDelta.length > 0) {
551
+ if (activeReasoningId == null) {
552
+ if (activeText) {
553
+ controller.enqueue({ type: "text-end", id: "0" });
554
+ activeText = false;
555
+ }
556
+ activeReasoningId = generateId2();
557
+ controller.enqueue({
558
+ type: "reasoning-start",
559
+ id: activeReasoningId
560
+ });
561
+ }
562
+ controller.enqueue({
563
+ type: "reasoning-delta",
564
+ id: activeReasoningId,
565
+ delta: reasoningDelta
566
+ });
567
+ }
568
+ }
569
+ }
570
+ }
571
+ if (textContent != null && textContent.length > 0) {
572
+ if (!activeText) {
573
+ if (activeReasoningId != null) {
574
+ controller.enqueue({
575
+ type: "reasoning-end",
576
+ id: activeReasoningId
577
+ });
578
+ activeReasoningId = null;
579
+ }
580
+ controller.enqueue({ type: "text-start", id: "0" });
581
+ activeText = true;
582
+ }
583
+ controller.enqueue({
584
+ type: "text-delta",
585
+ id: "0",
586
+ delta: textContent
587
+ });
588
+ }
589
+ if ((delta == null ? void 0 : delta.tool_calls) != null) {
590
+ for (const toolCall of delta.tool_calls) {
591
+ const toolCallId = toolCall.id;
592
+ const toolName = toolCall.function.name;
593
+ const input = toolCall.function.arguments;
594
+ controller.enqueue({
595
+ type: "tool-input-start",
596
+ id: toolCallId,
597
+ toolName
598
+ });
599
+ controller.enqueue({
600
+ type: "tool-input-delta",
601
+ id: toolCallId,
602
+ delta: input
603
+ });
604
+ controller.enqueue({
605
+ type: "tool-input-end",
606
+ id: toolCallId
607
+ });
608
+ controller.enqueue({
609
+ type: "tool-call",
610
+ toolCallId,
611
+ toolName,
612
+ input
613
+ });
614
+ }
615
+ }
616
+ if (choice.finish_reason != null) {
617
+ finishReason = {
618
+ unified: mapMistralFinishReason(choice.finish_reason),
619
+ raw: choice.finish_reason
620
+ };
621
+ }
622
+ },
623
+ flush(controller) {
624
+ if (activeReasoningId != null) {
625
+ controller.enqueue({
626
+ type: "reasoning-end",
627
+ id: activeReasoningId
628
+ });
629
+ }
630
+ if (activeText) {
631
+ controller.enqueue({ type: "text-end", id: "0" });
632
+ }
633
+ controller.enqueue({
634
+ type: "finish",
635
+ finishReason,
636
+ usage: convertMistralUsage(usage)
637
+ });
638
+ }
639
+ })
640
+ ),
641
+ request: { body },
642
+ response: { headers: responseHeaders }
643
+ };
644
+ }
645
+ };
646
+ function extractReasoningContent(thinking) {
647
+ return thinking.filter((chunk) => chunk.type === "text").map((chunk) => chunk.text).join("");
648
+ }
649
+ function extractTextContent(content) {
650
+ if (typeof content === "string") {
651
+ return content;
652
+ }
653
+ if (content == null) {
654
+ return void 0;
655
+ }
656
+ const textContent = [];
657
+ for (const chunk of content) {
658
+ const { type } = chunk;
659
+ switch (type) {
660
+ case "text":
661
+ textContent.push(chunk.text);
662
+ break;
663
+ case "thinking":
664
+ case "image_url":
665
+ case "reference":
666
+ break;
667
+ default: {
668
+ const _exhaustiveCheck = type;
669
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
670
+ }
671
+ }
672
+ }
673
+ return textContent.length ? textContent.join("") : void 0;
674
+ }
675
+ var mistralContentSchema = z3.union([
676
+ z3.string(),
677
+ z3.array(
678
+ z3.discriminatedUnion("type", [
679
+ z3.object({
680
+ type: z3.literal("text"),
681
+ text: z3.string()
682
+ }),
683
+ z3.object({
684
+ type: z3.literal("image_url"),
685
+ image_url: z3.union([
686
+ z3.string(),
687
+ z3.object({
688
+ url: z3.string(),
689
+ detail: z3.string().nullable()
690
+ })
691
+ ])
692
+ }),
693
+ z3.object({
694
+ type: z3.literal("reference"),
695
+ reference_ids: z3.array(z3.union([z3.string(), z3.number()]))
696
+ }),
697
+ z3.object({
698
+ type: z3.literal("thinking"),
699
+ thinking: z3.array(
700
+ z3.object({
701
+ type: z3.literal("text"),
702
+ text: z3.string()
703
+ })
704
+ )
705
+ })
706
+ ])
707
+ )
708
+ ]).nullish();
709
+ var mistralUsageSchema = z3.object({
710
+ prompt_tokens: z3.number(),
711
+ completion_tokens: z3.number(),
712
+ total_tokens: z3.number()
713
+ });
714
+ var mistralChatResponseSchema = z3.object({
715
+ id: z3.string().nullish(),
716
+ created: z3.number().nullish(),
717
+ model: z3.string().nullish(),
718
+ choices: z3.array(
719
+ z3.object({
720
+ message: z3.object({
721
+ role: z3.literal("assistant"),
722
+ content: mistralContentSchema,
723
+ tool_calls: z3.array(
724
+ z3.object({
725
+ id: z3.string(),
726
+ function: z3.object({ name: z3.string(), arguments: z3.string() })
727
+ })
728
+ ).nullish()
729
+ }),
730
+ index: z3.number(),
731
+ finish_reason: z3.string().nullish()
732
+ })
733
+ ),
734
+ object: z3.literal("chat.completion"),
735
+ usage: mistralUsageSchema
736
+ });
737
+ var mistralChatChunkSchema = z3.object({
738
+ id: z3.string().nullish(),
739
+ created: z3.number().nullish(),
740
+ model: z3.string().nullish(),
741
+ choices: z3.array(
742
+ z3.object({
743
+ delta: z3.object({
744
+ role: z3.enum(["assistant"]).optional(),
745
+ content: mistralContentSchema,
746
+ tool_calls: z3.array(
747
+ z3.object({
748
+ id: z3.string(),
749
+ function: z3.object({ name: z3.string(), arguments: z3.string() })
750
+ })
751
+ ).nullish()
752
+ }),
753
+ finish_reason: z3.string().nullish(),
754
+ index: z3.number()
755
+ })
756
+ ),
757
+ usage: mistralUsageSchema.nullish()
758
+ });
759
+
760
+ // src/mistral-embedding-model.ts
761
+ import {
762
+ TooManyEmbeddingValuesForCallError
763
+ } from "@ai-sdk/provider";
764
+ import {
765
+ combineHeaders as combineHeaders2,
766
+ createJsonResponseHandler as createJsonResponseHandler2,
767
+ postJsonToApi as postJsonToApi2
768
+ } from "@ai-sdk/provider-utils";
769
+ import { z as z4 } from "zod/v4";
770
+ var MistralEmbeddingModel = class {
771
+ constructor(modelId, config) {
772
+ this.specificationVersion = "v3";
773
+ this.maxEmbeddingsPerCall = 32;
774
+ this.supportsParallelCalls = false;
775
+ this.modelId = modelId;
776
+ this.config = config;
777
+ }
778
+ get provider() {
779
+ return this.config.provider;
780
+ }
781
+ async doEmbed({
782
+ values,
783
+ abortSignal,
784
+ headers
785
+ }) {
786
+ if (values.length > this.maxEmbeddingsPerCall) {
787
+ throw new TooManyEmbeddingValuesForCallError({
788
+ provider: this.provider,
789
+ modelId: this.modelId,
790
+ maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
791
+ values
792
+ });
793
+ }
794
+ const {
795
+ responseHeaders,
796
+ value: response,
797
+ rawValue
798
+ } = await postJsonToApi2({
799
+ url: `${this.config.baseURL}/embeddings`,
800
+ headers: combineHeaders2(this.config.headers(), headers),
801
+ body: {
802
+ model: this.modelId,
803
+ input: values,
804
+ encoding_format: "float"
805
+ },
806
+ failedResponseHandler: mistralFailedResponseHandler,
807
+ successfulResponseHandler: createJsonResponseHandler2(
808
+ MistralTextEmbeddingResponseSchema
809
+ ),
810
+ abortSignal,
811
+ fetch: this.config.fetch
812
+ });
813
+ return {
814
+ warnings: [],
815
+ embeddings: response.data.map((item) => item.embedding),
816
+ usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
817
+ response: { headers: responseHeaders, body: rawValue }
818
+ };
819
+ }
820
+ };
821
+ var MistralTextEmbeddingResponseSchema = z4.object({
822
+ data: z4.array(z4.object({ embedding: z4.array(z4.number()) })),
823
+ usage: z4.object({ prompt_tokens: z4.number() }).nullish()
824
+ });
825
+
826
+ // src/version.ts
827
+ var VERSION = true ? "0.0.0-1c33ba03-20260114162300" : "0.0.0-test";
828
+
829
+ // src/mistral-provider.ts
830
+ function createMistral(options = {}) {
831
+ var _a;
832
+ const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
833
+ const getHeaders = () => withUserAgentSuffix(
834
+ {
835
+ Authorization: `Bearer ${loadApiKey({
836
+ apiKey: options.apiKey,
837
+ environmentVariableName: "MISTRAL_API_KEY",
838
+ description: "Mistral"
839
+ })}`,
840
+ ...options.headers
841
+ },
842
+ `ai-sdk/mistral/${VERSION}`
843
+ );
844
+ const createChatModel = (modelId) => new MistralChatLanguageModel(modelId, {
845
+ provider: "mistral.chat",
846
+ baseURL,
847
+ headers: getHeaders,
848
+ fetch: options.fetch,
849
+ generateId: options.generateId
850
+ });
851
+ const createEmbeddingModel = (modelId) => new MistralEmbeddingModel(modelId, {
852
+ provider: "mistral.embedding",
853
+ baseURL,
854
+ headers: getHeaders,
855
+ fetch: options.fetch
856
+ });
857
+ const provider = function(modelId) {
858
+ if (new.target) {
859
+ throw new Error(
860
+ "The Mistral model function cannot be called with the new keyword."
861
+ );
862
+ }
863
+ return createChatModel(modelId);
864
+ };
865
+ provider.specificationVersion = "v3";
866
+ provider.languageModel = createChatModel;
867
+ provider.chat = createChatModel;
868
+ provider.embedding = createEmbeddingModel;
869
+ provider.embeddingModel = createEmbeddingModel;
870
+ provider.textEmbedding = createEmbeddingModel;
871
+ provider.textEmbeddingModel = createEmbeddingModel;
872
+ provider.imageModel = (modelId) => {
873
+ throw new NoSuchModelError({ modelId, modelType: "imageModel" });
874
+ };
875
+ return provider;
876
+ }
877
+ var mistral = createMistral();
878
+ export {
879
+ VERSION,
880
+ createMistral,
881
+ mistral
882
+ };
883
+ //# sourceMappingURL=index.mjs.map