@ai-sdk/mistral 2.0.3 → 2.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/index.d.mts +2 -1
- package/dist/index.d.ts +2 -1
- package/dist/index.js +84 -8
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +85 -8
- package/dist/index.mjs.map +1 -1
- package/package.json +2 -2
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,19 @@
|
|
|
1
1
|
# @ai-sdk/mistral
|
|
2
2
|
|
|
3
|
+
## 2.0.5
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 266ef91: fix(provider/mistral): add support for magistral reasoning models with thinking content type
|
|
8
|
+
|
|
9
|
+
## 2.0.4
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- Updated dependencies [034e229]
|
|
14
|
+
- Updated dependencies [f25040d]
|
|
15
|
+
- @ai-sdk/provider-utils@3.0.3
|
|
16
|
+
|
|
3
17
|
## 2.0.3
|
|
4
18
|
|
|
5
19
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { ProviderV2, LanguageModelV2, EmbeddingModelV2 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
|
|
4
|
-
type MistralChatModelId = 'ministral-3b-latest' | 'ministral-8b-latest' | 'mistral-large-latest' | 'mistral-medium-latest' | 'mistral-medium-2505' | 'mistral-small-latest' | 'pixtral-large-latest' | 'magistral-small-2506' | 'magistral-medium-2506' | 'pixtral-12b-2409' | 'open-mistral-7b' | 'open-mixtral-8x7b' | 'open-mixtral-8x22b' | (string & {});
|
|
4
|
+
type MistralChatModelId = 'ministral-3b-latest' | 'ministral-8b-latest' | 'mistral-large-latest' | 'mistral-medium-latest' | 'mistral-medium-2508' | 'mistral-medium-2505' | 'mistral-small-latest' | 'pixtral-large-latest' | 'magistral-small-2507' | 'magistral-medium-2507' | 'magistral-small-2506' | 'magistral-medium-2506' | 'pixtral-12b-2409' | 'open-mistral-7b' | 'open-mixtral-8x7b' | 'open-mixtral-8x22b' | (string & {});
|
|
5
5
|
|
|
6
6
|
type MistralEmbeddingModelId = 'mistral-embed' | (string & {});
|
|
7
7
|
|
|
@@ -42,6 +42,7 @@ interface MistralProviderSettings {
|
|
|
42
42
|
or to provide a custom fetch implementation for e.g. testing.
|
|
43
43
|
*/
|
|
44
44
|
fetch?: FetchFunction;
|
|
45
|
+
generateId?: () => string;
|
|
45
46
|
}
|
|
46
47
|
/**
|
|
47
48
|
Create a Mistral AI provider instance.
|
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { ProviderV2, LanguageModelV2, EmbeddingModelV2 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
|
|
4
|
-
type MistralChatModelId = 'ministral-3b-latest' | 'ministral-8b-latest' | 'mistral-large-latest' | 'mistral-medium-latest' | 'mistral-medium-2505' | 'mistral-small-latest' | 'pixtral-large-latest' | 'magistral-small-2506' | 'magistral-medium-2506' | 'pixtral-12b-2409' | 'open-mistral-7b' | 'open-mixtral-8x7b' | 'open-mixtral-8x22b' | (string & {});
|
|
4
|
+
type MistralChatModelId = 'ministral-3b-latest' | 'ministral-8b-latest' | 'mistral-large-latest' | 'mistral-medium-latest' | 'mistral-medium-2508' | 'mistral-medium-2505' | 'mistral-small-latest' | 'pixtral-large-latest' | 'magistral-small-2507' | 'magistral-medium-2507' | 'magistral-small-2506' | 'magistral-medium-2506' | 'pixtral-12b-2409' | 'open-mistral-7b' | 'open-mixtral-8x7b' | 'open-mixtral-8x22b' | (string & {});
|
|
5
5
|
|
|
6
6
|
type MistralEmbeddingModelId = 'mistral-embed' | (string & {});
|
|
7
7
|
|
|
@@ -42,6 +42,7 @@ interface MistralProviderSettings {
|
|
|
42
42
|
or to provide a custom fetch implementation for e.g. testing.
|
|
43
43
|
*/
|
|
44
44
|
fetch?: FetchFunction;
|
|
45
|
+
generateId?: () => string;
|
|
45
46
|
}
|
|
46
47
|
/**
|
|
47
48
|
Create a Mistral AI provider instance.
|
package/dist/index.js
CHANGED
|
@@ -97,6 +97,15 @@ function convertToMistralChatMessages(prompt) {
|
|
|
97
97
|
});
|
|
98
98
|
break;
|
|
99
99
|
}
|
|
100
|
+
case "reasoning": {
|
|
101
|
+
text += part.text;
|
|
102
|
+
break;
|
|
103
|
+
}
|
|
104
|
+
default: {
|
|
105
|
+
throw new Error(
|
|
106
|
+
`Unsupported content type in assistant message: ${part.type}`
|
|
107
|
+
);
|
|
108
|
+
}
|
|
100
109
|
}
|
|
101
110
|
}
|
|
102
111
|
messages.push({
|
|
@@ -256,8 +265,10 @@ var MistralChatLanguageModel = class {
|
|
|
256
265
|
this.supportedUrls = {
|
|
257
266
|
"application/pdf": [/^https:\/\/.*$/]
|
|
258
267
|
};
|
|
268
|
+
var _a;
|
|
259
269
|
this.modelId = modelId;
|
|
260
270
|
this.config = config;
|
|
271
|
+
this.generateId = (_a = config.generateId) != null ? _a : import_provider_utils3.generateId;
|
|
261
272
|
}
|
|
262
273
|
get provider() {
|
|
263
274
|
return this.config.provider;
|
|
@@ -369,13 +380,24 @@ var MistralChatLanguageModel = class {
|
|
|
369
380
|
});
|
|
370
381
|
const choice = response.choices[0];
|
|
371
382
|
const content = [];
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
383
|
+
if (choice.message.content != null && Array.isArray(choice.message.content)) {
|
|
384
|
+
for (const part of choice.message.content) {
|
|
385
|
+
if (part.type === "thinking") {
|
|
386
|
+
const reasoningText = extractReasoningContent(part.thinking);
|
|
387
|
+
if (reasoningText.length > 0) {
|
|
388
|
+
content.push({ type: "reasoning", text: reasoningText });
|
|
389
|
+
}
|
|
390
|
+
} else if (part.type === "text") {
|
|
391
|
+
if (part.text.length > 0) {
|
|
392
|
+
content.push({ type: "text", text: part.text });
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
} else {
|
|
397
|
+
const text = extractTextContent(choice.message.content);
|
|
398
|
+
if (text != null && text.length > 0) {
|
|
399
|
+
content.push({ type: "text", text });
|
|
400
|
+
}
|
|
379
401
|
}
|
|
380
402
|
if (choice.message.tool_calls != null) {
|
|
381
403
|
for (const toolCall of choice.message.tool_calls) {
|
|
@@ -426,6 +448,8 @@ var MistralChatLanguageModel = class {
|
|
|
426
448
|
};
|
|
427
449
|
let isFirstChunk = true;
|
|
428
450
|
let activeText = false;
|
|
451
|
+
let activeReasoningId = null;
|
|
452
|
+
const generateId2 = this.generateId;
|
|
429
453
|
return {
|
|
430
454
|
stream: response.pipeThrough(
|
|
431
455
|
new TransformStream({
|
|
@@ -456,8 +480,40 @@ var MistralChatLanguageModel = class {
|
|
|
456
480
|
const choice = value.choices[0];
|
|
457
481
|
const delta = choice.delta;
|
|
458
482
|
const textContent = extractTextContent(delta.content);
|
|
483
|
+
if (delta.content != null && Array.isArray(delta.content)) {
|
|
484
|
+
for (const part of delta.content) {
|
|
485
|
+
if (part.type === "thinking") {
|
|
486
|
+
const reasoningDelta = extractReasoningContent(part.thinking);
|
|
487
|
+
if (reasoningDelta.length > 0) {
|
|
488
|
+
if (activeReasoningId == null) {
|
|
489
|
+
if (activeText) {
|
|
490
|
+
controller.enqueue({ type: "text-end", id: "0" });
|
|
491
|
+
activeText = false;
|
|
492
|
+
}
|
|
493
|
+
activeReasoningId = generateId2();
|
|
494
|
+
controller.enqueue({
|
|
495
|
+
type: "reasoning-start",
|
|
496
|
+
id: activeReasoningId
|
|
497
|
+
});
|
|
498
|
+
}
|
|
499
|
+
controller.enqueue({
|
|
500
|
+
type: "reasoning-delta",
|
|
501
|
+
id: activeReasoningId,
|
|
502
|
+
delta: reasoningDelta
|
|
503
|
+
});
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
}
|
|
507
|
+
}
|
|
459
508
|
if (textContent != null && textContent.length > 0) {
|
|
460
509
|
if (!activeText) {
|
|
510
|
+
if (activeReasoningId != null) {
|
|
511
|
+
controller.enqueue({
|
|
512
|
+
type: "reasoning-end",
|
|
513
|
+
id: activeReasoningId
|
|
514
|
+
});
|
|
515
|
+
activeReasoningId = null;
|
|
516
|
+
}
|
|
461
517
|
controller.enqueue({ type: "text-start", id: "0" });
|
|
462
518
|
activeText = true;
|
|
463
519
|
}
|
|
@@ -499,6 +555,12 @@ var MistralChatLanguageModel = class {
|
|
|
499
555
|
}
|
|
500
556
|
},
|
|
501
557
|
flush(controller) {
|
|
558
|
+
if (activeReasoningId != null) {
|
|
559
|
+
controller.enqueue({
|
|
560
|
+
type: "reasoning-end",
|
|
561
|
+
id: activeReasoningId
|
|
562
|
+
});
|
|
563
|
+
}
|
|
502
564
|
if (activeText) {
|
|
503
565
|
controller.enqueue({ type: "text-end", id: "0" });
|
|
504
566
|
}
|
|
@@ -515,6 +577,9 @@ var MistralChatLanguageModel = class {
|
|
|
515
577
|
};
|
|
516
578
|
}
|
|
517
579
|
};
|
|
580
|
+
function extractReasoningContent(thinking) {
|
|
581
|
+
return thinking.filter((chunk) => chunk.type === "text").map((chunk) => chunk.text).join("");
|
|
582
|
+
}
|
|
518
583
|
function extractTextContent(content) {
|
|
519
584
|
if (typeof content === "string") {
|
|
520
585
|
return content;
|
|
@@ -529,6 +594,7 @@ function extractTextContent(content) {
|
|
|
529
594
|
case "text":
|
|
530
595
|
textContent.push(chunk.text);
|
|
531
596
|
break;
|
|
597
|
+
case "thinking":
|
|
532
598
|
case "image_url":
|
|
533
599
|
case "reference":
|
|
534
600
|
break;
|
|
@@ -561,6 +627,15 @@ var mistralContentSchema = import_v43.z.union([
|
|
|
561
627
|
import_v43.z.object({
|
|
562
628
|
type: import_v43.z.literal("reference"),
|
|
563
629
|
reference_ids: import_v43.z.array(import_v43.z.number())
|
|
630
|
+
}),
|
|
631
|
+
import_v43.z.object({
|
|
632
|
+
type: import_v43.z.literal("thinking"),
|
|
633
|
+
thinking: import_v43.z.array(
|
|
634
|
+
import_v43.z.object({
|
|
635
|
+
type: import_v43.z.literal("text"),
|
|
636
|
+
text: import_v43.z.string()
|
|
637
|
+
})
|
|
638
|
+
)
|
|
564
639
|
})
|
|
565
640
|
])
|
|
566
641
|
)
|
|
@@ -691,7 +766,8 @@ function createMistral(options = {}) {
|
|
|
691
766
|
provider: "mistral.chat",
|
|
692
767
|
baseURL,
|
|
693
768
|
headers: getHeaders,
|
|
694
|
-
fetch: options.fetch
|
|
769
|
+
fetch: options.fetch,
|
|
770
|
+
generateId: options.generateId
|
|
695
771
|
});
|
|
696
772
|
const createEmbeddingModel = (modelId) => new MistralEmbeddingModel(modelId, {
|
|
697
773
|
provider: "mistral.embedding",
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["export { createMistral, mistral } from './mistral-provider';\nexport type {\n MistralProvider,\n MistralProviderSettings,\n} from './mistral-provider';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\n\nexport interface MistralProvider extends ProviderV2 {\n (modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV2<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralProviderOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n let text = extractTextContent(choice.message.content);\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n const lastMessage = body.messages[body.messages.length - 1];\n if (\n lastMessage.role === 'assistant' &&\n text?.startsWith(lastMessage.content)\n ) {\n text = text.slice(lastMessage.content.length);\n }\n\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'image_url':\n case 'reference':\n // image content or reference content is currently ignored.\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralProviderOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n});\n\nexport type MistralProviderOptions = z.infer<typeof mistralProviderOptions>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAIO;;;ACFP,IAAAC,yBAQO;AACP,IAAAC,aAAkB;;;ACjBlB,sBAGO;AAEP,4BAAgC;AAEzB,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,eAAW,uCAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACtIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,gBAAkB;AAuBX,IAAM,yBAAyB,YAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM7C,YAAY,YAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,YAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,YAAE,OAAO,EAAE,SAAS;AACzC,CAAC;;;ACjCD,IAAAC,yBAA+C;AAC/C,IAAAC,aAAkB;AAElB,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,QAAQ,aAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,aAAE,OAAO;AAAA,EAClB,MAAM,aAAE,OAAO;AAAA,EACf,OAAO,aAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,uDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANvDO,IAAM,2BAAN,MAA0D;AAAA,EAO/D,YAAY,SAA6B,QAA2B;AANpE,SAAS,uBAAuB;AAehC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AAVE,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AArEnD;AAsEI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QAAI,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AAKpD,UAAM,cAAc,KAAK,SAAS,KAAK,SAAS,SAAS,CAAC;AAC1D,QACE,YAAY,SAAS,gBACrB,6BAAM,WAAW,YAAY,WAC7B;AACA,aAAO,KAAK,MAAM,YAAY,QAAQ,MAAM;AAAA,IAC9C;AAEA,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AAEjB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AACf,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuB,aAC1B,MAAM;AAAA,EACL,aAAE,OAAO;AAAA,EACT,aAAE;AAAA,IACA,aAAE,mBAAmB,QAAQ;AAAA,MAC3B,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,MAAM;AAAA,QACtB,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,WAAW,aAAE,MAAM;AAAA,UACjB,aAAE,OAAO;AAAA,UACT,aAAE,OAAO;AAAA,YACP,KAAK,aAAE,OAAO;AAAA,YACd,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,eAAe,aAAE,MAAM,aAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqB,aAAE,OAAO;AAAA,EAClC,eAAe,aAAE,OAAO;AAAA,EACxB,mBAAmB,aAAE,OAAO;AAAA,EAC5B,cAAc,aAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4B,aAAE,OAAO;AAAA,EACzC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,aAAE,OAAO;AAAA,MAChB,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,aAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,OAAO,aAAE,OAAO;AAAA,QACd,MAAM,aAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAO,aAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AO9eD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,aAAkB;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqC,aAAE,OAAO;AAAA,EAClD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,WAAW,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,aAAE,OAAO,EAAE,eAAe,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ARvBM,SAAS,cACd,UAAmC,CAAC,GACnB;AAvEnB;AAwEE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider","import_provider_utils","import_provider_utils","import_v4","import_provider_utils","import_v4","import_provider","import_provider","import_provider_utils","import_v4"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["export { createMistral, mistral } from './mistral-provider';\nexport type {\n MistralProvider,\n MistralProviderSettings,\n} from './mistral-provider';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\n\nexport interface MistralProvider extends ProviderV2 {\n (modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV2<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n generateId: options.generateId,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralProviderOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n generateId?: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n private readonly generateId: () => string;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n this.generateId = config.generateId ?? generateId;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // process content parts in order to preserve sequence\n if (\n choice.message.content != null &&\n Array.isArray(choice.message.content)\n ) {\n for (const part of choice.message.content) {\n if (part.type === 'thinking') {\n const reasoningText = extractReasoningContent(part.thinking);\n if (reasoningText.length > 0) {\n content.push({ type: 'reasoning', text: reasoningText });\n }\n } else if (part.type === 'text') {\n if (part.text.length > 0) {\n content.push({ type: 'text', text: part.text });\n }\n }\n }\n } else {\n // handle legacy string content\n const text = extractTextContent(choice.message.content);\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n }\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n let activeReasoningId: string | null = null;\n\n const generateId = this.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (delta.content != null && Array.isArray(delta.content)) {\n for (const part of delta.content) {\n if (part.type === 'thinking') {\n const reasoningDelta = extractReasoningContent(part.thinking);\n if (reasoningDelta.length > 0) {\n if (activeReasoningId == null) {\n // end any active text before starting reasoning\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n activeText = false;\n }\n\n activeReasoningId = generateId();\n controller.enqueue({\n type: 'reasoning-start',\n id: activeReasoningId,\n });\n }\n controller.enqueue({\n type: 'reasoning-delta',\n id: activeReasoningId,\n delta: reasoningDelta,\n });\n }\n }\n }\n }\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n // if we were in reasoning mode, end it before starting text\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n activeReasoningId = null;\n }\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n }\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractReasoningContent(\n thinking: Array<{ type: string; text: string }>,\n) {\n return thinking\n .filter(chunk => chunk.type === 'text')\n .map(chunk => chunk.text)\n .join('');\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'thinking':\n case 'image_url':\n case 'reference':\n // thinking, image content, and reference content are currently ignored\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n z.object({\n type: z.literal('thinking'),\n thinking: z.array(\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n ),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n case 'reasoning': {\n text += part.text;\n break;\n }\n default: {\n throw new Error(\n `Unsupported content type in assistant message: ${part.type}`,\n );\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2508'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2507'\n | 'magistral-medium-2507'\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralProviderOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n});\n\nexport type MistralProviderOptions = z.infer<typeof mistralProviderOptions>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAIO;;;ACFP,IAAAC,yBASO;AACP,IAAAC,aAAkB;;;AClBlB,sBAGO;AAEP,4BAAgC;AAEzB,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,eAAW,uCAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,IAAI;AAAA,gBACR,kDAAkD,KAAK,IAAI;AAAA,cAC7D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC/IO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,gBAAkB;AA0BX,IAAM,yBAAyB,YAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM7C,YAAY,YAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,YAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,YAAE,OAAO,EAAE,SAAS;AACzC,CAAC;;;ACpCD,IAAAC,yBAA+C;AAC/C,IAAAC,aAAkB;AAElB,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,QAAQ,aAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,aAAE,OAAO;AAAA,EAClB,MAAM,aAAE,OAAO;AAAA,EACf,OAAO,aAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,uDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANrDO,IAAM,2BAAN,MAA0D;AAAA,EAQ/D,YAAY,SAA6B,QAA2B;AAPpE,SAAS,uBAAuB;AAiBhC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AAzDF;AA8CI,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,cAAa,YAAO,eAAP,YAAqB;AAAA,EACzC;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAzEnD;AA0EI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QACE,OAAO,QAAQ,WAAW,QAC1B,MAAM,QAAQ,OAAO,QAAQ,OAAO,GACpC;AACA,iBAAW,QAAQ,OAAO,QAAQ,SAAS;AACzC,YAAI,KAAK,SAAS,YAAY;AAC5B,gBAAM,gBAAgB,wBAAwB,KAAK,QAAQ;AAC3D,cAAI,cAAc,SAAS,GAAG;AAC5B,oBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,cAAc,CAAC;AAAA,UACzD;AAAA,QACF,WAAW,KAAK,SAAS,QAAQ;AAC/B,cAAI,KAAK,KAAK,SAAS,GAAG;AACxB,oBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AACtD,UAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,gBAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,MACrC;AAAA,IACF;AAOA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AACjB,QAAI,oBAAmC;AAEvC,UAAMC,cAAa,KAAK;AAExB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,MAAM,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAO,GAAG;AACzD,yBAAW,QAAQ,MAAM,SAAS;AAChC,oBAAI,KAAK,SAAS,YAAY;AAC5B,wBAAM,iBAAiB,wBAAwB,KAAK,QAAQ;AAC5D,sBAAI,eAAe,SAAS,GAAG;AAC7B,wBAAI,qBAAqB,MAAM;AAE7B,0BAAI,YAAY;AACd,mCAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAChD,qCAAa;AAAA,sBACf;AAEA,0CAAoBA,YAAW;AAC/B,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AACA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO;AAAA,oBACT,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AAEf,oBAAI,qBAAqB,MAAM;AAC7B,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI;AAAA,kBACN,CAAC;AACD,sCAAoB;AAAA,gBACtB;AACA,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,qBAAqB,MAAM;AAC7B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AACA,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,wBACP,UACA;AACA,SAAO,SACJ,OAAO,WAAS,MAAM,SAAS,MAAM,EACrC,IAAI,WAAS,MAAM,IAAI,EACvB,KAAK,EAAE;AACZ;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuB,aAC1B,MAAM;AAAA,EACL,aAAE,OAAO;AAAA,EACT,aAAE;AAAA,IACA,aAAE,mBAAmB,QAAQ;AAAA,MAC3B,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,MAAM;AAAA,QACtB,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,WAAW,aAAE,MAAM;AAAA,UACjB,aAAE,OAAO;AAAA,UACT,aAAE,OAAO;AAAA,YACP,KAAK,aAAE,OAAO;AAAA,YACd,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,eAAe,aAAE,MAAM,aAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,UAAU;AAAA,QAC1B,UAAU,aAAE;AAAA,UACV,aAAE,OAAO;AAAA,YACP,MAAM,aAAE,QAAQ,MAAM;AAAA,YACtB,MAAM,aAAE,OAAO;AAAA,UACjB,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqB,aAAE,OAAO;AAAA,EAClC,eAAe,aAAE,OAAO;AAAA,EACxB,mBAAmB,aAAE,OAAO;AAAA,EAC5B,cAAc,aAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4B,aAAE,OAAO;AAAA,EACzC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,aAAE,OAAO;AAAA,MAChB,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,aAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,OAAO,aAAE,OAAO;AAAA,QACd,MAAM,aAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAO,aAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AO7jBD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,aAAkB;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqC,aAAE,OAAO;AAAA,EAClD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,WAAW,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,aAAE,OAAO,EAAE,eAAe,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ARrBM,SAAS,cACd,UAAmC,CAAC,GACnB;AAzEnB;AA0EE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,YAAY,QAAQ;AAAA,EACtB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider","import_provider_utils","import_provider_utils","import_v4","import_provider_utils","import_v4","import_provider","generateId","import_provider","import_provider_utils","import_v4"]}
|
package/dist/index.mjs
CHANGED
|
@@ -12,6 +12,7 @@ import {
|
|
|
12
12
|
combineHeaders,
|
|
13
13
|
createEventSourceResponseHandler,
|
|
14
14
|
createJsonResponseHandler,
|
|
15
|
+
generateId,
|
|
15
16
|
parseProviderOptions,
|
|
16
17
|
postJsonToApi
|
|
17
18
|
} from "@ai-sdk/provider-utils";
|
|
@@ -83,6 +84,15 @@ function convertToMistralChatMessages(prompt) {
|
|
|
83
84
|
});
|
|
84
85
|
break;
|
|
85
86
|
}
|
|
87
|
+
case "reasoning": {
|
|
88
|
+
text += part.text;
|
|
89
|
+
break;
|
|
90
|
+
}
|
|
91
|
+
default: {
|
|
92
|
+
throw new Error(
|
|
93
|
+
`Unsupported content type in assistant message: ${part.type}`
|
|
94
|
+
);
|
|
95
|
+
}
|
|
86
96
|
}
|
|
87
97
|
}
|
|
88
98
|
messages.push({
|
|
@@ -244,8 +254,10 @@ var MistralChatLanguageModel = class {
|
|
|
244
254
|
this.supportedUrls = {
|
|
245
255
|
"application/pdf": [/^https:\/\/.*$/]
|
|
246
256
|
};
|
|
257
|
+
var _a;
|
|
247
258
|
this.modelId = modelId;
|
|
248
259
|
this.config = config;
|
|
260
|
+
this.generateId = (_a = config.generateId) != null ? _a : generateId;
|
|
249
261
|
}
|
|
250
262
|
get provider() {
|
|
251
263
|
return this.config.provider;
|
|
@@ -357,13 +369,24 @@ var MistralChatLanguageModel = class {
|
|
|
357
369
|
});
|
|
358
370
|
const choice = response.choices[0];
|
|
359
371
|
const content = [];
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
372
|
+
if (choice.message.content != null && Array.isArray(choice.message.content)) {
|
|
373
|
+
for (const part of choice.message.content) {
|
|
374
|
+
if (part.type === "thinking") {
|
|
375
|
+
const reasoningText = extractReasoningContent(part.thinking);
|
|
376
|
+
if (reasoningText.length > 0) {
|
|
377
|
+
content.push({ type: "reasoning", text: reasoningText });
|
|
378
|
+
}
|
|
379
|
+
} else if (part.type === "text") {
|
|
380
|
+
if (part.text.length > 0) {
|
|
381
|
+
content.push({ type: "text", text: part.text });
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
} else {
|
|
386
|
+
const text = extractTextContent(choice.message.content);
|
|
387
|
+
if (text != null && text.length > 0) {
|
|
388
|
+
content.push({ type: "text", text });
|
|
389
|
+
}
|
|
367
390
|
}
|
|
368
391
|
if (choice.message.tool_calls != null) {
|
|
369
392
|
for (const toolCall of choice.message.tool_calls) {
|
|
@@ -414,6 +437,8 @@ var MistralChatLanguageModel = class {
|
|
|
414
437
|
};
|
|
415
438
|
let isFirstChunk = true;
|
|
416
439
|
let activeText = false;
|
|
440
|
+
let activeReasoningId = null;
|
|
441
|
+
const generateId2 = this.generateId;
|
|
417
442
|
return {
|
|
418
443
|
stream: response.pipeThrough(
|
|
419
444
|
new TransformStream({
|
|
@@ -444,8 +469,40 @@ var MistralChatLanguageModel = class {
|
|
|
444
469
|
const choice = value.choices[0];
|
|
445
470
|
const delta = choice.delta;
|
|
446
471
|
const textContent = extractTextContent(delta.content);
|
|
472
|
+
if (delta.content != null && Array.isArray(delta.content)) {
|
|
473
|
+
for (const part of delta.content) {
|
|
474
|
+
if (part.type === "thinking") {
|
|
475
|
+
const reasoningDelta = extractReasoningContent(part.thinking);
|
|
476
|
+
if (reasoningDelta.length > 0) {
|
|
477
|
+
if (activeReasoningId == null) {
|
|
478
|
+
if (activeText) {
|
|
479
|
+
controller.enqueue({ type: "text-end", id: "0" });
|
|
480
|
+
activeText = false;
|
|
481
|
+
}
|
|
482
|
+
activeReasoningId = generateId2();
|
|
483
|
+
controller.enqueue({
|
|
484
|
+
type: "reasoning-start",
|
|
485
|
+
id: activeReasoningId
|
|
486
|
+
});
|
|
487
|
+
}
|
|
488
|
+
controller.enqueue({
|
|
489
|
+
type: "reasoning-delta",
|
|
490
|
+
id: activeReasoningId,
|
|
491
|
+
delta: reasoningDelta
|
|
492
|
+
});
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
}
|
|
447
497
|
if (textContent != null && textContent.length > 0) {
|
|
448
498
|
if (!activeText) {
|
|
499
|
+
if (activeReasoningId != null) {
|
|
500
|
+
controller.enqueue({
|
|
501
|
+
type: "reasoning-end",
|
|
502
|
+
id: activeReasoningId
|
|
503
|
+
});
|
|
504
|
+
activeReasoningId = null;
|
|
505
|
+
}
|
|
449
506
|
controller.enqueue({ type: "text-start", id: "0" });
|
|
450
507
|
activeText = true;
|
|
451
508
|
}
|
|
@@ -487,6 +544,12 @@ var MistralChatLanguageModel = class {
|
|
|
487
544
|
}
|
|
488
545
|
},
|
|
489
546
|
flush(controller) {
|
|
547
|
+
if (activeReasoningId != null) {
|
|
548
|
+
controller.enqueue({
|
|
549
|
+
type: "reasoning-end",
|
|
550
|
+
id: activeReasoningId
|
|
551
|
+
});
|
|
552
|
+
}
|
|
490
553
|
if (activeText) {
|
|
491
554
|
controller.enqueue({ type: "text-end", id: "0" });
|
|
492
555
|
}
|
|
@@ -503,6 +566,9 @@ var MistralChatLanguageModel = class {
|
|
|
503
566
|
};
|
|
504
567
|
}
|
|
505
568
|
};
|
|
569
|
+
function extractReasoningContent(thinking) {
|
|
570
|
+
return thinking.filter((chunk) => chunk.type === "text").map((chunk) => chunk.text).join("");
|
|
571
|
+
}
|
|
506
572
|
function extractTextContent(content) {
|
|
507
573
|
if (typeof content === "string") {
|
|
508
574
|
return content;
|
|
@@ -517,6 +583,7 @@ function extractTextContent(content) {
|
|
|
517
583
|
case "text":
|
|
518
584
|
textContent.push(chunk.text);
|
|
519
585
|
break;
|
|
586
|
+
case "thinking":
|
|
520
587
|
case "image_url":
|
|
521
588
|
case "reference":
|
|
522
589
|
break;
|
|
@@ -549,6 +616,15 @@ var mistralContentSchema = z3.union([
|
|
|
549
616
|
z3.object({
|
|
550
617
|
type: z3.literal("reference"),
|
|
551
618
|
reference_ids: z3.array(z3.number())
|
|
619
|
+
}),
|
|
620
|
+
z3.object({
|
|
621
|
+
type: z3.literal("thinking"),
|
|
622
|
+
thinking: z3.array(
|
|
623
|
+
z3.object({
|
|
624
|
+
type: z3.literal("text"),
|
|
625
|
+
text: z3.string()
|
|
626
|
+
})
|
|
627
|
+
)
|
|
552
628
|
})
|
|
553
629
|
])
|
|
554
630
|
)
|
|
@@ -685,7 +761,8 @@ function createMistral(options = {}) {
|
|
|
685
761
|
provider: "mistral.chat",
|
|
686
762
|
baseURL,
|
|
687
763
|
headers: getHeaders,
|
|
688
|
-
fetch: options.fetch
|
|
764
|
+
fetch: options.fetch,
|
|
765
|
+
generateId: options.generateId
|
|
689
766
|
});
|
|
690
767
|
const createEmbeddingModel = (modelId) => new MistralEmbeddingModel(modelId, {
|
|
691
768
|
provider: "mistral.embedding",
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\n\nexport interface MistralProvider extends ProviderV2 {\n (modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV2<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralProviderOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n let text = extractTextContent(choice.message.content);\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n const lastMessage = body.messages[body.messages.length - 1];\n if (\n lastMessage.role === 'assistant' &&\n text?.startsWith(lastMessage.content)\n ) {\n text = text.slice(lastMessage.content.length);\n }\n\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'image_url':\n case 'reference':\n // image content or reference content is currently ignored.\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralProviderOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n});\n\nexport type MistralProviderOptions = z.infer<typeof mistralProviderOptions>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";AAAA;AAAA,EAGE;AAAA,OAEK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;;;ACFP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EAEA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;ACjBlB;AAAA,EAEE;AAAA,OACK;AAEP,SAAS,uBAAuB;AAEzB,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,gBAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACtIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,SAAS,SAAS;AAuBX,IAAM,yBAAyB,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM7C,YAAY,EAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,EAAE,OAAO,EAAE,SAAS;AACzC,CAAC;;;ACjCD,SAAS,sCAAsC;AAC/C,SAAS,KAAAC,UAAS;AAElB,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,QAAQA,GAAE,QAAQ,OAAO;AAAA,EACzB,SAASA,GAAE,OAAO;AAAA,EAClB,MAAMA,GAAE,OAAO;AAAA,EACf,OAAOA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAMA,GAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,+BAA+B,+BAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD;AAAA,EAGE,iCAAAC;AAAA,OACK;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANvDO,IAAM,2BAAN,MAA0D;AAAA,EAO/D,YAAY,SAA6B,QAA2B;AANpE,SAAS,uBAAuB;AAehC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AAVE,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AArEnD;AAsEI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,WAAM,qBAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QAAI,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AAKpD,UAAM,cAAc,KAAK,SAAS,KAAK,SAAS,SAAS,CAAC;AAC1D,QACE,YAAY,SAAS,gBACrB,6BAAM,WAAW,YAAY,WAC7B;AACA,aAAO,KAAK,MAAM,YAAY,QAAQ,MAAM;AAAA,IAC9C;AAEA,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AAEjB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AACf,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuBC,GAC1B,MAAM;AAAA,EACLA,GAAE,OAAO;AAAA,EACTA,GAAE;AAAA,IACAA,GAAE,mBAAmB,QAAQ;AAAA,MAC3BA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,QACtB,MAAMA,GAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,WAAWA,GAAE,MAAM;AAAA,UACjBA,GAAE,OAAO;AAAA,UACTA,GAAE,OAAO;AAAA,YACP,KAAKA,GAAE,OAAO;AAAA,YACd,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,eAAeA,GAAE,MAAMA,GAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqBA,GAAE,OAAO;AAAA,EAClC,eAAeA,GAAE,OAAO;AAAA,EACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC5B,cAAcA,GAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4BA,GAAE,OAAO;AAAA,EACzC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAOA,GAAE,OAAO;AAAA,MAChB,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQA,GAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,QACd,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAOA,GAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AO9eD;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqCC,GAAE,OAAO;AAAA,EAClD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ARvBM,SAAS,cACd,UAAmC,CAAC,GACnB;AAvEnB;AAwEE,QAAM,WACJ,0BAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["z","z","UnsupportedFunctionalityError","z","combineHeaders","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonResponseHandler","z"]}
|
|
1
|
+
{"version":3,"sources":["../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\n\nexport interface MistralProvider extends ProviderV2 {\n (modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV2<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n generateId: options.generateId,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralProviderOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n generateId?: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n private readonly generateId: () => string;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n this.generateId = config.generateId ?? generateId;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // process content parts in order to preserve sequence\n if (\n choice.message.content != null &&\n Array.isArray(choice.message.content)\n ) {\n for (const part of choice.message.content) {\n if (part.type === 'thinking') {\n const reasoningText = extractReasoningContent(part.thinking);\n if (reasoningText.length > 0) {\n content.push({ type: 'reasoning', text: reasoningText });\n }\n } else if (part.type === 'text') {\n if (part.text.length > 0) {\n content.push({ type: 'text', text: part.text });\n }\n }\n }\n } else {\n // handle legacy string content\n const text = extractTextContent(choice.message.content);\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n }\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n let activeReasoningId: string | null = null;\n\n const generateId = this.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (delta.content != null && Array.isArray(delta.content)) {\n for (const part of delta.content) {\n if (part.type === 'thinking') {\n const reasoningDelta = extractReasoningContent(part.thinking);\n if (reasoningDelta.length > 0) {\n if (activeReasoningId == null) {\n // end any active text before starting reasoning\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n activeText = false;\n }\n\n activeReasoningId = generateId();\n controller.enqueue({\n type: 'reasoning-start',\n id: activeReasoningId,\n });\n }\n controller.enqueue({\n type: 'reasoning-delta',\n id: activeReasoningId,\n delta: reasoningDelta,\n });\n }\n }\n }\n }\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n // if we were in reasoning mode, end it before starting text\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n activeReasoningId = null;\n }\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n }\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractReasoningContent(\n thinking: Array<{ type: string; text: string }>,\n) {\n return thinking\n .filter(chunk => chunk.type === 'text')\n .map(chunk => chunk.text)\n .join('');\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'thinking':\n case 'image_url':\n case 'reference':\n // thinking, image content, and reference content are currently ignored\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n z.object({\n type: z.literal('thinking'),\n thinking: z.array(\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n ),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n case 'reasoning': {\n text += part.text;\n break;\n }\n default: {\n throw new Error(\n `Unsupported content type in assistant message: ${part.type}`,\n );\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2508'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2507'\n | 'magistral-medium-2507'\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralProviderOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n});\n\nexport type MistralProviderOptions = z.infer<typeof mistralProviderOptions>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";AAAA;AAAA,EAGE;AAAA,OAEK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;;;ACFP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;AClBlB;AAAA,EAEE;AAAA,OACK;AAEP,SAAS,uBAAuB;AAEzB,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,gBAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,IAAI;AAAA,gBACR,kDAAkD,KAAK,IAAI;AAAA,cAC7D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC/IO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,SAAS,SAAS;AA0BX,IAAM,yBAAyB,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM7C,YAAY,EAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,EAAE,OAAO,EAAE,SAAS;AACzC,CAAC;;;ACpCD,SAAS,sCAAsC;AAC/C,SAAS,KAAAC,UAAS;AAElB,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,QAAQA,GAAE,QAAQ,OAAO;AAAA,EACzB,SAASA,GAAE,OAAO;AAAA,EAClB,MAAMA,GAAE,OAAO;AAAA,EACf,OAAOA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAMA,GAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,+BAA+B,+BAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD;AAAA,EAGE,iCAAAC;AAAA,OACK;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANrDO,IAAM,2BAAN,MAA0D;AAAA,EAQ/D,YAAY,SAA6B,QAA2B;AAPpE,SAAS,uBAAuB;AAiBhC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AAzDF;AA8CI,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,cAAa,YAAO,eAAP,YAAqB;AAAA,EACzC;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAzEnD;AA0EI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,WAAM,qBAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QACE,OAAO,QAAQ,WAAW,QAC1B,MAAM,QAAQ,OAAO,QAAQ,OAAO,GACpC;AACA,iBAAW,QAAQ,OAAO,QAAQ,SAAS;AACzC,YAAI,KAAK,SAAS,YAAY;AAC5B,gBAAM,gBAAgB,wBAAwB,KAAK,QAAQ;AAC3D,cAAI,cAAc,SAAS,GAAG;AAC5B,oBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,cAAc,CAAC;AAAA,UACzD;AAAA,QACF,WAAW,KAAK,SAAS,QAAQ;AAC/B,cAAI,KAAK,KAAK,SAAS,GAAG;AACxB,oBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AACtD,UAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,gBAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,MACrC;AAAA,IACF;AAOA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AACjB,QAAI,oBAAmC;AAEvC,UAAMC,cAAa,KAAK;AAExB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,MAAM,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAO,GAAG;AACzD,yBAAW,QAAQ,MAAM,SAAS;AAChC,oBAAI,KAAK,SAAS,YAAY;AAC5B,wBAAM,iBAAiB,wBAAwB,KAAK,QAAQ;AAC5D,sBAAI,eAAe,SAAS,GAAG;AAC7B,wBAAI,qBAAqB,MAAM;AAE7B,0BAAI,YAAY;AACd,mCAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAChD,qCAAa;AAAA,sBACf;AAEA,0CAAoBA,YAAW;AAC/B,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AACA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO;AAAA,oBACT,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AAEf,oBAAI,qBAAqB,MAAM;AAC7B,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI;AAAA,kBACN,CAAC;AACD,sCAAoB;AAAA,gBACtB;AACA,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,qBAAqB,MAAM;AAC7B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AACA,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,wBACP,UACA;AACA,SAAO,SACJ,OAAO,WAAS,MAAM,SAAS,MAAM,EACrC,IAAI,WAAS,MAAM,IAAI,EACvB,KAAK,EAAE;AACZ;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuBC,GAC1B,MAAM;AAAA,EACLA,GAAE,OAAO;AAAA,EACTA,GAAE;AAAA,IACAA,GAAE,mBAAmB,QAAQ;AAAA,MAC3BA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,QACtB,MAAMA,GAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,WAAWA,GAAE,MAAM;AAAA,UACjBA,GAAE,OAAO;AAAA,UACTA,GAAE,OAAO;AAAA,YACP,KAAKA,GAAE,OAAO;AAAA,YACd,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,eAAeA,GAAE,MAAMA,GAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,UAAU;AAAA,QAC1B,UAAUA,GAAE;AAAA,UACVA,GAAE,OAAO;AAAA,YACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,YACtB,MAAMA,GAAE,OAAO;AAAA,UACjB,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqBA,GAAE,OAAO;AAAA,EAClC,eAAeA,GAAE,OAAO;AAAA,EACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC5B,cAAcA,GAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4BA,GAAE,OAAO;AAAA,EACzC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAOA,GAAE,OAAO;AAAA,MAChB,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQA,GAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,QACd,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAOA,GAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AO7jBD;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqCC,GAAE,OAAO;AAAA,EAClD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ARrBM,SAAS,cACd,UAAmC,CAAC,GACnB;AAzEnB;AA0EE,QAAM,WACJ,0BAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,YAAY,QAAQ;AAAA,EACtB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["z","z","UnsupportedFunctionalityError","generateId","z","combineHeaders","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonResponseHandler","z"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ai-sdk/mistral",
|
|
3
|
-
"version": "2.0.
|
|
3
|
+
"version": "2.0.5",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -20,7 +20,7 @@
|
|
|
20
20
|
},
|
|
21
21
|
"dependencies": {
|
|
22
22
|
"@ai-sdk/provider": "2.0.0",
|
|
23
|
-
"@ai-sdk/provider-utils": "3.0.
|
|
23
|
+
"@ai-sdk/provider-utils": "3.0.3"
|
|
24
24
|
},
|
|
25
25
|
"devDependencies": {
|
|
26
26
|
"@types/node": "20.17.24",
|