@ai-sdk/mistral 2.0.4 → 2.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +2 -1
- package/dist/index.d.ts +2 -1
- package/dist/index.js +91 -8
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +93 -8
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
# @ai-sdk/mistral
|
|
2
2
|
|
|
3
|
+
## 2.0.6
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- f8d99df: fix(provider/mistral): inject JSON generation instruction when response format is JSON
|
|
8
|
+
|
|
9
|
+
## 2.0.5
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- 266ef91: fix(provider/mistral): add support for magistral reasoning models with thinking content type
|
|
14
|
+
|
|
3
15
|
## 2.0.4
|
|
4
16
|
|
|
5
17
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { ProviderV2, LanguageModelV2, EmbeddingModelV2 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
|
|
4
|
-
type MistralChatModelId = 'ministral-3b-latest' | 'ministral-8b-latest' | 'mistral-large-latest' | 'mistral-medium-latest' | 'mistral-medium-2505' | 'mistral-small-latest' | 'pixtral-large-latest' | 'magistral-small-2506' | 'magistral-medium-2506' | 'pixtral-12b-2409' | 'open-mistral-7b' | 'open-mixtral-8x7b' | 'open-mixtral-8x22b' | (string & {});
|
|
4
|
+
type MistralChatModelId = 'ministral-3b-latest' | 'ministral-8b-latest' | 'mistral-large-latest' | 'mistral-medium-latest' | 'mistral-medium-2508' | 'mistral-medium-2505' | 'mistral-small-latest' | 'pixtral-large-latest' | 'magistral-small-2507' | 'magistral-medium-2507' | 'magistral-small-2506' | 'magistral-medium-2506' | 'pixtral-12b-2409' | 'open-mistral-7b' | 'open-mixtral-8x7b' | 'open-mixtral-8x22b' | (string & {});
|
|
5
5
|
|
|
6
6
|
type MistralEmbeddingModelId = 'mistral-embed' | (string & {});
|
|
7
7
|
|
|
@@ -42,6 +42,7 @@ interface MistralProviderSettings {
|
|
|
42
42
|
or to provide a custom fetch implementation for e.g. testing.
|
|
43
43
|
*/
|
|
44
44
|
fetch?: FetchFunction;
|
|
45
|
+
generateId?: () => string;
|
|
45
46
|
}
|
|
46
47
|
/**
|
|
47
48
|
Create a Mistral AI provider instance.
|
package/dist/index.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { ProviderV2, LanguageModelV2, EmbeddingModelV2 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
|
|
4
|
-
type MistralChatModelId = 'ministral-3b-latest' | 'ministral-8b-latest' | 'mistral-large-latest' | 'mistral-medium-latest' | 'mistral-medium-2505' | 'mistral-small-latest' | 'pixtral-large-latest' | 'magistral-small-2506' | 'magistral-medium-2506' | 'pixtral-12b-2409' | 'open-mistral-7b' | 'open-mixtral-8x7b' | 'open-mixtral-8x22b' | (string & {});
|
|
4
|
+
type MistralChatModelId = 'ministral-3b-latest' | 'ministral-8b-latest' | 'mistral-large-latest' | 'mistral-medium-latest' | 'mistral-medium-2508' | 'mistral-medium-2505' | 'mistral-small-latest' | 'pixtral-large-latest' | 'magistral-small-2507' | 'magistral-medium-2507' | 'magistral-small-2506' | 'magistral-medium-2506' | 'pixtral-12b-2409' | 'open-mistral-7b' | 'open-mixtral-8x7b' | 'open-mixtral-8x22b' | (string & {});
|
|
5
5
|
|
|
6
6
|
type MistralEmbeddingModelId = 'mistral-embed' | (string & {});
|
|
7
7
|
|
|
@@ -42,6 +42,7 @@ interface MistralProviderSettings {
|
|
|
42
42
|
or to provide a custom fetch implementation for e.g. testing.
|
|
43
43
|
*/
|
|
44
44
|
fetch?: FetchFunction;
|
|
45
|
+
generateId?: () => string;
|
|
45
46
|
}
|
|
46
47
|
/**
|
|
47
48
|
Create a Mistral AI provider instance.
|
package/dist/index.js
CHANGED
|
@@ -97,6 +97,15 @@ function convertToMistralChatMessages(prompt) {
|
|
|
97
97
|
});
|
|
98
98
|
break;
|
|
99
99
|
}
|
|
100
|
+
case "reasoning": {
|
|
101
|
+
text += part.text;
|
|
102
|
+
break;
|
|
103
|
+
}
|
|
104
|
+
default: {
|
|
105
|
+
throw new Error(
|
|
106
|
+
`Unsupported content type in assistant message: ${part.type}`
|
|
107
|
+
);
|
|
108
|
+
}
|
|
100
109
|
}
|
|
101
110
|
}
|
|
102
111
|
messages.push({
|
|
@@ -256,8 +265,10 @@ var MistralChatLanguageModel = class {
|
|
|
256
265
|
this.supportedUrls = {
|
|
257
266
|
"application/pdf": [/^https:\/\/.*$/]
|
|
258
267
|
};
|
|
268
|
+
var _a;
|
|
259
269
|
this.modelId = modelId;
|
|
260
270
|
this.config = config;
|
|
271
|
+
this.generateId = (_a = config.generateId) != null ? _a : import_provider_utils3.generateId;
|
|
261
272
|
}
|
|
262
273
|
get provider() {
|
|
263
274
|
return this.config.provider;
|
|
@@ -315,6 +326,12 @@ var MistralChatLanguageModel = class {
|
|
|
315
326
|
details: "JSON response format schema is not supported"
|
|
316
327
|
});
|
|
317
328
|
}
|
|
329
|
+
if ((responseFormat == null ? void 0 : responseFormat.type) === "json") {
|
|
330
|
+
prompt = (0, import_provider_utils3.injectJsonInstructionIntoMessages)({
|
|
331
|
+
messages: prompt,
|
|
332
|
+
schema: responseFormat.schema
|
|
333
|
+
});
|
|
334
|
+
}
|
|
318
335
|
const baseArgs = {
|
|
319
336
|
// model id:
|
|
320
337
|
model: this.modelId,
|
|
@@ -326,6 +343,7 @@ var MistralChatLanguageModel = class {
|
|
|
326
343
|
top_p: topP,
|
|
327
344
|
random_seed: seed,
|
|
328
345
|
// response format:
|
|
346
|
+
// TODO add JSON schema support (see OpenAI implementation)
|
|
329
347
|
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? { type: "json_object" } : void 0,
|
|
330
348
|
// mistral-specific provider options:
|
|
331
349
|
document_image_limit: options.documentImageLimit,
|
|
@@ -369,13 +387,24 @@ var MistralChatLanguageModel = class {
|
|
|
369
387
|
});
|
|
370
388
|
const choice = response.choices[0];
|
|
371
389
|
const content = [];
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
390
|
+
if (choice.message.content != null && Array.isArray(choice.message.content)) {
|
|
391
|
+
for (const part of choice.message.content) {
|
|
392
|
+
if (part.type === "thinking") {
|
|
393
|
+
const reasoningText = extractReasoningContent(part.thinking);
|
|
394
|
+
if (reasoningText.length > 0) {
|
|
395
|
+
content.push({ type: "reasoning", text: reasoningText });
|
|
396
|
+
}
|
|
397
|
+
} else if (part.type === "text") {
|
|
398
|
+
if (part.text.length > 0) {
|
|
399
|
+
content.push({ type: "text", text: part.text });
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
}
|
|
403
|
+
} else {
|
|
404
|
+
const text = extractTextContent(choice.message.content);
|
|
405
|
+
if (text != null && text.length > 0) {
|
|
406
|
+
content.push({ type: "text", text });
|
|
407
|
+
}
|
|
379
408
|
}
|
|
380
409
|
if (choice.message.tool_calls != null) {
|
|
381
410
|
for (const toolCall of choice.message.tool_calls) {
|
|
@@ -426,6 +455,8 @@ var MistralChatLanguageModel = class {
|
|
|
426
455
|
};
|
|
427
456
|
let isFirstChunk = true;
|
|
428
457
|
let activeText = false;
|
|
458
|
+
let activeReasoningId = null;
|
|
459
|
+
const generateId2 = this.generateId;
|
|
429
460
|
return {
|
|
430
461
|
stream: response.pipeThrough(
|
|
431
462
|
new TransformStream({
|
|
@@ -456,8 +487,40 @@ var MistralChatLanguageModel = class {
|
|
|
456
487
|
const choice = value.choices[0];
|
|
457
488
|
const delta = choice.delta;
|
|
458
489
|
const textContent = extractTextContent(delta.content);
|
|
490
|
+
if (delta.content != null && Array.isArray(delta.content)) {
|
|
491
|
+
for (const part of delta.content) {
|
|
492
|
+
if (part.type === "thinking") {
|
|
493
|
+
const reasoningDelta = extractReasoningContent(part.thinking);
|
|
494
|
+
if (reasoningDelta.length > 0) {
|
|
495
|
+
if (activeReasoningId == null) {
|
|
496
|
+
if (activeText) {
|
|
497
|
+
controller.enqueue({ type: "text-end", id: "0" });
|
|
498
|
+
activeText = false;
|
|
499
|
+
}
|
|
500
|
+
activeReasoningId = generateId2();
|
|
501
|
+
controller.enqueue({
|
|
502
|
+
type: "reasoning-start",
|
|
503
|
+
id: activeReasoningId
|
|
504
|
+
});
|
|
505
|
+
}
|
|
506
|
+
controller.enqueue({
|
|
507
|
+
type: "reasoning-delta",
|
|
508
|
+
id: activeReasoningId,
|
|
509
|
+
delta: reasoningDelta
|
|
510
|
+
});
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
}
|
|
459
515
|
if (textContent != null && textContent.length > 0) {
|
|
460
516
|
if (!activeText) {
|
|
517
|
+
if (activeReasoningId != null) {
|
|
518
|
+
controller.enqueue({
|
|
519
|
+
type: "reasoning-end",
|
|
520
|
+
id: activeReasoningId
|
|
521
|
+
});
|
|
522
|
+
activeReasoningId = null;
|
|
523
|
+
}
|
|
461
524
|
controller.enqueue({ type: "text-start", id: "0" });
|
|
462
525
|
activeText = true;
|
|
463
526
|
}
|
|
@@ -499,6 +562,12 @@ var MistralChatLanguageModel = class {
|
|
|
499
562
|
}
|
|
500
563
|
},
|
|
501
564
|
flush(controller) {
|
|
565
|
+
if (activeReasoningId != null) {
|
|
566
|
+
controller.enqueue({
|
|
567
|
+
type: "reasoning-end",
|
|
568
|
+
id: activeReasoningId
|
|
569
|
+
});
|
|
570
|
+
}
|
|
502
571
|
if (activeText) {
|
|
503
572
|
controller.enqueue({ type: "text-end", id: "0" });
|
|
504
573
|
}
|
|
@@ -515,6 +584,9 @@ var MistralChatLanguageModel = class {
|
|
|
515
584
|
};
|
|
516
585
|
}
|
|
517
586
|
};
|
|
587
|
+
function extractReasoningContent(thinking) {
|
|
588
|
+
return thinking.filter((chunk) => chunk.type === "text").map((chunk) => chunk.text).join("");
|
|
589
|
+
}
|
|
518
590
|
function extractTextContent(content) {
|
|
519
591
|
if (typeof content === "string") {
|
|
520
592
|
return content;
|
|
@@ -529,6 +601,7 @@ function extractTextContent(content) {
|
|
|
529
601
|
case "text":
|
|
530
602
|
textContent.push(chunk.text);
|
|
531
603
|
break;
|
|
604
|
+
case "thinking":
|
|
532
605
|
case "image_url":
|
|
533
606
|
case "reference":
|
|
534
607
|
break;
|
|
@@ -561,6 +634,15 @@ var mistralContentSchema = import_v43.z.union([
|
|
|
561
634
|
import_v43.z.object({
|
|
562
635
|
type: import_v43.z.literal("reference"),
|
|
563
636
|
reference_ids: import_v43.z.array(import_v43.z.number())
|
|
637
|
+
}),
|
|
638
|
+
import_v43.z.object({
|
|
639
|
+
type: import_v43.z.literal("thinking"),
|
|
640
|
+
thinking: import_v43.z.array(
|
|
641
|
+
import_v43.z.object({
|
|
642
|
+
type: import_v43.z.literal("text"),
|
|
643
|
+
text: import_v43.z.string()
|
|
644
|
+
})
|
|
645
|
+
)
|
|
564
646
|
})
|
|
565
647
|
])
|
|
566
648
|
)
|
|
@@ -691,7 +773,8 @@ function createMistral(options = {}) {
|
|
|
691
773
|
provider: "mistral.chat",
|
|
692
774
|
baseURL,
|
|
693
775
|
headers: getHeaders,
|
|
694
|
-
fetch: options.fetch
|
|
776
|
+
fetch: options.fetch,
|
|
777
|
+
generateId: options.generateId
|
|
695
778
|
});
|
|
696
779
|
const createEmbeddingModel = (modelId) => new MistralEmbeddingModel(modelId, {
|
|
697
780
|
provider: "mistral.embedding",
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["export { createMistral, mistral } from './mistral-provider';\nexport type {\n MistralProvider,\n MistralProviderSettings,\n} from './mistral-provider';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\n\nexport interface MistralProvider extends ProviderV2 {\n (modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV2<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralProviderOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n let text = extractTextContent(choice.message.content);\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n const lastMessage = body.messages[body.messages.length - 1];\n if (\n lastMessage.role === 'assistant' &&\n text?.startsWith(lastMessage.content)\n ) {\n text = text.slice(lastMessage.content.length);\n }\n\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'image_url':\n case 'reference':\n // image content or reference content is currently ignored.\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralProviderOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n});\n\nexport type MistralProviderOptions = z.infer<typeof mistralProviderOptions>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAIO;;;ACFP,IAAAC,yBAQO;AACP,IAAAC,aAAkB;;;ACjBlB,sBAGO;AAEP,4BAAgC;AAEzB,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,eAAW,uCAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACtIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,gBAAkB;AAuBX,IAAM,yBAAyB,YAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM7C,YAAY,YAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,YAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,YAAE,OAAO,EAAE,SAAS;AACzC,CAAC;;;ACjCD,IAAAC,yBAA+C;AAC/C,IAAAC,aAAkB;AAElB,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,QAAQ,aAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,aAAE,OAAO;AAAA,EAClB,MAAM,aAAE,OAAO;AAAA,EACf,OAAO,aAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,uDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANvDO,IAAM,2BAAN,MAA0D;AAAA,EAO/D,YAAY,SAA6B,QAA2B;AANpE,SAAS,uBAAuB;AAehC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AAVE,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AArEnD;AAsEI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QAAI,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AAKpD,UAAM,cAAc,KAAK,SAAS,KAAK,SAAS,SAAS,CAAC;AAC1D,QACE,YAAY,SAAS,gBACrB,6BAAM,WAAW,YAAY,WAC7B;AACA,aAAO,KAAK,MAAM,YAAY,QAAQ,MAAM;AAAA,IAC9C;AAEA,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AAEjB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AACf,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuB,aAC1B,MAAM;AAAA,EACL,aAAE,OAAO;AAAA,EACT,aAAE;AAAA,IACA,aAAE,mBAAmB,QAAQ;AAAA,MAC3B,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,MAAM;AAAA,QACtB,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,WAAW,aAAE,MAAM;AAAA,UACjB,aAAE,OAAO;AAAA,UACT,aAAE,OAAO;AAAA,YACP,KAAK,aAAE,OAAO;AAAA,YACd,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,eAAe,aAAE,MAAM,aAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqB,aAAE,OAAO;AAAA,EAClC,eAAe,aAAE,OAAO;AAAA,EACxB,mBAAmB,aAAE,OAAO;AAAA,EAC5B,cAAc,aAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4B,aAAE,OAAO;AAAA,EACzC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,aAAE,OAAO;AAAA,MAChB,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,aAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,OAAO,aAAE,OAAO;AAAA,QACd,MAAM,aAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAO,aAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AO9eD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,aAAkB;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqC,aAAE,OAAO;AAAA,EAClD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,WAAW,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,aAAE,OAAO,EAAE,eAAe,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ARvBM,SAAS,cACd,UAAmC,CAAC,GACnB;AAvEnB;AAwEE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider","import_provider_utils","import_provider_utils","import_v4","import_provider_utils","import_v4","import_provider","import_provider","import_provider_utils","import_v4"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["export { createMistral, mistral } from './mistral-provider';\nexport type {\n MistralProvider,\n MistralProviderSettings,\n} from './mistral-provider';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\n\nexport interface MistralProvider extends ProviderV2 {\n (modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV2<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n generateId: options.generateId,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n injectJsonInstructionIntoMessages,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralProviderOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n generateId?: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n private readonly generateId: () => string;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n this.generateId = config.generateId ?? generateId;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n // TODO remove when we have JSON schema support (see OpenAI implementation)\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n // For Mistral we need to need to instruct the model to return a JSON object.\n // https://docs.mistral.ai/capabilities/structured-output/structured_output_overview/\n if (responseFormat?.type === 'json') {\n prompt = injectJsonInstructionIntoMessages({\n messages: prompt,\n schema: responseFormat.schema,\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n // TODO add JSON schema support (see OpenAI implementation)\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // process content parts in order to preserve sequence\n if (\n choice.message.content != null &&\n Array.isArray(choice.message.content)\n ) {\n for (const part of choice.message.content) {\n if (part.type === 'thinking') {\n const reasoningText = extractReasoningContent(part.thinking);\n if (reasoningText.length > 0) {\n content.push({ type: 'reasoning', text: reasoningText });\n }\n } else if (part.type === 'text') {\n if (part.text.length > 0) {\n content.push({ type: 'text', text: part.text });\n }\n }\n }\n } else {\n // handle legacy string content\n const text = extractTextContent(choice.message.content);\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n }\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n let activeReasoningId: string | null = null;\n\n const generateId = this.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (delta.content != null && Array.isArray(delta.content)) {\n for (const part of delta.content) {\n if (part.type === 'thinking') {\n const reasoningDelta = extractReasoningContent(part.thinking);\n if (reasoningDelta.length > 0) {\n if (activeReasoningId == null) {\n // end any active text before starting reasoning\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n activeText = false;\n }\n\n activeReasoningId = generateId();\n controller.enqueue({\n type: 'reasoning-start',\n id: activeReasoningId,\n });\n }\n controller.enqueue({\n type: 'reasoning-delta',\n id: activeReasoningId,\n delta: reasoningDelta,\n });\n }\n }\n }\n }\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n // if we were in reasoning mode, end it before starting text\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n activeReasoningId = null;\n }\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n }\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractReasoningContent(\n thinking: Array<{ type: string; text: string }>,\n) {\n return thinking\n .filter(chunk => chunk.type === 'text')\n .map(chunk => chunk.text)\n .join('');\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'thinking':\n case 'image_url':\n case 'reference':\n // thinking, image content, and reference content are currently ignored\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n z.object({\n type: z.literal('thinking'),\n thinking: z.array(\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n ),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n case 'reasoning': {\n text += part.text;\n break;\n }\n default: {\n throw new Error(\n `Unsupported content type in assistant message: ${part.type}`,\n );\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2508'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2507'\n | 'magistral-medium-2507'\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralProviderOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n});\n\nexport type MistralProviderOptions = z.infer<typeof mistralProviderOptions>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAIO;;;ACFP,IAAAC,yBAUO;AACP,IAAAC,aAAkB;;;ACnBlB,sBAGO;AAEP,4BAAgC;AAEzB,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,eAAW,uCAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,IAAI;AAAA,gBACR,kDAAkD,KAAK,IAAI;AAAA,cAC7D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC/IO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,gBAAkB;AA0BX,IAAM,yBAAyB,YAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM7C,YAAY,YAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,YAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,YAAE,OAAO,EAAE,SAAS;AACzC,CAAC;;;ACpCD,IAAAC,yBAA+C;AAC/C,IAAAC,aAAkB;AAElB,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,QAAQ,aAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,aAAE,OAAO;AAAA,EAClB,MAAM,aAAE,OAAO;AAAA,EACf,OAAO,aAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,uDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANpDO,IAAM,2BAAN,MAA0D;AAAA,EAQ/D,YAAY,SAA6B,QAA2B;AAPpE,SAAS,uBAAuB;AAiBhC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AA1DF;AA+CI,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,cAAa,YAAO,eAAP,YAAqB;AAAA,EACzC;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA1EnD;AA2EI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAGA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAIA,SAAI,iDAAgB,UAAS,QAAQ;AACnC,mBAAS,0DAAkC;AAAA,QACzC,UAAU;AAAA,QACV,QAAQ,eAAe;AAAA,MACzB,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA;AAAA,MAIb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QACE,OAAO,QAAQ,WAAW,QAC1B,MAAM,QAAQ,OAAO,QAAQ,OAAO,GACpC;AACA,iBAAW,QAAQ,OAAO,QAAQ,SAAS;AACzC,YAAI,KAAK,SAAS,YAAY;AAC5B,gBAAM,gBAAgB,wBAAwB,KAAK,QAAQ;AAC3D,cAAI,cAAc,SAAS,GAAG;AAC5B,oBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,cAAc,CAAC;AAAA,UACzD;AAAA,QACF,WAAW,KAAK,SAAS,QAAQ;AAC/B,cAAI,KAAK,KAAK,SAAS,GAAG;AACxB,oBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AACtD,UAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,gBAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,MACrC;AAAA,IACF;AAOA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AACjB,QAAI,oBAAmC;AAEvC,UAAMC,cAAa,KAAK;AAExB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,MAAM,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAO,GAAG;AACzD,yBAAW,QAAQ,MAAM,SAAS;AAChC,oBAAI,KAAK,SAAS,YAAY;AAC5B,wBAAM,iBAAiB,wBAAwB,KAAK,QAAQ;AAC5D,sBAAI,eAAe,SAAS,GAAG;AAC7B,wBAAI,qBAAqB,MAAM;AAE7B,0BAAI,YAAY;AACd,mCAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAChD,qCAAa;AAAA,sBACf;AAEA,0CAAoBA,YAAW;AAC/B,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AACA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO;AAAA,oBACT,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AAEf,oBAAI,qBAAqB,MAAM;AAC7B,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI;AAAA,kBACN,CAAC;AACD,sCAAoB;AAAA,gBACtB;AACA,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,qBAAqB,MAAM;AAC7B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AACA,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,wBACP,UACA;AACA,SAAO,SACJ,OAAO,WAAS,MAAM,SAAS,MAAM,EACrC,IAAI,WAAS,MAAM,IAAI,EACvB,KAAK,EAAE;AACZ;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuB,aAC1B,MAAM;AAAA,EACL,aAAE,OAAO;AAAA,EACT,aAAE;AAAA,IACA,aAAE,mBAAmB,QAAQ;AAAA,MAC3B,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,MAAM;AAAA,QACtB,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,WAAW,aAAE,MAAM;AAAA,UACjB,aAAE,OAAO;AAAA,UACT,aAAE,OAAO;AAAA,YACP,KAAK,aAAE,OAAO;AAAA,YACd,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,eAAe,aAAE,MAAM,aAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,UAAU;AAAA,QAC1B,UAAU,aAAE;AAAA,UACV,aAAE,OAAO;AAAA,YACP,MAAM,aAAE,QAAQ,MAAM;AAAA,YACtB,MAAM,aAAE,OAAO;AAAA,UACjB,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqB,aAAE,OAAO;AAAA,EAClC,eAAe,aAAE,OAAO;AAAA,EACxB,mBAAmB,aAAE,OAAO;AAAA,EAC5B,cAAc,aAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4B,aAAE,OAAO;AAAA,EACzC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,aAAE,OAAO;AAAA,MAChB,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,aAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,OAAO,aAAE,OAAO;AAAA,QACd,MAAM,aAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAO,aAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AOzkBD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,aAAkB;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqC,aAAE,OAAO;AAAA,EAClD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,WAAW,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,aAAE,OAAO,EAAE,eAAe,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ARrBM,SAAS,cACd,UAAmC,CAAC,GACnB;AAzEnB;AA0EE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,YAAY,QAAQ;AAAA,EACtB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider","import_provider_utils","import_provider_utils","import_v4","import_provider_utils","import_v4","import_provider","generateId","import_provider","import_provider_utils","import_v4"]}
|
package/dist/index.mjs
CHANGED
|
@@ -12,6 +12,8 @@ import {
|
|
|
12
12
|
combineHeaders,
|
|
13
13
|
createEventSourceResponseHandler,
|
|
14
14
|
createJsonResponseHandler,
|
|
15
|
+
generateId,
|
|
16
|
+
injectJsonInstructionIntoMessages,
|
|
15
17
|
parseProviderOptions,
|
|
16
18
|
postJsonToApi
|
|
17
19
|
} from "@ai-sdk/provider-utils";
|
|
@@ -83,6 +85,15 @@ function convertToMistralChatMessages(prompt) {
|
|
|
83
85
|
});
|
|
84
86
|
break;
|
|
85
87
|
}
|
|
88
|
+
case "reasoning": {
|
|
89
|
+
text += part.text;
|
|
90
|
+
break;
|
|
91
|
+
}
|
|
92
|
+
default: {
|
|
93
|
+
throw new Error(
|
|
94
|
+
`Unsupported content type in assistant message: ${part.type}`
|
|
95
|
+
);
|
|
96
|
+
}
|
|
86
97
|
}
|
|
87
98
|
}
|
|
88
99
|
messages.push({
|
|
@@ -244,8 +255,10 @@ var MistralChatLanguageModel = class {
|
|
|
244
255
|
this.supportedUrls = {
|
|
245
256
|
"application/pdf": [/^https:\/\/.*$/]
|
|
246
257
|
};
|
|
258
|
+
var _a;
|
|
247
259
|
this.modelId = modelId;
|
|
248
260
|
this.config = config;
|
|
261
|
+
this.generateId = (_a = config.generateId) != null ? _a : generateId;
|
|
249
262
|
}
|
|
250
263
|
get provider() {
|
|
251
264
|
return this.config.provider;
|
|
@@ -303,6 +316,12 @@ var MistralChatLanguageModel = class {
|
|
|
303
316
|
details: "JSON response format schema is not supported"
|
|
304
317
|
});
|
|
305
318
|
}
|
|
319
|
+
if ((responseFormat == null ? void 0 : responseFormat.type) === "json") {
|
|
320
|
+
prompt = injectJsonInstructionIntoMessages({
|
|
321
|
+
messages: prompt,
|
|
322
|
+
schema: responseFormat.schema
|
|
323
|
+
});
|
|
324
|
+
}
|
|
306
325
|
const baseArgs = {
|
|
307
326
|
// model id:
|
|
308
327
|
model: this.modelId,
|
|
@@ -314,6 +333,7 @@ var MistralChatLanguageModel = class {
|
|
|
314
333
|
top_p: topP,
|
|
315
334
|
random_seed: seed,
|
|
316
335
|
// response format:
|
|
336
|
+
// TODO add JSON schema support (see OpenAI implementation)
|
|
317
337
|
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? { type: "json_object" } : void 0,
|
|
318
338
|
// mistral-specific provider options:
|
|
319
339
|
document_image_limit: options.documentImageLimit,
|
|
@@ -357,13 +377,24 @@ var MistralChatLanguageModel = class {
|
|
|
357
377
|
});
|
|
358
378
|
const choice = response.choices[0];
|
|
359
379
|
const content = [];
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
380
|
+
if (choice.message.content != null && Array.isArray(choice.message.content)) {
|
|
381
|
+
for (const part of choice.message.content) {
|
|
382
|
+
if (part.type === "thinking") {
|
|
383
|
+
const reasoningText = extractReasoningContent(part.thinking);
|
|
384
|
+
if (reasoningText.length > 0) {
|
|
385
|
+
content.push({ type: "reasoning", text: reasoningText });
|
|
386
|
+
}
|
|
387
|
+
} else if (part.type === "text") {
|
|
388
|
+
if (part.text.length > 0) {
|
|
389
|
+
content.push({ type: "text", text: part.text });
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
} else {
|
|
394
|
+
const text = extractTextContent(choice.message.content);
|
|
395
|
+
if (text != null && text.length > 0) {
|
|
396
|
+
content.push({ type: "text", text });
|
|
397
|
+
}
|
|
367
398
|
}
|
|
368
399
|
if (choice.message.tool_calls != null) {
|
|
369
400
|
for (const toolCall of choice.message.tool_calls) {
|
|
@@ -414,6 +445,8 @@ var MistralChatLanguageModel = class {
|
|
|
414
445
|
};
|
|
415
446
|
let isFirstChunk = true;
|
|
416
447
|
let activeText = false;
|
|
448
|
+
let activeReasoningId = null;
|
|
449
|
+
const generateId2 = this.generateId;
|
|
417
450
|
return {
|
|
418
451
|
stream: response.pipeThrough(
|
|
419
452
|
new TransformStream({
|
|
@@ -444,8 +477,40 @@ var MistralChatLanguageModel = class {
|
|
|
444
477
|
const choice = value.choices[0];
|
|
445
478
|
const delta = choice.delta;
|
|
446
479
|
const textContent = extractTextContent(delta.content);
|
|
480
|
+
if (delta.content != null && Array.isArray(delta.content)) {
|
|
481
|
+
for (const part of delta.content) {
|
|
482
|
+
if (part.type === "thinking") {
|
|
483
|
+
const reasoningDelta = extractReasoningContent(part.thinking);
|
|
484
|
+
if (reasoningDelta.length > 0) {
|
|
485
|
+
if (activeReasoningId == null) {
|
|
486
|
+
if (activeText) {
|
|
487
|
+
controller.enqueue({ type: "text-end", id: "0" });
|
|
488
|
+
activeText = false;
|
|
489
|
+
}
|
|
490
|
+
activeReasoningId = generateId2();
|
|
491
|
+
controller.enqueue({
|
|
492
|
+
type: "reasoning-start",
|
|
493
|
+
id: activeReasoningId
|
|
494
|
+
});
|
|
495
|
+
}
|
|
496
|
+
controller.enqueue({
|
|
497
|
+
type: "reasoning-delta",
|
|
498
|
+
id: activeReasoningId,
|
|
499
|
+
delta: reasoningDelta
|
|
500
|
+
});
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
}
|
|
447
505
|
if (textContent != null && textContent.length > 0) {
|
|
448
506
|
if (!activeText) {
|
|
507
|
+
if (activeReasoningId != null) {
|
|
508
|
+
controller.enqueue({
|
|
509
|
+
type: "reasoning-end",
|
|
510
|
+
id: activeReasoningId
|
|
511
|
+
});
|
|
512
|
+
activeReasoningId = null;
|
|
513
|
+
}
|
|
449
514
|
controller.enqueue({ type: "text-start", id: "0" });
|
|
450
515
|
activeText = true;
|
|
451
516
|
}
|
|
@@ -487,6 +552,12 @@ var MistralChatLanguageModel = class {
|
|
|
487
552
|
}
|
|
488
553
|
},
|
|
489
554
|
flush(controller) {
|
|
555
|
+
if (activeReasoningId != null) {
|
|
556
|
+
controller.enqueue({
|
|
557
|
+
type: "reasoning-end",
|
|
558
|
+
id: activeReasoningId
|
|
559
|
+
});
|
|
560
|
+
}
|
|
490
561
|
if (activeText) {
|
|
491
562
|
controller.enqueue({ type: "text-end", id: "0" });
|
|
492
563
|
}
|
|
@@ -503,6 +574,9 @@ var MistralChatLanguageModel = class {
|
|
|
503
574
|
};
|
|
504
575
|
}
|
|
505
576
|
};
|
|
577
|
+
function extractReasoningContent(thinking) {
|
|
578
|
+
return thinking.filter((chunk) => chunk.type === "text").map((chunk) => chunk.text).join("");
|
|
579
|
+
}
|
|
506
580
|
function extractTextContent(content) {
|
|
507
581
|
if (typeof content === "string") {
|
|
508
582
|
return content;
|
|
@@ -517,6 +591,7 @@ function extractTextContent(content) {
|
|
|
517
591
|
case "text":
|
|
518
592
|
textContent.push(chunk.text);
|
|
519
593
|
break;
|
|
594
|
+
case "thinking":
|
|
520
595
|
case "image_url":
|
|
521
596
|
case "reference":
|
|
522
597
|
break;
|
|
@@ -549,6 +624,15 @@ var mistralContentSchema = z3.union([
|
|
|
549
624
|
z3.object({
|
|
550
625
|
type: z3.literal("reference"),
|
|
551
626
|
reference_ids: z3.array(z3.number())
|
|
627
|
+
}),
|
|
628
|
+
z3.object({
|
|
629
|
+
type: z3.literal("thinking"),
|
|
630
|
+
thinking: z3.array(
|
|
631
|
+
z3.object({
|
|
632
|
+
type: z3.literal("text"),
|
|
633
|
+
text: z3.string()
|
|
634
|
+
})
|
|
635
|
+
)
|
|
552
636
|
})
|
|
553
637
|
])
|
|
554
638
|
)
|
|
@@ -685,7 +769,8 @@ function createMistral(options = {}) {
|
|
|
685
769
|
provider: "mistral.chat",
|
|
686
770
|
baseURL,
|
|
687
771
|
headers: getHeaders,
|
|
688
|
-
fetch: options.fetch
|
|
772
|
+
fetch: options.fetch,
|
|
773
|
+
generateId: options.generateId
|
|
689
774
|
});
|
|
690
775
|
const createEmbeddingModel = (modelId) => new MistralEmbeddingModel(modelId, {
|
|
691
776
|
provider: "mistral.embedding",
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\n\nexport interface MistralProvider extends ProviderV2 {\n (modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV2<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralProviderOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // text content:\n let text = extractTextContent(choice.message.content);\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n const lastMessage = body.messages[body.messages.length - 1];\n if (\n lastMessage.role === 'assistant' &&\n text?.startsWith(lastMessage.content)\n ) {\n text = text.slice(lastMessage.content.length);\n }\n\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'image_url':\n case 'reference':\n // image content or reference content is currently ignored.\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralProviderOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n});\n\nexport type MistralProviderOptions = z.infer<typeof mistralProviderOptions>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";AAAA;AAAA,EAGE;AAAA,OAEK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;;;ACFP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EAEA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;ACjBlB;AAAA,EAEE;AAAA,OACK;AAEP,SAAS,uBAAuB;AAEzB,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,gBAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACtIO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,SAAS,SAAS;AAuBX,IAAM,yBAAyB,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM7C,YAAY,EAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,EAAE,OAAO,EAAE,SAAS;AACzC,CAAC;;;ACjCD,SAAS,sCAAsC;AAC/C,SAAS,KAAAC,UAAS;AAElB,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,QAAQA,GAAE,QAAQ,OAAO;AAAA,EACzB,SAASA,GAAE,OAAO;AAAA,EAClB,MAAMA,GAAE,OAAO;AAAA,EACf,OAAOA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAMA,GAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,+BAA+B,+BAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD;AAAA,EAGE,iCAAAC;AAAA,OACK;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANvDO,IAAM,2BAAN,MAA0D;AAAA,EAO/D,YAAY,SAA6B,QAA2B;AANpE,SAAS,uBAAuB;AAehC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AAVE,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AArEnD;AAsEI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,WAAM,qBAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QAAI,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AAKpD,UAAM,cAAc,KAAK,SAAS,KAAK,SAAS,SAAS,CAAC;AAC1D,QACE,YAAY,SAAS,gBACrB,6BAAM,WAAW,YAAY,WAC7B;AACA,aAAO,KAAK,MAAM,YAAY,QAAQ,MAAM;AAAA,IAC9C;AAEA,QAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,cAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,IACrC;AAGA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AAEjB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AACf,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuBC,GAC1B,MAAM;AAAA,EACLA,GAAE,OAAO;AAAA,EACTA,GAAE;AAAA,IACAA,GAAE,mBAAmB,QAAQ;AAAA,MAC3BA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,QACtB,MAAMA,GAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,WAAWA,GAAE,MAAM;AAAA,UACjBA,GAAE,OAAO;AAAA,UACTA,GAAE,OAAO;AAAA,YACP,KAAKA,GAAE,OAAO;AAAA,YACd,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,eAAeA,GAAE,MAAMA,GAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqBA,GAAE,OAAO;AAAA,EAClC,eAAeA,GAAE,OAAO;AAAA,EACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC5B,cAAcA,GAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4BA,GAAE,OAAO;AAAA,EACzC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAOA,GAAE,OAAO;AAAA,MAChB,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQA,GAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,QACd,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAOA,GAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AO9eD;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqCC,GAAE,OAAO;AAAA,EAClD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ARvBM,SAAS,cACd,UAAmC,CAAC,GACnB;AAvEnB;AAwEE,QAAM,WACJ,0BAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["z","z","UnsupportedFunctionalityError","z","combineHeaders","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonResponseHandler","z"]}
|
|
1
|
+
{"version":3,"sources":["../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\n\nexport interface MistralProvider extends ProviderV2 {\n (modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV2<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n generateId: options.generateId,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n injectJsonInstructionIntoMessages,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralProviderOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n generateId?: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n private readonly generateId: () => string;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n this.generateId = config.generateId ?? generateId;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralProviderOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n // TODO remove when we have JSON schema support (see OpenAI implementation)\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n // For Mistral we need to need to instruct the model to return a JSON object.\n // https://docs.mistral.ai/capabilities/structured-output/structured_output_overview/\n if (responseFormat?.type === 'json') {\n prompt = injectJsonInstructionIntoMessages({\n messages: prompt,\n schema: responseFormat.schema,\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n // TODO add JSON schema support (see OpenAI implementation)\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // process content parts in order to preserve sequence\n if (\n choice.message.content != null &&\n Array.isArray(choice.message.content)\n ) {\n for (const part of choice.message.content) {\n if (part.type === 'thinking') {\n const reasoningText = extractReasoningContent(part.thinking);\n if (reasoningText.length > 0) {\n content.push({ type: 'reasoning', text: reasoningText });\n }\n } else if (part.type === 'text') {\n if (part.text.length > 0) {\n content.push({ type: 'text', text: part.text });\n }\n }\n }\n } else {\n // handle legacy string content\n const text = extractTextContent(choice.message.content);\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n }\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n let activeReasoningId: string | null = null;\n\n const generateId = this.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (delta.content != null && Array.isArray(delta.content)) {\n for (const part of delta.content) {\n if (part.type === 'thinking') {\n const reasoningDelta = extractReasoningContent(part.thinking);\n if (reasoningDelta.length > 0) {\n if (activeReasoningId == null) {\n // end any active text before starting reasoning\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n activeText = false;\n }\n\n activeReasoningId = generateId();\n controller.enqueue({\n type: 'reasoning-start',\n id: activeReasoningId,\n });\n }\n controller.enqueue({\n type: 'reasoning-delta',\n id: activeReasoningId,\n delta: reasoningDelta,\n });\n }\n }\n }\n }\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n // if we were in reasoning mode, end it before starting text\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n activeReasoningId = null;\n }\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n }\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractReasoningContent(\n thinking: Array<{ type: string; text: string }>,\n) {\n return thinking\n .filter(chunk => chunk.type === 'text')\n .map(chunk => chunk.text)\n .join('');\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'thinking':\n case 'image_url':\n case 'reference':\n // thinking, image content, and reference content are currently ignored\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n z.object({\n type: z.literal('thinking'),\n thinking: z.array(\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n ),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n case 'reasoning': {\n text += part.text;\n break;\n }\n default: {\n throw new Error(\n `Unsupported content type in assistant message: ${part.type}`,\n );\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2508'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2507'\n | 'magistral-medium-2507'\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralProviderOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n});\n\nexport type MistralProviderOptions = z.infer<typeof mistralProviderOptions>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";AAAA;AAAA,EAGE;AAAA,OAEK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;;;ACFP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;ACnBlB;AAAA,EAEE;AAAA,OACK;AAEP,SAAS,uBAAuB;AAEzB,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,gBAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,IAAI;AAAA,gBACR,kDAAkD,KAAK,IAAI;AAAA,cAC7D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC/IO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,SAAS,SAAS;AA0BX,IAAM,yBAAyB,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM7C,YAAY,EAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,EAAE,OAAO,EAAE,SAAS;AACzC,CAAC;;;ACpCD,SAAS,sCAAsC;AAC/C,SAAS,KAAAC,UAAS;AAElB,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,QAAQA,GAAE,QAAQ,OAAO;AAAA,EACzB,SAASA,GAAE,OAAO;AAAA,EAClB,MAAMA,GAAE,OAAO;AAAA,EACf,OAAOA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAMA,GAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,+BAA+B,+BAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD;AAAA,EAGE,iCAAAC;AAAA,OACK;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANpDO,IAAM,2BAAN,MAA0D;AAAA,EAQ/D,YAAY,SAA6B,QAA2B;AAPpE,SAAS,uBAAuB;AAiBhC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AA1DF;AA+CI,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,cAAa,YAAO,eAAP,YAAqB;AAAA,EACzC;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA1EnD;AA2EI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,WAAM,qBAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAGA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAIA,SAAI,iDAAgB,UAAS,QAAQ;AACnC,eAAS,kCAAkC;AAAA,QACzC,UAAU;AAAA,QACV,QAAQ,eAAe;AAAA,MACzB,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA;AAAA,MAIb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QACE,OAAO,QAAQ,WAAW,QAC1B,MAAM,QAAQ,OAAO,QAAQ,OAAO,GACpC;AACA,iBAAW,QAAQ,OAAO,QAAQ,SAAS;AACzC,YAAI,KAAK,SAAS,YAAY;AAC5B,gBAAM,gBAAgB,wBAAwB,KAAK,QAAQ;AAC3D,cAAI,cAAc,SAAS,GAAG;AAC5B,oBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,cAAc,CAAC;AAAA,UACzD;AAAA,QACF,WAAW,KAAK,SAAS,QAAQ;AAC/B,cAAI,KAAK,KAAK,SAAS,GAAG;AACxB,oBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AACtD,UAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,gBAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,MACrC;AAAA,IACF;AAOA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AACjB,QAAI,oBAAmC;AAEvC,UAAMC,cAAa,KAAK;AAExB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,MAAM,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAO,GAAG;AACzD,yBAAW,QAAQ,MAAM,SAAS;AAChC,oBAAI,KAAK,SAAS,YAAY;AAC5B,wBAAM,iBAAiB,wBAAwB,KAAK,QAAQ;AAC5D,sBAAI,eAAe,SAAS,GAAG;AAC7B,wBAAI,qBAAqB,MAAM;AAE7B,0BAAI,YAAY;AACd,mCAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAChD,qCAAa;AAAA,sBACf;AAEA,0CAAoBA,YAAW;AAC/B,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AACA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO;AAAA,oBACT,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AAEf,oBAAI,qBAAqB,MAAM;AAC7B,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI;AAAA,kBACN,CAAC;AACD,sCAAoB;AAAA,gBACtB;AACA,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,qBAAqB,MAAM;AAC7B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AACA,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,wBACP,UACA;AACA,SAAO,SACJ,OAAO,WAAS,MAAM,SAAS,MAAM,EACrC,IAAI,WAAS,MAAM,IAAI,EACvB,KAAK,EAAE;AACZ;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuBC,GAC1B,MAAM;AAAA,EACLA,GAAE,OAAO;AAAA,EACTA,GAAE;AAAA,IACAA,GAAE,mBAAmB,QAAQ;AAAA,MAC3BA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,QACtB,MAAMA,GAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,WAAWA,GAAE,MAAM;AAAA,UACjBA,GAAE,OAAO;AAAA,UACTA,GAAE,OAAO;AAAA,YACP,KAAKA,GAAE,OAAO;AAAA,YACd,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,eAAeA,GAAE,MAAMA,GAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,UAAU;AAAA,QAC1B,UAAUA,GAAE;AAAA,UACVA,GAAE,OAAO;AAAA,YACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,YACtB,MAAMA,GAAE,OAAO;AAAA,UACjB,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqBA,GAAE,OAAO;AAAA,EAClC,eAAeA,GAAE,OAAO;AAAA,EACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC5B,cAAcA,GAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4BA,GAAE,OAAO;AAAA,EACzC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAOA,GAAE,OAAO;AAAA,MAChB,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQA,GAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,QACd,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAOA,GAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AOzkBD;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqCC,GAAE,OAAO;AAAA,EAClD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ARrBM,SAAS,cACd,UAAmC,CAAC,GACnB;AAzEnB;AA0EE,QAAM,WACJ,0BAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,YAAY,QAAQ;AAAA,EACtB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["z","z","UnsupportedFunctionalityError","generateId","z","combineHeaders","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonResponseHandler","z"]}
|