@ai-sdk/mistral 2.1.0-beta.0 → 2.1.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,91 @@
1
1
  # @ai-sdk/mistral
2
2
 
3
+ ## 2.1.0-beta.10
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [521c537]
8
+ - @ai-sdk/provider-utils@3.1.0-beta.9
9
+
10
+ ## 2.1.0-beta.9
11
+
12
+ ### Patch Changes
13
+
14
+ - Updated dependencies [e06565c]
15
+ - @ai-sdk/provider-utils@3.1.0-beta.8
16
+
17
+ ## 2.1.0-beta.8
18
+
19
+ ### Patch Changes
20
+
21
+ - e8109d3: feat: tool execution approval
22
+ - Updated dependencies [046aa3b]
23
+ - Updated dependencies [e8109d3]
24
+ - @ai-sdk/provider@2.1.0-beta.5
25
+ - @ai-sdk/provider-utils@3.1.0-beta.7
26
+
27
+ ## 2.1.0-beta.7
28
+
29
+ ### Patch Changes
30
+
31
+ - Updated dependencies [0adc679]
32
+ - Updated dependencies [2b0caef]
33
+ - @ai-sdk/provider-utils@3.1.0-beta.6
34
+ - @ai-sdk/provider@2.1.0-beta.4
35
+
36
+ ## 2.1.0-beta.6
37
+
38
+ ### Patch Changes
39
+
40
+ - 8dac895: feat: `LanguageModelV3`
41
+ - 10c1322: fix: moved dependency `@ai-sdk/test-server` to devDependencies
42
+ - Updated dependencies [8dac895]
43
+ - @ai-sdk/provider-utils@3.1.0-beta.5
44
+ - @ai-sdk/provider@2.1.0-beta.3
45
+
46
+ ## 2.1.0-beta.5
47
+
48
+ ### Patch Changes
49
+
50
+ - 4616b86: chore: update zod peer depenedency version
51
+ - Updated dependencies [4616b86]
52
+ - @ai-sdk/provider-utils@3.1.0-beta.4
53
+
54
+ ## 2.1.0-beta.4
55
+
56
+ ### Patch Changes
57
+
58
+ - ed329cb: feat: `Provider-V3`
59
+ - Updated dependencies [ed329cb]
60
+ - Updated dependencies [522f6b8]
61
+ - @ai-sdk/provider@2.1.0-beta.2
62
+ - @ai-sdk/provider-utils@3.1.0-beta.3
63
+
64
+ ## 2.1.0-beta.3
65
+
66
+ ### Patch Changes
67
+
68
+ - 0c4822d: feat: `EmbeddingModelV3`
69
+ - 1cad0ab: feat: add provider version to user-agent header
70
+ - Updated dependencies [0c4822d]
71
+ - @ai-sdk/provider@2.1.0-beta.1
72
+ - @ai-sdk/provider-utils@3.1.0-beta.2
73
+
74
+ ## 2.1.0-beta.2
75
+
76
+ ### Patch Changes
77
+
78
+ - 62731a8: Add option for disabling parallel tool call in mistral
79
+
80
+ ## 2.1.0-beta.1
81
+
82
+ ### Patch Changes
83
+
84
+ - Updated dependencies [953d0f2]
85
+ - Updated dependencies [cbb1d35]
86
+ - @ai-sdk/test-server@1.0.0-beta.0
87
+ - @ai-sdk/provider-utils@3.1.0-beta.1
88
+
3
89
  ## 2.1.0-beta.0
4
90
 
5
91
  ### Minor Changes
package/dist/index.d.mts CHANGED
@@ -1,4 +1,4 @@
1
- import { ProviderV2, LanguageModelV2, EmbeddingModelV2 } from '@ai-sdk/provider';
1
+ import { ProviderV3, LanguageModelV3, EmbeddingModelV3 } from '@ai-sdk/provider';
2
2
  import { FetchFunction } from '@ai-sdk/provider-utils';
3
3
  import { z } from 'zod/v4';
4
4
 
@@ -9,27 +9,28 @@ declare const mistralLanguageModelOptions: z.ZodObject<{
9
9
  documentPageLimit: z.ZodOptional<z.ZodNumber>;
10
10
  structuredOutputs: z.ZodOptional<z.ZodBoolean>;
11
11
  strictJsonSchema: z.ZodOptional<z.ZodBoolean>;
12
+ parallelToolCalls: z.ZodOptional<z.ZodBoolean>;
12
13
  }, z.core.$strip>;
13
14
  type MistralLanguageModelOptions = z.infer<typeof mistralLanguageModelOptions>;
14
15
 
15
16
  type MistralEmbeddingModelId = 'mistral-embed' | (string & {});
16
17
 
17
- interface MistralProvider extends ProviderV2 {
18
- (modelId: MistralChatModelId): LanguageModelV2;
18
+ interface MistralProvider extends ProviderV3 {
19
+ (modelId: MistralChatModelId): LanguageModelV3;
19
20
  /**
20
21
  Creates a model for text generation.
21
22
  */
22
- languageModel(modelId: MistralChatModelId): LanguageModelV2;
23
+ languageModel(modelId: MistralChatModelId): LanguageModelV3;
23
24
  /**
24
25
  Creates a model for text generation.
25
26
  */
26
- chat(modelId: MistralChatModelId): LanguageModelV2;
27
+ chat(modelId: MistralChatModelId): LanguageModelV3;
27
28
  /**
28
29
  @deprecated Use `textEmbedding()` instead.
29
30
  */
30
- embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;
31
- textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;
32
- textEmbeddingModel: (modelId: MistralEmbeddingModelId) => EmbeddingModelV2<string>;
31
+ embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV3<string>;
32
+ textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV3<string>;
33
+ textEmbeddingModel: (modelId: MistralEmbeddingModelId) => EmbeddingModelV3<string>;
33
34
  }
34
35
  interface MistralProviderSettings {
35
36
  /**
@@ -62,4 +63,6 @@ Default Mistral provider instance.
62
63
  */
63
64
  declare const mistral: MistralProvider;
64
65
 
65
- export { type MistralLanguageModelOptions, type MistralProvider, type MistralProviderSettings, createMistral, mistral };
66
+ declare const VERSION: string;
67
+
68
+ export { type MistralLanguageModelOptions, type MistralProvider, type MistralProviderSettings, VERSION, createMistral, mistral };
package/dist/index.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { ProviderV2, LanguageModelV2, EmbeddingModelV2 } from '@ai-sdk/provider';
1
+ import { ProviderV3, LanguageModelV3, EmbeddingModelV3 } from '@ai-sdk/provider';
2
2
  import { FetchFunction } from '@ai-sdk/provider-utils';
3
3
  import { z } from 'zod/v4';
4
4
 
@@ -9,27 +9,28 @@ declare const mistralLanguageModelOptions: z.ZodObject<{
9
9
  documentPageLimit: z.ZodOptional<z.ZodNumber>;
10
10
  structuredOutputs: z.ZodOptional<z.ZodBoolean>;
11
11
  strictJsonSchema: z.ZodOptional<z.ZodBoolean>;
12
+ parallelToolCalls: z.ZodOptional<z.ZodBoolean>;
12
13
  }, z.core.$strip>;
13
14
  type MistralLanguageModelOptions = z.infer<typeof mistralLanguageModelOptions>;
14
15
 
15
16
  type MistralEmbeddingModelId = 'mistral-embed' | (string & {});
16
17
 
17
- interface MistralProvider extends ProviderV2 {
18
- (modelId: MistralChatModelId): LanguageModelV2;
18
+ interface MistralProvider extends ProviderV3 {
19
+ (modelId: MistralChatModelId): LanguageModelV3;
19
20
  /**
20
21
  Creates a model for text generation.
21
22
  */
22
- languageModel(modelId: MistralChatModelId): LanguageModelV2;
23
+ languageModel(modelId: MistralChatModelId): LanguageModelV3;
23
24
  /**
24
25
  Creates a model for text generation.
25
26
  */
26
- chat(modelId: MistralChatModelId): LanguageModelV2;
27
+ chat(modelId: MistralChatModelId): LanguageModelV3;
27
28
  /**
28
29
  @deprecated Use `textEmbedding()` instead.
29
30
  */
30
- embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;
31
- textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;
32
- textEmbeddingModel: (modelId: MistralEmbeddingModelId) => EmbeddingModelV2<string>;
31
+ embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV3<string>;
32
+ textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV3<string>;
33
+ textEmbeddingModel: (modelId: MistralEmbeddingModelId) => EmbeddingModelV3<string>;
33
34
  }
34
35
  interface MistralProviderSettings {
35
36
  /**
@@ -62,4 +63,6 @@ Default Mistral provider instance.
62
63
  */
63
64
  declare const mistral: MistralProvider;
64
65
 
65
- export { type MistralLanguageModelOptions, type MistralProvider, type MistralProviderSettings, createMistral, mistral };
66
+ declare const VERSION: string;
67
+
68
+ export { type MistralLanguageModelOptions, type MistralProvider, type MistralProviderSettings, VERSION, createMistral, mistral };
package/dist/index.js CHANGED
@@ -20,6 +20,7 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // src/index.ts
21
21
  var src_exports = {};
22
22
  __export(src_exports, {
23
+ VERSION: () => VERSION,
23
24
  createMistral: () => createMistral,
24
25
  mistral: () => mistral
25
26
  });
@@ -37,6 +38,7 @@ var import_v43 = require("zod/v4");
37
38
  var import_provider = require("@ai-sdk/provider");
38
39
  var import_provider_utils = require("@ai-sdk/provider-utils");
39
40
  function convertToMistralChatMessages(prompt) {
41
+ var _a;
40
42
  const messages = [];
41
43
  for (let i = 0; i < prompt.length; i++) {
42
44
  const { role, content } = prompt[i];
@@ -125,6 +127,9 @@ function convertToMistralChatMessages(prompt) {
125
127
  case "error-text":
126
128
  contentValue = output.value;
127
129
  break;
130
+ case "execution-denied":
131
+ contentValue = (_a = output.reason) != null ? _a : "Tool execution denied.";
132
+ break;
128
133
  case "content":
129
134
  case "json":
130
135
  case "error-json":
@@ -199,7 +204,14 @@ var mistralLanguageModelOptions = import_v4.z.object({
199
204
  *
200
205
  * @default false
201
206
  */
202
- strictJsonSchema: import_v4.z.boolean().optional()
207
+ strictJsonSchema: import_v4.z.boolean().optional(),
208
+ /**
209
+ * Whether to enable parallel function calling during tool use.
210
+ * When set to false, the model will use at most one tool per response.
211
+ *
212
+ * @default true
213
+ */
214
+ parallelToolCalls: import_v4.z.boolean().optional()
203
215
  });
204
216
 
205
217
  // src/mistral-error.ts
@@ -253,6 +265,8 @@ function prepareTools({
253
265
  return { tools: mistralTools, toolChoice: type, toolWarnings };
254
266
  case "required":
255
267
  return { tools: mistralTools, toolChoice: "any", toolWarnings };
268
+ // mistral does not support tool mode directly,
269
+ // so we filter the tools and force the tool choice through 'any'
256
270
  case "tool":
257
271
  return {
258
272
  tools: mistralTools.filter(
@@ -273,7 +287,7 @@ function prepareTools({
273
287
  // src/mistral-chat-language-model.ts
274
288
  var MistralChatLanguageModel = class {
275
289
  constructor(modelId, config) {
276
- this.specificationVersion = "v2";
290
+ this.specificationVersion = "v3";
277
291
  this.supportedUrls = {
278
292
  "application/pdf": [/^https:\/\/.*$/]
279
293
  };
@@ -377,7 +391,8 @@ var MistralChatLanguageModel = class {
377
391
  args: {
378
392
  ...baseArgs,
379
393
  tools: mistralTools,
380
- tool_choice: mistralToolChoice
394
+ tool_choice: mistralToolChoice,
395
+ ...mistralTools != null && options.parallelToolCalls !== void 0 ? { parallel_tool_calls: options.parallelToolCalls } : {}
381
396
  },
382
397
  warnings: [...warnings, ...toolWarnings]
383
398
  };
@@ -718,7 +733,7 @@ var import_provider_utils4 = require("@ai-sdk/provider-utils");
718
733
  var import_v44 = require("zod/v4");
719
734
  var MistralEmbeddingModel = class {
720
735
  constructor(modelId, config) {
721
- this.specificationVersion = "v2";
736
+ this.specificationVersion = "v3";
722
737
  this.maxEmbeddingsPerCall = 32;
723
738
  this.supportsParallelCalls = false;
724
739
  this.modelId = modelId;
@@ -771,18 +786,24 @@ var MistralTextEmbeddingResponseSchema = import_v44.z.object({
771
786
  usage: import_v44.z.object({ prompt_tokens: import_v44.z.number() }).nullish()
772
787
  });
773
788
 
789
+ // src/version.ts
790
+ var VERSION = true ? "2.1.0-beta.10" : "0.0.0-test";
791
+
774
792
  // src/mistral-provider.ts
775
793
  function createMistral(options = {}) {
776
794
  var _a;
777
795
  const baseURL = (_a = (0, import_provider_utils5.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
778
- const getHeaders = () => ({
779
- Authorization: `Bearer ${(0, import_provider_utils5.loadApiKey)({
780
- apiKey: options.apiKey,
781
- environmentVariableName: "MISTRAL_API_KEY",
782
- description: "Mistral"
783
- })}`,
784
- ...options.headers
785
- });
796
+ const getHeaders = () => (0, import_provider_utils5.withUserAgentSuffix)(
797
+ {
798
+ Authorization: `Bearer ${(0, import_provider_utils5.loadApiKey)({
799
+ apiKey: options.apiKey,
800
+ environmentVariableName: "MISTRAL_API_KEY",
801
+ description: "Mistral"
802
+ })}`,
803
+ ...options.headers
804
+ },
805
+ `ai-sdk/mistral/${VERSION}`
806
+ );
786
807
  const createChatModel = (modelId) => new MistralChatLanguageModel(modelId, {
787
808
  provider: "mistral.chat",
788
809
  baseURL,
@@ -817,6 +838,7 @@ function createMistral(options = {}) {
817
838
  var mistral = createMistral();
818
839
  // Annotate the CommonJS export names for ESM import in node:
819
840
  0 && (module.exports = {
841
+ VERSION,
820
842
  createMistral,
821
843
  mistral
822
844
  });
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["export { createMistral, mistral } from './mistral-provider';\nexport type {\n MistralProvider,\n MistralProviderSettings,\n} from './mistral-provider';\nexport type { MistralLanguageModelOptions } from './mistral-chat-options';\n","import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\n\nexport interface MistralProvider extends ProviderV2 {\n (modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV2<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n generateId: options.generateId,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n injectJsonInstructionIntoMessages,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralLanguageModelOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n generateId?: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n private readonly generateId: () => string;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n this.generateId = config.generateId ?? generateId;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralLanguageModelOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n const structuredOutputs = options.structuredOutputs ?? true;\n const strictJsonSchema = options.strictJsonSchema ?? false;\n\n // For Mistral we need to need to instruct the model to return a JSON object.\n // https://docs.mistral.ai/capabilities/structured-output/structured_output_overview/\n if (responseFormat?.type === 'json' && !responseFormat?.schema) {\n prompt = injectJsonInstructionIntoMessages({\n messages: prompt,\n schema: responseFormat.schema,\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json'\n ? structuredOutputs && responseFormat?.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n strict: strictJsonSchema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // process content parts in order to preserve sequence\n if (\n choice.message.content != null &&\n Array.isArray(choice.message.content)\n ) {\n for (const part of choice.message.content) {\n if (part.type === 'thinking') {\n const reasoningText = extractReasoningContent(part.thinking);\n if (reasoningText.length > 0) {\n content.push({ type: 'reasoning', text: reasoningText });\n }\n } else if (part.type === 'text') {\n if (part.text.length > 0) {\n content.push({ type: 'text', text: part.text });\n }\n }\n }\n } else {\n // handle legacy string content\n const text = extractTextContent(choice.message.content);\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n }\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n let activeReasoningId: string | null = null;\n\n const generateId = this.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (delta.content != null && Array.isArray(delta.content)) {\n for (const part of delta.content) {\n if (part.type === 'thinking') {\n const reasoningDelta = extractReasoningContent(part.thinking);\n if (reasoningDelta.length > 0) {\n if (activeReasoningId == null) {\n // end any active text before starting reasoning\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n activeText = false;\n }\n\n activeReasoningId = generateId();\n controller.enqueue({\n type: 'reasoning-start',\n id: activeReasoningId,\n });\n }\n controller.enqueue({\n type: 'reasoning-delta',\n id: activeReasoningId,\n delta: reasoningDelta,\n });\n }\n }\n }\n }\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n // if we were in reasoning mode, end it before starting text\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n activeReasoningId = null;\n }\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n }\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractReasoningContent(\n thinking: Array<{ type: string; text: string }>,\n) {\n return thinking\n .filter(chunk => chunk.type === 'text')\n .map(chunk => chunk.text)\n .join('');\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'thinking':\n case 'image_url':\n case 'reference':\n // thinking, image content, and reference content are currently ignored\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n z.object({\n type: z.literal('thinking'),\n thinking: z.array(\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n ),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n case 'reasoning': {\n text += part.text;\n break;\n }\n default: {\n throw new Error(\n `Unsupported content type in assistant message: ${part.type}`,\n );\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2508'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2507'\n | 'magistral-medium-2507'\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralLanguageModelOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n\n /**\n * Whether to use structured outputs.\n *\n * @default true\n */\n structuredOutputs: z.boolean().optional(),\n\n /**\n * Whether to use strict JSON schema validation.\n *\n * @default false\n */\n strictJsonSchema: z.boolean().optional(),\n});\n\nexport type MistralLanguageModelOptions = z.infer<\n typeof mistralLanguageModelOptions\n>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAIO;;;ACFP,IAAAC,yBAUO;AACP,IAAAC,aAAkB;;;ACnBlB,sBAGO;AAEP,4BAAgC;AAEzB,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,eAAW,uCAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,IAAI;AAAA,gBACR,kDAAkD,KAAK,IAAI;AAAA,cAC7D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC/IO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,gBAAkB;AA0BX,IAAM,8BAA8B,YAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMlD,YAAY,YAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,YAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,YAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOvC,mBAAmB,YAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOxC,kBAAkB,YAAE,QAAQ,EAAE,SAAS;AACzC,CAAC;;;AClDD,IAAAC,yBAA+C;AAC/C,IAAAC,aAAkB;AAElB,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,QAAQ,aAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,aAAE,OAAO;AAAA,EAClB,MAAM,aAAE,OAAO;AAAA,EACf,OAAO,aAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,uDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANpDO,IAAM,2BAAN,MAA0D;AAAA,EAQ/D,YAAY,SAA6B,QAA2B;AAPpE,SAAS,uBAAuB;AAiBhC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AA1DF;AA+CI,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,cAAa,YAAO,eAAP,YAAqB;AAAA,EACzC;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA1EnD;AA2EI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,qBAAoB,aAAQ,sBAAR,YAA6B;AACvD,UAAM,oBAAmB,aAAQ,qBAAR,YAA4B;AAIrD,SAAI,iDAAgB,UAAS,UAAU,EAAC,iDAAgB,SAAQ;AAC9D,mBAAS,0DAAkC;AAAA,QACzC,UAAU;AAAA,QACV,QAAQ,eAAe;AAAA,MACzB,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SACrB,sBAAqB,iDAAgB,WAAU,OAC7C;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,QAAQ;AAAA,UACR,OAAM,oBAAe,SAAf,YAAuB;AAAA,UAC7B,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA;AAAA,MAGN,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QACE,OAAO,QAAQ,WAAW,QAC1B,MAAM,QAAQ,OAAO,QAAQ,OAAO,GACpC;AACA,iBAAW,QAAQ,OAAO,QAAQ,SAAS;AACzC,YAAI,KAAK,SAAS,YAAY;AAC5B,gBAAM,gBAAgB,wBAAwB,KAAK,QAAQ;AAC3D,cAAI,cAAc,SAAS,GAAG;AAC5B,oBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,cAAc,CAAC;AAAA,UACzD;AAAA,QACF,WAAW,KAAK,SAAS,QAAQ;AAC/B,cAAI,KAAK,KAAK,SAAS,GAAG;AACxB,oBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AACtD,UAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,gBAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,MACrC;AAAA,IACF;AAOA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AACjB,QAAI,oBAAmC;AAEvC,UAAMC,cAAa,KAAK;AAExB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,MAAM,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAO,GAAG;AACzD,yBAAW,QAAQ,MAAM,SAAS;AAChC,oBAAI,KAAK,SAAS,YAAY;AAC5B,wBAAM,iBAAiB,wBAAwB,KAAK,QAAQ;AAC5D,sBAAI,eAAe,SAAS,GAAG;AAC7B,wBAAI,qBAAqB,MAAM;AAE7B,0BAAI,YAAY;AACd,mCAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAChD,qCAAa;AAAA,sBACf;AAEA,0CAAoBA,YAAW;AAC/B,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AACA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO;AAAA,oBACT,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AAEf,oBAAI,qBAAqB,MAAM;AAC7B,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI;AAAA,kBACN,CAAC;AACD,sCAAoB;AAAA,gBACtB;AACA,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,qBAAqB,MAAM;AAC7B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AACA,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,wBACP,UACA;AACA,SAAO,SACJ,OAAO,WAAS,MAAM,SAAS,MAAM,EACrC,IAAI,WAAS,MAAM,IAAI,EACvB,KAAK,EAAE;AACZ;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuB,aAC1B,MAAM;AAAA,EACL,aAAE,OAAO;AAAA,EACT,aAAE;AAAA,IACA,aAAE,mBAAmB,QAAQ;AAAA,MAC3B,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,MAAM;AAAA,QACtB,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,WAAW,aAAE,MAAM;AAAA,UACjB,aAAE,OAAO;AAAA,UACT,aAAE,OAAO;AAAA,YACP,KAAK,aAAE,OAAO;AAAA,YACd,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,eAAe,aAAE,MAAM,aAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,UAAU;AAAA,QAC1B,UAAU,aAAE;AAAA,UACV,aAAE,OAAO;AAAA,YACP,MAAM,aAAE,QAAQ,MAAM;AAAA,YACtB,MAAM,aAAE,OAAO;AAAA,UACjB,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqB,aAAE,OAAO;AAAA,EAClC,eAAe,aAAE,OAAO;AAAA,EACxB,mBAAmB,aAAE,OAAO;AAAA,EAC5B,cAAc,aAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4B,aAAE,OAAO;AAAA,EACzC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,aAAE,OAAO;AAAA,MAChB,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,aAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,OAAO,aAAE,OAAO;AAAA,QACd,MAAM,aAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAO,aAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AO1kBD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,aAAkB;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqC,aAAE,OAAO;AAAA,EAClD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,WAAW,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,aAAE,OAAO,EAAE,eAAe,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ARrBM,SAAS,cACd,UAAmC,CAAC,GACnB;AAzEnB;AA0EE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,YAAY,QAAQ;AAAA,EACtB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider","import_provider_utils","import_provider_utils","import_v4","import_provider_utils","import_v4","import_provider","generateId","import_provider","import_provider_utils","import_v4"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts","../src/version.ts"],"sourcesContent":["export { createMistral, mistral } from './mistral-provider';\nexport type {\n MistralProvider,\n MistralProviderSettings,\n} from './mistral-provider';\nexport type { MistralLanguageModelOptions } from './mistral-chat-options';\nexport { VERSION } from './version';\n","import {\n EmbeddingModelV3,\n LanguageModelV3,\n NoSuchModelError,\n ProviderV3,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { VERSION } from './version';\n\nexport interface MistralProvider extends ProviderV3 {\n (modelId: MistralChatModelId): LanguageModelV3;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV3;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV3;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV3<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV3<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV3<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () =>\n withUserAgentSuffix(\n {\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n },\n `ai-sdk/mistral/${VERSION}`,\n );\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n generateId: options.generateId,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV3,\n LanguageModelV3CallWarning,\n LanguageModelV3Content,\n LanguageModelV3FinishReason,\n LanguageModelV3StreamPart,\n LanguageModelV3Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n injectJsonInstructionIntoMessages,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralLanguageModelOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n generateId?: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV3 {\n readonly specificationVersion = 'v3';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n private readonly generateId: () => string;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n this.generateId = config.generateId ?? generateId;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV3['doGenerate']>[0]) {\n const warnings: LanguageModelV3CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralLanguageModelOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n const structuredOutputs = options.structuredOutputs ?? true;\n const strictJsonSchema = options.strictJsonSchema ?? false;\n\n // For Mistral we need to need to instruct the model to return a JSON object.\n // https://docs.mistral.ai/capabilities/structured-output/structured_output_overview/\n if (responseFormat?.type === 'json' && !responseFormat?.schema) {\n prompt = injectJsonInstructionIntoMessages({\n messages: prompt,\n schema: responseFormat.schema,\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json'\n ? structuredOutputs && responseFormat?.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n strict: strictJsonSchema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n ...(mistralTools != null && options.parallelToolCalls !== undefined\n ? { parallel_tool_calls: options.parallelToolCalls }\n : {}),\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV3['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV3Content> = [];\n\n // process content parts in order to preserve sequence\n if (\n choice.message.content != null &&\n Array.isArray(choice.message.content)\n ) {\n for (const part of choice.message.content) {\n if (part.type === 'thinking') {\n const reasoningText = extractReasoningContent(part.thinking);\n if (reasoningText.length > 0) {\n content.push({ type: 'reasoning', text: reasoningText });\n }\n } else if (part.type === 'text') {\n if (part.text.length > 0) {\n content.push({ type: 'text', text: part.text });\n }\n }\n }\n } else {\n // handle legacy string content\n const text = extractTextContent(choice.message.content);\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n }\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV3['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV3FinishReason = 'unknown';\n const usage: LanguageModelV3Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n let activeReasoningId: string | null = null;\n\n const generateId = this.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV3StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (delta.content != null && Array.isArray(delta.content)) {\n for (const part of delta.content) {\n if (part.type === 'thinking') {\n const reasoningDelta = extractReasoningContent(part.thinking);\n if (reasoningDelta.length > 0) {\n if (activeReasoningId == null) {\n // end any active text before starting reasoning\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n activeText = false;\n }\n\n activeReasoningId = generateId();\n controller.enqueue({\n type: 'reasoning-start',\n id: activeReasoningId,\n });\n }\n controller.enqueue({\n type: 'reasoning-delta',\n id: activeReasoningId,\n delta: reasoningDelta,\n });\n }\n }\n }\n }\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n // if we were in reasoning mode, end it before starting text\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n activeReasoningId = null;\n }\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n }\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractReasoningContent(\n thinking: Array<{ type: string; text: string }>,\n) {\n return thinking\n .filter(chunk => chunk.type === 'text')\n .map(chunk => chunk.text)\n .join('');\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'thinking':\n case 'image_url':\n case 'reference':\n // thinking, image content, and reference content are currently ignored\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n z.object({\n type: z.literal('thinking'),\n thinking: z.array(\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n ),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV3Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV3Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n case 'reasoning': {\n text += part.text;\n break;\n }\n default: {\n throw new Error(\n `Unsupported content type in assistant message: ${part.type}`,\n );\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'execution-denied':\n contentValue = output.reason ?? 'Tool execution denied.';\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV3FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV3FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2508'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2507'\n | 'magistral-medium-2507'\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralLanguageModelOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n\n /**\n * Whether to use structured outputs.\n *\n * @default true\n */\n structuredOutputs: z.boolean().optional(),\n\n /**\n * Whether to use strict JSON schema validation.\n *\n * @default false\n */\n strictJsonSchema: z.boolean().optional(),\n\n /**\n * Whether to enable parallel function calling during tool use.\n * When set to false, the model will use at most one tool per response.\n *\n * @default true\n */\n parallelToolCalls: z.boolean().optional(),\n});\n\nexport type MistralLanguageModelOptions = z.infer<\n typeof mistralLanguageModelOptions\n>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV3CallOptions,\n LanguageModelV3CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV3CallOptions['tools'];\n toolChoice?: LanguageModelV3CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV3CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV3CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV3,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV3<string> {\n readonly specificationVersion = 'v3';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV3<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV3<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","// Version string of this package injected at build time.\ndeclare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,mBAKO;AACP,IAAAC,yBAKO;;;ACHP,IAAAC,yBAUO;AACP,IAAAC,aAAkB;;;ACnBlB,sBAGO;AAEP,4BAAgC;AAEzB,SAAS,6BACd,QACe;AATjB;AAUE,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,eAAW,uCAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8CAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,IAAI;AAAA,gBACR,kDAAkD,KAAK,IAAI;AAAA,cAC7D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AACH,8BAAe,YAAO,WAAP,YAAiB;AAChC;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AClJO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,gBAAkB;AA0BX,IAAM,8BAA8B,YAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMlD,YAAY,YAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,YAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,YAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOvC,mBAAmB,YAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOxC,kBAAkB,YAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQvC,mBAAmB,YAAE,QAAQ,EAAE,SAAS;AAC1C,CAAC;;;AC1DD,IAAAC,yBAA+C;AAC/C,IAAAC,aAAkB;AAElB,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,QAAQ,aAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,aAAE,OAAO;AAAA,EAClB,MAAM,aAAE,OAAO;AAAA,EACf,OAAO,aAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,uDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD,IAAAC,mBAIO;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA;AAAA;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANpDO,IAAM,2BAAN,MAA0D;AAAA,EAQ/D,YAAY,SAA6B,QAA2B;AAPpE,SAAS,uBAAuB;AAiBhC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AA1DF;AA+CI,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,cAAa,YAAO,eAAP,YAAqB;AAAA,EACzC;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA1EnD;AA2EI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,eAAM,6CAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,qBAAoB,aAAQ,sBAAR,YAA6B;AACvD,UAAM,oBAAmB,aAAQ,qBAAR,YAA4B;AAIrD,SAAI,iDAAgB,UAAS,UAAU,EAAC,iDAAgB,SAAQ;AAC9D,mBAAS,0DAAkC;AAAA,QACzC,UAAU;AAAA,QACV,QAAQ,eAAe;AAAA,MACzB,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SACrB,sBAAqB,iDAAgB,WAAU,OAC7C;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,QAAQ;AAAA,UACR,OAAM,oBAAe,SAAf,YAAuB;AAAA,UAC7B,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA;AAAA,MAGN,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,QACb,GAAI,gBAAgB,QAAQ,QAAQ,sBAAsB,SACtD,EAAE,qBAAqB,QAAQ,kBAAkB,IACjD,CAAC;AAAA,MACP;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QACE,OAAO,QAAQ,WAAW,QAC1B,MAAM,QAAQ,OAAO,QAAQ,OAAO,GACpC;AACA,iBAAW,QAAQ,OAAO,QAAQ,SAAS;AACzC,YAAI,KAAK,SAAS,YAAY;AAC5B,gBAAM,gBAAgB,wBAAwB,KAAK,QAAQ;AAC3D,cAAI,cAAc,SAAS,GAAG;AAC5B,oBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,cAAc,CAAC;AAAA,UACzD;AAAA,QACF,WAAW,KAAK,SAAS,QAAQ;AAC/B,cAAI,KAAK,KAAK,SAAS,GAAG;AACxB,oBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AACtD,UAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,gBAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,MACrC;AAAA,IACF;AAOA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AACjB,QAAI,oBAAmC;AAEvC,UAAMC,cAAa,KAAK;AAExB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,MAAM,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAO,GAAG;AACzD,yBAAW,QAAQ,MAAM,SAAS;AAChC,oBAAI,KAAK,SAAS,YAAY;AAC5B,wBAAM,iBAAiB,wBAAwB,KAAK,QAAQ;AAC5D,sBAAI,eAAe,SAAS,GAAG;AAC7B,wBAAI,qBAAqB,MAAM;AAE7B,0BAAI,YAAY;AACd,mCAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAChD,qCAAa;AAAA,sBACf;AAEA,0CAAoBA,YAAW;AAC/B,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AACA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO;AAAA,oBACT,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AAEf,oBAAI,qBAAqB,MAAM;AAC7B,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI;AAAA,kBACN,CAAC;AACD,sCAAoB;AAAA,gBACtB;AACA,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,qBAAqB,MAAM;AAC7B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AACA,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,wBACP,UACA;AACA,SAAO,SACJ,OAAO,WAAS,MAAM,SAAS,MAAM,EACrC,IAAI,WAAS,MAAM,IAAI,EACvB,KAAK,EAAE;AACZ;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuB,aAC1B,MAAM;AAAA,EACL,aAAE,OAAO;AAAA,EACT,aAAE;AAAA,IACA,aAAE,mBAAmB,QAAQ;AAAA,MAC3B,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,MAAM;AAAA,QACtB,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,WAAW,aAAE,MAAM;AAAA,UACjB,aAAE,OAAO;AAAA,UACT,aAAE,OAAO;AAAA,YACP,KAAK,aAAE,OAAO;AAAA,YACd,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,eAAe,aAAE,MAAM,aAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,MACD,aAAE,OAAO;AAAA,QACP,MAAM,aAAE,QAAQ,UAAU;AAAA,QAC1B,UAAU,aAAE;AAAA,UACV,aAAE,OAAO;AAAA,YACP,MAAM,aAAE,QAAQ,MAAM;AAAA,YACtB,MAAM,aAAE,OAAO;AAAA,UACjB,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqB,aAAE,OAAO;AAAA,EAClC,eAAe,aAAE,OAAO;AAAA,EACxB,mBAAmB,aAAE,OAAO;AAAA,EAC5B,cAAc,aAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4B,aAAE,OAAO;AAAA,EACzC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,aAAE,OAAO;AAAA,MAChB,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,aAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,IAAI,aAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,aAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,aAAE;AAAA,IACT,aAAE,OAAO;AAAA,MACP,OAAO,aAAE,OAAO;AAAA,QACd,MAAM,aAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAY,aACT;AAAA,UACC,aAAE,OAAO;AAAA,YACP,IAAI,aAAE,OAAO;AAAA,YACb,UAAU,aAAE,OAAO,EAAE,MAAM,aAAE,OAAO,GAAG,WAAW,aAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,aAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAO,aAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AO7kBD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,aAAkB;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,UAAM,sCAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqC,aAAE,OAAO;AAAA,EAClD,MAAM,aAAE,MAAM,aAAE,OAAO,EAAE,WAAW,aAAE,MAAM,aAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,aAAE,OAAO,EAAE,eAAe,aAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AC1FM,IAAM,UACX,OACI,kBACA;;;AToEC,SAAS,cACd,UAAmC,CAAC,GACnB;AA3EnB;AA4EE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,UACjB;AAAA,IACE;AAAA,MACE,eAAe,cAAU,mCAAW;AAAA,QAClC,QAAQ,QAAQ;AAAA,QAChB,yBAAyB;AAAA,QACzB,aAAa;AAAA,MACf,CAAC,CAAC;AAAA,MACF,GAAG,QAAQ;AAAA,IACb;AAAA,IACA,kBAAkB,OAAO;AAAA,EAC3B;AAEF,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,YAAY,QAAQ;AAAA,EACtB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,kCAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider","import_provider_utils","import_provider_utils","import_v4","import_provider_utils","import_v4","import_provider","generateId","import_provider","import_provider_utils","import_v4"]}
package/dist/index.mjs CHANGED
@@ -4,7 +4,8 @@ import {
4
4
  } from "@ai-sdk/provider";
5
5
  import {
6
6
  loadApiKey,
7
- withoutTrailingSlash
7
+ withoutTrailingSlash,
8
+ withUserAgentSuffix
8
9
  } from "@ai-sdk/provider-utils";
9
10
 
10
11
  // src/mistral-chat-language-model.ts
@@ -25,6 +26,7 @@ import {
25
26
  } from "@ai-sdk/provider";
26
27
  import { convertToBase64 } from "@ai-sdk/provider-utils";
27
28
  function convertToMistralChatMessages(prompt) {
29
+ var _a;
28
30
  const messages = [];
29
31
  for (let i = 0; i < prompt.length; i++) {
30
32
  const { role, content } = prompt[i];
@@ -113,6 +115,9 @@ function convertToMistralChatMessages(prompt) {
113
115
  case "error-text":
114
116
  contentValue = output.value;
115
117
  break;
118
+ case "execution-denied":
119
+ contentValue = (_a = output.reason) != null ? _a : "Tool execution denied.";
120
+ break;
116
121
  case "content":
117
122
  case "json":
118
123
  case "error-json":
@@ -187,7 +192,14 @@ var mistralLanguageModelOptions = z.object({
187
192
  *
188
193
  * @default false
189
194
  */
190
- strictJsonSchema: z.boolean().optional()
195
+ strictJsonSchema: z.boolean().optional(),
196
+ /**
197
+ * Whether to enable parallel function calling during tool use.
198
+ * When set to false, the model will use at most one tool per response.
199
+ *
200
+ * @default true
201
+ */
202
+ parallelToolCalls: z.boolean().optional()
191
203
  });
192
204
 
193
205
  // src/mistral-error.ts
@@ -243,6 +255,8 @@ function prepareTools({
243
255
  return { tools: mistralTools, toolChoice: type, toolWarnings };
244
256
  case "required":
245
257
  return { tools: mistralTools, toolChoice: "any", toolWarnings };
258
+ // mistral does not support tool mode directly,
259
+ // so we filter the tools and force the tool choice through 'any'
246
260
  case "tool":
247
261
  return {
248
262
  tools: mistralTools.filter(
@@ -263,7 +277,7 @@ function prepareTools({
263
277
  // src/mistral-chat-language-model.ts
264
278
  var MistralChatLanguageModel = class {
265
279
  constructor(modelId, config) {
266
- this.specificationVersion = "v2";
280
+ this.specificationVersion = "v3";
267
281
  this.supportedUrls = {
268
282
  "application/pdf": [/^https:\/\/.*$/]
269
283
  };
@@ -367,7 +381,8 @@ var MistralChatLanguageModel = class {
367
381
  args: {
368
382
  ...baseArgs,
369
383
  tools: mistralTools,
370
- tool_choice: mistralToolChoice
384
+ tool_choice: mistralToolChoice,
385
+ ...mistralTools != null && options.parallelToolCalls !== void 0 ? { parallel_tool_calls: options.parallelToolCalls } : {}
371
386
  },
372
387
  warnings: [...warnings, ...toolWarnings]
373
388
  };
@@ -714,7 +729,7 @@ import {
714
729
  import { z as z4 } from "zod/v4";
715
730
  var MistralEmbeddingModel = class {
716
731
  constructor(modelId, config) {
717
- this.specificationVersion = "v2";
732
+ this.specificationVersion = "v3";
718
733
  this.maxEmbeddingsPerCall = 32;
719
734
  this.supportsParallelCalls = false;
720
735
  this.modelId = modelId;
@@ -767,18 +782,24 @@ var MistralTextEmbeddingResponseSchema = z4.object({
767
782
  usage: z4.object({ prompt_tokens: z4.number() }).nullish()
768
783
  });
769
784
 
785
+ // src/version.ts
786
+ var VERSION = true ? "2.1.0-beta.10" : "0.0.0-test";
787
+
770
788
  // src/mistral-provider.ts
771
789
  function createMistral(options = {}) {
772
790
  var _a;
773
791
  const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
774
- const getHeaders = () => ({
775
- Authorization: `Bearer ${loadApiKey({
776
- apiKey: options.apiKey,
777
- environmentVariableName: "MISTRAL_API_KEY",
778
- description: "Mistral"
779
- })}`,
780
- ...options.headers
781
- });
792
+ const getHeaders = () => withUserAgentSuffix(
793
+ {
794
+ Authorization: `Bearer ${loadApiKey({
795
+ apiKey: options.apiKey,
796
+ environmentVariableName: "MISTRAL_API_KEY",
797
+ description: "Mistral"
798
+ })}`,
799
+ ...options.headers
800
+ },
801
+ `ai-sdk/mistral/${VERSION}`
802
+ );
782
803
  const createChatModel = (modelId) => new MistralChatLanguageModel(modelId, {
783
804
  provider: "mistral.chat",
784
805
  baseURL,
@@ -812,6 +833,7 @@ function createMistral(options = {}) {
812
833
  }
813
834
  var mistral = createMistral();
814
835
  export {
836
+ VERSION,
815
837
  createMistral,
816
838
  mistral
817
839
  };
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["import {\n EmbeddingModelV2,\n LanguageModelV2,\n NoSuchModelError,\n ProviderV2,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\n\nexport interface MistralProvider extends ProviderV2 {\n (modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV2;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV2<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV2<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n generateId: options.generateId,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV2,\n LanguageModelV2CallWarning,\n LanguageModelV2Content,\n LanguageModelV2FinishReason,\n LanguageModelV2StreamPart,\n LanguageModelV2Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n injectJsonInstructionIntoMessages,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralLanguageModelOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n generateId?: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV2 {\n readonly specificationVersion = 'v2';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n private readonly generateId: () => string;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n this.generateId = config.generateId ?? generateId;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV2['doGenerate']>[0]) {\n const warnings: LanguageModelV2CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralLanguageModelOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n const structuredOutputs = options.structuredOutputs ?? true;\n const strictJsonSchema = options.strictJsonSchema ?? false;\n\n // For Mistral we need to need to instruct the model to return a JSON object.\n // https://docs.mistral.ai/capabilities/structured-output/structured_output_overview/\n if (responseFormat?.type === 'json' && !responseFormat?.schema) {\n prompt = injectJsonInstructionIntoMessages({\n messages: prompt,\n schema: responseFormat.schema,\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json'\n ? structuredOutputs && responseFormat?.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n strict: strictJsonSchema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV2['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV2Content> = [];\n\n // process content parts in order to preserve sequence\n if (\n choice.message.content != null &&\n Array.isArray(choice.message.content)\n ) {\n for (const part of choice.message.content) {\n if (part.type === 'thinking') {\n const reasoningText = extractReasoningContent(part.thinking);\n if (reasoningText.length > 0) {\n content.push({ type: 'reasoning', text: reasoningText });\n }\n } else if (part.type === 'text') {\n if (part.text.length > 0) {\n content.push({ type: 'text', text: part.text });\n }\n }\n }\n } else {\n // handle legacy string content\n const text = extractTextContent(choice.message.content);\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n }\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV2['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV2['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV2FinishReason = 'unknown';\n const usage: LanguageModelV2Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n let activeReasoningId: string | null = null;\n\n const generateId = this.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV2StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (delta.content != null && Array.isArray(delta.content)) {\n for (const part of delta.content) {\n if (part.type === 'thinking') {\n const reasoningDelta = extractReasoningContent(part.thinking);\n if (reasoningDelta.length > 0) {\n if (activeReasoningId == null) {\n // end any active text before starting reasoning\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n activeText = false;\n }\n\n activeReasoningId = generateId();\n controller.enqueue({\n type: 'reasoning-start',\n id: activeReasoningId,\n });\n }\n controller.enqueue({\n type: 'reasoning-delta',\n id: activeReasoningId,\n delta: reasoningDelta,\n });\n }\n }\n }\n }\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n // if we were in reasoning mode, end it before starting text\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n activeReasoningId = null;\n }\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n }\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractReasoningContent(\n thinking: Array<{ type: string; text: string }>,\n) {\n return thinking\n .filter(chunk => chunk.type === 'text')\n .map(chunk => chunk.text)\n .join('');\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'thinking':\n case 'image_url':\n case 'reference':\n // thinking, image content, and reference content are currently ignored\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n z.object({\n type: z.literal('thinking'),\n thinking: z.array(\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n ),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV2Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV2Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n case 'reasoning': {\n text += part.text;\n break;\n }\n default: {\n throw new Error(\n `Unsupported content type in assistant message: ${part.type}`,\n );\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV2FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV2FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2508'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2507'\n | 'magistral-medium-2507'\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralLanguageModelOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n\n /**\n * Whether to use structured outputs.\n *\n * @default true\n */\n structuredOutputs: z.boolean().optional(),\n\n /**\n * Whether to use strict JSON schema validation.\n *\n * @default false\n */\n strictJsonSchema: z.boolean().optional(),\n});\n\nexport type MistralLanguageModelOptions = z.infer<\n typeof mistralLanguageModelOptions\n>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV2CallOptions,\n LanguageModelV2CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV2CallOptions['tools'];\n toolChoice?: LanguageModelV2CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV2CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV2CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV2,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV2<string> {\n readonly specificationVersion = 'v2';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";AAAA;AAAA,EAGE;AAAA,OAEK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;;;ACFP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;ACnBlB;AAAA,EAEE;AAAA,OACK;AAEP,SAAS,uBAAuB;AAEzB,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,gBAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,IAAI;AAAA,gBACR,kDAAkD,KAAK,IAAI;AAAA,cAC7D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC/IO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,SAAS,SAAS;AA0BX,IAAM,8BAA8B,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMlD,YAAY,EAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOvC,mBAAmB,EAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOxC,kBAAkB,EAAE,QAAQ,EAAE,SAAS;AACzC,CAAC;;;AClDD,SAAS,sCAAsC;AAC/C,SAAS,KAAAC,UAAS;AAElB,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,QAAQA,GAAE,QAAQ,OAAO;AAAA,EACzB,SAASA,GAAE,OAAO;AAAA,EAClB,MAAMA,GAAE,OAAO;AAAA,EACf,OAAOA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAMA,GAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,+BAA+B,+BAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD;AAAA,EAGE,iCAAAC;AAAA,OACK;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANpDO,IAAM,2BAAN,MAA0D;AAAA,EAQ/D,YAAY,SAA6B,QAA2B;AAPpE,SAAS,uBAAuB;AAiBhC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AA1DF;AA+CI,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,cAAa,YAAO,eAAP,YAAqB;AAAA,EACzC;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA1EnD;AA2EI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,WAAM,qBAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,qBAAoB,aAAQ,sBAAR,YAA6B;AACvD,UAAM,oBAAmB,aAAQ,qBAAR,YAA4B;AAIrD,SAAI,iDAAgB,UAAS,UAAU,EAAC,iDAAgB,SAAQ;AAC9D,eAAS,kCAAkC;AAAA,QACzC,UAAU;AAAA,QACV,QAAQ,eAAe;AAAA,MACzB,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SACrB,sBAAqB,iDAAgB,WAAU,OAC7C;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,QAAQ;AAAA,UACR,OAAM,oBAAe,SAAf,YAAuB;AAAA,UAC7B,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA;AAAA,MAGN,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,MACf;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QACE,OAAO,QAAQ,WAAW,QAC1B,MAAM,QAAQ,OAAO,QAAQ,OAAO,GACpC;AACA,iBAAW,QAAQ,OAAO,QAAQ,SAAS;AACzC,YAAI,KAAK,SAAS,YAAY;AAC5B,gBAAM,gBAAgB,wBAAwB,KAAK,QAAQ;AAC3D,cAAI,cAAc,SAAS,GAAG;AAC5B,oBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,cAAc,CAAC;AAAA,UACzD;AAAA,QACF,WAAW,KAAK,SAAS,QAAQ;AAC/B,cAAI,KAAK,KAAK,SAAS,GAAG;AACxB,oBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AACtD,UAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,gBAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,MACrC;AAAA,IACF;AAOA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AACjB,QAAI,oBAAmC;AAEvC,UAAMC,cAAa,KAAK;AAExB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,MAAM,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAO,GAAG;AACzD,yBAAW,QAAQ,MAAM,SAAS;AAChC,oBAAI,KAAK,SAAS,YAAY;AAC5B,wBAAM,iBAAiB,wBAAwB,KAAK,QAAQ;AAC5D,sBAAI,eAAe,SAAS,GAAG;AAC7B,wBAAI,qBAAqB,MAAM;AAE7B,0BAAI,YAAY;AACd,mCAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAChD,qCAAa;AAAA,sBACf;AAEA,0CAAoBA,YAAW;AAC/B,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AACA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO;AAAA,oBACT,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AAEf,oBAAI,qBAAqB,MAAM;AAC7B,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI;AAAA,kBACN,CAAC;AACD,sCAAoB;AAAA,gBACtB;AACA,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,qBAAqB,MAAM;AAC7B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AACA,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,wBACP,UACA;AACA,SAAO,SACJ,OAAO,WAAS,MAAM,SAAS,MAAM,EACrC,IAAI,WAAS,MAAM,IAAI,EACvB,KAAK,EAAE;AACZ;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuBC,GAC1B,MAAM;AAAA,EACLA,GAAE,OAAO;AAAA,EACTA,GAAE;AAAA,IACAA,GAAE,mBAAmB,QAAQ;AAAA,MAC3BA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,QACtB,MAAMA,GAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,WAAWA,GAAE,MAAM;AAAA,UACjBA,GAAE,OAAO;AAAA,UACTA,GAAE,OAAO;AAAA,YACP,KAAKA,GAAE,OAAO;AAAA,YACd,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,eAAeA,GAAE,MAAMA,GAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,UAAU;AAAA,QAC1B,UAAUA,GAAE;AAAA,UACVA,GAAE,OAAO;AAAA,YACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,YACtB,MAAMA,GAAE,OAAO;AAAA,UACjB,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqBA,GAAE,OAAO;AAAA,EAClC,eAAeA,GAAE,OAAO;AAAA,EACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC5B,cAAcA,GAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4BA,GAAE,OAAO;AAAA,EACzC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAOA,GAAE,OAAO;AAAA,MAChB,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQA,GAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,QACd,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAOA,GAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AO1kBD;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqCC,GAAE,OAAO;AAAA,EAClD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ARrBM,SAAS,cACd,UAAmC,CAAC,GACnB;AAzEnB;AA0EE,QAAM,WACJ,0BAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,YAAY,QAAQ;AAAA,EACtB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["z","z","UnsupportedFunctionalityError","generateId","z","combineHeaders","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonResponseHandler","z"]}
1
+ {"version":3,"sources":["../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/get-response-metadata.ts","../src/map-mistral-finish-reason.ts","../src/mistral-chat-options.ts","../src/mistral-error.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts","../src/version.ts"],"sourcesContent":["import {\n EmbeddingModelV3,\n LanguageModelV3,\n NoSuchModelError,\n ProviderV3,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n withUserAgentSuffix,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport { MistralChatModelId } from './mistral-chat-options';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { VERSION } from './version';\n\nexport interface MistralProvider extends ProviderV3 {\n (modelId: MistralChatModelId): LanguageModelV3;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(modelId: MistralChatModelId): LanguageModelV3;\n\n /**\nCreates a model for text generation.\n*/\n chat(modelId: MistralChatModelId): LanguageModelV3;\n\n /**\n@deprecated Use `textEmbedding()` instead.\n */\n embedding(modelId: MistralEmbeddingModelId): EmbeddingModelV3<string>;\n\n textEmbedding(modelId: MistralEmbeddingModelId): EmbeddingModelV3<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n ) => EmbeddingModelV3<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n\n generateId?: () => string;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () =>\n withUserAgentSuffix(\n {\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n },\n `ai-sdk/mistral/${VERSION}`,\n );\n\n const createChatModel = (modelId: MistralChatModelId) =>\n new MistralChatLanguageModel(modelId, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n generateId: options.generateId,\n });\n\n const createEmbeddingModel = (modelId: MistralEmbeddingModelId) =>\n new MistralEmbeddingModel(modelId, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (modelId: MistralChatModelId) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n provider.imageModel = (modelId: string) => {\n throw new NoSuchModelError({ modelId, modelType: 'imageModel' });\n };\n\n return provider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV3,\n LanguageModelV3CallWarning,\n LanguageModelV3Content,\n LanguageModelV3FinishReason,\n LanguageModelV3StreamPart,\n LanguageModelV3Usage,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n FetchFunction,\n generateId,\n injectJsonInstructionIntoMessages,\n parseProviderOptions,\n ParseResult,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n mistralLanguageModelOptions,\n} from './mistral-chat-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n generateId?: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV3 {\n readonly specificationVersion = 'v3';\n\n readonly modelId: MistralChatModelId;\n\n private readonly config: MistralChatConfig;\n private readonly generateId: () => string;\n\n constructor(modelId: MistralChatModelId, config: MistralChatConfig) {\n this.modelId = modelId;\n this.config = config;\n this.generateId = config.generateId ?? generateId;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n readonly supportedUrls: Record<string, RegExp[]> = {\n 'application/pdf': [/^https:\\/\\/.*$/],\n };\n\n private async getArgs({\n prompt,\n maxOutputTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n providerOptions,\n tools,\n toolChoice,\n }: Parameters<LanguageModelV3['doGenerate']>[0]) {\n const warnings: LanguageModelV3CallWarning[] = [];\n\n const options =\n (await parseProviderOptions({\n provider: 'mistral',\n providerOptions,\n schema: mistralLanguageModelOptions,\n })) ?? {};\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n const structuredOutputs = options.structuredOutputs ?? true;\n const strictJsonSchema = options.strictJsonSchema ?? false;\n\n // For Mistral we need to need to instruct the model to return a JSON object.\n // https://docs.mistral.ai/capabilities/structured-output/structured_output_overview/\n if (responseFormat?.type === 'json' && !responseFormat?.schema) {\n prompt = injectJsonInstructionIntoMessages({\n messages: prompt,\n schema: responseFormat.schema,\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: options.safePrompt,\n\n // standardized settings:\n max_tokens: maxOutputTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json'\n ? structuredOutputs && responseFormat?.schema != null\n ? {\n type: 'json_schema',\n json_schema: {\n schema: responseFormat.schema,\n strict: strictJsonSchema,\n name: responseFormat.name ?? 'response',\n description: responseFormat.description,\n },\n }\n : { type: 'json_object' }\n : undefined,\n\n // mistral-specific provider options:\n document_image_limit: options.documentImageLimit,\n document_page_limit: options.documentPageLimit,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n const {\n tools: mistralTools,\n toolChoice: mistralToolChoice,\n toolWarnings,\n } = prepareTools({\n tools,\n toolChoice,\n });\n\n return {\n args: {\n ...baseArgs,\n tools: mistralTools,\n tool_choice: mistralToolChoice,\n ...(mistralTools != null && options.parallelToolCalls !== undefined\n ? { parallel_tool_calls: options.parallelToolCalls }\n : {}),\n },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV3['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doGenerate']>>> {\n const { args: body, warnings } = await this.getArgs(options);\n\n const {\n responseHeaders,\n value: response,\n rawValue: rawResponse,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const choice = response.choices[0];\n const content: Array<LanguageModelV3Content> = [];\n\n // process content parts in order to preserve sequence\n if (\n choice.message.content != null &&\n Array.isArray(choice.message.content)\n ) {\n for (const part of choice.message.content) {\n if (part.type === 'thinking') {\n const reasoningText = extractReasoningContent(part.thinking);\n if (reasoningText.length > 0) {\n content.push({ type: 'reasoning', text: reasoningText });\n }\n } else if (part.type === 'text') {\n if (part.text.length > 0) {\n content.push({ type: 'text', text: part.text });\n }\n }\n }\n } else {\n // handle legacy string content\n const text = extractTextContent(choice.message.content);\n if (text != null && text.length > 0) {\n content.push({ type: 'text', text });\n }\n }\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n\n // tool calls:\n if (choice.message.tool_calls != null) {\n for (const toolCall of choice.message.tool_calls) {\n content.push({\n type: 'tool-call',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n input: toolCall.function.arguments!,\n });\n }\n }\n\n return {\n content,\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n },\n request: { body },\n response: {\n ...getResponseMetadata(response),\n headers: responseHeaders,\n body: rawResponse,\n },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV3['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV3['doStream']>>> {\n const { args, warnings } = await this.getArgs(options);\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n let finishReason: LanguageModelV3FinishReason = 'unknown';\n const usage: LanguageModelV3Usage = {\n inputTokens: undefined,\n outputTokens: undefined,\n totalTokens: undefined,\n };\n\n let isFirstChunk = true;\n let activeText = false;\n let activeReasoningId: string | null = null;\n\n const generateId = this.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV3StreamPart\n >({\n start(controller) {\n controller.enqueue({ type: 'stream-start', warnings });\n },\n\n transform(chunk, controller) {\n // Emit raw chunk if requested (before anything else)\n if (options.includeRawChunks) {\n controller.enqueue({ type: 'raw', rawValue: chunk.rawValue });\n }\n\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (isFirstChunk) {\n isFirstChunk = false;\n\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage.inputTokens = value.usage.prompt_tokens;\n usage.outputTokens = value.usage.completion_tokens;\n usage.totalTokens = value.usage.total_tokens;\n }\n\n const choice = value.choices[0];\n const delta = choice.delta;\n\n const textContent = extractTextContent(delta.content);\n\n if (delta.content != null && Array.isArray(delta.content)) {\n for (const part of delta.content) {\n if (part.type === 'thinking') {\n const reasoningDelta = extractReasoningContent(part.thinking);\n if (reasoningDelta.length > 0) {\n if (activeReasoningId == null) {\n // end any active text before starting reasoning\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n activeText = false;\n }\n\n activeReasoningId = generateId();\n controller.enqueue({\n type: 'reasoning-start',\n id: activeReasoningId,\n });\n }\n controller.enqueue({\n type: 'reasoning-delta',\n id: activeReasoningId,\n delta: reasoningDelta,\n });\n }\n }\n }\n }\n\n if (textContent != null && textContent.length > 0) {\n if (!activeText) {\n // if we were in reasoning mode, end it before starting text\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n activeReasoningId = null;\n }\n controller.enqueue({ type: 'text-start', id: '0' });\n activeText = true;\n }\n\n controller.enqueue({\n type: 'text-delta',\n id: '0',\n delta: textContent,\n });\n }\n\n if (delta?.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n const toolCallId = toolCall.id;\n const toolName = toolCall.function.name;\n const input = toolCall.function.arguments;\n\n controller.enqueue({\n type: 'tool-input-start',\n id: toolCallId,\n toolName,\n });\n\n controller.enqueue({\n type: 'tool-input-delta',\n id: toolCallId,\n delta: input,\n });\n\n controller.enqueue({\n type: 'tool-input-end',\n id: toolCallId,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallId,\n toolName,\n input,\n });\n }\n }\n\n if (choice.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n },\n\n flush(controller) {\n if (activeReasoningId != null) {\n controller.enqueue({\n type: 'reasoning-end',\n id: activeReasoningId,\n });\n }\n if (activeText) {\n controller.enqueue({ type: 'text-end', id: '0' });\n }\n\n controller.enqueue({\n type: 'finish',\n finishReason,\n usage,\n });\n },\n }),\n ),\n request: { body },\n response: { headers: responseHeaders },\n };\n }\n}\n\nfunction extractReasoningContent(\n thinking: Array<{ type: string; text: string }>,\n) {\n return thinking\n .filter(chunk => chunk.type === 'text')\n .map(chunk => chunk.text)\n .join('');\n}\n\nfunction extractTextContent(content: z.infer<typeof mistralContentSchema>) {\n if (typeof content === 'string') {\n return content;\n }\n\n if (content == null) {\n return undefined;\n }\n\n const textContent: string[] = [];\n\n for (const chunk of content) {\n const { type } = chunk;\n\n switch (type) {\n case 'text':\n textContent.push(chunk.text);\n break;\n case 'thinking':\n case 'image_url':\n case 'reference':\n // thinking, image content, and reference content are currently ignored\n break;\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return textContent.length ? textContent.join('') : undefined;\n}\n\nconst mistralContentSchema = z\n .union([\n z.string(),\n z.array(\n z.discriminatedUnion('type', [\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n z.object({\n type: z.literal('image_url'),\n image_url: z.union([\n z.string(),\n z.object({\n url: z.string(),\n detail: z.string().nullable(),\n }),\n ]),\n }),\n z.object({\n type: z.literal('reference'),\n reference_ids: z.array(z.number()),\n }),\n z.object({\n type: z.literal('thinking'),\n thinking: z.array(\n z.object({\n type: z.literal('text'),\n text: z.string(),\n }),\n ),\n }),\n ]),\n ),\n ])\n .nullish();\n\nconst mistralUsageSchema = z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n total_tokens: z.number(),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: mistralUsageSchema,\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: mistralContentSchema,\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: mistralUsageSchema.nullish(),\n});\n","import {\n LanguageModelV3Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralPrompt } from './mistral-chat-prompt';\nimport { convertToBase64 } from '@ai-sdk/provider-utils';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV3Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n\n case 'file': {\n if (part.mediaType.startsWith('image/')) {\n const mediaType =\n part.mediaType === 'image/*'\n ? 'image/jpeg'\n : part.mediaType;\n\n return {\n type: 'image_url',\n image_url:\n part.data instanceof URL\n ? part.data.toString()\n : `data:${mediaType};base64,${convertToBase64(part.data)}`,\n };\n } else if (part.mediaType === 'application/pdf') {\n return {\n type: 'document_url',\n document_url: part.data.toString(),\n };\n } else {\n throw new UnsupportedFunctionalityError({\n functionality:\n 'Only images and PDF file parts are supported',\n });\n }\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.input),\n },\n });\n break;\n }\n case 'reasoning': {\n text += part.text;\n break;\n }\n default: {\n throw new Error(\n `Unsupported content type in assistant message: ${part.type}`,\n );\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n const output = toolResponse.output;\n\n let contentValue: string;\n switch (output.type) {\n case 'text':\n case 'error-text':\n contentValue = output.value;\n break;\n case 'execution-denied':\n contentValue = output.reason ?? 'Tool execution denied.';\n break;\n case 'content':\n case 'json':\n case 'error-json':\n contentValue = JSON.stringify(output.value);\n break;\n }\n\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n tool_call_id: toolResponse.toolCallId,\n content: contentValue,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import { LanguageModelV3FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV3FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { z } from 'zod/v4';\n\n// https://docs.mistral.ai/getting-started/models/models_overview/\nexport type MistralChatModelId =\n // premier\n | 'ministral-3b-latest'\n | 'ministral-8b-latest'\n | 'mistral-large-latest'\n | 'mistral-medium-latest'\n | 'mistral-medium-2508'\n | 'mistral-medium-2505'\n | 'mistral-small-latest'\n | 'pixtral-large-latest'\n // reasoning models\n | 'magistral-small-2507'\n | 'magistral-medium-2507'\n | 'magistral-small-2506'\n | 'magistral-medium-2506'\n // free\n | 'pixtral-12b-2409'\n // legacy\n | 'open-mistral-7b'\n | 'open-mixtral-8x7b'\n | 'open-mixtral-8x22b'\n | (string & {});\n\nexport const mistralLanguageModelOptions = z.object({\n /**\nWhether to inject a safety prompt before all conversations.\n\nDefaults to `false`.\n */\n safePrompt: z.boolean().optional(),\n\n documentImageLimit: z.number().optional(),\n documentPageLimit: z.number().optional(),\n\n /**\n * Whether to use structured outputs.\n *\n * @default true\n */\n structuredOutputs: z.boolean().optional(),\n\n /**\n * Whether to use strict JSON schema validation.\n *\n * @default false\n */\n strictJsonSchema: z.boolean().optional(),\n\n /**\n * Whether to enable parallel function calling during tool use.\n * When set to false, the model will use at most one tool per response.\n *\n * @default true\n */\n parallelToolCalls: z.boolean().optional(),\n});\n\nexport type MistralLanguageModelOptions = z.infer<\n typeof mistralLanguageModelOptions\n>;\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import {\n LanguageModelV3CallOptions,\n LanguageModelV3CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralToolChoice } from './mistral-chat-prompt';\n\nexport function prepareTools({\n tools,\n toolChoice,\n}: {\n tools: LanguageModelV3CallOptions['tools'];\n toolChoice?: LanguageModelV3CallOptions['toolChoice'];\n}): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n toolChoice: MistralToolChoice | undefined;\n toolWarnings: LanguageModelV3CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n tools = tools?.length ? tools : undefined;\n\n const toolWarnings: LanguageModelV3CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, toolChoice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.inputSchema,\n },\n });\n }\n }\n\n if (toolChoice == null) {\n return { tools: mistralTools, toolChoice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, toolChoice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, toolChoice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n toolChoice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV3,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod/v4';\nimport { MistralEmbeddingModelId } from './mistral-embedding-options';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV3<string> {\n readonly specificationVersion = 'v3';\n readonly modelId: MistralEmbeddingModelId;\n readonly maxEmbeddingsPerCall = 32;\n readonly supportsParallelCalls = false;\n\n private readonly config: MistralEmbeddingConfig;\n\n get provider(): string {\n return this.config.provider;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV3<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV3<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const {\n responseHeaders,\n value: response,\n rawValue,\n } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n response: { headers: responseHeaders, body: rawValue },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n","// Version string of this package injected at build time.\ndeclare const __PACKAGE_VERSION__: string | undefined;\nexport const VERSION: string =\n typeof __PACKAGE_VERSION__ !== 'undefined'\n ? __PACKAGE_VERSION__\n : '0.0.0-test';\n"],"mappings":";AAAA;AAAA,EAGE;AAAA,OAEK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,OACK;;;ACHP;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EAEA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;ACnBlB;AAAA,EAEE;AAAA,OACK;AAEP,SAAS,uBAAuB;AAEzB,SAAS,6BACd,QACe;AATjB;AAUE,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAC3B,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cAEA,KAAK,QAAQ;AACX,oBAAI,KAAK,UAAU,WAAW,QAAQ,GAAG;AACvC,wBAAM,YACJ,KAAK,cAAc,YACf,eACA,KAAK;AAEX,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,WACE,KAAK,gBAAgB,MACjB,KAAK,KAAK,SAAS,IACnB,QAAQ,SAAS,WAAW,gBAAgB,KAAK,IAAI,CAAC;AAAA,kBAC9D;AAAA,gBACF,WAAW,KAAK,cAAc,mBAAmB;AAC/C,yBAAO;AAAA,oBACL,MAAM;AAAA,oBACN,cAAc,KAAK,KAAK,SAAS;AAAA,kBACnC;AAAA,gBACF,OAAO;AACL,wBAAM,IAAI,8BAA8B;AAAA,oBACtC,eACE;AAAA,kBACJ,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,KAAK;AAAA,gBACtC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,IAAI;AAAA,gBACR,kDAAkD,KAAK,IAAI;AAAA,cAC7D;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,gBAAM,SAAS,aAAa;AAE5B,cAAI;AACJ,kBAAQ,OAAO,MAAM;AAAA,YACnB,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,OAAO;AACtB;AAAA,YACF,KAAK;AACH,8BAAe,YAAO,WAAP,YAAiB;AAChC;AAAA,YACF,KAAK;AAAA,YACL,KAAK;AAAA,YACL,KAAK;AACH,6BAAe,KAAK,UAAU,OAAO,KAAK;AAC1C;AAAA,UACJ;AAEA,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,cAAc,aAAa;AAAA,YAC3B,SAAS;AAAA,UACX,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AClJO,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACZO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,SAAS,SAAS;AA0BX,IAAM,8BAA8B,EAAE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMlD,YAAY,EAAE,QAAQ,EAAE,SAAS;AAAA,EAEjC,oBAAoB,EAAE,OAAO,EAAE,SAAS;AAAA,EACxC,mBAAmB,EAAE,OAAO,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOvC,mBAAmB,EAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOxC,kBAAkB,EAAE,QAAQ,EAAE,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQvC,mBAAmB,EAAE,QAAQ,EAAE,SAAS;AAC1C,CAAC;;;AC1DD,SAAS,sCAAsC;AAC/C,SAAS,KAAAC,UAAS;AAElB,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,QAAQA,GAAE,QAAQ,OAAO;AAAA,EACzB,SAASA,GAAE,OAAO;AAAA,EAClB,MAAMA,GAAE,OAAO;AAAA,EACf,OAAOA,GAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAMA,GAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,+BAA+B,+BAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBD;AAAA,EAGE,iCAAAC;AAAA,OACK;AAGA,SAAS,aAAa;AAAA,EAC3B;AAAA,EACA;AACF,GAgBE;AAEA,WAAQ,+BAAO,UAAS,QAAQ;AAEhC,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,YAAY,QAAW,aAAa;AAAA,EACjE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,YAAY,QAAW,aAAa;AAAA,EACpE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,MAAM,aAAa;AAAA,IAC/D,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,YAAY,OAAO,aAAa;AAAA;AAAA;AAAA,IAIhE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,qBAAqB,gBAAgB;AAAA,MACtD,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;ANpDO,IAAM,2BAAN,MAA0D;AAAA,EAQ/D,YAAY,SAA6B,QAA2B;AAPpE,SAAS,uBAAuB;AAiBhC,SAAS,gBAA0C;AAAA,MACjD,mBAAmB,CAAC,gBAAgB;AAAA,IACtC;AA1DF;AA+CI,SAAK,UAAU;AACf,SAAK,SAAS;AACd,SAAK,cAAa,YAAO,eAAP,YAAqB;AAAA,EACzC;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAMA,MAAc,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA1EnD;AA2EI,UAAM,WAAyC,CAAC;AAEhD,UAAM,WACH,WAAM,qBAAqB;AAAA,MAC1B,UAAU;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,IACV,CAAC,MAJA,YAIM,CAAC;AAEV,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,qBAAoB,aAAQ,sBAAR,YAA6B;AACvD,UAAM,oBAAmB,aAAQ,qBAAR,YAA4B;AAIrD,SAAI,iDAAgB,UAAS,UAAU,EAAC,iDAAgB,SAAQ;AAC9D,eAAS,kCAAkC;AAAA,QACzC,UAAU;AAAA,QACV,QAAQ,eAAe;AAAA,MACzB,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,QAAQ;AAAA;AAAA,MAGrB,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SACrB,sBAAqB,iDAAgB,WAAU,OAC7C;AAAA,QACE,MAAM;AAAA,QACN,aAAa;AAAA,UACX,QAAQ,eAAe;AAAA,UACvB,QAAQ;AAAA,UACR,OAAM,oBAAe,SAAf,YAAuB;AAAA,UAC7B,aAAa,eAAe;AAAA,QAC9B;AAAA,MACF,IACA,EAAE,MAAM,cAAc,IACxB;AAAA;AAAA,MAGN,sBAAsB,QAAQ;AAAA,MAC9B,qBAAqB,QAAQ;AAAA;AAAA,MAG7B,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,UAAM;AAAA,MACJ,OAAO;AAAA,MACP,YAAY;AAAA,MACZ;AAAA,IACF,IAAI,aAAa;AAAA,MACf;AAAA,MACA;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,OAAO;AAAA,QACP,aAAa;AAAA,QACb,GAAI,gBAAgB,QAAQ,QAAQ,sBAAsB,SACtD,EAAE,qBAAqB,QAAQ,kBAAkB,IACjD,CAAC;AAAA,MACP;AAAA,MACA,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,IACzC;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AAC7D,UAAM,EAAE,MAAM,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AAE3D,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ,IAAI,MAAM,cAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,UAAM,UAAyC,CAAC;AAGhD,QACE,OAAO,QAAQ,WAAW,QAC1B,MAAM,QAAQ,OAAO,QAAQ,OAAO,GACpC;AACA,iBAAW,QAAQ,OAAO,QAAQ,SAAS;AACzC,YAAI,KAAK,SAAS,YAAY;AAC5B,gBAAM,gBAAgB,wBAAwB,KAAK,QAAQ;AAC3D,cAAI,cAAc,SAAS,GAAG;AAC5B,oBAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,cAAc,CAAC;AAAA,UACzD;AAAA,QACF,WAAW,KAAK,SAAS,QAAQ;AAC/B,cAAI,KAAK,KAAK,SAAS,GAAG;AACxB,oBAAQ,KAAK,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AAEL,YAAM,OAAO,mBAAmB,OAAO,QAAQ,OAAO;AACtD,UAAI,QAAQ,QAAQ,KAAK,SAAS,GAAG;AACnC,gBAAQ,KAAK,EAAE,MAAM,QAAQ,KAAK,CAAC;AAAA,MACrC;AAAA,IACF;AAOA,QAAI,OAAO,QAAQ,cAAc,MAAM;AACrC,iBAAW,YAAY,OAAO,QAAQ,YAAY;AAChD,gBAAQ,KAAK;AAAA,UACX,MAAM;AAAA,UACN,YAAY,SAAS;AAAA,UACrB,UAAU,SAAS,SAAS;AAAA,UAC5B,OAAO,SAAS,SAAS;AAAA,QAC3B,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,aAAa,SAAS,MAAM;AAAA,QAC5B,cAAc,SAAS,MAAM;AAAA,QAC7B,aAAa,SAAS,MAAM;AAAA,MAC9B;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU;AAAA,QACR,GAAG,oBAAoB,QAAQ;AAAA,QAC/B,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,MAAM,KAAK,QAAQ,OAAO;AACrD,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,QAAI,eAA4C;AAChD,UAAM,QAA8B;AAAA,MAClC,aAAa;AAAA,MACb,cAAc;AAAA,MACd,aAAa;AAAA,IACf;AAEA,QAAI,eAAe;AACnB,QAAI,aAAa;AACjB,QAAI,oBAAmC;AAEvC,UAAMC,cAAa,KAAK;AAExB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,gBAAgB,SAAS,CAAC;AAAA,UACvD;AAAA,UAEA,UAAU,OAAO,YAAY;AAE3B,gBAAI,QAAQ,kBAAkB;AAC5B,yBAAW,QAAQ,EAAE,MAAM,OAAO,UAAU,MAAM,SAAS,CAAC;AAAA,YAC9D;AAEA,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,cAAc;AAChB,6BAAe;AAEf,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,oBAAM,cAAc,MAAM,MAAM;AAChC,oBAAM,eAAe,MAAM,MAAM;AACjC,oBAAM,cAAc,MAAM,MAAM;AAAA,YAClC;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAC9B,kBAAM,QAAQ,OAAO;AAErB,kBAAM,cAAc,mBAAmB,MAAM,OAAO;AAEpD,gBAAI,MAAM,WAAW,QAAQ,MAAM,QAAQ,MAAM,OAAO,GAAG;AACzD,yBAAW,QAAQ,MAAM,SAAS;AAChC,oBAAI,KAAK,SAAS,YAAY;AAC5B,wBAAM,iBAAiB,wBAAwB,KAAK,QAAQ;AAC5D,sBAAI,eAAe,SAAS,GAAG;AAC7B,wBAAI,qBAAqB,MAAM;AAE7B,0BAAI,YAAY;AACd,mCAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAChD,qCAAa;AAAA,sBACf;AAEA,0CAAoBA,YAAW;AAC/B,iCAAW,QAAQ;AAAA,wBACjB,MAAM;AAAA,wBACN,IAAI;AAAA,sBACN,CAAC;AAAA,oBACH;AACA,+BAAW,QAAQ;AAAA,sBACjB,MAAM;AAAA,sBACN,IAAI;AAAA,sBACJ,OAAO;AAAA,oBACT,CAAC;AAAA,kBACH;AAAA,gBACF;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,eAAe,QAAQ,YAAY,SAAS,GAAG;AACjD,kBAAI,CAAC,YAAY;AAEf,oBAAI,qBAAqB,MAAM;AAC7B,6BAAW,QAAQ;AAAA,oBACjB,MAAM;AAAA,oBACN,IAAI;AAAA,kBACN,CAAC;AACD,sCAAoB;AAAA,gBACtB;AACA,2BAAW,QAAQ,EAAE,MAAM,cAAc,IAAI,IAAI,CAAC;AAClD,6BAAa;AAAA,cACf;AAEA,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,gBACJ,OAAO;AAAA,cACT,CAAC;AAAA,YACH;AAEA,iBAAI,+BAAO,eAAc,MAAM;AAC7B,yBAAW,YAAY,MAAM,YAAY;AACvC,sBAAM,aAAa,SAAS;AAC5B,sBAAM,WAAW,SAAS,SAAS;AACnC,sBAAM,QAAQ,SAAS,SAAS;AAEhC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ;AAAA,gBACF,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,kBACJ,OAAO;AAAA,gBACT,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,IAAI;AAAA,gBACN,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN;AAAA,kBACA;AAAA,kBACA;AAAA,gBACF,CAAC;AAAA,cACH;AAAA,YACF;AAEA,gBAAI,OAAO,iBAAiB,MAAM;AAChC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,gBAAI,qBAAqB,MAAM;AAC7B,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,IAAI;AAAA,cACN,CAAC;AAAA,YACH;AACA,gBAAI,YAAY;AACd,yBAAW,QAAQ,EAAE,MAAM,YAAY,IAAI,IAAI,CAAC;AAAA,YAClD;AAEA,uBAAW,QAAQ;AAAA,cACjB,MAAM;AAAA,cACN;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,KAAK;AAAA,MAChB,UAAU,EAAE,SAAS,gBAAgB;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,wBACP,UACA;AACA,SAAO,SACJ,OAAO,WAAS,MAAM,SAAS,MAAM,EACrC,IAAI,WAAS,MAAM,IAAI,EACvB,KAAK,EAAE;AACZ;AAEA,SAAS,mBAAmB,SAA+C;AACzE,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,WAAW,MAAM;AACnB,WAAO;AAAA,EACT;AAEA,QAAM,cAAwB,CAAC;AAE/B,aAAW,SAAS,SAAS;AAC3B,UAAM,EAAE,KAAK,IAAI;AAEjB,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,oBAAY,KAAK,MAAM,IAAI;AAC3B;AAAA,MACF,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAEH;AAAA,MACF,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,YAAY,SAAS,YAAY,KAAK,EAAE,IAAI;AACrD;AAEA,IAAM,uBAAuBC,GAC1B,MAAM;AAAA,EACLA,GAAE,OAAO;AAAA,EACTA,GAAE;AAAA,IACAA,GAAE,mBAAmB,QAAQ;AAAA,MAC3BA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,QACtB,MAAMA,GAAE,OAAO;AAAA,MACjB,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,WAAWA,GAAE,MAAM;AAAA,UACjBA,GAAE,OAAO;AAAA,UACTA,GAAE,OAAO;AAAA,YACP,KAAKA,GAAE,OAAO;AAAA,YACd,QAAQA,GAAE,OAAO,EAAE,SAAS;AAAA,UAC9B,CAAC;AAAA,QACH,CAAC;AAAA,MACH,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,eAAeA,GAAE,MAAMA,GAAE,OAAO,CAAC;AAAA,MACnC,CAAC;AAAA,MACDA,GAAE,OAAO;AAAA,QACP,MAAMA,GAAE,QAAQ,UAAU;AAAA,QAC1B,UAAUA,GAAE;AAAA,UACVA,GAAE,OAAO;AAAA,YACP,MAAMA,GAAE,QAAQ,MAAM;AAAA,YACtB,MAAMA,GAAE,OAAO;AAAA,UACjB,CAAC;AAAA,QACH;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,CAAC,EACA,QAAQ;AAEX,IAAM,qBAAqBA,GAAE,OAAO;AAAA,EAClC,eAAeA,GAAE,OAAO;AAAA,EACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC5B,cAAcA,GAAE,OAAO;AACzB,CAAC;AAID,IAAM,4BAA4BA,GAAE,OAAO;AAAA,EACzC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAOA,GAAE,OAAO;AAAA,MAChB,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQA,GAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO;AACT,CAAC;AAID,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,QACd,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS;AAAA,QACT,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAOA,GAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,mBAAmB,QAAQ;AACpC,CAAC;;;AO7kBD;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAWX,IAAM,wBAAN,MAAgE;AAAA,EAYrE,YACE,SACA,QACA;AAdF,SAAS,uBAAuB;AAEhC,SAAS,uBAAuB;AAChC,SAAS,wBAAwB;AAY/B,SAAK,UAAU;AACf,SAAK,SAAS;AAAA,EAChB;AAAA,EAVA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAUA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM;AAAA,MACJ;AAAA,MACA,OAAO;AAAA,MACP;AAAA,IACF,IAAI,MAAMC,eAAc;AAAA,MACtB,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,UAAU,EAAE,SAAS,iBAAiB,MAAM,SAAS;AAAA,IACvD;AAAA,EACF;AACF;AAIA,IAAM,qCAAqCC,GAAE,OAAO;AAAA,EAClD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;AC1FM,IAAM,UACX,OACI,kBACA;;;AToEC,SAAS,cACd,UAAmC,CAAC,GACnB;AA3EnB;AA4EE,QAAM,WACJ,0BAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,MACjB;AAAA,IACE;AAAA,MACE,eAAe,UAAU,WAAW;AAAA,QAClC,QAAQ,QAAQ;AAAA,QAChB,yBAAyB;AAAA,QACzB,aAAa;AAAA,MACf,CAAC,CAAC;AAAA,MACF,GAAG,QAAQ;AAAA,IACb;AAAA,IACA,kBAAkB,OAAO;AAAA,EAC3B;AAEF,QAAM,kBAAkB,CAAC,YACvB,IAAI,yBAAyB,SAAS;AAAA,IACpC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,IACf,YAAY,QAAQ;AAAA,EACtB,CAAC;AAEH,QAAM,uBAAuB,CAAC,YAC5B,IAAI,sBAAsB,SAAS;AAAA,IACjC,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SAAU,SAA6B;AACtD,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,OAAO;AAAA,EAChC;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,WAAS,aAAa,CAAC,YAAoB;AACzC,UAAM,IAAI,iBAAiB,EAAE,SAAS,WAAW,aAAa,CAAC;AAAA,EACjE;AAEA,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["z","z","UnsupportedFunctionalityError","generateId","z","combineHeaders","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonResponseHandler","z"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ai-sdk/mistral",
3
- "version": "2.1.0-beta.0",
3
+ "version": "2.1.0-beta.10",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -19,18 +19,19 @@
19
19
  }
20
20
  },
21
21
  "dependencies": {
22
- "@ai-sdk/provider": "2.1.0-beta.0",
23
- "@ai-sdk/provider-utils": "3.1.0-beta.0"
22
+ "@ai-sdk/provider": "2.1.0-beta.5",
23
+ "@ai-sdk/provider-utils": "3.1.0-beta.9"
24
24
  },
25
25
  "devDependencies": {
26
26
  "@types/node": "20.17.24",
27
27
  "tsup": "^8",
28
28
  "typescript": "5.8.3",
29
29
  "zod": "3.25.76",
30
+ "@ai-sdk/test-server": "1.0.0-beta.0",
30
31
  "@vercel/ai-tsconfig": "0.0.0"
31
32
  },
32
33
  "peerDependencies": {
33
- "zod": "^3.25.76 || ^4"
34
+ "zod": "^3.25.76 || ^4.1.8"
34
35
  },
35
36
  "engines": {
36
37
  "node": ">=18"