@ai-sdk/mistral 1.0.0-canary.1 → 1.0.0-canary.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +19 -0
- package/dist/index.d.mts +2 -40
- package/dist/index.d.ts +2 -40
- package/dist/index.js +8 -45
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +7 -43
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,24 @@
|
|
|
1
1
|
# @ai-sdk/mistral
|
|
2
2
|
|
|
3
|
+
## 1.0.0-canary.3
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Updated dependencies [8426f55]
|
|
8
|
+
- @ai-sdk/provider-utils@2.0.0-canary.3
|
|
9
|
+
|
|
10
|
+
## 1.0.0-canary.2
|
|
11
|
+
|
|
12
|
+
### Major Changes
|
|
13
|
+
|
|
14
|
+
- afe9283: chore (provider/mistral): remove Mistral facade
|
|
15
|
+
|
|
16
|
+
### Patch Changes
|
|
17
|
+
|
|
18
|
+
- Updated dependencies [dce4158]
|
|
19
|
+
- Updated dependencies [dce4158]
|
|
20
|
+
- @ai-sdk/provider-utils@2.0.0-canary.2
|
|
21
|
+
|
|
3
22
|
## 1.0.0-canary.1
|
|
4
23
|
|
|
5
24
|
### Major Changes
|
package/dist/index.d.mts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { ProviderV1, LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
|
|
4
4
|
type MistralChatModelId = 'ministral-3b-latest' | 'ministral-8b-latest' | 'mistral-large-latest' | 'mistral-small-latest' | 'pixtral-12b-2409' | 'open-mistral-7b' | 'open-mixtral-8x7b' | 'open-mixtral-8x22b' | (string & {});
|
|
@@ -11,26 +11,6 @@ interface MistralChatSettings {
|
|
|
11
11
|
safePrompt?: boolean;
|
|
12
12
|
}
|
|
13
13
|
|
|
14
|
-
type MistralChatConfig = {
|
|
15
|
-
provider: string;
|
|
16
|
-
baseURL: string;
|
|
17
|
-
headers: () => Record<string, string | undefined>;
|
|
18
|
-
fetch?: FetchFunction;
|
|
19
|
-
};
|
|
20
|
-
declare class MistralChatLanguageModel implements LanguageModelV1 {
|
|
21
|
-
readonly specificationVersion = "v1";
|
|
22
|
-
readonly defaultObjectGenerationMode = "json";
|
|
23
|
-
readonly supportsImageUrls = false;
|
|
24
|
-
readonly modelId: MistralChatModelId;
|
|
25
|
-
readonly settings: MistralChatSettings;
|
|
26
|
-
private readonly config;
|
|
27
|
-
constructor(modelId: MistralChatModelId, settings: MistralChatSettings, config: MistralChatConfig);
|
|
28
|
-
get provider(): string;
|
|
29
|
-
private getArgs;
|
|
30
|
-
doGenerate(options: Parameters<LanguageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>>;
|
|
31
|
-
doStream(options: Parameters<LanguageModelV1['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>>;
|
|
32
|
-
}
|
|
33
|
-
|
|
34
14
|
type MistralEmbeddingModelId = 'mistral-embed' | (string & {});
|
|
35
15
|
interface MistralEmbeddingSettings {
|
|
36
16
|
/**
|
|
@@ -93,22 +73,4 @@ Default Mistral provider instance.
|
|
|
93
73
|
*/
|
|
94
74
|
declare const mistral: MistralProvider;
|
|
95
75
|
|
|
96
|
-
|
|
97
|
-
* @deprecated Use `createMistral` instead.
|
|
98
|
-
*/
|
|
99
|
-
declare class Mistral {
|
|
100
|
-
/**
|
|
101
|
-
* Base URL for the Mistral API calls.
|
|
102
|
-
*/
|
|
103
|
-
readonly baseURL: string;
|
|
104
|
-
readonly apiKey?: string;
|
|
105
|
-
readonly headers?: Record<string, string>;
|
|
106
|
-
/**
|
|
107
|
-
* Creates a new Mistral provider instance.
|
|
108
|
-
*/
|
|
109
|
-
constructor(options?: MistralProviderSettings);
|
|
110
|
-
private get baseConfig();
|
|
111
|
-
chat(modelId: MistralChatModelId, settings?: MistralChatSettings): MistralChatLanguageModel;
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
export { Mistral, type MistralProvider, type MistralProviderSettings, createMistral, mistral };
|
|
76
|
+
export { type MistralProvider, type MistralProviderSettings, createMistral, mistral };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { ProviderV1, LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
|
|
4
4
|
type MistralChatModelId = 'ministral-3b-latest' | 'ministral-8b-latest' | 'mistral-large-latest' | 'mistral-small-latest' | 'pixtral-12b-2409' | 'open-mistral-7b' | 'open-mixtral-8x7b' | 'open-mixtral-8x22b' | (string & {});
|
|
@@ -11,26 +11,6 @@ interface MistralChatSettings {
|
|
|
11
11
|
safePrompt?: boolean;
|
|
12
12
|
}
|
|
13
13
|
|
|
14
|
-
type MistralChatConfig = {
|
|
15
|
-
provider: string;
|
|
16
|
-
baseURL: string;
|
|
17
|
-
headers: () => Record<string, string | undefined>;
|
|
18
|
-
fetch?: FetchFunction;
|
|
19
|
-
};
|
|
20
|
-
declare class MistralChatLanguageModel implements LanguageModelV1 {
|
|
21
|
-
readonly specificationVersion = "v1";
|
|
22
|
-
readonly defaultObjectGenerationMode = "json";
|
|
23
|
-
readonly supportsImageUrls = false;
|
|
24
|
-
readonly modelId: MistralChatModelId;
|
|
25
|
-
readonly settings: MistralChatSettings;
|
|
26
|
-
private readonly config;
|
|
27
|
-
constructor(modelId: MistralChatModelId, settings: MistralChatSettings, config: MistralChatConfig);
|
|
28
|
-
get provider(): string;
|
|
29
|
-
private getArgs;
|
|
30
|
-
doGenerate(options: Parameters<LanguageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>>;
|
|
31
|
-
doStream(options: Parameters<LanguageModelV1['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>>;
|
|
32
|
-
}
|
|
33
|
-
|
|
34
14
|
type MistralEmbeddingModelId = 'mistral-embed' | (string & {});
|
|
35
15
|
interface MistralEmbeddingSettings {
|
|
36
16
|
/**
|
|
@@ -93,22 +73,4 @@ Default Mistral provider instance.
|
|
|
93
73
|
*/
|
|
94
74
|
declare const mistral: MistralProvider;
|
|
95
75
|
|
|
96
|
-
|
|
97
|
-
* @deprecated Use `createMistral` instead.
|
|
98
|
-
*/
|
|
99
|
-
declare class Mistral {
|
|
100
|
-
/**
|
|
101
|
-
* Base URL for the Mistral API calls.
|
|
102
|
-
*/
|
|
103
|
-
readonly baseURL: string;
|
|
104
|
-
readonly apiKey?: string;
|
|
105
|
-
readonly headers?: Record<string, string>;
|
|
106
|
-
/**
|
|
107
|
-
* Creates a new Mistral provider instance.
|
|
108
|
-
*/
|
|
109
|
-
constructor(options?: MistralProviderSettings);
|
|
110
|
-
private get baseConfig();
|
|
111
|
-
chat(modelId: MistralChatModelId, settings?: MistralChatSettings): MistralChatLanguageModel;
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
export { Mistral, type MistralProvider, type MistralProviderSettings, createMistral, mistral };
|
|
76
|
+
export { type MistralProvider, type MistralProviderSettings, createMistral, mistral };
|
package/dist/index.js
CHANGED
|
@@ -20,14 +20,13 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
20
20
|
// src/index.ts
|
|
21
21
|
var src_exports = {};
|
|
22
22
|
__export(src_exports, {
|
|
23
|
-
Mistral: () => Mistral,
|
|
24
23
|
createMistral: () => createMistral,
|
|
25
24
|
mistral: () => mistral
|
|
26
25
|
});
|
|
27
26
|
module.exports = __toCommonJS(src_exports);
|
|
28
27
|
|
|
29
|
-
// src/mistral-
|
|
30
|
-
var
|
|
28
|
+
// src/mistral-provider.ts
|
|
29
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
31
30
|
|
|
32
31
|
// src/mistral-chat-language-model.ts
|
|
33
32
|
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
@@ -519,44 +518,9 @@ var mistralChatChunkSchema = import_zod2.z.object({
|
|
|
519
518
|
}).nullish()
|
|
520
519
|
});
|
|
521
520
|
|
|
522
|
-
// src/mistral-facade.ts
|
|
523
|
-
var Mistral = class {
|
|
524
|
-
/**
|
|
525
|
-
* Creates a new Mistral provider instance.
|
|
526
|
-
*/
|
|
527
|
-
constructor(options = {}) {
|
|
528
|
-
var _a;
|
|
529
|
-
this.baseURL = (_a = (0, import_provider_utils4.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
|
|
530
|
-
this.apiKey = options.apiKey;
|
|
531
|
-
this.headers = options.headers;
|
|
532
|
-
}
|
|
533
|
-
get baseConfig() {
|
|
534
|
-
return {
|
|
535
|
-
baseURL: this.baseURL,
|
|
536
|
-
headers: () => ({
|
|
537
|
-
Authorization: `Bearer ${(0, import_provider_utils4.loadApiKey)({
|
|
538
|
-
apiKey: this.apiKey,
|
|
539
|
-
environmentVariableName: "MISTRAL_API_KEY",
|
|
540
|
-
description: "Mistral"
|
|
541
|
-
})}`,
|
|
542
|
-
...this.headers
|
|
543
|
-
})
|
|
544
|
-
};
|
|
545
|
-
}
|
|
546
|
-
chat(modelId, settings = {}) {
|
|
547
|
-
return new MistralChatLanguageModel(modelId, settings, {
|
|
548
|
-
provider: "mistral.chat",
|
|
549
|
-
...this.baseConfig
|
|
550
|
-
});
|
|
551
|
-
}
|
|
552
|
-
};
|
|
553
|
-
|
|
554
|
-
// src/mistral-provider.ts
|
|
555
|
-
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
556
|
-
|
|
557
521
|
// src/mistral-embedding-model.ts
|
|
558
522
|
var import_provider3 = require("@ai-sdk/provider");
|
|
559
|
-
var
|
|
523
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
560
524
|
var import_zod3 = require("zod");
|
|
561
525
|
var MistralEmbeddingModel = class {
|
|
562
526
|
constructor(modelId, settings, config) {
|
|
@@ -589,16 +553,16 @@ var MistralEmbeddingModel = class {
|
|
|
589
553
|
values
|
|
590
554
|
});
|
|
591
555
|
}
|
|
592
|
-
const { responseHeaders, value: response } = await (0,
|
|
556
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
|
|
593
557
|
url: `${this.config.baseURL}/embeddings`,
|
|
594
|
-
headers: (0,
|
|
558
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), headers),
|
|
595
559
|
body: {
|
|
596
560
|
model: this.modelId,
|
|
597
561
|
input: values,
|
|
598
562
|
encoding_format: "float"
|
|
599
563
|
},
|
|
600
564
|
failedResponseHandler: mistralFailedResponseHandler,
|
|
601
|
-
successfulResponseHandler: (0,
|
|
565
|
+
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
|
|
602
566
|
MistralTextEmbeddingResponseSchema
|
|
603
567
|
),
|
|
604
568
|
abortSignal,
|
|
@@ -619,9 +583,9 @@ var MistralTextEmbeddingResponseSchema = import_zod3.z.object({
|
|
|
619
583
|
// src/mistral-provider.ts
|
|
620
584
|
function createMistral(options = {}) {
|
|
621
585
|
var _a;
|
|
622
|
-
const baseURL = (_a = (0,
|
|
586
|
+
const baseURL = (_a = (0, import_provider_utils5.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
|
|
623
587
|
const getHeaders = () => ({
|
|
624
|
-
Authorization: `Bearer ${(0,
|
|
588
|
+
Authorization: `Bearer ${(0, import_provider_utils5.loadApiKey)({
|
|
625
589
|
apiKey: options.apiKey,
|
|
626
590
|
environmentVariableName: "MISTRAL_API_KEY",
|
|
627
591
|
description: "Mistral"
|
|
@@ -658,7 +622,6 @@ function createMistral(options = {}) {
|
|
|
658
622
|
var mistral = createMistral();
|
|
659
623
|
// Annotate the CommonJS export names for ESM import in node:
|
|
660
624
|
0 && (module.exports = {
|
|
661
|
-
Mistral,
|
|
662
625
|
createMistral,
|
|
663
626
|
mistral
|
|
664
627
|
});
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/mistral-facade.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/map-mistral-finish-reason.ts","../src/mistral-error.ts","../src/get-response-metadata.ts","../src/mistral-prepare-tools.ts","../src/mistral-provider.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["export * from './mistral-facade';\nexport { createMistral, mistral } from './mistral-provider';\nexport type {\n MistralProvider,\n MistralProviderSettings,\n} from './mistral-provider';\n","import { loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { MistralProviderSettings } from './mistral-provider';\n\n/**\n * @deprecated Use `createMistral` instead.\n */\nexport class Mistral {\n /**\n * Base URL for the Mistral API calls.\n */\n readonly baseURL: string;\n\n readonly apiKey?: string;\n\n readonly headers?: Record<string, string>;\n\n /**\n * Creates a new Mistral provider instance.\n */\n constructor(options: MistralProviderSettings = {}) {\n this.baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n this.apiKey = options.apiKey;\n this.headers = options.headers;\n }\n\n private get baseConfig() {\n return {\n baseURL: this.baseURL,\n headers: () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: this.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...this.headers,\n }),\n };\n }\n\n chat(modelId: MistralChatModelId, settings: MistralChatSettings = {}) {\n return new MistralChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n ...this.baseConfig,\n });\n }\n}\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n readonly modelId: MistralChatModelId;\n readonly settings: MistralChatSettings;\n\n private readonly config: MistralChatConfig;\n\n constructor(\n modelId: MistralChatModelId,\n settings: MistralChatSettings,\n config: MistralChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, toolWarnings } = prepareTools(mode);\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format: { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: 'any',\n tools: [{ type: 'function', function: mode.tool }],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n let text = choice.message.content ?? undefined;\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n const lastMessage = rawPrompt[rawPrompt.length - 1];\n if (\n lastMessage.role === 'assistant' &&\n text?.startsWith(lastMessage.content)\n ) {\n text = text.slice(lastMessage.content.length);\n }\n\n return {\n text,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(args) },\n response: getResponseMetadata(response),\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let chunkNumber = 0;\n let trimLeadingSpace = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n chunkNumber++;\n\n const value = chunk.value;\n\n if (chunkNumber === 1) {\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n if (chunkNumber <= 2) {\n const lastMessage = rawPrompt[rawPrompt.length - 1];\n\n if (\n lastMessage.role === 'assistant' &&\n delta.content === lastMessage.content.trimEnd()\n ) {\n // Mistral moves the trailing space from the prefix to the next chunk.\n // We trim the leading space to avoid duplication.\n if (delta.content.length < lastMessage.content.length) {\n trimLeadingSpace = true;\n }\n\n // skip the repeated content:\n return;\n }\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: trimLeadingSpace\n ? delta.content.trimStart()\n : delta.content,\n });\n\n trimLeadingSpace = false;\n }\n\n if (delta.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n // mistral tool calls come in one piece:\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(body) },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: z.string().nullable(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { MistralPrompt } from './mistral-chat-prompt';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV1Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n };\n }\n case 'file': {\n throw new UnsupportedFunctionalityError({\n functionality: 'File content parts in user messages',\n });\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n tool_call_id: toolResponse.toolCallId,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'any'\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return { tools: mistralTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, tool_choice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, tool_choice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n tool_choice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\n\nexport interface MistralProvider extends ProviderV1 {\n (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a model for text generation.\n*/\n chat(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ) => EmbeddingModelV1<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: MistralChatModelId,\n settings: MistralChatSettings = {},\n ) =>\n new MistralChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings = {},\n ) =>\n new MistralEmbeddingModel(modelId, settings, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n return provider as MistralProvider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV1<string> {\n readonly specificationVersion = 'v1';\n readonly modelId: MistralEmbeddingModelId;\n\n private readonly config: MistralEmbeddingConfig;\n private readonly settings: MistralEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 32;\n }\n\n get supportsParallelCalls(): boolean {\n // Parallel calls are technically possible,\n // but I have been hitting rate limits and disable them for now.\n return this.settings.supportsParallelCalls ?? false;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAAiD;;;ACMjD,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;ACdlB,sBAGO;AACP,4BAA0C;AAGnC,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAzBvC;AA0BY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,eAAW,iDAA0B,KAAK,KAAK,CAAC;AAAA,gBACxD;AAAA,cACF;AAAA,cACA,KAAK,QAAQ;AACX,sBAAM,IAAI,8CAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,cAAc,aAAa;AAAA,UAC7B,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC9GO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,IAAAC,yBAA+C;AAC/C,iBAAkB;AAElB,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,QAAQ,aAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,aAAE,OAAO;AAAA,EAClB,MAAM,aAAE,OAAO;AAAA,EACf,OAAO,aAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,uDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBM,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACdA,IAAAC,mBAIO;AAEA,SAAS,aACd,MAqBA;AA5BF;AA8BE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,aAAa,QAAW,aAAa;AAAA,EACrE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,aAAa,MAAM,aAAa;AAAA,IAChE,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,aAAa,OAAO,aAAa;AAAA,IAIjE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,aAAa;AAAA,QACb;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AL7DO,IAAM,2BAAN,MAA0D;AAAA,EAU/D,YACE,SACA,UACA,QACA;AAbF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAY3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa,IAAI;AAE9D,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,UACzC;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA9KjE;AA+KI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,QAAI,QAAO,YAAO,QAAQ,YAAf,YAA0B;AAKrC,UAAM,cAAc,UAAU,UAAU,SAAS,CAAC;AAClD,QACE,YAAY,SAAS,gBACrB,6BAAM,WAAW,YAAY,WAC7B;AACA,aAAO,KAAK,MAAM,YAAY,QAAQ,MAAM;AAAA,IAC9C;AAEA,WAAO;AAAA,MACL;AAAA,MACA,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,eAAa;AAAA,QACrD,cAAc;AAAA,QACd,YAAY,SAAS;AAAA,QACrB,UAAU,SAAS,SAAS;AAAA,QAC5B,MAAM,SAAS,SAAS;AAAA,MAC1B;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,cAAc;AAClB,QAAI,mBAAmB;AAEvB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,gBAAgB,GAAG;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAKrB,gBAAI,eAAe,GAAG;AACpB,oBAAM,cAAc,UAAU,UAAU,SAAS,CAAC;AAElD,kBACE,YAAY,SAAS,eACrB,MAAM,YAAY,YAAY,QAAQ,QAAQ,GAC9C;AAGA,oBAAI,MAAM,QAAQ,SAAS,YAAY,QAAQ,QAAQ;AACrD,qCAAmB;AAAA,gBACrB;AAGA;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,mBACP,MAAM,QAAQ,UAAU,IACxB,MAAM;AAAA,cACZ,CAAC;AAED,iCAAmB;AAAA,YACrB;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,YAAY,MAAM,YAAY;AAEvC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,eAAe,SAAS,SAAS;AAAA,gBACnC,CAAC;AACD,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,MAAM,SAAS,SAAS;AAAA,gBAC1B,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4B,cAAE,OAAO;AAAA,EACzC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS,cAAE,OAAO,EAAE,SAAS;AAAA,QAC7B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO;AAAA,YACb,UAAU,cAAE,OAAO,EAAE,MAAM,cAAE,OAAO,GAAG,WAAW,cAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,cAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,yBAAyB,cAAE,OAAO;AAAA,EACtC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,OAAO,cAAE,OAAO;AAAA,QACd,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO;AAAA,YACb,UAAU,cAAE,OAAO,EAAE,MAAM,cAAE,OAAO,GAAG,WAAW,cAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAO,cAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;;;AD5ZM,IAAM,UAAN,MAAc;AAAA;AAAA;AAAA;AAAA,EAanB,YAAY,UAAmC,CAAC,GAAG;AAxBrD;AAyBI,SAAK,WACH,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,SAAK,SAAS,QAAQ;AACtB,SAAK,UAAU,QAAQ;AAAA,EACzB;AAAA,EAEA,IAAY,aAAa;AACvB,WAAO;AAAA,MACL,SAAS,KAAK;AAAA,MACd,SAAS,OAAO;AAAA,QACd,eAAe,cAAU,mCAAW;AAAA,UAClC,QAAQ,KAAK;AAAA,UACb,yBAAyB;AAAA,UACzB,aAAa;AAAA,QACf,CAAC,CAAC;AAAA,QACF,GAAG,KAAK;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAAA,EAEA,KAAK,SAA6B,WAAgC,CAAC,GAAG;AACpE,WAAO,IAAI,yBAAyB,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,IACV,CAAC;AAAA,EACH;AACF;;;AO/CA,IAAAC,yBAIO;;;ACTP,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;AAcX,IAAM,wBAAN,MAAgE;AAAA,EAqBrE,YACE,SACA,UACA,QACA;AAxBF,SAAS,uBAAuB;AAyB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAtBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAnCrC;AAoCI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AAvCvC;AA0CI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ADTM,SAAS,cACd,UAAmC,CAAC,GACnB;AA/FnB;AAgGE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAgC,CAAC,MAEjC,IAAI,yBAAyB,SAAS,UAAU;AAAA,IAC9C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAqC,CAAC,MAEtC,IAAI,sBAAsB,SAAS,UAAU;AAAA,IAC3C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider_utils","import_provider_utils","import_zod","import_provider_utils","import_provider","import_provider_utils","import_provider","import_provider_utils","import_zod"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/map-mistral-finish-reason.ts","../src/mistral-error.ts","../src/get-response-metadata.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["export { createMistral, mistral } from './mistral-provider';\nexport type {\n MistralProvider,\n MistralProviderSettings,\n} from './mistral-provider';\n","import {\n EmbeddingModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\n\nexport interface MistralProvider extends ProviderV1 {\n (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a model for text generation.\n*/\n chat(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ) => EmbeddingModelV1<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: MistralChatModelId,\n settings: MistralChatSettings = {},\n ) =>\n new MistralChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings = {},\n ) =>\n new MistralEmbeddingModel(modelId, settings, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n return provider as MistralProvider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n readonly modelId: MistralChatModelId;\n readonly settings: MistralChatSettings;\n\n private readonly config: MistralChatConfig;\n\n constructor(\n modelId: MistralChatModelId,\n settings: MistralChatSettings,\n config: MistralChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, toolWarnings } = prepareTools(mode);\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format: { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: 'any',\n tools: [{ type: 'function', function: mode.tool }],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n let text = choice.message.content ?? undefined;\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n const lastMessage = rawPrompt[rawPrompt.length - 1];\n if (\n lastMessage.role === 'assistant' &&\n text?.startsWith(lastMessage.content)\n ) {\n text = text.slice(lastMessage.content.length);\n }\n\n return {\n text,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(args) },\n response: getResponseMetadata(response),\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let chunkNumber = 0;\n let trimLeadingSpace = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n chunkNumber++;\n\n const value = chunk.value;\n\n if (chunkNumber === 1) {\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n if (chunkNumber <= 2) {\n const lastMessage = rawPrompt[rawPrompt.length - 1];\n\n if (\n lastMessage.role === 'assistant' &&\n delta.content === lastMessage.content.trimEnd()\n ) {\n // Mistral moves the trailing space from the prefix to the next chunk.\n // We trim the leading space to avoid duplication.\n if (delta.content.length < lastMessage.content.length) {\n trimLeadingSpace = true;\n }\n\n // skip the repeated content:\n return;\n }\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: trimLeadingSpace\n ? delta.content.trimStart()\n : delta.content,\n });\n\n trimLeadingSpace = false;\n }\n\n if (delta.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n // mistral tool calls come in one piece:\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(body) },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: z.string().nullable(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { MistralPrompt } from './mistral-chat-prompt';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV1Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n };\n }\n case 'file': {\n throw new UnsupportedFunctionalityError({\n functionality: 'File content parts in user messages',\n });\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n tool_call_id: toolResponse.toolCallId,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'any'\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return { tools: mistralTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, tool_choice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, tool_choice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n tool_choice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV1<string> {\n readonly specificationVersion = 'v1';\n readonly modelId: MistralEmbeddingModelId;\n\n private readonly config: MistralEmbeddingConfig;\n private readonly settings: MistralEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 32;\n }\n\n get supportsParallelCalls(): boolean {\n // Parallel calls are technically possible,\n // but I have been hitting rate limits and disable them for now.\n return this.settings.supportsParallelCalls ?? false;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACKA,IAAAA,yBAIO;;;ACHP,IAAAC,yBAOO;AACP,IAAAC,cAAkB;;;ACdlB,sBAGO;AACP,4BAA0C;AAGnC,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAzBvC;AA0BY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,eAAW,iDAA0B,KAAK,KAAK,CAAC;AAAA,gBACxD;AAAA,cACF;AAAA,cACA,KAAK,QAAQ;AACX,sBAAM,IAAI,8CAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,cAAc,aAAa;AAAA,UAC7B,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC9GO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,IAAAC,yBAA+C;AAC/C,iBAAkB;AAElB,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,QAAQ,aAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,aAAE,OAAO;AAAA,EAClB,MAAM,aAAE,OAAO;AAAA,EACf,OAAO,aAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,uDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBM,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACdA,IAAAC,mBAIO;AAEA,SAAS,aACd,MAqBA;AA5BF;AA8BE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,aAAa,QAAW,aAAa;AAAA,EACrE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,aAAa,MAAM,aAAa;AAAA,IAChE,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,aAAa,OAAO,aAAa;AAAA,IAIjE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,aAAa;AAAA,QACb;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAI,+CAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AL7DO,IAAM,2BAAN,MAA0D;AAAA,EAU/D,YACE,SACA,UACA,QACA;AAbF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAY3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa,IAAI;AAE9D,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,UACzC;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA9KjE;AA+KI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,QAAI,QAAO,YAAO,QAAQ,YAAf,YAA0B;AAKrC,UAAM,cAAc,UAAU,UAAU,SAAS,CAAC;AAClD,QACE,YAAY,SAAS,gBACrB,6BAAM,WAAW,YAAY,WAC7B;AACA,aAAO,KAAK,MAAM,YAAY,QAAQ,MAAM;AAAA,IAC9C;AAEA,WAAO;AAAA,MACL;AAAA,MACA,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,eAAa;AAAA,QACrD,cAAc;AAAA,QACd,YAAY,SAAS;AAAA,QACrB,UAAU,SAAS,SAAS;AAAA,QAC5B,MAAM,SAAS,SAAS;AAAA,MAC1B;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,cAAc;AAClB,QAAI,mBAAmB;AAEvB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,gBAAgB,GAAG;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAKrB,gBAAI,eAAe,GAAG;AACpB,oBAAM,cAAc,UAAU,UAAU,SAAS,CAAC;AAElD,kBACE,YAAY,SAAS,eACrB,MAAM,YAAY,YAAY,QAAQ,QAAQ,GAC9C;AAGA,oBAAI,MAAM,QAAQ,SAAS,YAAY,QAAQ,QAAQ;AACrD,qCAAmB;AAAA,gBACrB;AAGA;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,mBACP,MAAM,QAAQ,UAAU,IACxB,MAAM;AAAA,cACZ,CAAC;AAED,iCAAmB;AAAA,YACrB;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,YAAY,MAAM,YAAY;AAEvC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,eAAe,SAAS,SAAS;AAAA,gBACnC,CAAC;AACD,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,MAAM,SAAS,SAAS;AAAA,gBAC1B,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4B,cAAE,OAAO;AAAA,EACzC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS,cAAE,OAAO,EAAE,SAAS;AAAA,QAC7B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO;AAAA,YACb,UAAU,cAAE,OAAO,EAAE,MAAM,cAAE,OAAO,GAAG,WAAW,cAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,cAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,yBAAyB,cAAE,OAAO;AAAA,EACtC,IAAI,cAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAO,cAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,OAAO,cAAE,OAAO;AAAA,QACd,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,IAAI,cAAE,OAAO;AAAA,YACb,UAAU,cAAE,OAAO,EAAE,MAAM,cAAE,OAAO,GAAG,WAAW,cAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAO,cAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;;;AMvaD,IAAAC,mBAGO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;AAcX,IAAM,wBAAN,MAAgE;AAAA,EAqBrE,YACE,SACA,UACA,QACA;AAxBF,SAAS,uBAAuB;AAyB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAtBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAnCrC;AAoCI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AAvCvC;AA0CI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,oDAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,UAAM,sCAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,aAAS,uCAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,qCAAqC,cAAE,OAAO;AAAA,EAClD,MAAM,cAAE,MAAM,cAAE,OAAO,EAAE,WAAW,cAAE,MAAM,cAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAO,cAAE,OAAO,EAAE,eAAe,cAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;APTM,SAAS,cACd,UAAmC,CAAC,GACnB;AA/FnB;AAgGE,QAAM,WACJ,sDAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,mCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAgC,CAAC,MAEjC,IAAI,yBAAyB,SAAS,UAAU;AAAA,IAC9C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAqC,CAAC,MAEtC,IAAI,sBAAsB,SAAS,UAAU;AAAA,IAC3C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider_utils","import_provider_utils","import_zod","import_provider_utils","import_provider","import_provider","import_provider_utils","import_zod"]}
|
package/dist/index.mjs
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
|
-
// src/mistral-
|
|
2
|
-
import {
|
|
1
|
+
// src/mistral-provider.ts
|
|
2
|
+
import {
|
|
3
|
+
loadApiKey,
|
|
4
|
+
withoutTrailingSlash
|
|
5
|
+
} from "@ai-sdk/provider-utils";
|
|
3
6
|
|
|
4
7
|
// src/mistral-chat-language-model.ts
|
|
5
8
|
import {
|
|
@@ -500,44 +503,6 @@ var mistralChatChunkSchema = z2.object({
|
|
|
500
503
|
}).nullish()
|
|
501
504
|
});
|
|
502
505
|
|
|
503
|
-
// src/mistral-facade.ts
|
|
504
|
-
var Mistral = class {
|
|
505
|
-
/**
|
|
506
|
-
* Creates a new Mistral provider instance.
|
|
507
|
-
*/
|
|
508
|
-
constructor(options = {}) {
|
|
509
|
-
var _a;
|
|
510
|
-
this.baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
|
|
511
|
-
this.apiKey = options.apiKey;
|
|
512
|
-
this.headers = options.headers;
|
|
513
|
-
}
|
|
514
|
-
get baseConfig() {
|
|
515
|
-
return {
|
|
516
|
-
baseURL: this.baseURL,
|
|
517
|
-
headers: () => ({
|
|
518
|
-
Authorization: `Bearer ${loadApiKey({
|
|
519
|
-
apiKey: this.apiKey,
|
|
520
|
-
environmentVariableName: "MISTRAL_API_KEY",
|
|
521
|
-
description: "Mistral"
|
|
522
|
-
})}`,
|
|
523
|
-
...this.headers
|
|
524
|
-
})
|
|
525
|
-
};
|
|
526
|
-
}
|
|
527
|
-
chat(modelId, settings = {}) {
|
|
528
|
-
return new MistralChatLanguageModel(modelId, settings, {
|
|
529
|
-
provider: "mistral.chat",
|
|
530
|
-
...this.baseConfig
|
|
531
|
-
});
|
|
532
|
-
}
|
|
533
|
-
};
|
|
534
|
-
|
|
535
|
-
// src/mistral-provider.ts
|
|
536
|
-
import {
|
|
537
|
-
loadApiKey as loadApiKey2,
|
|
538
|
-
withoutTrailingSlash as withoutTrailingSlash2
|
|
539
|
-
} from "@ai-sdk/provider-utils";
|
|
540
|
-
|
|
541
506
|
// src/mistral-embedding-model.ts
|
|
542
507
|
import {
|
|
543
508
|
TooManyEmbeddingValuesForCallError
|
|
@@ -609,9 +574,9 @@ var MistralTextEmbeddingResponseSchema = z3.object({
|
|
|
609
574
|
// src/mistral-provider.ts
|
|
610
575
|
function createMistral(options = {}) {
|
|
611
576
|
var _a;
|
|
612
|
-
const baseURL = (_a =
|
|
577
|
+
const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.mistral.ai/v1";
|
|
613
578
|
const getHeaders = () => ({
|
|
614
|
-
Authorization: `Bearer ${
|
|
579
|
+
Authorization: `Bearer ${loadApiKey({
|
|
615
580
|
apiKey: options.apiKey,
|
|
616
581
|
environmentVariableName: "MISTRAL_API_KEY",
|
|
617
582
|
description: "Mistral"
|
|
@@ -647,7 +612,6 @@ function createMistral(options = {}) {
|
|
|
647
612
|
}
|
|
648
613
|
var mistral = createMistral();
|
|
649
614
|
export {
|
|
650
|
-
Mistral,
|
|
651
615
|
createMistral,
|
|
652
616
|
mistral
|
|
653
617
|
};
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/mistral-facade.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/map-mistral-finish-reason.ts","../src/mistral-error.ts","../src/get-response-metadata.ts","../src/mistral-prepare-tools.ts","../src/mistral-provider.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["import { loadApiKey, withoutTrailingSlash } from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { MistralProviderSettings } from './mistral-provider';\n\n/**\n * @deprecated Use `createMistral` instead.\n */\nexport class Mistral {\n /**\n * Base URL for the Mistral API calls.\n */\n readonly baseURL: string;\n\n readonly apiKey?: string;\n\n readonly headers?: Record<string, string>;\n\n /**\n * Creates a new Mistral provider instance.\n */\n constructor(options: MistralProviderSettings = {}) {\n this.baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n this.apiKey = options.apiKey;\n this.headers = options.headers;\n }\n\n private get baseConfig() {\n return {\n baseURL: this.baseURL,\n headers: () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: this.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...this.headers,\n }),\n };\n }\n\n chat(modelId: MistralChatModelId, settings: MistralChatSettings = {}) {\n return new MistralChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n ...this.baseConfig,\n });\n }\n}\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n readonly modelId: MistralChatModelId;\n readonly settings: MistralChatSettings;\n\n private readonly config: MistralChatConfig;\n\n constructor(\n modelId: MistralChatModelId,\n settings: MistralChatSettings,\n config: MistralChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, toolWarnings } = prepareTools(mode);\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format: { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: 'any',\n tools: [{ type: 'function', function: mode.tool }],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n let text = choice.message.content ?? undefined;\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n const lastMessage = rawPrompt[rawPrompt.length - 1];\n if (\n lastMessage.role === 'assistant' &&\n text?.startsWith(lastMessage.content)\n ) {\n text = text.slice(lastMessage.content.length);\n }\n\n return {\n text,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(args) },\n response: getResponseMetadata(response),\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let chunkNumber = 0;\n let trimLeadingSpace = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n chunkNumber++;\n\n const value = chunk.value;\n\n if (chunkNumber === 1) {\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n if (chunkNumber <= 2) {\n const lastMessage = rawPrompt[rawPrompt.length - 1];\n\n if (\n lastMessage.role === 'assistant' &&\n delta.content === lastMessage.content.trimEnd()\n ) {\n // Mistral moves the trailing space from the prefix to the next chunk.\n // We trim the leading space to avoid duplication.\n if (delta.content.length < lastMessage.content.length) {\n trimLeadingSpace = true;\n }\n\n // skip the repeated content:\n return;\n }\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: trimLeadingSpace\n ? delta.content.trimStart()\n : delta.content,\n });\n\n trimLeadingSpace = false;\n }\n\n if (delta.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n // mistral tool calls come in one piece:\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(body) },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: z.string().nullable(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { MistralPrompt } from './mistral-chat-prompt';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV1Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n };\n }\n case 'file': {\n throw new UnsupportedFunctionalityError({\n functionality: 'File content parts in user messages',\n });\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n tool_call_id: toolResponse.toolCallId,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'any'\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return { tools: mistralTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, tool_choice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, tool_choice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n tool_choice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\n\nexport interface MistralProvider extends ProviderV1 {\n (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a model for text generation.\n*/\n chat(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ) => EmbeddingModelV1<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: MistralChatModelId,\n settings: MistralChatSettings = {},\n ) =>\n new MistralChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings = {},\n ) =>\n new MistralEmbeddingModel(modelId, settings, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n return provider as MistralProvider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV1<string> {\n readonly specificationVersion = 'v1';\n readonly modelId: MistralEmbeddingModelId;\n\n private readonly config: MistralEmbeddingConfig;\n private readonly settings: MistralEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 32;\n }\n\n get supportsParallelCalls(): boolean {\n // Parallel calls are technically possible,\n // but I have been hitting rate limits and disable them for now.\n return this.settings.supportsParallelCalls ?? false;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";AAAA,SAAS,YAAY,4BAA4B;;;ACMjD;AAAA,EAGE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;ACdlB;AAAA,EAEE;AAAA,OACK;AACP,SAAS,iCAAiC;AAGnC,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAzBvC;AA0BY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,WAAW,0BAA0B,KAAK,KAAK,CAAC;AAAA,gBACxD;AAAA,cACF;AAAA,cACA,KAAK,QAAQ;AACX,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,cAAc,aAAa;AAAA,UAC7B,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC9GO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,yBAAyB,EAAE,OAAO;AAAA,EACtC,QAAQ,EAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,EAAE,OAAO;AAAA,EAClB,MAAM,EAAE,OAAO;AAAA,EACf,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,EAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,+BAA+B,+BAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBM,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACdA;AAAA,EAGE,iCAAAC;AAAA,OACK;AAEA,SAAS,aACd,MAqBA;AA5BF;AA8BE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,aAAa,QAAW,aAAa;AAAA,EACrE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,aAAa,MAAM,aAAa;AAAA,IAChE,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,aAAa,OAAO,aAAa;AAAA,IAIjE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,aAAa;AAAA,QACb;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AL7DO,IAAM,2BAAN,MAA0D;AAAA,EAU/D,YACE,SACA,UACA,QACA;AAbF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAY3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa,IAAI;AAE9D,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,UACzC;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA9KjE;AA+KI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,QAAI,QAAO,YAAO,QAAQ,YAAf,YAA0B;AAKrC,UAAM,cAAc,UAAU,UAAU,SAAS,CAAC;AAClD,QACE,YAAY,SAAS,gBACrB,6BAAM,WAAW,YAAY,WAC7B;AACA,aAAO,KAAK,MAAM,YAAY,QAAQ,MAAM;AAAA,IAC9C;AAEA,WAAO;AAAA,MACL;AAAA,MACA,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,eAAa;AAAA,QACrD,cAAc;AAAA,QACd,YAAY,SAAS;AAAA,QACrB,UAAU,SAAS,SAAS;AAAA,QAC5B,MAAM,SAAS,SAAS;AAAA,MAC1B;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,cAAc;AAClB,QAAI,mBAAmB;AAEvB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,gBAAgB,GAAG;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAKrB,gBAAI,eAAe,GAAG;AACpB,oBAAM,cAAc,UAAU,UAAU,SAAS,CAAC;AAElD,kBACE,YAAY,SAAS,eACrB,MAAM,YAAY,YAAY,QAAQ,QAAQ,GAC9C;AAGA,oBAAI,MAAM,QAAQ,SAAS,YAAY,QAAQ,QAAQ;AACrD,qCAAmB;AAAA,gBACrB;AAGA;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,mBACP,MAAM,QAAQ,UAAU,IACxB,MAAM;AAAA,cACZ,CAAC;AAED,iCAAmB;AAAA,YACrB;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,YAAY,MAAM,YAAY;AAEvC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,eAAe,SAAS,SAAS;AAAA,gBACnC,CAAC;AACD,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,MAAM,SAAS,SAAS;AAAA,gBAC1B,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4BC,GAAE,OAAO;AAAA,EACzC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,SAASA,GAAE,OAAO,EAAE,SAAS;AAAA,QAC7B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAOA,GAAE,OAAO;AAAA,MAChB,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQA,GAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAOA,GAAE,OAAO;AAAA,IACd,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,QACd,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAOA,GAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;;;AD5ZM,IAAM,UAAN,MAAc;AAAA;AAAA;AAAA;AAAA,EAanB,YAAY,UAAmC,CAAC,GAAG;AAxBrD;AAyBI,SAAK,WACH,0BAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,SAAK,SAAS,QAAQ;AACtB,SAAK,UAAU,QAAQ;AAAA,EACzB;AAAA,EAEA,IAAY,aAAa;AACvB,WAAO;AAAA,MACL,SAAS,KAAK;AAAA,MACd,SAAS,OAAO;AAAA,QACd,eAAe,UAAU,WAAW;AAAA,UAClC,QAAQ,KAAK;AAAA,UACb,yBAAyB;AAAA,UACzB,aAAa;AAAA,QACf,CAAC,CAAC;AAAA,QACF,GAAG,KAAK;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAAA,EAEA,KAAK,SAA6B,WAAgC,CAAC,GAAG;AACpE,WAAO,IAAI,yBAAyB,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,IACV,CAAC;AAAA,EACH;AACF;;;AO/CA;AAAA,EAEE,cAAAC;AAAA,EACA,wBAAAC;AAAA,OACK;;;ACTP;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAcX,IAAM,wBAAN,MAAgE;AAAA,EAqBrE,YACE,SACA,UACA,QACA;AAxBF,SAAS,uBAAuB;AAyB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAtBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAnCrC;AAoCI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AAvCvC;AA0CI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,qCAAqCC,GAAE,OAAO;AAAA,EAClD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;ADTM,SAAS,cACd,UAAmC,CAAC,GACnB;AA/FnB;AAgGE,QAAM,WACJ,KAAAC,sBAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAUC,YAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAgC,CAAC,MAEjC,IAAI,yBAAyB,SAAS,UAAU;AAAA,IAC9C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAqC,CAAC,MAEtC,IAAI,sBAAsB,SAAS,UAAU;AAAA,IAC3C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["z","UnsupportedFunctionalityError","z","loadApiKey","withoutTrailingSlash","combineHeaders","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonResponseHandler","z","withoutTrailingSlash","loadApiKey"]}
|
|
1
|
+
{"version":3,"sources":["../src/mistral-provider.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/map-mistral-finish-reason.ts","../src/mistral-error.ts","../src/get-response-metadata.ts","../src/mistral-prepare-tools.ts","../src/mistral-embedding-model.ts"],"sourcesContent":["import {\n EmbeddingModelV1,\n LanguageModelV1,\n ProviderV1,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { MistralEmbeddingModel } from './mistral-embedding-model';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\n\nexport interface MistralProvider extends ProviderV1 {\n (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a model for text generation.\n*/\n languageModel(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a model for text generation.\n*/\n chat(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): LanguageModelV1;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n embedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n /**\n@deprecated Use `textEmbeddingModel()` instead.\n */\n textEmbedding(\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n\n textEmbeddingModel: (\n modelId: MistralEmbeddingModelId,\n settings?: MistralEmbeddingSettings,\n ) => EmbeddingModelV1<string>;\n}\n\nexport interface MistralProviderSettings {\n /**\nUse a different URL prefix for API calls, e.g. to use proxy servers.\nThe default prefix is `https://api.mistral.ai/v1`.\n */\n baseURL?: string;\n\n /**\nAPI key that is being send using the `Authorization` header.\nIt defaults to the `MISTRAL_API_KEY` environment variable.\n */\n apiKey?: string;\n\n /**\nCustom headers to include in the requests.\n */\n headers?: Record<string, string>;\n\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n */\n fetch?: FetchFunction;\n}\n\n/**\nCreate a Mistral AI provider instance.\n */\nexport function createMistral(\n options: MistralProviderSettings = {},\n): MistralProvider {\n const baseURL =\n withoutTrailingSlash(options.baseURL) ?? 'https://api.mistral.ai/v1';\n\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n ...options.headers,\n });\n\n const createChatModel = (\n modelId: MistralChatModelId,\n settings: MistralChatSettings = {},\n ) =>\n new MistralChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createEmbeddingModel = (\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings = {},\n ) =>\n new MistralEmbeddingModel(modelId, settings, {\n provider: 'mistral.embedding',\n baseURL,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const provider = function (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return createChatModel(modelId, settings);\n };\n\n provider.languageModel = createChatModel;\n provider.chat = createChatModel;\n provider.embedding = createEmbeddingModel;\n provider.textEmbedding = createEmbeddingModel;\n provider.textEmbeddingModel = createEmbeddingModel;\n\n return provider as MistralProvider;\n}\n\n/**\nDefault Mistral provider instance.\n */\nexport const mistral = createMistral();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n} from '@ai-sdk/provider';\nimport {\n FetchFunction,\n ParseResult,\n combineHeaders,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\nimport { getResponseMetadata } from './get-response-metadata';\nimport { prepareTools } from './mistral-prepare-tools';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n readonly supportsImageUrls = false;\n\n readonly modelId: MistralChatModelId;\n readonly settings: MistralChatSettings;\n\n private readonly config: MistralChatConfig;\n\n constructor(\n modelId: MistralChatModelId,\n settings: MistralChatSettings,\n config: MistralChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n topK,\n frequencyPenalty,\n presencePenalty,\n stopSequences,\n responseFormat,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (topK != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'topK',\n });\n }\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n if (stopSequences != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'stopSequences',\n });\n }\n\n if (\n responseFormat != null &&\n responseFormat.type === 'json' &&\n responseFormat.schema != null\n ) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'responseFormat',\n details: 'JSON response format schema is not supported',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature,\n top_p: topP,\n random_seed: seed,\n\n // response format:\n response_format:\n responseFormat?.type === 'json' ? { type: 'json_object' } : undefined,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n const { tools, tool_choice, toolWarnings } = prepareTools(mode);\n\n return {\n args: { ...baseArgs, tools, tool_choice },\n warnings: [...warnings, ...toolWarnings],\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format: { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: 'any',\n tools: [{ type: 'function', function: mode.tool }],\n },\n warnings,\n };\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body: args,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n let text = choice.message.content ?? undefined;\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n const lastMessage = rawPrompt[rawPrompt.length - 1];\n if (\n lastMessage.role === 'assistant' &&\n text?.startsWith(lastMessage.content)\n ) {\n text = text.slice(lastMessage.content.length);\n }\n\n return {\n text,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(args) },\n response: getResponseMetadata(response),\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const body = { ...args, stream: true };\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: combineHeaders(this.config.headers(), options.headers),\n body,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n fetch: this.config.fetch,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'unknown';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n let chunkNumber = 0;\n let trimLeadingSpace = false;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n chunkNumber++;\n\n const value = chunk.value;\n\n if (chunkNumber === 1) {\n controller.enqueue({\n type: 'response-metadata',\n ...getResponseMetadata(value),\n });\n }\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n // when there is a trailing assistant message, mistral will send the\n // content of that message again. we skip this repeated content to\n // avoid duplication, e.g. in continuation mode.\n if (chunkNumber <= 2) {\n const lastMessage = rawPrompt[rawPrompt.length - 1];\n\n if (\n lastMessage.role === 'assistant' &&\n delta.content === lastMessage.content.trimEnd()\n ) {\n // Mistral moves the trailing space from the prefix to the next chunk.\n // We trim the leading space to avoid duplication.\n if (delta.content.length < lastMessage.content.length) {\n trimLeadingSpace = true;\n }\n\n // skip the repeated content:\n return;\n }\n }\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: trimLeadingSpace\n ? delta.content.trimStart()\n : delta.content,\n });\n\n trimLeadingSpace = false;\n }\n\n if (delta.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n // mistral tool calls come in one piece:\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId: toolCall.id,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n rawResponse: { headers: responseHeaders },\n request: { body: JSON.stringify(body) },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: z.string().nullable(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n index: z.number(),\n finish_reason: z.string().nullish(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n id: z.string().nullish(),\n created: z.number().nullish(),\n model: z.string().nullish(),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: z.string().nullish(),\n tool_calls: z\n .array(\n z.object({\n id: z.string(),\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .nullish(),\n }),\n finish_reason: z.string().nullish(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .nullish(),\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';\nimport { MistralPrompt } from './mistral-chat-prompt';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV1Prompt,\n): MistralPrompt {\n const messages: MistralPrompt = [];\n\n for (let i = 0; i < prompt.length; i++) {\n const { role, content } = prompt[i];\n const isLastMessage = i === prompt.length - 1;\n\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content.map(part => {\n switch (part.type) {\n case 'text': {\n return { type: 'text', text: part.text };\n }\n case 'image': {\n return {\n type: 'image_url',\n image_url:\n part.image instanceof URL\n ? part.image.toString()\n : `data:${\n part.mimeType ?? 'image/jpeg'\n };base64,${convertUint8ArrayToBase64(part.image)}`,\n };\n }\n case 'file': {\n throw new UnsupportedFunctionalityError({\n functionality: 'File content parts in user messages',\n });\n }\n }\n }),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n prefix: isLastMessage ? true : undefined,\n tool_calls: toolCalls.length > 0 ? toolCalls : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n tool_call_id: toolResponse.toolCallId,\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'unknown';\n }\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","export function getResponseMetadata({\n id,\n model,\n created,\n}: {\n id?: string | undefined | null;\n created?: number | undefined | null;\n model?: string | undefined | null;\n}) {\n return {\n id: id ?? undefined,\n modelId: model ?? undefined,\n timestamp: created != null ? new Date(created * 1000) : undefined,\n };\n}\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\n\nexport function prepareTools(\n mode: Parameters<LanguageModelV1['doGenerate']>[0]['mode'] & {\n type: 'regular';\n },\n): {\n tools:\n | Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }>\n | undefined;\n tool_choice:\n | { type: 'function'; function: { name: string } }\n | 'auto'\n | 'none'\n | 'any'\n | undefined;\n toolWarnings: LanguageModelV1CallWarning[];\n} {\n // when the tools array is empty, change it to undefined to prevent errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n const toolWarnings: LanguageModelV1CallWarning[] = [];\n\n if (tools == null) {\n return { tools: undefined, tool_choice: undefined, toolWarnings };\n }\n\n const mistralTools: Array<{\n type: 'function';\n function: {\n name: string;\n description: string | undefined;\n parameters: unknown;\n };\n }> = [];\n\n for (const tool of tools) {\n if (tool.type === 'provider-defined') {\n toolWarnings.push({ type: 'unsupported-tool', tool });\n } else {\n mistralTools.push({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n });\n }\n }\n\n const toolChoice = mode.toolChoice;\n\n if (toolChoice == null) {\n return { tools: mistralTools, tool_choice: undefined, toolWarnings };\n }\n\n const type = toolChoice.type;\n\n switch (type) {\n case 'auto':\n case 'none':\n return { tools: mistralTools, tool_choice: type, toolWarnings };\n case 'required':\n return { tools: mistralTools, tool_choice: 'any', toolWarnings };\n\n // mistral does not support tool mode directly,\n // so we filter the tools and force the tool choice through 'any'\n case 'tool':\n return {\n tools: mistralTools.filter(\n tool => tool.function.name === toolChoice.toolName,\n ),\n tool_choice: 'any',\n toolWarnings,\n };\n default: {\n const _exhaustiveCheck: never = type;\n throw new UnsupportedFunctionalityError({\n functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`,\n });\n }\n }\n}\n","import {\n EmbeddingModelV1,\n TooManyEmbeddingValuesForCallError,\n} from '@ai-sdk/provider';\nimport {\n combineHeaders,\n createJsonResponseHandler,\n FetchFunction,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport {\n MistralEmbeddingModelId,\n MistralEmbeddingSettings,\n} from './mistral-embedding-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralEmbeddingConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n fetch?: FetchFunction;\n};\n\nexport class MistralEmbeddingModel implements EmbeddingModelV1<string> {\n readonly specificationVersion = 'v1';\n readonly modelId: MistralEmbeddingModelId;\n\n private readonly config: MistralEmbeddingConfig;\n private readonly settings: MistralEmbeddingSettings;\n\n get provider(): string {\n return this.config.provider;\n }\n\n get maxEmbeddingsPerCall(): number {\n return this.settings.maxEmbeddingsPerCall ?? 32;\n }\n\n get supportsParallelCalls(): boolean {\n // Parallel calls are technically possible,\n // but I have been hitting rate limits and disable them for now.\n return this.settings.supportsParallelCalls ?? false;\n }\n\n constructor(\n modelId: MistralEmbeddingModelId,\n settings: MistralEmbeddingSettings,\n config: MistralEmbeddingConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n async doEmbed({\n values,\n abortSignal,\n headers,\n }: Parameters<EmbeddingModelV1<string>['doEmbed']>[0]): Promise<\n Awaited<ReturnType<EmbeddingModelV1<string>['doEmbed']>>\n > {\n if (values.length > this.maxEmbeddingsPerCall) {\n throw new TooManyEmbeddingValuesForCallError({\n provider: this.provider,\n modelId: this.modelId,\n maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,\n values,\n });\n }\n\n const { responseHeaders, value: response } = await postJsonToApi({\n url: `${this.config.baseURL}/embeddings`,\n headers: combineHeaders(this.config.headers(), headers),\n body: {\n model: this.modelId,\n input: values,\n encoding_format: 'float',\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n MistralTextEmbeddingResponseSchema,\n ),\n abortSignal,\n fetch: this.config.fetch,\n });\n\n return {\n embeddings: response.data.map(item => item.embedding),\n usage: response.usage\n ? { tokens: response.usage.prompt_tokens }\n : undefined,\n rawResponse: { headers: responseHeaders },\n };\n }\n}\n\n// minimal version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst MistralTextEmbeddingResponseSchema = z.object({\n data: z.array(z.object({ embedding: z.array(z.number()) })),\n usage: z.object({ prompt_tokens: z.number() }).nullish(),\n});\n"],"mappings":";AAKA;AAAA,EAEE;AAAA,EACA;AAAA,OACK;;;ACHP;AAAA,EAGE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAA,UAAS;;;ACdlB;AAAA,EAEE;AAAA,OACK;AACP,SAAS,iCAAiC;AAGnC,SAAS,6BACd,QACe;AACf,QAAM,WAA0B,CAAC;AAEjC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAM,EAAE,MAAM,QAAQ,IAAI,OAAO,CAAC;AAClC,UAAM,gBAAgB,MAAM,OAAO,SAAS;AAE5C,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QAAQ,IAAI,UAAQ;AAzBvC;AA0BY,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,EAAE,MAAM,QAAQ,MAAM,KAAK,KAAK;AAAA,cACzC;AAAA,cACA,KAAK,SAAS;AACZ,uBAAO;AAAA,kBACL,MAAM;AAAA,kBACN,WACE,KAAK,iBAAiB,MAClB,KAAK,MAAM,SAAS,IACpB,SACE,UAAK,aAAL,YAAiB,YACnB,WAAW,0BAA0B,KAAK,KAAK,CAAC;AAAA,gBACxD;AAAA,cACF;AAAA,cACA,KAAK,QAAQ;AACX,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC;AAAA,QACH,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,QAAQ,gBAAgB,OAAO;AAAA,UAC/B,YAAY,UAAU,SAAS,IAAI,YAAY;AAAA,QACjD,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,YAC3C,cAAc,aAAa;AAAA,UAC7B,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;AC9GO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,yBAAyB,EAAE,OAAO;AAAA,EACtC,QAAQ,EAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,EAAE,OAAO;AAAA,EAClB,MAAM,EAAE,OAAO;AAAA,EACf,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,EAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,+BAA+B,+BAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AChBM,SAAS,oBAAoB;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,SAAO;AAAA,IACL,IAAI,kBAAM;AAAA,IACV,SAAS,wBAAS;AAAA,IAClB,WAAW,WAAW,OAAO,IAAI,KAAK,UAAU,GAAI,IAAI;AAAA,EAC1D;AACF;;;ACdA;AAAA,EAGE,iCAAAC;AAAA,OACK;AAEA,SAAS,aACd,MAqBA;AA5BF;AA8BE,QAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAChD,QAAM,eAA6C,CAAC;AAEpD,MAAI,SAAS,MAAM;AACjB,WAAO,EAAE,OAAO,QAAW,aAAa,QAAW,aAAa;AAAA,EAClE;AAEA,QAAM,eAOD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,SAAS,oBAAoB;AACpC,mBAAa,KAAK,EAAE,MAAM,oBAAoB,KAAK,CAAC;AAAA,IACtD,OAAO;AACL,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,UAAU;AAAA,UACR,MAAM,KAAK;AAAA,UACX,aAAa,KAAK;AAAA,UAClB,YAAY,KAAK;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AAEA,QAAM,aAAa,KAAK;AAExB,MAAI,cAAc,MAAM;AACtB,WAAO,EAAE,OAAO,cAAc,aAAa,QAAW,aAAa;AAAA,EACrE;AAEA,QAAM,OAAO,WAAW;AAExB,UAAQ,MAAM;AAAA,IACZ,KAAK;AAAA,IACL,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,aAAa,MAAM,aAAa;AAAA,IAChE,KAAK;AACH,aAAO,EAAE,OAAO,cAAc,aAAa,OAAO,aAAa;AAAA,IAIjE,KAAK;AACH,aAAO;AAAA,QACL,OAAO,aAAa;AAAA,UAClB,UAAQ,KAAK,SAAS,SAAS,WAAW;AAAA,QAC5C;AAAA,QACA,aAAa;AAAA,QACb;AAAA,MACF;AAAA,IACF,SAAS;AACP,YAAM,mBAA0B;AAChC,YAAM,IAAIA,+BAA8B;AAAA,QACtC,eAAe,iCAAiC,gBAAgB;AAAA,MAClE,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AL7DO,IAAM,2BAAN,MAA0D;AAAA,EAU/D,YACE,SACA,UACA,QACA;AAbF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AACvC,SAAS,oBAAoB;AAY3B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AAC/C,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,QAAQ,MAAM;AAChB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,iBAAiB,MAAM;AACzB,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QACE,kBAAkB,QAClB,eAAe,SAAS,UACxB,eAAe,UAAU,MACzB;AACA,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,kBACE,iDAAgB,UAAS,SAAS,EAAE,MAAM,cAAc,IAAI;AAAA;AAAA,MAG9D,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AACd,cAAM,EAAE,OAAO,aAAa,aAAa,IAAI,aAAa,IAAI;AAE9D,eAAO;AAAA,UACL,MAAM,EAAE,GAAG,UAAU,OAAO,YAAY;AAAA,UACxC,UAAU,CAAC,GAAG,UAAU,GAAG,YAAY;AAAA,QACzC;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,UACzC;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA9KjE;AA+KI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AACjC,QAAI,QAAO,YAAO,QAAQ,YAAf,YAA0B;AAKrC,UAAM,cAAc,UAAU,UAAU,SAAS,CAAC;AAClD,QACE,YAAY,SAAS,gBACrB,6BAAM,WAAW,YAAY,WAC7B;AACA,aAAO,KAAK,MAAM,YAAY,QAAQ,MAAM;AAAA,IAC9C;AAEA,WAAO;AAAA,MACL;AAAA,MACA,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,eAAa;AAAA,QACrD,cAAc;AAAA,QACd,YAAY,SAAS;AAAA,QACrB,UAAU,SAAS,SAAS;AAAA,QAC5B,MAAM,SAAS,SAAS;AAAA,MAC1B;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC,UAAU,oBAAoB,QAAQ;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,OAAO,EAAE,GAAG,MAAM,QAAQ,KAAK;AAErC,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAM,cAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,eAAe,KAAK,OAAO,QAAQ,GAAG,QAAQ,OAAO;AAAA,MAC9D;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AACA,QAAI,cAAc;AAClB,QAAI,mBAAmB;AAEvB,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,gBAAgB,GAAG;AACrB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,GAAG,oBAAoB,KAAK;AAAA,cAC9B,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAKrB,gBAAI,eAAe,GAAG;AACpB,oBAAM,cAAc,UAAU,UAAU,SAAS,CAAC;AAElD,kBACE,YAAY,SAAS,eACrB,MAAM,YAAY,YAAY,QAAQ,QAAQ,GAC9C;AAGA,oBAAI,MAAM,QAAQ,SAAS,YAAY,QAAQ,QAAQ;AACrD,qCAAmB;AAAA,gBACrB;AAGA;AAAA,cACF;AAAA,YACF;AAEA,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,mBACP,MAAM,QAAQ,UAAU,IACxB,MAAM;AAAA,cACZ,CAAC;AAED,iCAAmB;AAAA,YACrB;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,YAAY,MAAM,YAAY;AAEvC,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,eAAe,SAAS,SAAS;AAAA,gBACnC,CAAC;AACD,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd,YAAY,SAAS;AAAA,kBACrB,UAAU,SAAS,SAAS;AAAA,kBAC5B,MAAM,SAAS,SAAS;AAAA,gBAC1B,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC,aAAa,EAAE,SAAS,gBAAgB;AAAA,MACxC,SAAS,EAAE,MAAM,KAAK,UAAU,IAAI,EAAE;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4BC,GAAE,OAAO;AAAA,EACzC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,SAASA,GAAE,OAAO,EAAE,SAAS;AAAA,QAC7B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,OAAOA,GAAE,OAAO;AAAA,MAChB,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,IACpC,CAAC;AAAA,EACH;AAAA,EACA,QAAQA,GAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAOA,GAAE,OAAO;AAAA,IACd,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,IAAIA,GAAE,OAAO,EAAE,QAAQ;AAAA,EACvB,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC5B,OAAOA,GAAE,OAAO,EAAE,QAAQ;AAAA,EAC1B,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,QACd,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,QAAQ;AAAA,QAC5B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,IAAIA,GAAE,OAAO;AAAA,YACb,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,QAAQ;AAAA,MAClC,OAAOA,GAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC,EACA,QAAQ;AACb,CAAC;;;AMvaD;AAAA,EAEE;AAAA,OACK;AACP;AAAA,EACE,kBAAAC;AAAA,EACA,6BAAAC;AAAA,EAEA,iBAAAC;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;AAcX,IAAM,wBAAN,MAAgE;AAAA,EAqBrE,YACE,SACA,UACA,QACA;AAxBF,SAAS,uBAAuB;AAyB9B,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAtBA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEA,IAAI,uBAA+B;AAnCrC;AAoCI,YAAO,UAAK,SAAS,yBAAd,YAAsC;AAAA,EAC/C;AAAA,EAEA,IAAI,wBAAiC;AAvCvC;AA0CI,YAAO,UAAK,SAAS,0BAAd,YAAuC;AAAA,EAChD;AAAA,EAYA,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAEE;AACA,QAAI,OAAO,SAAS,KAAK,sBAAsB;AAC7C,YAAM,IAAI,mCAAmC;AAAA,QAC3C,UAAU,KAAK;AAAA,QACf,SAAS,KAAK;AAAA,QACd,sBAAsB,KAAK;AAAA,QAC3B;AAAA,MACF,CAAC;AAAA,IACH;AAEA,UAAM,EAAE,iBAAiB,OAAO,SAAS,IAAI,MAAMC,eAAc;AAAA,MAC/D,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAASC,gBAAe,KAAK,OAAO,QAAQ,GAAG,OAAO;AAAA,MACtD,MAAM;AAAA,QACJ,OAAO,KAAK;AAAA,QACZ,OAAO;AAAA,QACP,iBAAiB;AAAA,MACnB;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2BC;AAAA,QACzB;AAAA,MACF;AAAA,MACA;AAAA,MACA,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,WAAO;AAAA,MACL,YAAY,SAAS,KAAK,IAAI,UAAQ,KAAK,SAAS;AAAA,MACpD,OAAO,SAAS,QACZ,EAAE,QAAQ,SAAS,MAAM,cAAc,IACvC;AAAA,MACJ,aAAa,EAAE,SAAS,gBAAgB;AAAA,IAC1C;AAAA,EACF;AACF;AAIA,IAAM,qCAAqCC,GAAE,OAAO;AAAA,EAClD,MAAMA,GAAE,MAAMA,GAAE,OAAO,EAAE,WAAWA,GAAE,MAAMA,GAAE,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,EAC1D,OAAOA,GAAE,OAAO,EAAE,eAAeA,GAAE,OAAO,EAAE,CAAC,EAAE,QAAQ;AACzD,CAAC;;;APTM,SAAS,cACd,UAAmC,CAAC,GACnB;AA/FnB;AAgGE,QAAM,WACJ,0BAAqB,QAAQ,OAAO,MAApC,YAAyC;AAE3C,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAgC,CAAC,MAEjC,IAAI,yBAAyB,SAAS,UAAU;AAAA,IAC9C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,uBAAuB,CAC3B,SACA,WAAqC,CAAC,MAEtC,IAAI,sBAAsB,SAAS,UAAU;AAAA,IAC3C,UAAU;AAAA,IACV;AAAA,IACA,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB,CAAC;AAEH,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAO,gBAAgB,SAAS,QAAQ;AAAA,EAC1C;AAEA,WAAS,gBAAgB;AACzB,WAAS,OAAO;AAChB,WAAS,YAAY;AACrB,WAAS,gBAAgB;AACzB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["z","UnsupportedFunctionalityError","z","combineHeaders","createJsonResponseHandler","postJsonToApi","z","postJsonToApi","combineHeaders","createJsonResponseHandler","z"]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ai-sdk/mistral",
|
|
3
|
-
"version": "1.0.0-canary.
|
|
3
|
+
"version": "1.0.0-canary.3",
|
|
4
4
|
"license": "Apache-2.0",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -20,12 +20,12 @@
|
|
|
20
20
|
},
|
|
21
21
|
"dependencies": {
|
|
22
22
|
"@ai-sdk/provider": "1.0.0-canary.0",
|
|
23
|
-
"@ai-sdk/provider-utils": "2.0.0-canary.
|
|
23
|
+
"@ai-sdk/provider-utils": "2.0.0-canary.3"
|
|
24
24
|
},
|
|
25
25
|
"devDependencies": {
|
|
26
26
|
"@types/node": "^18",
|
|
27
27
|
"tsup": "^8",
|
|
28
|
-
"typescript": "5.
|
|
28
|
+
"typescript": "5.6.3",
|
|
29
29
|
"zod": "3.23.8",
|
|
30
30
|
"@vercel/ai-tsconfig": "0.0.0"
|
|
31
31
|
},
|