@ai-sdk/mistral 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +50 -0
- package/dist/index.d.mts +52 -0
- package/dist/index.d.ts +52 -0
- package/dist/index.js +445 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +425 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +67 -0
package/README.md
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
# Vercel AI SDK - Mistral Provider
|
|
2
|
+
|
|
3
|
+
The Mistral provider contains language model support for the Mistral chat API.
|
|
4
|
+
It creates language model objects that can be used with the `generateText`, `streamText`, `generateObject`, and `streamObject` AI functions.
|
|
5
|
+
|
|
6
|
+
## Setup
|
|
7
|
+
|
|
8
|
+
The Mistral provider is available in the `@ai-sdk/mistral` module. You can install it with
|
|
9
|
+
|
|
10
|
+
```bash
|
|
11
|
+
npm i @ai-sdk/mistral
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
## Provider Instance
|
|
15
|
+
|
|
16
|
+
You can import `Mistral` from `ai/mistral` and initialize a provider instance with various settings:
|
|
17
|
+
|
|
18
|
+
```ts
|
|
19
|
+
import { Mistral } from '@ai-sdk/mistral';
|
|
20
|
+
|
|
21
|
+
const mistral = new Mistral({
|
|
22
|
+
baseUrl: '', // optional base URL for proxies etc.
|
|
23
|
+
apiKey: '', // optional API key, default to env property MISTRAL_API_KEY
|
|
24
|
+
});
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
The AI SDK also provides a shorthand `mistral` import with a Mistral provider instance that uses defaults:
|
|
28
|
+
|
|
29
|
+
```ts
|
|
30
|
+
import { mistral } from '@ai-sdk/mistral';
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
## Chat Models
|
|
34
|
+
|
|
35
|
+
You can create models that call the [Mistral chat API](https://docs.mistral.ai/api/#operation/createChatCompletion) using the `.chat()` factory method.
|
|
36
|
+
The first argument is the model id, e.g. `mistral-large-latest`.
|
|
37
|
+
Some Mistral chat models support tool calls.
|
|
38
|
+
|
|
39
|
+
```ts
|
|
40
|
+
const model = mistral.chat('mistral-large-latest');
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
Mistral chat models also support additional model settings that are not part of the [standard call settings](/docs/ai-core/settings).
|
|
44
|
+
You can pass them as an options argument:
|
|
45
|
+
|
|
46
|
+
```ts
|
|
47
|
+
const model = mistral.chat('mistral-large-latest', {
|
|
48
|
+
safePrompt: true, // optional safety prompt injection
|
|
49
|
+
});
|
|
50
|
+
```
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { LanguageModelV1 } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
type MistralChatModelId = 'open-mistral-7b' | 'open-mixtral-8x7b' | 'mistral-small-latest' | 'mistral-medium-latest' | 'mistral-large-latest' | (string & {});
|
|
4
|
+
interface MistralChatSettings {
|
|
5
|
+
/**
|
|
6
|
+
* Whether to inject a safety prompt before all conversations.
|
|
7
|
+
*
|
|
8
|
+
* Default: false
|
|
9
|
+
*/
|
|
10
|
+
safePrompt?: boolean;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
type MistralChatConfig = {
|
|
14
|
+
provider: string;
|
|
15
|
+
baseUrl: string;
|
|
16
|
+
headers: () => Record<string, string | undefined>;
|
|
17
|
+
generateId: () => string;
|
|
18
|
+
};
|
|
19
|
+
declare class MistralChatLanguageModel implements LanguageModelV1 {
|
|
20
|
+
readonly specificationVersion = "v1";
|
|
21
|
+
readonly defaultObjectGenerationMode = "json";
|
|
22
|
+
readonly modelId: MistralChatModelId;
|
|
23
|
+
readonly settings: MistralChatSettings;
|
|
24
|
+
private readonly config;
|
|
25
|
+
constructor(modelId: MistralChatModelId, settings: MistralChatSettings, config: MistralChatConfig);
|
|
26
|
+
get provider(): string;
|
|
27
|
+
private getArgs;
|
|
28
|
+
doGenerate(options: Parameters<LanguageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>>;
|
|
29
|
+
doStream(options: Parameters<LanguageModelV1['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>>;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Mistral provider.
|
|
34
|
+
*/
|
|
35
|
+
declare class Mistral {
|
|
36
|
+
readonly baseUrl?: string;
|
|
37
|
+
readonly apiKey?: string;
|
|
38
|
+
private readonly generateId;
|
|
39
|
+
constructor(options?: {
|
|
40
|
+
baseUrl?: string;
|
|
41
|
+
apiKey?: string;
|
|
42
|
+
generateId?: () => string;
|
|
43
|
+
});
|
|
44
|
+
private get baseConfig();
|
|
45
|
+
chat(modelId: MistralChatModelId, settings?: MistralChatSettings): MistralChatLanguageModel;
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Default Mistral provider instance.
|
|
49
|
+
*/
|
|
50
|
+
declare const mistral: Mistral;
|
|
51
|
+
|
|
52
|
+
export { Mistral, mistral };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { LanguageModelV1 } from '@ai-sdk/provider';
|
|
2
|
+
|
|
3
|
+
type MistralChatModelId = 'open-mistral-7b' | 'open-mixtral-8x7b' | 'mistral-small-latest' | 'mistral-medium-latest' | 'mistral-large-latest' | (string & {});
|
|
4
|
+
interface MistralChatSettings {
|
|
5
|
+
/**
|
|
6
|
+
* Whether to inject a safety prompt before all conversations.
|
|
7
|
+
*
|
|
8
|
+
* Default: false
|
|
9
|
+
*/
|
|
10
|
+
safePrompt?: boolean;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
type MistralChatConfig = {
|
|
14
|
+
provider: string;
|
|
15
|
+
baseUrl: string;
|
|
16
|
+
headers: () => Record<string, string | undefined>;
|
|
17
|
+
generateId: () => string;
|
|
18
|
+
};
|
|
19
|
+
declare class MistralChatLanguageModel implements LanguageModelV1 {
|
|
20
|
+
readonly specificationVersion = "v1";
|
|
21
|
+
readonly defaultObjectGenerationMode = "json";
|
|
22
|
+
readonly modelId: MistralChatModelId;
|
|
23
|
+
readonly settings: MistralChatSettings;
|
|
24
|
+
private readonly config;
|
|
25
|
+
constructor(modelId: MistralChatModelId, settings: MistralChatSettings, config: MistralChatConfig);
|
|
26
|
+
get provider(): string;
|
|
27
|
+
private getArgs;
|
|
28
|
+
doGenerate(options: Parameters<LanguageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>>;
|
|
29
|
+
doStream(options: Parameters<LanguageModelV1['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>>;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Mistral provider.
|
|
34
|
+
*/
|
|
35
|
+
declare class Mistral {
|
|
36
|
+
readonly baseUrl?: string;
|
|
37
|
+
readonly apiKey?: string;
|
|
38
|
+
private readonly generateId;
|
|
39
|
+
constructor(options?: {
|
|
40
|
+
baseUrl?: string;
|
|
41
|
+
apiKey?: string;
|
|
42
|
+
generateId?: () => string;
|
|
43
|
+
});
|
|
44
|
+
private get baseConfig();
|
|
45
|
+
chat(modelId: MistralChatModelId, settings?: MistralChatSettings): MistralChatLanguageModel;
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Default Mistral provider instance.
|
|
49
|
+
*/
|
|
50
|
+
declare const mistral: Mistral;
|
|
51
|
+
|
|
52
|
+
export { Mistral, mistral };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,445 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var src_exports = {};
|
|
22
|
+
__export(src_exports, {
|
|
23
|
+
Mistral: () => Mistral,
|
|
24
|
+
mistral: () => mistral
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(src_exports);
|
|
27
|
+
|
|
28
|
+
// src/mistral-facade.ts
|
|
29
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
30
|
+
|
|
31
|
+
// src/mistral-chat-language-model.ts
|
|
32
|
+
var import_provider2 = require("@ai-sdk/provider");
|
|
33
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
34
|
+
var import_zod2 = require("zod");
|
|
35
|
+
|
|
36
|
+
// src/convert-to-mistral-chat-messages.ts
|
|
37
|
+
var import_provider = require("@ai-sdk/provider");
|
|
38
|
+
function convertToMistralChatMessages(prompt) {
|
|
39
|
+
const messages = [];
|
|
40
|
+
for (const { role, content } of prompt) {
|
|
41
|
+
switch (role) {
|
|
42
|
+
case "system": {
|
|
43
|
+
messages.push({ role: "system", content });
|
|
44
|
+
break;
|
|
45
|
+
}
|
|
46
|
+
case "user": {
|
|
47
|
+
messages.push({
|
|
48
|
+
role: "user",
|
|
49
|
+
content: content.map((part) => {
|
|
50
|
+
switch (part.type) {
|
|
51
|
+
case "text": {
|
|
52
|
+
return part.text;
|
|
53
|
+
}
|
|
54
|
+
case "image": {
|
|
55
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
56
|
+
functionality: "image-part"
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}).join("")
|
|
61
|
+
});
|
|
62
|
+
break;
|
|
63
|
+
}
|
|
64
|
+
case "assistant": {
|
|
65
|
+
let text = "";
|
|
66
|
+
const toolCalls = [];
|
|
67
|
+
for (const part of content) {
|
|
68
|
+
switch (part.type) {
|
|
69
|
+
case "text": {
|
|
70
|
+
text += part.text;
|
|
71
|
+
break;
|
|
72
|
+
}
|
|
73
|
+
case "tool-call": {
|
|
74
|
+
toolCalls.push({
|
|
75
|
+
id: part.toolCallId,
|
|
76
|
+
type: "function",
|
|
77
|
+
function: {
|
|
78
|
+
name: part.toolName,
|
|
79
|
+
arguments: JSON.stringify(part.args)
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
break;
|
|
83
|
+
}
|
|
84
|
+
default: {
|
|
85
|
+
const _exhaustiveCheck = part;
|
|
86
|
+
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
messages.push({
|
|
91
|
+
role: "assistant",
|
|
92
|
+
content: text,
|
|
93
|
+
tool_calls: toolCalls.length > 0 ? toolCalls.map(({ function: { name, arguments: args } }) => ({
|
|
94
|
+
id: "null",
|
|
95
|
+
type: "function",
|
|
96
|
+
function: { name, arguments: args }
|
|
97
|
+
})) : void 0
|
|
98
|
+
});
|
|
99
|
+
break;
|
|
100
|
+
}
|
|
101
|
+
case "tool": {
|
|
102
|
+
for (const toolResponse of content) {
|
|
103
|
+
messages.push({
|
|
104
|
+
role: "tool",
|
|
105
|
+
name: toolResponse.toolName,
|
|
106
|
+
content: JSON.stringify(toolResponse.result)
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
break;
|
|
110
|
+
}
|
|
111
|
+
default: {
|
|
112
|
+
const _exhaustiveCheck = role;
|
|
113
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
return messages;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// src/map-mistral-finish-reason.ts
|
|
121
|
+
function mapMistralFinishReason(finishReason) {
|
|
122
|
+
switch (finishReason) {
|
|
123
|
+
case "stop":
|
|
124
|
+
return "stop";
|
|
125
|
+
case "length":
|
|
126
|
+
case "model_length":
|
|
127
|
+
return "length";
|
|
128
|
+
case "tool_calls":
|
|
129
|
+
return "tool-calls";
|
|
130
|
+
default:
|
|
131
|
+
return "other";
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// src/mistral-error.ts
|
|
136
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
137
|
+
var import_zod = require("zod");
|
|
138
|
+
var mistralErrorDataSchema = import_zod.z.object({
|
|
139
|
+
object: import_zod.z.literal("error"),
|
|
140
|
+
message: import_zod.z.string(),
|
|
141
|
+
type: import_zod.z.string(),
|
|
142
|
+
param: import_zod.z.string().nullable(),
|
|
143
|
+
code: import_zod.z.string().nullable()
|
|
144
|
+
});
|
|
145
|
+
var mistralFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
|
|
146
|
+
errorSchema: mistralErrorDataSchema,
|
|
147
|
+
errorToMessage: (data) => data.message
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
// src/mistral-chat-language-model.ts
|
|
151
|
+
var MistralChatLanguageModel = class {
|
|
152
|
+
constructor(modelId, settings, config) {
|
|
153
|
+
this.specificationVersion = "v1";
|
|
154
|
+
this.defaultObjectGenerationMode = "json";
|
|
155
|
+
this.modelId = modelId;
|
|
156
|
+
this.settings = settings;
|
|
157
|
+
this.config = config;
|
|
158
|
+
}
|
|
159
|
+
get provider() {
|
|
160
|
+
return this.config.provider;
|
|
161
|
+
}
|
|
162
|
+
getArgs({
|
|
163
|
+
mode,
|
|
164
|
+
prompt,
|
|
165
|
+
maxTokens,
|
|
166
|
+
temperature,
|
|
167
|
+
topP,
|
|
168
|
+
frequencyPenalty,
|
|
169
|
+
presencePenalty,
|
|
170
|
+
seed
|
|
171
|
+
}) {
|
|
172
|
+
var _a;
|
|
173
|
+
const type = mode.type;
|
|
174
|
+
const warnings = [];
|
|
175
|
+
if (frequencyPenalty != null) {
|
|
176
|
+
warnings.push({
|
|
177
|
+
type: "unsupported-setting",
|
|
178
|
+
setting: "frequencyPenalty"
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
if (presencePenalty != null) {
|
|
182
|
+
warnings.push({
|
|
183
|
+
type: "unsupported-setting",
|
|
184
|
+
setting: "presencePenalty"
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
const baseArgs = {
|
|
188
|
+
// model id:
|
|
189
|
+
model: this.modelId,
|
|
190
|
+
// model specific settings:
|
|
191
|
+
safe_prompt: this.settings.safePrompt,
|
|
192
|
+
// standardized settings:
|
|
193
|
+
max_tokens: maxTokens,
|
|
194
|
+
temperature,
|
|
195
|
+
// uses 0..1 scale
|
|
196
|
+
top_p: topP,
|
|
197
|
+
random_seed: seed,
|
|
198
|
+
// messages:
|
|
199
|
+
messages: convertToMistralChatMessages(prompt)
|
|
200
|
+
};
|
|
201
|
+
switch (type) {
|
|
202
|
+
case "regular": {
|
|
203
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
204
|
+
return {
|
|
205
|
+
args: {
|
|
206
|
+
...baseArgs,
|
|
207
|
+
tools: tools == null ? void 0 : tools.map((tool) => ({
|
|
208
|
+
type: "function",
|
|
209
|
+
function: {
|
|
210
|
+
name: tool.name,
|
|
211
|
+
description: tool.description,
|
|
212
|
+
parameters: tool.parameters
|
|
213
|
+
}
|
|
214
|
+
}))
|
|
215
|
+
},
|
|
216
|
+
warnings
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
case "object-json": {
|
|
220
|
+
return {
|
|
221
|
+
args: {
|
|
222
|
+
...baseArgs,
|
|
223
|
+
response_format: { type: "json_object" }
|
|
224
|
+
},
|
|
225
|
+
warnings
|
|
226
|
+
};
|
|
227
|
+
}
|
|
228
|
+
case "object-tool": {
|
|
229
|
+
return {
|
|
230
|
+
args: {
|
|
231
|
+
...baseArgs,
|
|
232
|
+
tool_choice: "any",
|
|
233
|
+
tools: [{ type: "function", function: mode.tool }]
|
|
234
|
+
},
|
|
235
|
+
warnings
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
case "object-grammar": {
|
|
239
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
240
|
+
functionality: "object-grammar mode"
|
|
241
|
+
});
|
|
242
|
+
}
|
|
243
|
+
default: {
|
|
244
|
+
const _exhaustiveCheck = type;
|
|
245
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
async doGenerate(options) {
|
|
250
|
+
var _a, _b;
|
|
251
|
+
const { args, warnings } = this.getArgs(options);
|
|
252
|
+
const response = await (0, import_provider_utils2.postJsonToApi)({
|
|
253
|
+
url: `${this.config.baseUrl}/chat/completions`,
|
|
254
|
+
headers: this.config.headers(),
|
|
255
|
+
body: args,
|
|
256
|
+
failedResponseHandler: mistralFailedResponseHandler,
|
|
257
|
+
successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
|
|
258
|
+
mistralChatResponseSchema
|
|
259
|
+
),
|
|
260
|
+
abortSignal: options.abortSignal
|
|
261
|
+
});
|
|
262
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
263
|
+
const choice = response.choices[0];
|
|
264
|
+
return {
|
|
265
|
+
text: (_a = choice.message.content) != null ? _a : void 0,
|
|
266
|
+
toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
|
|
267
|
+
toolCallType: "function",
|
|
268
|
+
toolCallId: this.config.generateId(),
|
|
269
|
+
toolName: toolCall.function.name,
|
|
270
|
+
args: toolCall.function.arguments
|
|
271
|
+
})),
|
|
272
|
+
finishReason: mapMistralFinishReason(choice.finish_reason),
|
|
273
|
+
usage: {
|
|
274
|
+
promptTokens: response.usage.prompt_tokens,
|
|
275
|
+
completionTokens: response.usage.completion_tokens
|
|
276
|
+
},
|
|
277
|
+
rawCall: { rawPrompt, rawSettings },
|
|
278
|
+
warnings
|
|
279
|
+
};
|
|
280
|
+
}
|
|
281
|
+
async doStream(options) {
|
|
282
|
+
const { args, warnings } = this.getArgs(options);
|
|
283
|
+
const response = await (0, import_provider_utils2.postJsonToApi)({
|
|
284
|
+
url: `${this.config.baseUrl}/chat/completions`,
|
|
285
|
+
headers: this.config.headers(),
|
|
286
|
+
body: {
|
|
287
|
+
...args,
|
|
288
|
+
stream: true
|
|
289
|
+
},
|
|
290
|
+
failedResponseHandler: mistralFailedResponseHandler,
|
|
291
|
+
successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
|
|
292
|
+
mistralChatChunkSchema
|
|
293
|
+
),
|
|
294
|
+
abortSignal: options.abortSignal
|
|
295
|
+
});
|
|
296
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
297
|
+
let finishReason = "other";
|
|
298
|
+
let usage = {
|
|
299
|
+
promptTokens: Number.NaN,
|
|
300
|
+
completionTokens: Number.NaN
|
|
301
|
+
};
|
|
302
|
+
const generateId2 = this.config.generateId;
|
|
303
|
+
return {
|
|
304
|
+
stream: response.pipeThrough(
|
|
305
|
+
new TransformStream({
|
|
306
|
+
transform(chunk, controller) {
|
|
307
|
+
if (!chunk.success) {
|
|
308
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
309
|
+
return;
|
|
310
|
+
}
|
|
311
|
+
const value = chunk.value;
|
|
312
|
+
if (value.usage != null) {
|
|
313
|
+
usage = {
|
|
314
|
+
promptTokens: value.usage.prompt_tokens,
|
|
315
|
+
completionTokens: value.usage.completion_tokens
|
|
316
|
+
};
|
|
317
|
+
}
|
|
318
|
+
const choice = value.choices[0];
|
|
319
|
+
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
320
|
+
finishReason = mapMistralFinishReason(choice.finish_reason);
|
|
321
|
+
}
|
|
322
|
+
if ((choice == null ? void 0 : choice.delta) == null) {
|
|
323
|
+
return;
|
|
324
|
+
}
|
|
325
|
+
const delta = choice.delta;
|
|
326
|
+
if (delta.content != null) {
|
|
327
|
+
controller.enqueue({
|
|
328
|
+
type: "text-delta",
|
|
329
|
+
textDelta: delta.content
|
|
330
|
+
});
|
|
331
|
+
}
|
|
332
|
+
if (delta.tool_calls != null) {
|
|
333
|
+
for (const toolCall of delta.tool_calls) {
|
|
334
|
+
const toolCallId = generateId2();
|
|
335
|
+
controller.enqueue({
|
|
336
|
+
type: "tool-call-delta",
|
|
337
|
+
toolCallType: "function",
|
|
338
|
+
toolCallId,
|
|
339
|
+
toolName: toolCall.function.name,
|
|
340
|
+
argsTextDelta: toolCall.function.arguments
|
|
341
|
+
});
|
|
342
|
+
controller.enqueue({
|
|
343
|
+
type: "tool-call",
|
|
344
|
+
toolCallType: "function",
|
|
345
|
+
toolCallId,
|
|
346
|
+
toolName: toolCall.function.name,
|
|
347
|
+
args: toolCall.function.arguments
|
|
348
|
+
});
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
},
|
|
352
|
+
flush(controller) {
|
|
353
|
+
controller.enqueue({ type: "finish", finishReason, usage });
|
|
354
|
+
}
|
|
355
|
+
})
|
|
356
|
+
),
|
|
357
|
+
rawCall: { rawPrompt, rawSettings },
|
|
358
|
+
warnings
|
|
359
|
+
};
|
|
360
|
+
}
|
|
361
|
+
};
|
|
362
|
+
var mistralChatResponseSchema = import_zod2.z.object({
|
|
363
|
+
choices: import_zod2.z.array(
|
|
364
|
+
import_zod2.z.object({
|
|
365
|
+
message: import_zod2.z.object({
|
|
366
|
+
role: import_zod2.z.literal("assistant"),
|
|
367
|
+
content: import_zod2.z.string().nullable(),
|
|
368
|
+
tool_calls: import_zod2.z.array(
|
|
369
|
+
import_zod2.z.object({
|
|
370
|
+
function: import_zod2.z.object({
|
|
371
|
+
name: import_zod2.z.string(),
|
|
372
|
+
arguments: import_zod2.z.string()
|
|
373
|
+
})
|
|
374
|
+
})
|
|
375
|
+
).optional().nullable()
|
|
376
|
+
}),
|
|
377
|
+
index: import_zod2.z.number(),
|
|
378
|
+
finish_reason: import_zod2.z.string().optional().nullable()
|
|
379
|
+
})
|
|
380
|
+
),
|
|
381
|
+
object: import_zod2.z.literal("chat.completion"),
|
|
382
|
+
usage: import_zod2.z.object({
|
|
383
|
+
prompt_tokens: import_zod2.z.number(),
|
|
384
|
+
completion_tokens: import_zod2.z.number()
|
|
385
|
+
})
|
|
386
|
+
});
|
|
387
|
+
var mistralChatChunkSchema = import_zod2.z.object({
|
|
388
|
+
object: import_zod2.z.literal("chat.completion.chunk"),
|
|
389
|
+
choices: import_zod2.z.array(
|
|
390
|
+
import_zod2.z.object({
|
|
391
|
+
delta: import_zod2.z.object({
|
|
392
|
+
role: import_zod2.z.enum(["assistant"]).optional(),
|
|
393
|
+
content: import_zod2.z.string().nullable().optional(),
|
|
394
|
+
tool_calls: import_zod2.z.array(
|
|
395
|
+
import_zod2.z.object({
|
|
396
|
+
function: import_zod2.z.object({ name: import_zod2.z.string(), arguments: import_zod2.z.string() })
|
|
397
|
+
})
|
|
398
|
+
).optional().nullable()
|
|
399
|
+
}),
|
|
400
|
+
finish_reason: import_zod2.z.string().nullable().optional(),
|
|
401
|
+
index: import_zod2.z.number()
|
|
402
|
+
})
|
|
403
|
+
),
|
|
404
|
+
usage: import_zod2.z.object({
|
|
405
|
+
prompt_tokens: import_zod2.z.number(),
|
|
406
|
+
completion_tokens: import_zod2.z.number()
|
|
407
|
+
}).optional().nullable()
|
|
408
|
+
});
|
|
409
|
+
|
|
410
|
+
// src/mistral-facade.ts
|
|
411
|
+
var Mistral = class {
|
|
412
|
+
constructor(options = {}) {
|
|
413
|
+
var _a;
|
|
414
|
+
this.baseUrl = options.baseUrl;
|
|
415
|
+
this.apiKey = options.apiKey;
|
|
416
|
+
this.generateId = (_a = options.generateId) != null ? _a : import_provider_utils3.generateId;
|
|
417
|
+
}
|
|
418
|
+
get baseConfig() {
|
|
419
|
+
var _a;
|
|
420
|
+
return {
|
|
421
|
+
baseUrl: (_a = this.baseUrl) != null ? _a : "https://api.mistral.ai/v1",
|
|
422
|
+
headers: () => ({
|
|
423
|
+
Authorization: `Bearer ${(0, import_provider_utils3.loadApiKey)({
|
|
424
|
+
apiKey: this.apiKey,
|
|
425
|
+
environmentVariableName: "MISTRAL_API_KEY",
|
|
426
|
+
description: "Mistral"
|
|
427
|
+
})}`
|
|
428
|
+
})
|
|
429
|
+
};
|
|
430
|
+
}
|
|
431
|
+
chat(modelId, settings = {}) {
|
|
432
|
+
return new MistralChatLanguageModel(modelId, settings, {
|
|
433
|
+
provider: "mistral.chat",
|
|
434
|
+
...this.baseConfig,
|
|
435
|
+
generateId: this.generateId
|
|
436
|
+
});
|
|
437
|
+
}
|
|
438
|
+
};
|
|
439
|
+
var mistral = new Mistral();
|
|
440
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
441
|
+
0 && (module.exports = {
|
|
442
|
+
Mistral,
|
|
443
|
+
mistral
|
|
444
|
+
});
|
|
445
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/mistral-facade.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/map-mistral-finish-reason.ts","../src/mistral-error.ts"],"sourcesContent":["export * from './mistral-facade';\n","import { generateId, loadApiKey } from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\n\n/**\n * Mistral provider.\n */\nexport class Mistral {\n readonly baseUrl?: string;\n readonly apiKey?: string;\n\n private readonly generateId: () => string;\n\n constructor(\n options: {\n baseUrl?: string;\n apiKey?: string;\n generateId?: () => string;\n } = {},\n ) {\n this.baseUrl = options.baseUrl;\n this.apiKey = options.apiKey;\n this.generateId = options.generateId ?? generateId;\n }\n\n private get baseConfig() {\n return {\n baseUrl: this.baseUrl ?? 'https://api.mistral.ai/v1',\n headers: () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: this.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n }),\n };\n }\n\n chat(modelId: MistralChatModelId, settings: MistralChatSettings = {}) {\n return new MistralChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n ...this.baseConfig,\n generateId: this.generateId,\n });\n }\n}\n\n/**\n * Default Mistral provider instance.\n */\nexport const mistral = new Mistral();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralChatConfig = {\n provider: string;\n baseUrl: string;\n headers: () => Record<string, string | undefined>;\n generateId: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n\n readonly modelId: MistralChatModelId;\n readonly settings: MistralChatSettings;\n\n private readonly config: MistralChatConfig;\n\n constructor(\n modelId: MistralChatModelId,\n settings: MistralChatSettings,\n config: MistralChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature, // uses 0..1 scale\n top_p: topP,\n random_seed: seed,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n // when the tools array is empty, change it to undefined to prevent OpenAI errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n return {\n args: {\n ...baseArgs,\n tools: tools?.map(tool => ({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })),\n },\n warnings,\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format: { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: 'any',\n tools: [{ type: 'function', function: mode.tool }],\n },\n warnings,\n };\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-grammar mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseUrl}/chat/completions`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: this.config.generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseUrl}/chat/completions`,\n headers: this.config.headers(),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n const generateId = this.config.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n // mistral tool calls come in one piece\n\n const toolCallId = generateId(); // delta and tool call must have same id\n\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: z.string().nullable(),\n tool_calls: z\n .array(\n z.object({\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .optional()\n .nullable(),\n }),\n index: z.number(),\n finish_reason: z.string().optional().nullable(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n object: z.literal('chat.completion.chunk'),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: z.string().nullable().optional(),\n tool_calls: z\n .array(\n z.object({\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .optional()\n .nullable(),\n }),\n finish_reason: z.string().nullable().optional(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .optional()\n .nullable(),\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralChatPrompt } from './mistral-chat-prompt';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV1Prompt,\n): MistralChatPrompt {\n const messages: MistralChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'image-part',\n });\n }\n }\n })\n .join(''),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls:\n toolCalls.length > 0\n ? toolCalls.map(({ function: { name, arguments: args } }) => ({\n id: 'null',\n type: 'function',\n function: { name, arguments: args },\n }))\n : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'other';\n }\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAAuC;;;ACAvC,IAAAC,mBAMO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;;;ACblB,sBAGO;AAGA,SAAS,6BACd,QACmB;AACnB,QAAM,WAA8B,CAAC;AAErC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QACN,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK;AAAA,cACd;AAAA,cACA,KAAK,SAAS;AACZ,sBAAM,IAAI,8CAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC,EACA,KAAK,EAAE;AAAA,QACZ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACE,UAAU,SAAS,IACf,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC1D,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,UACpC,EAAE,IACF;AAAA,QACR,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC7C,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACtGO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,4BAA+C;AAC/C,iBAAkB;AAElB,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,QAAQ,aAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,aAAE,OAAO;AAAA,EAClB,MAAM,aAAE,OAAO;AAAA,EACf,OAAO,aAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,sDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AHaM,IAAM,2BAAN,MAA0D;AAAA,EAS/D,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7DnD;AA8DI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AAEd,cAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAEhD,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,OAAO,+BAAO,IAAI,WAAS;AAAA,cACzB,MAAM;AAAA,cACN,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,aAAa,KAAK;AAAA,gBAClB,YAAY,KAAK;AAAA,cACnB;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,UACzC;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA1JjE;AA2JI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,UAAM,sCAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,eAAa;AAAA,QACrD,cAAc;AAAA,QACd,YAAY,KAAK,OAAO,WAAW;AAAA,QACnC,UAAU,SAAS,SAAS;AAAA,QAC5B,MAAM,SAAS,SAAS;AAAA,MAC1B;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,UAAM,sCAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,UAAMC,cAAa,KAAK,OAAO;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,YAAY,MAAM,YAAY;AAGvC,sBAAM,aAAaA,YAAW;AAE9B,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd;AAAA,kBACA,UAAU,SAAS,SAAS;AAAA,kBAC5B,eAAe,SAAS,SAAS;AAAA,gBACnC,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd;AAAA,kBACA,UAAU,SAAS,SAAS;AAAA,kBAC5B,MAAM,SAAS,SAAS;AAAA,gBAC1B,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4B,cAAE,OAAO;AAAA,EACzC,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS,cAAE,OAAO,EAAE,SAAS;AAAA,QAC7B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,SAAS,EACT,SAAS;AAAA,MACd,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,IAChD,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,cAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,yBAAyB,cAAE,OAAO;AAAA,EACtC,QAAQ,cAAE,QAAQ,uBAAuB;AAAA,EACzC,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,OAAO,cAAE,OAAO;AAAA,QACd,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QACxC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,UAAU,cAAE,OAAO,EAAE,MAAM,cAAE,OAAO,GAAG,WAAW,cAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,SAAS,EACT,SAAS;AAAA,MACd,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,MAC9C,OAAO,cAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC,EACA,SAAS,EACT,SAAS;AACd,CAAC;;;ADxVM,IAAM,UAAN,MAAc;AAAA,EAMnB,YACE,UAII,CAAC,GACL;AAtBJ;AAuBI,SAAK,UAAU,QAAQ;AACvB,SAAK,SAAS,QAAQ;AACtB,SAAK,cAAa,aAAQ,eAAR,YAAsB;AAAA,EAC1C;AAAA,EAEA,IAAY,aAAa;AA5B3B;AA6BI,WAAO;AAAA,MACL,UAAS,UAAK,YAAL,YAAgB;AAAA,MACzB,SAAS,OAAO;AAAA,QACd,eAAe,cAAU,mCAAW;AAAA,UAClC,QAAQ,KAAK;AAAA,UACb,yBAAyB;AAAA,UACzB,aAAa;AAAA,QACf,CAAC,CAAC;AAAA,MACJ;AAAA,IACF;AAAA,EACF;AAAA,EAEA,KAAK,SAA6B,WAAgC,CAAC,GAAG;AACpE,WAAO,IAAI,yBAAyB,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,MACR,YAAY,KAAK;AAAA,IACnB,CAAC;AAAA,EACH;AACF;AAKO,IAAM,UAAU,IAAI,QAAQ;","names":["import_provider_utils","import_provider","import_provider_utils","import_zod","generateId"]}
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,425 @@
|
|
|
1
|
+
// src/mistral-facade.ts
|
|
2
|
+
import { generateId, loadApiKey } from "@ai-sdk/provider-utils";
|
|
3
|
+
|
|
4
|
+
// src/mistral-chat-language-model.ts
|
|
5
|
+
import {
|
|
6
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
7
|
+
} from "@ai-sdk/provider";
|
|
8
|
+
import {
|
|
9
|
+
createEventSourceResponseHandler,
|
|
10
|
+
createJsonResponseHandler,
|
|
11
|
+
postJsonToApi
|
|
12
|
+
} from "@ai-sdk/provider-utils";
|
|
13
|
+
import { z as z2 } from "zod";
|
|
14
|
+
|
|
15
|
+
// src/convert-to-mistral-chat-messages.ts
|
|
16
|
+
import {
|
|
17
|
+
UnsupportedFunctionalityError
|
|
18
|
+
} from "@ai-sdk/provider";
|
|
19
|
+
function convertToMistralChatMessages(prompt) {
|
|
20
|
+
const messages = [];
|
|
21
|
+
for (const { role, content } of prompt) {
|
|
22
|
+
switch (role) {
|
|
23
|
+
case "system": {
|
|
24
|
+
messages.push({ role: "system", content });
|
|
25
|
+
break;
|
|
26
|
+
}
|
|
27
|
+
case "user": {
|
|
28
|
+
messages.push({
|
|
29
|
+
role: "user",
|
|
30
|
+
content: content.map((part) => {
|
|
31
|
+
switch (part.type) {
|
|
32
|
+
case "text": {
|
|
33
|
+
return part.text;
|
|
34
|
+
}
|
|
35
|
+
case "image": {
|
|
36
|
+
throw new UnsupportedFunctionalityError({
|
|
37
|
+
functionality: "image-part"
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}).join("")
|
|
42
|
+
});
|
|
43
|
+
break;
|
|
44
|
+
}
|
|
45
|
+
case "assistant": {
|
|
46
|
+
let text = "";
|
|
47
|
+
const toolCalls = [];
|
|
48
|
+
for (const part of content) {
|
|
49
|
+
switch (part.type) {
|
|
50
|
+
case "text": {
|
|
51
|
+
text += part.text;
|
|
52
|
+
break;
|
|
53
|
+
}
|
|
54
|
+
case "tool-call": {
|
|
55
|
+
toolCalls.push({
|
|
56
|
+
id: part.toolCallId,
|
|
57
|
+
type: "function",
|
|
58
|
+
function: {
|
|
59
|
+
name: part.toolName,
|
|
60
|
+
arguments: JSON.stringify(part.args)
|
|
61
|
+
}
|
|
62
|
+
});
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
default: {
|
|
66
|
+
const _exhaustiveCheck = part;
|
|
67
|
+
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
messages.push({
|
|
72
|
+
role: "assistant",
|
|
73
|
+
content: text,
|
|
74
|
+
tool_calls: toolCalls.length > 0 ? toolCalls.map(({ function: { name, arguments: args } }) => ({
|
|
75
|
+
id: "null",
|
|
76
|
+
type: "function",
|
|
77
|
+
function: { name, arguments: args }
|
|
78
|
+
})) : void 0
|
|
79
|
+
});
|
|
80
|
+
break;
|
|
81
|
+
}
|
|
82
|
+
case "tool": {
|
|
83
|
+
for (const toolResponse of content) {
|
|
84
|
+
messages.push({
|
|
85
|
+
role: "tool",
|
|
86
|
+
name: toolResponse.toolName,
|
|
87
|
+
content: JSON.stringify(toolResponse.result)
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
break;
|
|
91
|
+
}
|
|
92
|
+
default: {
|
|
93
|
+
const _exhaustiveCheck = role;
|
|
94
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
return messages;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// src/map-mistral-finish-reason.ts
|
|
102
|
+
function mapMistralFinishReason(finishReason) {
|
|
103
|
+
switch (finishReason) {
|
|
104
|
+
case "stop":
|
|
105
|
+
return "stop";
|
|
106
|
+
case "length":
|
|
107
|
+
case "model_length":
|
|
108
|
+
return "length";
|
|
109
|
+
case "tool_calls":
|
|
110
|
+
return "tool-calls";
|
|
111
|
+
default:
|
|
112
|
+
return "other";
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// src/mistral-error.ts
|
|
117
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
118
|
+
import { z } from "zod";
|
|
119
|
+
var mistralErrorDataSchema = z.object({
|
|
120
|
+
object: z.literal("error"),
|
|
121
|
+
message: z.string(),
|
|
122
|
+
type: z.string(),
|
|
123
|
+
param: z.string().nullable(),
|
|
124
|
+
code: z.string().nullable()
|
|
125
|
+
});
|
|
126
|
+
var mistralFailedResponseHandler = createJsonErrorResponseHandler({
|
|
127
|
+
errorSchema: mistralErrorDataSchema,
|
|
128
|
+
errorToMessage: (data) => data.message
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
// src/mistral-chat-language-model.ts
|
|
132
|
+
var MistralChatLanguageModel = class {
|
|
133
|
+
constructor(modelId, settings, config) {
|
|
134
|
+
this.specificationVersion = "v1";
|
|
135
|
+
this.defaultObjectGenerationMode = "json";
|
|
136
|
+
this.modelId = modelId;
|
|
137
|
+
this.settings = settings;
|
|
138
|
+
this.config = config;
|
|
139
|
+
}
|
|
140
|
+
get provider() {
|
|
141
|
+
return this.config.provider;
|
|
142
|
+
}
|
|
143
|
+
getArgs({
|
|
144
|
+
mode,
|
|
145
|
+
prompt,
|
|
146
|
+
maxTokens,
|
|
147
|
+
temperature,
|
|
148
|
+
topP,
|
|
149
|
+
frequencyPenalty,
|
|
150
|
+
presencePenalty,
|
|
151
|
+
seed
|
|
152
|
+
}) {
|
|
153
|
+
var _a;
|
|
154
|
+
const type = mode.type;
|
|
155
|
+
const warnings = [];
|
|
156
|
+
if (frequencyPenalty != null) {
|
|
157
|
+
warnings.push({
|
|
158
|
+
type: "unsupported-setting",
|
|
159
|
+
setting: "frequencyPenalty"
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
if (presencePenalty != null) {
|
|
163
|
+
warnings.push({
|
|
164
|
+
type: "unsupported-setting",
|
|
165
|
+
setting: "presencePenalty"
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
const baseArgs = {
|
|
169
|
+
// model id:
|
|
170
|
+
model: this.modelId,
|
|
171
|
+
// model specific settings:
|
|
172
|
+
safe_prompt: this.settings.safePrompt,
|
|
173
|
+
// standardized settings:
|
|
174
|
+
max_tokens: maxTokens,
|
|
175
|
+
temperature,
|
|
176
|
+
// uses 0..1 scale
|
|
177
|
+
top_p: topP,
|
|
178
|
+
random_seed: seed,
|
|
179
|
+
// messages:
|
|
180
|
+
messages: convertToMistralChatMessages(prompt)
|
|
181
|
+
};
|
|
182
|
+
switch (type) {
|
|
183
|
+
case "regular": {
|
|
184
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
185
|
+
return {
|
|
186
|
+
args: {
|
|
187
|
+
...baseArgs,
|
|
188
|
+
tools: tools == null ? void 0 : tools.map((tool) => ({
|
|
189
|
+
type: "function",
|
|
190
|
+
function: {
|
|
191
|
+
name: tool.name,
|
|
192
|
+
description: tool.description,
|
|
193
|
+
parameters: tool.parameters
|
|
194
|
+
}
|
|
195
|
+
}))
|
|
196
|
+
},
|
|
197
|
+
warnings
|
|
198
|
+
};
|
|
199
|
+
}
|
|
200
|
+
case "object-json": {
|
|
201
|
+
return {
|
|
202
|
+
args: {
|
|
203
|
+
...baseArgs,
|
|
204
|
+
response_format: { type: "json_object" }
|
|
205
|
+
},
|
|
206
|
+
warnings
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
case "object-tool": {
|
|
210
|
+
return {
|
|
211
|
+
args: {
|
|
212
|
+
...baseArgs,
|
|
213
|
+
tool_choice: "any",
|
|
214
|
+
tools: [{ type: "function", function: mode.tool }]
|
|
215
|
+
},
|
|
216
|
+
warnings
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
case "object-grammar": {
|
|
220
|
+
throw new UnsupportedFunctionalityError2({
|
|
221
|
+
functionality: "object-grammar mode"
|
|
222
|
+
});
|
|
223
|
+
}
|
|
224
|
+
default: {
|
|
225
|
+
const _exhaustiveCheck = type;
|
|
226
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
async doGenerate(options) {
|
|
231
|
+
var _a, _b;
|
|
232
|
+
const { args, warnings } = this.getArgs(options);
|
|
233
|
+
const response = await postJsonToApi({
|
|
234
|
+
url: `${this.config.baseUrl}/chat/completions`,
|
|
235
|
+
headers: this.config.headers(),
|
|
236
|
+
body: args,
|
|
237
|
+
failedResponseHandler: mistralFailedResponseHandler,
|
|
238
|
+
successfulResponseHandler: createJsonResponseHandler(
|
|
239
|
+
mistralChatResponseSchema
|
|
240
|
+
),
|
|
241
|
+
abortSignal: options.abortSignal
|
|
242
|
+
});
|
|
243
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
244
|
+
const choice = response.choices[0];
|
|
245
|
+
return {
|
|
246
|
+
text: (_a = choice.message.content) != null ? _a : void 0,
|
|
247
|
+
toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
|
|
248
|
+
toolCallType: "function",
|
|
249
|
+
toolCallId: this.config.generateId(),
|
|
250
|
+
toolName: toolCall.function.name,
|
|
251
|
+
args: toolCall.function.arguments
|
|
252
|
+
})),
|
|
253
|
+
finishReason: mapMistralFinishReason(choice.finish_reason),
|
|
254
|
+
usage: {
|
|
255
|
+
promptTokens: response.usage.prompt_tokens,
|
|
256
|
+
completionTokens: response.usage.completion_tokens
|
|
257
|
+
},
|
|
258
|
+
rawCall: { rawPrompt, rawSettings },
|
|
259
|
+
warnings
|
|
260
|
+
};
|
|
261
|
+
}
|
|
262
|
+
async doStream(options) {
|
|
263
|
+
const { args, warnings } = this.getArgs(options);
|
|
264
|
+
const response = await postJsonToApi({
|
|
265
|
+
url: `${this.config.baseUrl}/chat/completions`,
|
|
266
|
+
headers: this.config.headers(),
|
|
267
|
+
body: {
|
|
268
|
+
...args,
|
|
269
|
+
stream: true
|
|
270
|
+
},
|
|
271
|
+
failedResponseHandler: mistralFailedResponseHandler,
|
|
272
|
+
successfulResponseHandler: createEventSourceResponseHandler(
|
|
273
|
+
mistralChatChunkSchema
|
|
274
|
+
),
|
|
275
|
+
abortSignal: options.abortSignal
|
|
276
|
+
});
|
|
277
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
278
|
+
let finishReason = "other";
|
|
279
|
+
let usage = {
|
|
280
|
+
promptTokens: Number.NaN,
|
|
281
|
+
completionTokens: Number.NaN
|
|
282
|
+
};
|
|
283
|
+
const generateId2 = this.config.generateId;
|
|
284
|
+
return {
|
|
285
|
+
stream: response.pipeThrough(
|
|
286
|
+
new TransformStream({
|
|
287
|
+
transform(chunk, controller) {
|
|
288
|
+
if (!chunk.success) {
|
|
289
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
const value = chunk.value;
|
|
293
|
+
if (value.usage != null) {
|
|
294
|
+
usage = {
|
|
295
|
+
promptTokens: value.usage.prompt_tokens,
|
|
296
|
+
completionTokens: value.usage.completion_tokens
|
|
297
|
+
};
|
|
298
|
+
}
|
|
299
|
+
const choice = value.choices[0];
|
|
300
|
+
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
301
|
+
finishReason = mapMistralFinishReason(choice.finish_reason);
|
|
302
|
+
}
|
|
303
|
+
if ((choice == null ? void 0 : choice.delta) == null) {
|
|
304
|
+
return;
|
|
305
|
+
}
|
|
306
|
+
const delta = choice.delta;
|
|
307
|
+
if (delta.content != null) {
|
|
308
|
+
controller.enqueue({
|
|
309
|
+
type: "text-delta",
|
|
310
|
+
textDelta: delta.content
|
|
311
|
+
});
|
|
312
|
+
}
|
|
313
|
+
if (delta.tool_calls != null) {
|
|
314
|
+
for (const toolCall of delta.tool_calls) {
|
|
315
|
+
const toolCallId = generateId2();
|
|
316
|
+
controller.enqueue({
|
|
317
|
+
type: "tool-call-delta",
|
|
318
|
+
toolCallType: "function",
|
|
319
|
+
toolCallId,
|
|
320
|
+
toolName: toolCall.function.name,
|
|
321
|
+
argsTextDelta: toolCall.function.arguments
|
|
322
|
+
});
|
|
323
|
+
controller.enqueue({
|
|
324
|
+
type: "tool-call",
|
|
325
|
+
toolCallType: "function",
|
|
326
|
+
toolCallId,
|
|
327
|
+
toolName: toolCall.function.name,
|
|
328
|
+
args: toolCall.function.arguments
|
|
329
|
+
});
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
},
|
|
333
|
+
flush(controller) {
|
|
334
|
+
controller.enqueue({ type: "finish", finishReason, usage });
|
|
335
|
+
}
|
|
336
|
+
})
|
|
337
|
+
),
|
|
338
|
+
rawCall: { rawPrompt, rawSettings },
|
|
339
|
+
warnings
|
|
340
|
+
};
|
|
341
|
+
}
|
|
342
|
+
};
|
|
343
|
+
var mistralChatResponseSchema = z2.object({
|
|
344
|
+
choices: z2.array(
|
|
345
|
+
z2.object({
|
|
346
|
+
message: z2.object({
|
|
347
|
+
role: z2.literal("assistant"),
|
|
348
|
+
content: z2.string().nullable(),
|
|
349
|
+
tool_calls: z2.array(
|
|
350
|
+
z2.object({
|
|
351
|
+
function: z2.object({
|
|
352
|
+
name: z2.string(),
|
|
353
|
+
arguments: z2.string()
|
|
354
|
+
})
|
|
355
|
+
})
|
|
356
|
+
).optional().nullable()
|
|
357
|
+
}),
|
|
358
|
+
index: z2.number(),
|
|
359
|
+
finish_reason: z2.string().optional().nullable()
|
|
360
|
+
})
|
|
361
|
+
),
|
|
362
|
+
object: z2.literal("chat.completion"),
|
|
363
|
+
usage: z2.object({
|
|
364
|
+
prompt_tokens: z2.number(),
|
|
365
|
+
completion_tokens: z2.number()
|
|
366
|
+
})
|
|
367
|
+
});
|
|
368
|
+
var mistralChatChunkSchema = z2.object({
|
|
369
|
+
object: z2.literal("chat.completion.chunk"),
|
|
370
|
+
choices: z2.array(
|
|
371
|
+
z2.object({
|
|
372
|
+
delta: z2.object({
|
|
373
|
+
role: z2.enum(["assistant"]).optional(),
|
|
374
|
+
content: z2.string().nullable().optional(),
|
|
375
|
+
tool_calls: z2.array(
|
|
376
|
+
z2.object({
|
|
377
|
+
function: z2.object({ name: z2.string(), arguments: z2.string() })
|
|
378
|
+
})
|
|
379
|
+
).optional().nullable()
|
|
380
|
+
}),
|
|
381
|
+
finish_reason: z2.string().nullable().optional(),
|
|
382
|
+
index: z2.number()
|
|
383
|
+
})
|
|
384
|
+
),
|
|
385
|
+
usage: z2.object({
|
|
386
|
+
prompt_tokens: z2.number(),
|
|
387
|
+
completion_tokens: z2.number()
|
|
388
|
+
}).optional().nullable()
|
|
389
|
+
});
|
|
390
|
+
|
|
391
|
+
// src/mistral-facade.ts
|
|
392
|
+
var Mistral = class {
|
|
393
|
+
constructor(options = {}) {
|
|
394
|
+
var _a;
|
|
395
|
+
this.baseUrl = options.baseUrl;
|
|
396
|
+
this.apiKey = options.apiKey;
|
|
397
|
+
this.generateId = (_a = options.generateId) != null ? _a : generateId;
|
|
398
|
+
}
|
|
399
|
+
get baseConfig() {
|
|
400
|
+
var _a;
|
|
401
|
+
return {
|
|
402
|
+
baseUrl: (_a = this.baseUrl) != null ? _a : "https://api.mistral.ai/v1",
|
|
403
|
+
headers: () => ({
|
|
404
|
+
Authorization: `Bearer ${loadApiKey({
|
|
405
|
+
apiKey: this.apiKey,
|
|
406
|
+
environmentVariableName: "MISTRAL_API_KEY",
|
|
407
|
+
description: "Mistral"
|
|
408
|
+
})}`
|
|
409
|
+
})
|
|
410
|
+
};
|
|
411
|
+
}
|
|
412
|
+
chat(modelId, settings = {}) {
|
|
413
|
+
return new MistralChatLanguageModel(modelId, settings, {
|
|
414
|
+
provider: "mistral.chat",
|
|
415
|
+
...this.baseConfig,
|
|
416
|
+
generateId: this.generateId
|
|
417
|
+
});
|
|
418
|
+
}
|
|
419
|
+
};
|
|
420
|
+
var mistral = new Mistral();
|
|
421
|
+
export {
|
|
422
|
+
Mistral,
|
|
423
|
+
mistral
|
|
424
|
+
};
|
|
425
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/mistral-facade.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/map-mistral-finish-reason.ts","../src/mistral-error.ts"],"sourcesContent":["import { generateId, loadApiKey } from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\n\n/**\n * Mistral provider.\n */\nexport class Mistral {\n readonly baseUrl?: string;\n readonly apiKey?: string;\n\n private readonly generateId: () => string;\n\n constructor(\n options: {\n baseUrl?: string;\n apiKey?: string;\n generateId?: () => string;\n } = {},\n ) {\n this.baseUrl = options.baseUrl;\n this.apiKey = options.apiKey;\n this.generateId = options.generateId ?? generateId;\n }\n\n private get baseConfig() {\n return {\n baseUrl: this.baseUrl ?? 'https://api.mistral.ai/v1',\n headers: () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: this.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n }),\n };\n }\n\n chat(modelId: MistralChatModelId, settings: MistralChatSettings = {}) {\n return new MistralChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n ...this.baseConfig,\n generateId: this.generateId,\n });\n }\n}\n\n/**\n * Default Mistral provider instance.\n */\nexport const mistral = new Mistral();\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralChatConfig = {\n provider: string;\n baseUrl: string;\n headers: () => Record<string, string | undefined>;\n generateId: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n\n readonly modelId: MistralChatModelId;\n readonly settings: MistralChatSettings;\n\n private readonly config: MistralChatConfig;\n\n constructor(\n modelId: MistralChatModelId,\n settings: MistralChatSettings,\n config: MistralChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature, // uses 0..1 scale\n top_p: topP,\n random_seed: seed,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n // when the tools array is empty, change it to undefined to prevent OpenAI errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n return {\n args: {\n ...baseArgs,\n tools: tools?.map(tool => ({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })),\n },\n warnings,\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format: { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: 'any',\n tools: [{ type: 'function', function: mode.tool }],\n },\n warnings,\n };\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-grammar mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseUrl}/chat/completions`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: this.config.generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseUrl}/chat/completions`,\n headers: this.config.headers(),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n const generateId = this.config.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n // mistral tool calls come in one piece\n\n const toolCallId = generateId(); // delta and tool call must have same id\n\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: z.string().nullable(),\n tool_calls: z\n .array(\n z.object({\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .optional()\n .nullable(),\n }),\n index: z.number(),\n finish_reason: z.string().optional().nullable(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n object: z.literal('chat.completion.chunk'),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: z.string().nullable().optional(),\n tool_calls: z\n .array(\n z.object({\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .optional()\n .nullable(),\n }),\n finish_reason: z.string().nullable().optional(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .optional()\n .nullable(),\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralChatPrompt } from './mistral-chat-prompt';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV1Prompt,\n): MistralChatPrompt {\n const messages: MistralChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'image-part',\n });\n }\n }\n })\n .join(''),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls:\n toolCalls.length > 0\n ? toolCalls.map(({ function: { name, arguments: args } }) => ({\n id: 'null',\n type: 'function',\n function: { name, arguments: args },\n }))\n : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'other';\n }\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n"],"mappings":";AAAA,SAAS,YAAY,kBAAkB;;;ACAvC;AAAA,EAKE,iCAAAA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;;;ACblB;AAAA,EAEE;AAAA,OACK;AAGA,SAAS,6BACd,QACmB;AACnB,QAAM,WAA8B,CAAC;AAErC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QACN,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK;AAAA,cACd;AAAA,cACA,KAAK,SAAS;AACZ,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC,EACA,KAAK,EAAE;AAAA,QACZ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACE,UAAU,SAAS,IACf,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC1D,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,UACpC,EAAE,IACF;AAAA,QACR,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC7C,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACtGO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,yBAAyB,EAAE,OAAO;AAAA,EACtC,QAAQ,EAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,EAAE,OAAO;AAAA,EAClB,MAAM,EAAE,OAAO;AAAA,EACf,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,EAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,+BAA+B,+BAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AHaM,IAAM,2BAAN,MAA0D;AAAA,EAS/D,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7DnD;AA8DI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AAEd,cAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAEhD,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,OAAO,+BAAO,IAAI,WAAS;AAAA,cACzB,MAAM;AAAA,cACN,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,aAAa,KAAK;AAAA,gBAClB,YAAY,KAAK;AAAA,cACnB;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,UACzC;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAIC,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA1JjE;AA2JI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,eAAa;AAAA,QACrD,cAAc;AAAA,QACd,YAAY,KAAK,OAAO,WAAW;AAAA,QACnC,UAAU,SAAS,SAAS;AAAA,QAC5B,MAAM,SAAS,SAAS;AAAA,MAC1B;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,UAAMC,cAAa,KAAK,OAAO;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,YAAY,MAAM,YAAY;AAGvC,sBAAM,aAAaA,YAAW;AAE9B,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd;AAAA,kBACA,UAAU,SAAS,SAAS;AAAA,kBAC5B,eAAe,SAAS,SAAS;AAAA,gBACnC,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd;AAAA,kBACA,UAAU,SAAS,SAAS;AAAA,kBAC5B,MAAM,SAAS,SAAS;AAAA,gBAC1B,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4BC,GAAE,OAAO;AAAA,EACzC,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,SAASA,GAAE,OAAO,EAAE,SAAS;AAAA,QAC7B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,SAAS,EACT,SAAS;AAAA,MACd,CAAC;AAAA,MACD,OAAOA,GAAE,OAAO;AAAA,MAChB,eAAeA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,IAChD,CAAC;AAAA,EACH;AAAA,EACA,QAAQA,GAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAOA,GAAE,OAAO;AAAA,IACd,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,QAAQA,GAAE,QAAQ,uBAAuB;AAAA,EACzC,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,QACd,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QACxC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,SAAS,EACT,SAAS;AAAA,MACd,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,MAC9C,OAAOA,GAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC,EACA,SAAS,EACT,SAAS;AACd,CAAC;;;ADxVM,IAAM,UAAN,MAAc;AAAA,EAMnB,YACE,UAII,CAAC,GACL;AAtBJ;AAuBI,SAAK,UAAU,QAAQ;AACvB,SAAK,SAAS,QAAQ;AACtB,SAAK,cAAa,aAAQ,eAAR,YAAsB;AAAA,EAC1C;AAAA,EAEA,IAAY,aAAa;AA5B3B;AA6BI,WAAO;AAAA,MACL,UAAS,UAAK,YAAL,YAAgB;AAAA,MACzB,SAAS,OAAO;AAAA,QACd,eAAe,UAAU,WAAW;AAAA,UAClC,QAAQ,KAAK;AAAA,UACb,yBAAyB;AAAA,UACzB,aAAa;AAAA,QACf,CAAC,CAAC;AAAA,MACJ;AAAA,IACF;AAAA,EACF;AAAA,EAEA,KAAK,SAA6B,WAAgC,CAAC,GAAG;AACpE,WAAO,IAAI,yBAAyB,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,MACR,YAAY,KAAK;AAAA,IACnB,CAAC;AAAA,EACH;AACF;AAKO,IAAM,UAAU,IAAI,QAAQ;","names":["UnsupportedFunctionalityError","z","UnsupportedFunctionalityError","generateId","z"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@ai-sdk/mistral",
|
|
3
|
+
"version": "0.0.0",
|
|
4
|
+
"license": "Apache-2.0",
|
|
5
|
+
"sideEffects": false,
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"module": "./dist/index.mjs",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"files": [
|
|
10
|
+
"dist/**/*"
|
|
11
|
+
],
|
|
12
|
+
"scripts": {
|
|
13
|
+
"build": "tsup",
|
|
14
|
+
"clean": "rm -rf dist",
|
|
15
|
+
"dev": "tsup --watch",
|
|
16
|
+
"lint": "eslint \"./**/*.ts*\"",
|
|
17
|
+
"type-check": "tsc --noEmit",
|
|
18
|
+
"prettier-check": "prettier --check \"./**/*.ts*\"",
|
|
19
|
+
"test": "pnpm test:node && pnpm test:edge",
|
|
20
|
+
"test:edge": "vitest --config vitest.edge.config.js --run --threads=false",
|
|
21
|
+
"test:node": "vitest --config vitest.node.config.js --run --threads=false"
|
|
22
|
+
},
|
|
23
|
+
"exports": {
|
|
24
|
+
"./package.json": "./package.json",
|
|
25
|
+
".": {
|
|
26
|
+
"types": "./dist/index.d.ts",
|
|
27
|
+
"import": "./dist/index.mjs",
|
|
28
|
+
"require": "./dist/index.js"
|
|
29
|
+
}
|
|
30
|
+
},
|
|
31
|
+
"dependencies": {
|
|
32
|
+
"@ai-sdk/provider": "0.0.0",
|
|
33
|
+
"@ai-sdk/provider-utils": "0.0.0"
|
|
34
|
+
},
|
|
35
|
+
"devDependencies": {
|
|
36
|
+
"@types/node": "^18",
|
|
37
|
+
"@vercel/ai-tsconfig": "workspace:*",
|
|
38
|
+
"tsup": "^8",
|
|
39
|
+
"typescript": "5.1.3",
|
|
40
|
+
"zod": "3.22.4"
|
|
41
|
+
},
|
|
42
|
+
"peerDependencies": {
|
|
43
|
+
"zod": "^3.0.0"
|
|
44
|
+
},
|
|
45
|
+
"peerDependenciesMeta": {
|
|
46
|
+
"zod": {
|
|
47
|
+
"optional": true
|
|
48
|
+
}
|
|
49
|
+
},
|
|
50
|
+
"engines": {
|
|
51
|
+
"node": ">=18"
|
|
52
|
+
},
|
|
53
|
+
"publishConfig": {
|
|
54
|
+
"access": "public"
|
|
55
|
+
},
|
|
56
|
+
"homepage": "https://sdk.vercel.ai/docs",
|
|
57
|
+
"repository": {
|
|
58
|
+
"type": "git",
|
|
59
|
+
"url": "git+https://github.com/vercel/ai.git"
|
|
60
|
+
},
|
|
61
|
+
"bugs": {
|
|
62
|
+
"url": "https://github.com/vercel/ai/issues"
|
|
63
|
+
},
|
|
64
|
+
"keywords": [
|
|
65
|
+
"ai"
|
|
66
|
+
]
|
|
67
|
+
}
|