@ai-sdk/mistral 0.0.1 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -13,12 +13,12 @@ npm i @ai-sdk/mistral
13
13
 
14
14
  ## Provider Instance
15
15
 
16
- You can import `Mistral` from `ai/mistral` and initialize a provider instance with various settings:
16
+ You can import `createMistral` from `@ai-sdk/mistral` and create a provider instance with various settings:
17
17
 
18
18
  ```ts
19
- import { Mistral } from '@ai-sdk/mistral';
19
+ import { createMistral } from '@ai-sdk/mistral';
20
20
 
21
- const mistral = new Mistral({
21
+ const mistral = createMistral({
22
22
  baseURL: '', // optional base URL for proxies etc.
23
23
  apiKey: '', // optional API key, default to env property MISTRAL_API_KEY
24
24
  });
@@ -30,21 +30,21 @@ The AI SDK also provides a shorthand `mistral` import with a Mistral provider in
30
30
  import { mistral } from '@ai-sdk/mistral';
31
31
  ```
32
32
 
33
- ## Chat Models
33
+ ## Models
34
34
 
35
- You can create models that call the [Mistral chat API](https://docs.mistral.ai/api/#operation/createChatCompletion) using the `.chat()` factory method.
35
+ You can create models that call the [Mistral chat API](https://docs.mistral.ai/api/#operation/createChatCompletion) using provider instance.
36
36
  The first argument is the model id, e.g. `mistral-large-latest`.
37
37
  Some Mistral chat models support tool calls.
38
38
 
39
39
  ```ts
40
- const model = mistral.chat('mistral-large-latest');
40
+ const model = mistral('mistral-large-latest');
41
41
  ```
42
42
 
43
43
  Mistral chat models also support additional model settings that are not part of the [standard call settings](/docs/ai-core/settings).
44
44
  You can pass them as an options argument:
45
45
 
46
46
  ```ts
47
- const model = mistral.chat('mistral-large-latest', {
47
+ const model = mistral('mistral-large-latest', {
48
48
  safePrompt: true, // optional safety prompt injection
49
49
  });
50
50
  ```
@@ -0,0 +1,91 @@
1
+ import { LanguageModelV1 } from '@ai-sdk/provider';
2
+
3
+ type MistralChatModelId = 'open-mistral-7b' | 'open-mixtral-8x7b' | 'mistral-small-latest' | 'mistral-medium-latest' | 'mistral-large-latest' | (string & {});
4
+ interface MistralChatSettings {
5
+ /**
6
+ * Whether to inject a safety prompt before all conversations.
7
+ *
8
+ * Default: false
9
+ */
10
+ safePrompt?: boolean;
11
+ }
12
+
13
+ type MistralChatConfig = {
14
+ provider: string;
15
+ baseURL: string;
16
+ headers: () => Record<string, string | undefined>;
17
+ generateId: () => string;
18
+ };
19
+ declare class MistralChatLanguageModel implements LanguageModelV1 {
20
+ readonly specificationVersion = "v1";
21
+ readonly defaultObjectGenerationMode = "json";
22
+ readonly modelId: MistralChatModelId;
23
+ readonly settings: MistralChatSettings;
24
+ private readonly config;
25
+ constructor(modelId: MistralChatModelId, settings: MistralChatSettings, config: MistralChatConfig);
26
+ get provider(): string;
27
+ private getArgs;
28
+ doGenerate(options: Parameters<LanguageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>>;
29
+ doStream(options: Parameters<LanguageModelV1['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>>;
30
+ }
31
+
32
+ /**
33
+ * @deprecated Use `createMistral` instead.
34
+ */
35
+ declare class Mistral {
36
+ /**
37
+ * Base URL for the Mistral API calls.
38
+ */
39
+ readonly baseURL: string;
40
+ readonly apiKey?: string;
41
+ private readonly generateId;
42
+ /**
43
+ * Creates a new Mistral provider instance.
44
+ */
45
+ constructor(options?: {
46
+ /**
47
+ * Base URL for the Mistral API calls.
48
+ */
49
+ baseURL?: string;
50
+ /**
51
+ * @deprecated Use `baseURL` instead.
52
+ */
53
+ baseUrl?: string;
54
+ /**
55
+ * API key for authenticating requests.
56
+ */
57
+ apiKey?: string;
58
+ generateId?: () => string;
59
+ });
60
+ private get baseConfig();
61
+ chat(modelId: MistralChatModelId, settings?: MistralChatSettings): MistralChatLanguageModel;
62
+ }
63
+
64
+ interface MistralProvider {
65
+ (modelId: MistralChatModelId, settings?: MistralChatSettings): MistralChatLanguageModel;
66
+ chat(modelId: MistralChatModelId, settings?: MistralChatSettings): MistralChatLanguageModel;
67
+ }
68
+ /**
69
+ * Create a Mistral AI provider.
70
+ */
71
+ declare function createMistral(options?: {
72
+ /**
73
+ * Base URL for the Mistral API calls.
74
+ */
75
+ baseURL?: string;
76
+ /**
77
+ * @deprecated Use `baseURL` instead.
78
+ */
79
+ baseUrl?: string;
80
+ /**
81
+ * API key for authenticating requests.
82
+ */
83
+ apiKey?: string;
84
+ generateId?: () => string;
85
+ }): MistralProvider;
86
+ /**
87
+ * Default Mistral provider instance.
88
+ */
89
+ declare const mistral: MistralProvider;
90
+
91
+ export { Mistral, type MistralProvider, createMistral, mistral };
@@ -0,0 +1,91 @@
1
+ import { LanguageModelV1 } from '@ai-sdk/provider';
2
+
3
+ type MistralChatModelId = 'open-mistral-7b' | 'open-mixtral-8x7b' | 'mistral-small-latest' | 'mistral-medium-latest' | 'mistral-large-latest' | (string & {});
4
+ interface MistralChatSettings {
5
+ /**
6
+ * Whether to inject a safety prompt before all conversations.
7
+ *
8
+ * Default: false
9
+ */
10
+ safePrompt?: boolean;
11
+ }
12
+
13
+ type MistralChatConfig = {
14
+ provider: string;
15
+ baseURL: string;
16
+ headers: () => Record<string, string | undefined>;
17
+ generateId: () => string;
18
+ };
19
+ declare class MistralChatLanguageModel implements LanguageModelV1 {
20
+ readonly specificationVersion = "v1";
21
+ readonly defaultObjectGenerationMode = "json";
22
+ readonly modelId: MistralChatModelId;
23
+ readonly settings: MistralChatSettings;
24
+ private readonly config;
25
+ constructor(modelId: MistralChatModelId, settings: MistralChatSettings, config: MistralChatConfig);
26
+ get provider(): string;
27
+ private getArgs;
28
+ doGenerate(options: Parameters<LanguageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>>;
29
+ doStream(options: Parameters<LanguageModelV1['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>>;
30
+ }
31
+
32
+ /**
33
+ * @deprecated Use `createMistral` instead.
34
+ */
35
+ declare class Mistral {
36
+ /**
37
+ * Base URL for the Mistral API calls.
38
+ */
39
+ readonly baseURL: string;
40
+ readonly apiKey?: string;
41
+ private readonly generateId;
42
+ /**
43
+ * Creates a new Mistral provider instance.
44
+ */
45
+ constructor(options?: {
46
+ /**
47
+ * Base URL for the Mistral API calls.
48
+ */
49
+ baseURL?: string;
50
+ /**
51
+ * @deprecated Use `baseURL` instead.
52
+ */
53
+ baseUrl?: string;
54
+ /**
55
+ * API key for authenticating requests.
56
+ */
57
+ apiKey?: string;
58
+ generateId?: () => string;
59
+ });
60
+ private get baseConfig();
61
+ chat(modelId: MistralChatModelId, settings?: MistralChatSettings): MistralChatLanguageModel;
62
+ }
63
+
64
+ interface MistralProvider {
65
+ (modelId: MistralChatModelId, settings?: MistralChatSettings): MistralChatLanguageModel;
66
+ chat(modelId: MistralChatModelId, settings?: MistralChatSettings): MistralChatLanguageModel;
67
+ }
68
+ /**
69
+ * Create a Mistral AI provider.
70
+ */
71
+ declare function createMistral(options?: {
72
+ /**
73
+ * Base URL for the Mistral API calls.
74
+ */
75
+ baseURL?: string;
76
+ /**
77
+ * @deprecated Use `baseURL` instead.
78
+ */
79
+ baseUrl?: string;
80
+ /**
81
+ * API key for authenticating requests.
82
+ */
83
+ apiKey?: string;
84
+ generateId?: () => string;
85
+ }): MistralProvider;
86
+ /**
87
+ * Default Mistral provider instance.
88
+ */
89
+ declare const mistral: MistralProvider;
90
+
91
+ export { Mistral, type MistralProvider, createMistral, mistral };
package/dist/index.js ADDED
@@ -0,0 +1,464 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var src_exports = {};
22
+ __export(src_exports, {
23
+ Mistral: () => Mistral,
24
+ createMistral: () => createMistral,
25
+ mistral: () => mistral
26
+ });
27
+ module.exports = __toCommonJS(src_exports);
28
+
29
+ // src/mistral-facade.ts
30
+ var import_provider_utils3 = require("@ai-sdk/provider-utils");
31
+
32
+ // src/mistral-chat-language-model.ts
33
+ var import_provider2 = require("@ai-sdk/provider");
34
+ var import_provider_utils2 = require("@ai-sdk/provider-utils");
35
+ var import_zod2 = require("zod");
36
+
37
+ // src/convert-to-mistral-chat-messages.ts
38
+ var import_provider = require("@ai-sdk/provider");
39
+ function convertToMistralChatMessages(prompt) {
40
+ const messages = [];
41
+ for (const { role, content } of prompt) {
42
+ switch (role) {
43
+ case "system": {
44
+ messages.push({ role: "system", content });
45
+ break;
46
+ }
47
+ case "user": {
48
+ messages.push({
49
+ role: "user",
50
+ content: content.map((part) => {
51
+ switch (part.type) {
52
+ case "text": {
53
+ return part.text;
54
+ }
55
+ case "image": {
56
+ throw new import_provider.UnsupportedFunctionalityError({
57
+ functionality: "image-part"
58
+ });
59
+ }
60
+ }
61
+ }).join("")
62
+ });
63
+ break;
64
+ }
65
+ case "assistant": {
66
+ let text = "";
67
+ const toolCalls = [];
68
+ for (const part of content) {
69
+ switch (part.type) {
70
+ case "text": {
71
+ text += part.text;
72
+ break;
73
+ }
74
+ case "tool-call": {
75
+ toolCalls.push({
76
+ id: part.toolCallId,
77
+ type: "function",
78
+ function: {
79
+ name: part.toolName,
80
+ arguments: JSON.stringify(part.args)
81
+ }
82
+ });
83
+ break;
84
+ }
85
+ default: {
86
+ const _exhaustiveCheck = part;
87
+ throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
88
+ }
89
+ }
90
+ }
91
+ messages.push({
92
+ role: "assistant",
93
+ content: text,
94
+ tool_calls: toolCalls.length > 0 ? toolCalls.map(({ function: { name, arguments: args } }) => ({
95
+ id: "null",
96
+ type: "function",
97
+ function: { name, arguments: args }
98
+ })) : void 0
99
+ });
100
+ break;
101
+ }
102
+ case "tool": {
103
+ for (const toolResponse of content) {
104
+ messages.push({
105
+ role: "tool",
106
+ name: toolResponse.toolName,
107
+ content: JSON.stringify(toolResponse.result)
108
+ });
109
+ }
110
+ break;
111
+ }
112
+ default: {
113
+ const _exhaustiveCheck = role;
114
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
115
+ }
116
+ }
117
+ }
118
+ return messages;
119
+ }
120
+
121
+ // src/map-mistral-finish-reason.ts
122
+ function mapMistralFinishReason(finishReason) {
123
+ switch (finishReason) {
124
+ case "stop":
125
+ return "stop";
126
+ case "length":
127
+ case "model_length":
128
+ return "length";
129
+ case "tool_calls":
130
+ return "tool-calls";
131
+ default:
132
+ return "other";
133
+ }
134
+ }
135
+
136
+ // src/mistral-error.ts
137
+ var import_provider_utils = require("@ai-sdk/provider-utils");
138
+ var import_zod = require("zod");
139
+ var mistralErrorDataSchema = import_zod.z.object({
140
+ object: import_zod.z.literal("error"),
141
+ message: import_zod.z.string(),
142
+ type: import_zod.z.string(),
143
+ param: import_zod.z.string().nullable(),
144
+ code: import_zod.z.string().nullable()
145
+ });
146
+ var mistralFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
147
+ errorSchema: mistralErrorDataSchema,
148
+ errorToMessage: (data) => data.message
149
+ });
150
+
151
+ // src/mistral-chat-language-model.ts
152
+ var MistralChatLanguageModel = class {
153
+ constructor(modelId, settings, config) {
154
+ this.specificationVersion = "v1";
155
+ this.defaultObjectGenerationMode = "json";
156
+ this.modelId = modelId;
157
+ this.settings = settings;
158
+ this.config = config;
159
+ }
160
+ get provider() {
161
+ return this.config.provider;
162
+ }
163
+ getArgs({
164
+ mode,
165
+ prompt,
166
+ maxTokens,
167
+ temperature,
168
+ topP,
169
+ frequencyPenalty,
170
+ presencePenalty,
171
+ seed
172
+ }) {
173
+ var _a;
174
+ const type = mode.type;
175
+ const warnings = [];
176
+ if (frequencyPenalty != null) {
177
+ warnings.push({
178
+ type: "unsupported-setting",
179
+ setting: "frequencyPenalty"
180
+ });
181
+ }
182
+ if (presencePenalty != null) {
183
+ warnings.push({
184
+ type: "unsupported-setting",
185
+ setting: "presencePenalty"
186
+ });
187
+ }
188
+ const baseArgs = {
189
+ // model id:
190
+ model: this.modelId,
191
+ // model specific settings:
192
+ safe_prompt: this.settings.safePrompt,
193
+ // standardized settings:
194
+ max_tokens: maxTokens,
195
+ temperature,
196
+ // uses 0..1 scale
197
+ top_p: topP,
198
+ random_seed: seed,
199
+ // messages:
200
+ messages: convertToMistralChatMessages(prompt)
201
+ };
202
+ switch (type) {
203
+ case "regular": {
204
+ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
205
+ return {
206
+ args: {
207
+ ...baseArgs,
208
+ tools: tools == null ? void 0 : tools.map((tool) => ({
209
+ type: "function",
210
+ function: {
211
+ name: tool.name,
212
+ description: tool.description,
213
+ parameters: tool.parameters
214
+ }
215
+ }))
216
+ },
217
+ warnings
218
+ };
219
+ }
220
+ case "object-json": {
221
+ return {
222
+ args: {
223
+ ...baseArgs,
224
+ response_format: { type: "json_object" }
225
+ },
226
+ warnings
227
+ };
228
+ }
229
+ case "object-tool": {
230
+ return {
231
+ args: {
232
+ ...baseArgs,
233
+ tool_choice: "any",
234
+ tools: [{ type: "function", function: mode.tool }]
235
+ },
236
+ warnings
237
+ };
238
+ }
239
+ case "object-grammar": {
240
+ throw new import_provider2.UnsupportedFunctionalityError({
241
+ functionality: "object-grammar mode"
242
+ });
243
+ }
244
+ default: {
245
+ const _exhaustiveCheck = type;
246
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
247
+ }
248
+ }
249
+ }
250
+ async doGenerate(options) {
251
+ var _a, _b;
252
+ const { args, warnings } = this.getArgs(options);
253
+ const response = await (0, import_provider_utils2.postJsonToApi)({
254
+ url: `${this.config.baseURL}/chat/completions`,
255
+ headers: this.config.headers(),
256
+ body: args,
257
+ failedResponseHandler: mistralFailedResponseHandler,
258
+ successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
259
+ mistralChatResponseSchema
260
+ ),
261
+ abortSignal: options.abortSignal
262
+ });
263
+ const { messages: rawPrompt, ...rawSettings } = args;
264
+ const choice = response.choices[0];
265
+ return {
266
+ text: (_a = choice.message.content) != null ? _a : void 0,
267
+ toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
268
+ toolCallType: "function",
269
+ toolCallId: this.config.generateId(),
270
+ toolName: toolCall.function.name,
271
+ args: toolCall.function.arguments
272
+ })),
273
+ finishReason: mapMistralFinishReason(choice.finish_reason),
274
+ usage: {
275
+ promptTokens: response.usage.prompt_tokens,
276
+ completionTokens: response.usage.completion_tokens
277
+ },
278
+ rawCall: { rawPrompt, rawSettings },
279
+ warnings
280
+ };
281
+ }
282
+ async doStream(options) {
283
+ const { args, warnings } = this.getArgs(options);
284
+ const response = await (0, import_provider_utils2.postJsonToApi)({
285
+ url: `${this.config.baseURL}/chat/completions`,
286
+ headers: this.config.headers(),
287
+ body: {
288
+ ...args,
289
+ stream: true
290
+ },
291
+ failedResponseHandler: mistralFailedResponseHandler,
292
+ successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
293
+ mistralChatChunkSchema
294
+ ),
295
+ abortSignal: options.abortSignal
296
+ });
297
+ const { messages: rawPrompt, ...rawSettings } = args;
298
+ let finishReason = "other";
299
+ let usage = {
300
+ promptTokens: Number.NaN,
301
+ completionTokens: Number.NaN
302
+ };
303
+ const generateId2 = this.config.generateId;
304
+ return {
305
+ stream: response.pipeThrough(
306
+ new TransformStream({
307
+ transform(chunk, controller) {
308
+ if (!chunk.success) {
309
+ controller.enqueue({ type: "error", error: chunk.error });
310
+ return;
311
+ }
312
+ const value = chunk.value;
313
+ if (value.usage != null) {
314
+ usage = {
315
+ promptTokens: value.usage.prompt_tokens,
316
+ completionTokens: value.usage.completion_tokens
317
+ };
318
+ }
319
+ const choice = value.choices[0];
320
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
321
+ finishReason = mapMistralFinishReason(choice.finish_reason);
322
+ }
323
+ if ((choice == null ? void 0 : choice.delta) == null) {
324
+ return;
325
+ }
326
+ const delta = choice.delta;
327
+ if (delta.content != null) {
328
+ controller.enqueue({
329
+ type: "text-delta",
330
+ textDelta: delta.content
331
+ });
332
+ }
333
+ if (delta.tool_calls != null) {
334
+ for (const toolCall of delta.tool_calls) {
335
+ const toolCallId = generateId2();
336
+ controller.enqueue({
337
+ type: "tool-call-delta",
338
+ toolCallType: "function",
339
+ toolCallId,
340
+ toolName: toolCall.function.name,
341
+ argsTextDelta: toolCall.function.arguments
342
+ });
343
+ controller.enqueue({
344
+ type: "tool-call",
345
+ toolCallType: "function",
346
+ toolCallId,
347
+ toolName: toolCall.function.name,
348
+ args: toolCall.function.arguments
349
+ });
350
+ }
351
+ }
352
+ },
353
+ flush(controller) {
354
+ controller.enqueue({ type: "finish", finishReason, usage });
355
+ }
356
+ })
357
+ ),
358
+ rawCall: { rawPrompt, rawSettings },
359
+ warnings
360
+ };
361
+ }
362
+ };
363
+ var mistralChatResponseSchema = import_zod2.z.object({
364
+ choices: import_zod2.z.array(
365
+ import_zod2.z.object({
366
+ message: import_zod2.z.object({
367
+ role: import_zod2.z.literal("assistant"),
368
+ content: import_zod2.z.string().nullable(),
369
+ tool_calls: import_zod2.z.array(
370
+ import_zod2.z.object({
371
+ function: import_zod2.z.object({
372
+ name: import_zod2.z.string(),
373
+ arguments: import_zod2.z.string()
374
+ })
375
+ })
376
+ ).optional().nullable()
377
+ }),
378
+ index: import_zod2.z.number(),
379
+ finish_reason: import_zod2.z.string().optional().nullable()
380
+ })
381
+ ),
382
+ object: import_zod2.z.literal("chat.completion"),
383
+ usage: import_zod2.z.object({
384
+ prompt_tokens: import_zod2.z.number(),
385
+ completion_tokens: import_zod2.z.number()
386
+ })
387
+ });
388
+ var mistralChatChunkSchema = import_zod2.z.object({
389
+ object: import_zod2.z.literal("chat.completion.chunk"),
390
+ choices: import_zod2.z.array(
391
+ import_zod2.z.object({
392
+ delta: import_zod2.z.object({
393
+ role: import_zod2.z.enum(["assistant"]).optional(),
394
+ content: import_zod2.z.string().nullable().optional(),
395
+ tool_calls: import_zod2.z.array(
396
+ import_zod2.z.object({
397
+ function: import_zod2.z.object({ name: import_zod2.z.string(), arguments: import_zod2.z.string() })
398
+ })
399
+ ).optional().nullable()
400
+ }),
401
+ finish_reason: import_zod2.z.string().nullable().optional(),
402
+ index: import_zod2.z.number()
403
+ })
404
+ ),
405
+ usage: import_zod2.z.object({
406
+ prompt_tokens: import_zod2.z.number(),
407
+ completion_tokens: import_zod2.z.number()
408
+ }).optional().nullable()
409
+ });
410
+
411
+ // src/mistral-facade.ts
412
+ var Mistral = class {
413
+ /**
414
+ * Creates a new Mistral provider instance.
415
+ */
416
+ constructor(options = {}) {
417
+ var _a, _b, _c;
418
+ this.baseURL = (_b = (0, import_provider_utils3.withoutTrailingSlash)((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://api.mistral.ai/v1";
419
+ this.apiKey = options.apiKey;
420
+ this.generateId = (_c = options.generateId) != null ? _c : import_provider_utils3.generateId;
421
+ }
422
+ get baseConfig() {
423
+ return {
424
+ baseURL: this.baseURL,
425
+ headers: () => ({
426
+ Authorization: `Bearer ${(0, import_provider_utils3.loadApiKey)({
427
+ apiKey: this.apiKey,
428
+ environmentVariableName: "MISTRAL_API_KEY",
429
+ description: "Mistral"
430
+ })}`
431
+ })
432
+ };
433
+ }
434
+ chat(modelId, settings = {}) {
435
+ return new MistralChatLanguageModel(modelId, settings, {
436
+ provider: "mistral.chat",
437
+ ...this.baseConfig,
438
+ generateId: this.generateId
439
+ });
440
+ }
441
+ };
442
+
443
+ // src/mistral-provider.ts
444
+ function createMistral(options = {}) {
445
+ const mistral2 = new Mistral(options);
446
+ const provider = function(modelId, settings) {
447
+ if (new.target) {
448
+ throw new Error(
449
+ "The Mistral model function cannot be called with the new keyword."
450
+ );
451
+ }
452
+ return mistral2.chat(modelId, settings);
453
+ };
454
+ provider.chat = mistral2.chat.bind(mistral2);
455
+ return provider;
456
+ }
457
+ var mistral = createMistral();
458
+ // Annotate the CommonJS export names for ESM import in node:
459
+ 0 && (module.exports = {
460
+ Mistral,
461
+ createMistral,
462
+ mistral
463
+ });
464
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts","../src/mistral-facade.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/map-mistral-finish-reason.ts","../src/mistral-error.ts","../src/mistral-provider.ts"],"sourcesContent":["export * from './mistral-facade';\nexport * from './mistral-provider';\n","import {\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\n\n/**\n * @deprecated Use `createMistral` instead.\n */\nexport class Mistral {\n /**\n * Base URL for the Mistral API calls.\n */\n readonly baseURL: string;\n\n readonly apiKey?: string;\n\n private readonly generateId: () => string;\n\n /**\n * Creates a new Mistral provider instance.\n */\n constructor(\n options: {\n /**\n * Base URL for the Mistral API calls.\n */\n baseURL?: string;\n\n /**\n * @deprecated Use `baseURL` instead.\n */\n baseUrl?: string;\n\n /**\n * API key for authenticating requests.\n */\n apiKey?: string;\n\n generateId?: () => string;\n } = {},\n ) {\n this.baseURL =\n withoutTrailingSlash(options.baseURL ?? options.baseUrl) ??\n 'https://api.mistral.ai/v1';\n\n this.apiKey = options.apiKey;\n this.generateId = options.generateId ?? generateId;\n }\n\n private get baseConfig() {\n return {\n baseURL: this.baseURL,\n headers: () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: this.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n }),\n };\n }\n\n chat(modelId: MistralChatModelId, settings: MistralChatSettings = {}) {\n return new MistralChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n ...this.baseConfig,\n generateId: this.generateId,\n });\n }\n}\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n generateId: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n\n readonly modelId: MistralChatModelId;\n readonly settings: MistralChatSettings;\n\n private readonly config: MistralChatConfig;\n\n constructor(\n modelId: MistralChatModelId,\n settings: MistralChatSettings,\n config: MistralChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature, // uses 0..1 scale\n top_p: topP,\n random_seed: seed,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n // when the tools array is empty, change it to undefined to prevent OpenAI errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n return {\n args: {\n ...baseArgs,\n tools: tools?.map(tool => ({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })),\n },\n warnings,\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format: { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: 'any',\n tools: [{ type: 'function', function: mode.tool }],\n },\n warnings,\n };\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-grammar mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: this.config.generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: this.config.headers(),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n const generateId = this.config.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n // mistral tool calls come in one piece\n\n const toolCallId = generateId(); // delta and tool call must have same id\n\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: z.string().nullable(),\n tool_calls: z\n .array(\n z.object({\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .optional()\n .nullable(),\n }),\n index: z.number(),\n finish_reason: z.string().optional().nullable(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n object: z.literal('chat.completion.chunk'),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: z.string().nullable().optional(),\n tool_calls: z\n .array(\n z.object({\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .optional()\n .nullable(),\n }),\n finish_reason: z.string().nullable().optional(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .optional()\n .nullable(),\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralChatPrompt } from './mistral-chat-prompt';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV1Prompt,\n): MistralChatPrompt {\n const messages: MistralChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'image-part',\n });\n }\n }\n })\n .join(''),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls:\n toolCalls.length > 0\n ? toolCalls.map(({ function: { name, arguments: args } }) => ({\n id: 'null',\n type: 'function',\n function: { name, arguments: args },\n }))\n : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'other';\n }\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { Mistral } from './mistral-facade';\n\nexport interface MistralProvider {\n (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): MistralChatLanguageModel;\n\n chat(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): MistralChatLanguageModel;\n}\n\n/**\n * Create a Mistral AI provider.\n */\nexport function createMistral(\n options: {\n /**\n * Base URL for the Mistral API calls.\n */\n baseURL?: string;\n\n /**\n * @deprecated Use `baseURL` instead.\n */\n baseUrl?: string;\n\n /**\n * API key for authenticating requests.\n */\n apiKey?: string;\n\n generateId?: () => string;\n } = {},\n): MistralProvider {\n const mistral = new Mistral(options);\n\n const provider = function (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return mistral.chat(modelId, settings);\n };\n\n provider.chat = mistral.chat.bind(mistral);\n\n return provider as MistralProvider;\n}\n\n/**\n * Default Mistral provider instance.\n */\nexport const mistral = createMistral();\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,IAAAA,yBAIO;;;ACJP,IAAAC,mBAMO;AACP,IAAAC,yBAKO;AACP,IAAAC,cAAkB;;;ACblB,sBAGO;AAGA,SAAS,6BACd,QACmB;AACnB,QAAM,WAA8B,CAAC;AAErC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QACN,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK;AAAA,cACd;AAAA,cACA,KAAK,SAAS;AACZ,sBAAM,IAAI,8CAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC,EACA,KAAK,EAAE;AAAA,QACZ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACE,UAAU,SAAS,IACf,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC1D,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,UACpC,EAAE,IACF;AAAA,QACR,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC7C,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACtGO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,4BAA+C;AAC/C,iBAAkB;AAElB,IAAM,yBAAyB,aAAE,OAAO;AAAA,EACtC,QAAQ,aAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,aAAE,OAAO;AAAA,EAClB,MAAM,aAAE,OAAO;AAAA,EACf,OAAO,aAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,aAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,mCAA+B,sDAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AHaM,IAAM,2BAAN,MAA0D;AAAA,EAS/D,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7DnD;AA8DI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AAEd,cAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAEhD,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,OAAO,+BAAO,IAAI,WAAS;AAAA,cACzB,MAAM;AAAA,cACN,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,aAAa,KAAK;AAAA,gBAClB,YAAY,KAAK;AAAA,cACnB;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,UACzC;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAI,+CAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA1JjE;AA2JI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,UAAM,sCAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,eAAa;AAAA,QACrD,cAAc;AAAA,QACd,YAAY,KAAK,OAAO,WAAW;AAAA,QACnC,UAAU,SAAS,SAAS;AAAA,QAC5B,MAAM,SAAS,SAAS;AAAA,MAC1B;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,UAAM,sCAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB;AAAA,MACvB,+BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,UAAMC,cAAa,KAAK,OAAO;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,YAAY,MAAM,YAAY;AAGvC,sBAAM,aAAaA,YAAW;AAE9B,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd;AAAA,kBACA,UAAU,SAAS,SAAS;AAAA,kBAC5B,eAAe,SAAS,SAAS;AAAA,gBACnC,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd;AAAA,kBACA,UAAU,SAAS,SAAS;AAAA,kBAC5B,MAAM,SAAS,SAAS;AAAA,gBAC1B,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4B,cAAE,OAAO;AAAA,EACzC,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,SAAS,cAAE,OAAO;AAAA,QAChB,MAAM,cAAE,QAAQ,WAAW;AAAA,QAC3B,SAAS,cAAE,OAAO,EAAE,SAAS;AAAA,QAC7B,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,UAAU,cAAE,OAAO;AAAA,cACjB,MAAM,cAAE,OAAO;AAAA,cACf,WAAW,cAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,SAAS,EACT,SAAS;AAAA,MACd,CAAC;AAAA,MACD,OAAO,cAAE,OAAO;AAAA,MAChB,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,IAChD,CAAC;AAAA,EACH;AAAA,EACA,QAAQ,cAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAO,cAAE,OAAO;AAAA,IACd,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,yBAAyB,cAAE,OAAO;AAAA,EACtC,QAAQ,cAAE,QAAQ,uBAAuB;AAAA,EACzC,SAAS,cAAE;AAAA,IACT,cAAE,OAAO;AAAA,MACP,OAAO,cAAE,OAAO;AAAA,QACd,MAAM,cAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAAS,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QACxC,YAAY,cACT;AAAA,UACC,cAAE,OAAO;AAAA,YACP,UAAU,cAAE,OAAO,EAAE,MAAM,cAAE,OAAO,GAAG,WAAW,cAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,SAAS,EACT,SAAS;AAAA,MACd,CAAC;AAAA,MACD,eAAe,cAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,MAC9C,OAAO,cAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAO,cACJ,OAAO;AAAA,IACN,eAAe,cAAE,OAAO;AAAA,IACxB,mBAAmB,cAAE,OAAO;AAAA,EAC9B,CAAC,EACA,SAAS,EACT,SAAS;AACd,CAAC;;;ADpVM,IAAM,UAAN,MAAc;AAAA;AAAA;AAAA;AAAA,EAanB,YACE,UAiBI,CAAC,GACL;AA9CJ;AA+CI,SAAK,WACH,uDAAqB,aAAQ,YAAR,YAAmB,QAAQ,OAAO,MAAvD,YACA;AAEF,SAAK,SAAS,QAAQ;AACtB,SAAK,cAAa,aAAQ,eAAR,YAAsB;AAAA,EAC1C;AAAA,EAEA,IAAY,aAAa;AACvB,WAAO;AAAA,MACL,SAAS,KAAK;AAAA,MACd,SAAS,OAAO;AAAA,QACd,eAAe,cAAU,mCAAW;AAAA,UAClC,QAAQ,KAAK;AAAA,UACb,yBAAyB;AAAA,UACzB,aAAa;AAAA,QACf,CAAC,CAAC;AAAA,MACJ;AAAA,IACF;AAAA,EACF;AAAA,EAEA,KAAK,SAA6B,WAAgC,CAAC,GAAG;AACpE,WAAO,IAAI,yBAAyB,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,MACR,YAAY,KAAK;AAAA,IACnB,CAAC;AAAA,EACH;AACF;;;AKrDO,SAAS,cACd,UAiBI,CAAC,GACY;AACjB,QAAMC,WAAU,IAAI,QAAQ,OAAO;AAEnC,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAOA,SAAQ,KAAK,SAAS,QAAQ;AAAA,EACvC;AAEA,WAAS,OAAOA,SAAQ,KAAK,KAAKA,QAAO;AAEzC,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["import_provider_utils","import_provider","import_provider_utils","import_zod","generateId","mistral"]}
package/dist/index.mjs ADDED
@@ -0,0 +1,447 @@
1
+ // src/mistral-facade.ts
2
+ import {
3
+ generateId,
4
+ loadApiKey,
5
+ withoutTrailingSlash
6
+ } from "@ai-sdk/provider-utils";
7
+
8
+ // src/mistral-chat-language-model.ts
9
+ import {
10
+ UnsupportedFunctionalityError as UnsupportedFunctionalityError2
11
+ } from "@ai-sdk/provider";
12
+ import {
13
+ createEventSourceResponseHandler,
14
+ createJsonResponseHandler,
15
+ postJsonToApi
16
+ } from "@ai-sdk/provider-utils";
17
+ import { z as z2 } from "zod";
18
+
19
+ // src/convert-to-mistral-chat-messages.ts
20
+ import {
21
+ UnsupportedFunctionalityError
22
+ } from "@ai-sdk/provider";
23
+ function convertToMistralChatMessages(prompt) {
24
+ const messages = [];
25
+ for (const { role, content } of prompt) {
26
+ switch (role) {
27
+ case "system": {
28
+ messages.push({ role: "system", content });
29
+ break;
30
+ }
31
+ case "user": {
32
+ messages.push({
33
+ role: "user",
34
+ content: content.map((part) => {
35
+ switch (part.type) {
36
+ case "text": {
37
+ return part.text;
38
+ }
39
+ case "image": {
40
+ throw new UnsupportedFunctionalityError({
41
+ functionality: "image-part"
42
+ });
43
+ }
44
+ }
45
+ }).join("")
46
+ });
47
+ break;
48
+ }
49
+ case "assistant": {
50
+ let text = "";
51
+ const toolCalls = [];
52
+ for (const part of content) {
53
+ switch (part.type) {
54
+ case "text": {
55
+ text += part.text;
56
+ break;
57
+ }
58
+ case "tool-call": {
59
+ toolCalls.push({
60
+ id: part.toolCallId,
61
+ type: "function",
62
+ function: {
63
+ name: part.toolName,
64
+ arguments: JSON.stringify(part.args)
65
+ }
66
+ });
67
+ break;
68
+ }
69
+ default: {
70
+ const _exhaustiveCheck = part;
71
+ throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
72
+ }
73
+ }
74
+ }
75
+ messages.push({
76
+ role: "assistant",
77
+ content: text,
78
+ tool_calls: toolCalls.length > 0 ? toolCalls.map(({ function: { name, arguments: args } }) => ({
79
+ id: "null",
80
+ type: "function",
81
+ function: { name, arguments: args }
82
+ })) : void 0
83
+ });
84
+ break;
85
+ }
86
+ case "tool": {
87
+ for (const toolResponse of content) {
88
+ messages.push({
89
+ role: "tool",
90
+ name: toolResponse.toolName,
91
+ content: JSON.stringify(toolResponse.result)
92
+ });
93
+ }
94
+ break;
95
+ }
96
+ default: {
97
+ const _exhaustiveCheck = role;
98
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
99
+ }
100
+ }
101
+ }
102
+ return messages;
103
+ }
104
+
105
+ // src/map-mistral-finish-reason.ts
106
+ function mapMistralFinishReason(finishReason) {
107
+ switch (finishReason) {
108
+ case "stop":
109
+ return "stop";
110
+ case "length":
111
+ case "model_length":
112
+ return "length";
113
+ case "tool_calls":
114
+ return "tool-calls";
115
+ default:
116
+ return "other";
117
+ }
118
+ }
119
+
120
+ // src/mistral-error.ts
121
+ import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
122
+ import { z } from "zod";
123
+ var mistralErrorDataSchema = z.object({
124
+ object: z.literal("error"),
125
+ message: z.string(),
126
+ type: z.string(),
127
+ param: z.string().nullable(),
128
+ code: z.string().nullable()
129
+ });
130
+ var mistralFailedResponseHandler = createJsonErrorResponseHandler({
131
+ errorSchema: mistralErrorDataSchema,
132
+ errorToMessage: (data) => data.message
133
+ });
134
+
135
+ // src/mistral-chat-language-model.ts
136
+ var MistralChatLanguageModel = class {
137
+ constructor(modelId, settings, config) {
138
+ this.specificationVersion = "v1";
139
+ this.defaultObjectGenerationMode = "json";
140
+ this.modelId = modelId;
141
+ this.settings = settings;
142
+ this.config = config;
143
+ }
144
+ get provider() {
145
+ return this.config.provider;
146
+ }
147
+ getArgs({
148
+ mode,
149
+ prompt,
150
+ maxTokens,
151
+ temperature,
152
+ topP,
153
+ frequencyPenalty,
154
+ presencePenalty,
155
+ seed
156
+ }) {
157
+ var _a;
158
+ const type = mode.type;
159
+ const warnings = [];
160
+ if (frequencyPenalty != null) {
161
+ warnings.push({
162
+ type: "unsupported-setting",
163
+ setting: "frequencyPenalty"
164
+ });
165
+ }
166
+ if (presencePenalty != null) {
167
+ warnings.push({
168
+ type: "unsupported-setting",
169
+ setting: "presencePenalty"
170
+ });
171
+ }
172
+ const baseArgs = {
173
+ // model id:
174
+ model: this.modelId,
175
+ // model specific settings:
176
+ safe_prompt: this.settings.safePrompt,
177
+ // standardized settings:
178
+ max_tokens: maxTokens,
179
+ temperature,
180
+ // uses 0..1 scale
181
+ top_p: topP,
182
+ random_seed: seed,
183
+ // messages:
184
+ messages: convertToMistralChatMessages(prompt)
185
+ };
186
+ switch (type) {
187
+ case "regular": {
188
+ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
189
+ return {
190
+ args: {
191
+ ...baseArgs,
192
+ tools: tools == null ? void 0 : tools.map((tool) => ({
193
+ type: "function",
194
+ function: {
195
+ name: tool.name,
196
+ description: tool.description,
197
+ parameters: tool.parameters
198
+ }
199
+ }))
200
+ },
201
+ warnings
202
+ };
203
+ }
204
+ case "object-json": {
205
+ return {
206
+ args: {
207
+ ...baseArgs,
208
+ response_format: { type: "json_object" }
209
+ },
210
+ warnings
211
+ };
212
+ }
213
+ case "object-tool": {
214
+ return {
215
+ args: {
216
+ ...baseArgs,
217
+ tool_choice: "any",
218
+ tools: [{ type: "function", function: mode.tool }]
219
+ },
220
+ warnings
221
+ };
222
+ }
223
+ case "object-grammar": {
224
+ throw new UnsupportedFunctionalityError2({
225
+ functionality: "object-grammar mode"
226
+ });
227
+ }
228
+ default: {
229
+ const _exhaustiveCheck = type;
230
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
231
+ }
232
+ }
233
+ }
234
+ async doGenerate(options) {
235
+ var _a, _b;
236
+ const { args, warnings } = this.getArgs(options);
237
+ const response = await postJsonToApi({
238
+ url: `${this.config.baseURL}/chat/completions`,
239
+ headers: this.config.headers(),
240
+ body: args,
241
+ failedResponseHandler: mistralFailedResponseHandler,
242
+ successfulResponseHandler: createJsonResponseHandler(
243
+ mistralChatResponseSchema
244
+ ),
245
+ abortSignal: options.abortSignal
246
+ });
247
+ const { messages: rawPrompt, ...rawSettings } = args;
248
+ const choice = response.choices[0];
249
+ return {
250
+ text: (_a = choice.message.content) != null ? _a : void 0,
251
+ toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
252
+ toolCallType: "function",
253
+ toolCallId: this.config.generateId(),
254
+ toolName: toolCall.function.name,
255
+ args: toolCall.function.arguments
256
+ })),
257
+ finishReason: mapMistralFinishReason(choice.finish_reason),
258
+ usage: {
259
+ promptTokens: response.usage.prompt_tokens,
260
+ completionTokens: response.usage.completion_tokens
261
+ },
262
+ rawCall: { rawPrompt, rawSettings },
263
+ warnings
264
+ };
265
+ }
266
+ async doStream(options) {
267
+ const { args, warnings } = this.getArgs(options);
268
+ const response = await postJsonToApi({
269
+ url: `${this.config.baseURL}/chat/completions`,
270
+ headers: this.config.headers(),
271
+ body: {
272
+ ...args,
273
+ stream: true
274
+ },
275
+ failedResponseHandler: mistralFailedResponseHandler,
276
+ successfulResponseHandler: createEventSourceResponseHandler(
277
+ mistralChatChunkSchema
278
+ ),
279
+ abortSignal: options.abortSignal
280
+ });
281
+ const { messages: rawPrompt, ...rawSettings } = args;
282
+ let finishReason = "other";
283
+ let usage = {
284
+ promptTokens: Number.NaN,
285
+ completionTokens: Number.NaN
286
+ };
287
+ const generateId2 = this.config.generateId;
288
+ return {
289
+ stream: response.pipeThrough(
290
+ new TransformStream({
291
+ transform(chunk, controller) {
292
+ if (!chunk.success) {
293
+ controller.enqueue({ type: "error", error: chunk.error });
294
+ return;
295
+ }
296
+ const value = chunk.value;
297
+ if (value.usage != null) {
298
+ usage = {
299
+ promptTokens: value.usage.prompt_tokens,
300
+ completionTokens: value.usage.completion_tokens
301
+ };
302
+ }
303
+ const choice = value.choices[0];
304
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
305
+ finishReason = mapMistralFinishReason(choice.finish_reason);
306
+ }
307
+ if ((choice == null ? void 0 : choice.delta) == null) {
308
+ return;
309
+ }
310
+ const delta = choice.delta;
311
+ if (delta.content != null) {
312
+ controller.enqueue({
313
+ type: "text-delta",
314
+ textDelta: delta.content
315
+ });
316
+ }
317
+ if (delta.tool_calls != null) {
318
+ for (const toolCall of delta.tool_calls) {
319
+ const toolCallId = generateId2();
320
+ controller.enqueue({
321
+ type: "tool-call-delta",
322
+ toolCallType: "function",
323
+ toolCallId,
324
+ toolName: toolCall.function.name,
325
+ argsTextDelta: toolCall.function.arguments
326
+ });
327
+ controller.enqueue({
328
+ type: "tool-call",
329
+ toolCallType: "function",
330
+ toolCallId,
331
+ toolName: toolCall.function.name,
332
+ args: toolCall.function.arguments
333
+ });
334
+ }
335
+ }
336
+ },
337
+ flush(controller) {
338
+ controller.enqueue({ type: "finish", finishReason, usage });
339
+ }
340
+ })
341
+ ),
342
+ rawCall: { rawPrompt, rawSettings },
343
+ warnings
344
+ };
345
+ }
346
+ };
347
+ var mistralChatResponseSchema = z2.object({
348
+ choices: z2.array(
349
+ z2.object({
350
+ message: z2.object({
351
+ role: z2.literal("assistant"),
352
+ content: z2.string().nullable(),
353
+ tool_calls: z2.array(
354
+ z2.object({
355
+ function: z2.object({
356
+ name: z2.string(),
357
+ arguments: z2.string()
358
+ })
359
+ })
360
+ ).optional().nullable()
361
+ }),
362
+ index: z2.number(),
363
+ finish_reason: z2.string().optional().nullable()
364
+ })
365
+ ),
366
+ object: z2.literal("chat.completion"),
367
+ usage: z2.object({
368
+ prompt_tokens: z2.number(),
369
+ completion_tokens: z2.number()
370
+ })
371
+ });
372
+ var mistralChatChunkSchema = z2.object({
373
+ object: z2.literal("chat.completion.chunk"),
374
+ choices: z2.array(
375
+ z2.object({
376
+ delta: z2.object({
377
+ role: z2.enum(["assistant"]).optional(),
378
+ content: z2.string().nullable().optional(),
379
+ tool_calls: z2.array(
380
+ z2.object({
381
+ function: z2.object({ name: z2.string(), arguments: z2.string() })
382
+ })
383
+ ).optional().nullable()
384
+ }),
385
+ finish_reason: z2.string().nullable().optional(),
386
+ index: z2.number()
387
+ })
388
+ ),
389
+ usage: z2.object({
390
+ prompt_tokens: z2.number(),
391
+ completion_tokens: z2.number()
392
+ }).optional().nullable()
393
+ });
394
+
395
+ // src/mistral-facade.ts
396
+ var Mistral = class {
397
+ /**
398
+ * Creates a new Mistral provider instance.
399
+ */
400
+ constructor(options = {}) {
401
+ var _a, _b, _c;
402
+ this.baseURL = (_b = withoutTrailingSlash((_a = options.baseURL) != null ? _a : options.baseUrl)) != null ? _b : "https://api.mistral.ai/v1";
403
+ this.apiKey = options.apiKey;
404
+ this.generateId = (_c = options.generateId) != null ? _c : generateId;
405
+ }
406
+ get baseConfig() {
407
+ return {
408
+ baseURL: this.baseURL,
409
+ headers: () => ({
410
+ Authorization: `Bearer ${loadApiKey({
411
+ apiKey: this.apiKey,
412
+ environmentVariableName: "MISTRAL_API_KEY",
413
+ description: "Mistral"
414
+ })}`
415
+ })
416
+ };
417
+ }
418
+ chat(modelId, settings = {}) {
419
+ return new MistralChatLanguageModel(modelId, settings, {
420
+ provider: "mistral.chat",
421
+ ...this.baseConfig,
422
+ generateId: this.generateId
423
+ });
424
+ }
425
+ };
426
+
427
+ // src/mistral-provider.ts
428
+ function createMistral(options = {}) {
429
+ const mistral2 = new Mistral(options);
430
+ const provider = function(modelId, settings) {
431
+ if (new.target) {
432
+ throw new Error(
433
+ "The Mistral model function cannot be called with the new keyword."
434
+ );
435
+ }
436
+ return mistral2.chat(modelId, settings);
437
+ };
438
+ provider.chat = mistral2.chat.bind(mistral2);
439
+ return provider;
440
+ }
441
+ var mistral = createMistral();
442
+ export {
443
+ Mistral,
444
+ createMistral,
445
+ mistral
446
+ };
447
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/mistral-facade.ts","../src/mistral-chat-language-model.ts","../src/convert-to-mistral-chat-messages.ts","../src/map-mistral-finish-reason.ts","../src/mistral-error.ts","../src/mistral-provider.ts"],"sourcesContent":["import {\n generateId,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\n\n/**\n * @deprecated Use `createMistral` instead.\n */\nexport class Mistral {\n /**\n * Base URL for the Mistral API calls.\n */\n readonly baseURL: string;\n\n readonly apiKey?: string;\n\n private readonly generateId: () => string;\n\n /**\n * Creates a new Mistral provider instance.\n */\n constructor(\n options: {\n /**\n * Base URL for the Mistral API calls.\n */\n baseURL?: string;\n\n /**\n * @deprecated Use `baseURL` instead.\n */\n baseUrl?: string;\n\n /**\n * API key for authenticating requests.\n */\n apiKey?: string;\n\n generateId?: () => string;\n } = {},\n ) {\n this.baseURL =\n withoutTrailingSlash(options.baseURL ?? options.baseUrl) ??\n 'https://api.mistral.ai/v1';\n\n this.apiKey = options.apiKey;\n this.generateId = options.generateId ?? generateId;\n }\n\n private get baseConfig() {\n return {\n baseURL: this.baseURL,\n headers: () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: this.apiKey,\n environmentVariableName: 'MISTRAL_API_KEY',\n description: 'Mistral',\n })}`,\n }),\n };\n }\n\n chat(modelId: MistralChatModelId, settings: MistralChatSettings = {}) {\n return new MistralChatLanguageModel(modelId, settings, {\n provider: 'mistral.chat',\n ...this.baseConfig,\n generateId: this.generateId,\n });\n }\n}\n","import {\n LanguageModelV1,\n LanguageModelV1CallWarning,\n LanguageModelV1FinishReason,\n LanguageModelV1StreamPart,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport {\n ParseResult,\n createEventSourceResponseHandler,\n createJsonResponseHandler,\n postJsonToApi,\n} from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\nimport { convertToMistralChatMessages } from './convert-to-mistral-chat-messages';\nimport { mapMistralFinishReason } from './map-mistral-finish-reason';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { mistralFailedResponseHandler } from './mistral-error';\n\ntype MistralChatConfig = {\n provider: string;\n baseURL: string;\n headers: () => Record<string, string | undefined>;\n generateId: () => string;\n};\n\nexport class MistralChatLanguageModel implements LanguageModelV1 {\n readonly specificationVersion = 'v1';\n readonly defaultObjectGenerationMode = 'json';\n\n readonly modelId: MistralChatModelId;\n readonly settings: MistralChatSettings;\n\n private readonly config: MistralChatConfig;\n\n constructor(\n modelId: MistralChatModelId,\n settings: MistralChatSettings,\n config: MistralChatConfig,\n ) {\n this.modelId = modelId;\n this.settings = settings;\n this.config = config;\n }\n\n get provider(): string {\n return this.config.provider;\n }\n\n private getArgs({\n mode,\n prompt,\n maxTokens,\n temperature,\n topP,\n frequencyPenalty,\n presencePenalty,\n seed,\n }: Parameters<LanguageModelV1['doGenerate']>[0]) {\n const type = mode.type;\n\n const warnings: LanguageModelV1CallWarning[] = [];\n\n if (frequencyPenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'frequencyPenalty',\n });\n }\n\n if (presencePenalty != null) {\n warnings.push({\n type: 'unsupported-setting',\n setting: 'presencePenalty',\n });\n }\n\n const baseArgs = {\n // model id:\n model: this.modelId,\n\n // model specific settings:\n safe_prompt: this.settings.safePrompt,\n\n // standardized settings:\n max_tokens: maxTokens,\n temperature, // uses 0..1 scale\n top_p: topP,\n random_seed: seed,\n\n // messages:\n messages: convertToMistralChatMessages(prompt),\n };\n\n switch (type) {\n case 'regular': {\n // when the tools array is empty, change it to undefined to prevent OpenAI errors:\n const tools = mode.tools?.length ? mode.tools : undefined;\n\n return {\n args: {\n ...baseArgs,\n tools: tools?.map(tool => ({\n type: 'function',\n function: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n },\n })),\n },\n warnings,\n };\n }\n\n case 'object-json': {\n return {\n args: {\n ...baseArgs,\n response_format: { type: 'json_object' },\n },\n warnings,\n };\n }\n\n case 'object-tool': {\n return {\n args: {\n ...baseArgs,\n tool_choice: 'any',\n tools: [{ type: 'function', function: mode.tool }],\n },\n warnings,\n };\n }\n\n case 'object-grammar': {\n throw new UnsupportedFunctionalityError({\n functionality: 'object-grammar mode',\n });\n }\n\n default: {\n const _exhaustiveCheck: never = type;\n throw new Error(`Unsupported type: ${_exhaustiveCheck}`);\n }\n }\n }\n\n async doGenerate(\n options: Parameters<LanguageModelV1['doGenerate']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: this.config.headers(),\n body: args,\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createJsonResponseHandler(\n mistralChatResponseSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n const choice = response.choices[0];\n\n return {\n text: choice.message.content ?? undefined,\n toolCalls: choice.message.tool_calls?.map(toolCall => ({\n toolCallType: 'function',\n toolCallId: this.config.generateId(),\n toolName: toolCall.function.name,\n args: toolCall.function.arguments!,\n })),\n finishReason: mapMistralFinishReason(choice.finish_reason),\n usage: {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n },\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n\n async doStream(\n options: Parameters<LanguageModelV1['doStream']>[0],\n ): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>> {\n const { args, warnings } = this.getArgs(options);\n\n const response = await postJsonToApi({\n url: `${this.config.baseURL}/chat/completions`,\n headers: this.config.headers(),\n body: {\n ...args,\n stream: true,\n },\n failedResponseHandler: mistralFailedResponseHandler,\n successfulResponseHandler: createEventSourceResponseHandler(\n mistralChatChunkSchema,\n ),\n abortSignal: options.abortSignal,\n });\n\n const { messages: rawPrompt, ...rawSettings } = args;\n\n let finishReason: LanguageModelV1FinishReason = 'other';\n let usage: { promptTokens: number; completionTokens: number } = {\n promptTokens: Number.NaN,\n completionTokens: Number.NaN,\n };\n\n const generateId = this.config.generateId;\n\n return {\n stream: response.pipeThrough(\n new TransformStream<\n ParseResult<z.infer<typeof mistralChatChunkSchema>>,\n LanguageModelV1StreamPart\n >({\n transform(chunk, controller) {\n if (!chunk.success) {\n controller.enqueue({ type: 'error', error: chunk.error });\n return;\n }\n\n const value = chunk.value;\n\n if (value.usage != null) {\n usage = {\n promptTokens: value.usage.prompt_tokens,\n completionTokens: value.usage.completion_tokens,\n };\n }\n\n const choice = value.choices[0];\n\n if (choice?.finish_reason != null) {\n finishReason = mapMistralFinishReason(choice.finish_reason);\n }\n\n if (choice?.delta == null) {\n return;\n }\n\n const delta = choice.delta;\n\n if (delta.content != null) {\n controller.enqueue({\n type: 'text-delta',\n textDelta: delta.content,\n });\n }\n\n if (delta.tool_calls != null) {\n for (const toolCall of delta.tool_calls) {\n // mistral tool calls come in one piece\n\n const toolCallId = generateId(); // delta and tool call must have same id\n\n controller.enqueue({\n type: 'tool-call-delta',\n toolCallType: 'function',\n toolCallId,\n toolName: toolCall.function.name,\n argsTextDelta: toolCall.function.arguments,\n });\n\n controller.enqueue({\n type: 'tool-call',\n toolCallType: 'function',\n toolCallId,\n toolName: toolCall.function.name,\n args: toolCall.function.arguments,\n });\n }\n }\n },\n\n flush(controller) {\n controller.enqueue({ type: 'finish', finishReason, usage });\n },\n }),\n ),\n rawCall: { rawPrompt, rawSettings },\n warnings,\n };\n }\n}\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatResponseSchema = z.object({\n choices: z.array(\n z.object({\n message: z.object({\n role: z.literal('assistant'),\n content: z.string().nullable(),\n tool_calls: z\n .array(\n z.object({\n function: z.object({\n name: z.string(),\n arguments: z.string(),\n }),\n }),\n )\n .optional()\n .nullable(),\n }),\n index: z.number(),\n finish_reason: z.string().optional().nullable(),\n }),\n ),\n object: z.literal('chat.completion'),\n usage: z.object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n }),\n});\n\n// limited version of the schema, focussed on what is needed for the implementation\n// this approach limits breakages when the API changes and increases efficiency\nconst mistralChatChunkSchema = z.object({\n object: z.literal('chat.completion.chunk'),\n choices: z.array(\n z.object({\n delta: z.object({\n role: z.enum(['assistant']).optional(),\n content: z.string().nullable().optional(),\n tool_calls: z\n .array(\n z.object({\n function: z.object({ name: z.string(), arguments: z.string() }),\n }),\n )\n .optional()\n .nullable(),\n }),\n finish_reason: z.string().nullable().optional(),\n index: z.number(),\n }),\n ),\n usage: z\n .object({\n prompt_tokens: z.number(),\n completion_tokens: z.number(),\n })\n .optional()\n .nullable(),\n});\n","import {\n LanguageModelV1Prompt,\n UnsupportedFunctionalityError,\n} from '@ai-sdk/provider';\nimport { MistralChatPrompt } from './mistral-chat-prompt';\n\nexport function convertToMistralChatMessages(\n prompt: LanguageModelV1Prompt,\n): MistralChatPrompt {\n const messages: MistralChatPrompt = [];\n\n for (const { role, content } of prompt) {\n switch (role) {\n case 'system': {\n messages.push({ role: 'system', content });\n break;\n }\n\n case 'user': {\n messages.push({\n role: 'user',\n content: content\n .map(part => {\n switch (part.type) {\n case 'text': {\n return part.text;\n }\n case 'image': {\n throw new UnsupportedFunctionalityError({\n functionality: 'image-part',\n });\n }\n }\n })\n .join(''),\n });\n break;\n }\n\n case 'assistant': {\n let text = '';\n const toolCalls: Array<{\n id: string;\n type: 'function';\n function: { name: string; arguments: string };\n }> = [];\n\n for (const part of content) {\n switch (part.type) {\n case 'text': {\n text += part.text;\n break;\n }\n case 'tool-call': {\n toolCalls.push({\n id: part.toolCallId,\n type: 'function',\n function: {\n name: part.toolName,\n arguments: JSON.stringify(part.args),\n },\n });\n break;\n }\n default: {\n const _exhaustiveCheck: never = part;\n throw new Error(`Unsupported part: ${_exhaustiveCheck}`);\n }\n }\n }\n\n messages.push({\n role: 'assistant',\n content: text,\n tool_calls:\n toolCalls.length > 0\n ? toolCalls.map(({ function: { name, arguments: args } }) => ({\n id: 'null',\n type: 'function',\n function: { name, arguments: args },\n }))\n : undefined,\n });\n\n break;\n }\n case 'tool': {\n for (const toolResponse of content) {\n messages.push({\n role: 'tool',\n name: toolResponse.toolName,\n content: JSON.stringify(toolResponse.result),\n });\n }\n break;\n }\n default: {\n const _exhaustiveCheck: never = role;\n throw new Error(`Unsupported role: ${_exhaustiveCheck}`);\n }\n }\n }\n\n return messages;\n}\n","import { LanguageModelV1FinishReason } from '@ai-sdk/provider';\n\nexport function mapMistralFinishReason(\n finishReason: string | null | undefined,\n): LanguageModelV1FinishReason {\n switch (finishReason) {\n case 'stop':\n return 'stop';\n case 'length':\n case 'model_length':\n return 'length';\n case 'tool_calls':\n return 'tool-calls';\n default:\n return 'other';\n }\n}\n","import { createJsonErrorResponseHandler } from '@ai-sdk/provider-utils';\nimport { z } from 'zod';\n\nconst mistralErrorDataSchema = z.object({\n object: z.literal('error'),\n message: z.string(),\n type: z.string(),\n param: z.string().nullable(),\n code: z.string().nullable(),\n});\n\nexport type MistralErrorData = z.infer<typeof mistralErrorDataSchema>;\n\nexport const mistralFailedResponseHandler = createJsonErrorResponseHandler({\n errorSchema: mistralErrorDataSchema,\n errorToMessage: data => data.message,\n});\n","import { MistralChatLanguageModel } from './mistral-chat-language-model';\nimport {\n MistralChatModelId,\n MistralChatSettings,\n} from './mistral-chat-settings';\nimport { Mistral } from './mistral-facade';\n\nexport interface MistralProvider {\n (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): MistralChatLanguageModel;\n\n chat(\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ): MistralChatLanguageModel;\n}\n\n/**\n * Create a Mistral AI provider.\n */\nexport function createMistral(\n options: {\n /**\n * Base URL for the Mistral API calls.\n */\n baseURL?: string;\n\n /**\n * @deprecated Use `baseURL` instead.\n */\n baseUrl?: string;\n\n /**\n * API key for authenticating requests.\n */\n apiKey?: string;\n\n generateId?: () => string;\n } = {},\n): MistralProvider {\n const mistral = new Mistral(options);\n\n const provider = function (\n modelId: MistralChatModelId,\n settings?: MistralChatSettings,\n ) {\n if (new.target) {\n throw new Error(\n 'The Mistral model function cannot be called with the new keyword.',\n );\n }\n\n return mistral.chat(modelId, settings);\n };\n\n provider.chat = mistral.chat.bind(mistral);\n\n return provider as MistralProvider;\n}\n\n/**\n * Default Mistral provider instance.\n */\nexport const mistral = createMistral();\n"],"mappings":";AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;;;ACJP;AAAA,EAKE,iCAAAA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,KAAAC,UAAS;;;ACblB;AAAA,EAEE;AAAA,OACK;AAGA,SAAS,6BACd,QACmB;AACnB,QAAM,WAA8B,CAAC;AAErC,aAAW,EAAE,MAAM,QAAQ,KAAK,QAAQ;AACtC,YAAQ,MAAM;AAAA,MACZ,KAAK,UAAU;AACb,iBAAS,KAAK,EAAE,MAAM,UAAU,QAAQ,CAAC;AACzC;AAAA,MACF;AAAA,MAEA,KAAK,QAAQ;AACX,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS,QACN,IAAI,UAAQ;AACX,oBAAQ,KAAK,MAAM;AAAA,cACjB,KAAK,QAAQ;AACX,uBAAO,KAAK;AAAA,cACd;AAAA,cACA,KAAK,SAAS;AACZ,sBAAM,IAAI,8BAA8B;AAAA,kBACtC,eAAe;AAAA,gBACjB,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF,CAAC,EACA,KAAK,EAAE;AAAA,QACZ,CAAC;AACD;AAAA,MACF;AAAA,MAEA,KAAK,aAAa;AAChB,YAAI,OAAO;AACX,cAAM,YAID,CAAC;AAEN,mBAAW,QAAQ,SAAS;AAC1B,kBAAQ,KAAK,MAAM;AAAA,YACjB,KAAK,QAAQ;AACX,sBAAQ,KAAK;AACb;AAAA,YACF;AAAA,YACA,KAAK,aAAa;AAChB,wBAAU,KAAK;AAAA,gBACb,IAAI,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,UAAU;AAAA,kBACR,MAAM,KAAK;AAAA,kBACX,WAAW,KAAK,UAAU,KAAK,IAAI;AAAA,gBACrC;AAAA,cACF,CAAC;AACD;AAAA,YACF;AAAA,YACA,SAAS;AACP,oBAAM,mBAA0B;AAChC,oBAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,YACzD;AAAA,UACF;AAAA,QACF;AAEA,iBAAS,KAAK;AAAA,UACZ,MAAM;AAAA,UACN,SAAS;AAAA,UACT,YACE,UAAU,SAAS,IACf,UAAU,IAAI,CAAC,EAAE,UAAU,EAAE,MAAM,WAAW,KAAK,EAAE,OAAO;AAAA,YAC1D,IAAI;AAAA,YACJ,MAAM;AAAA,YACN,UAAU,EAAE,MAAM,WAAW,KAAK;AAAA,UACpC,EAAE,IACF;AAAA,QACR,CAAC;AAED;AAAA,MACF;AAAA,MACA,KAAK,QAAQ;AACX,mBAAW,gBAAgB,SAAS;AAClC,mBAAS,KAAK;AAAA,YACZ,MAAM;AAAA,YACN,MAAM,aAAa;AAAA,YACnB,SAAS,KAAK,UAAU,aAAa,MAAM;AAAA,UAC7C,CAAC;AAAA,QACH;AACA;AAAA,MACF;AAAA,MACA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ACtGO,SAAS,uBACd,cAC6B;AAC7B,UAAQ,cAAc;AAAA,IACpB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;;;AChBA,SAAS,sCAAsC;AAC/C,SAAS,SAAS;AAElB,IAAM,yBAAyB,EAAE,OAAO;AAAA,EACtC,QAAQ,EAAE,QAAQ,OAAO;AAAA,EACzB,SAAS,EAAE,OAAO;AAAA,EAClB,MAAM,EAAE,OAAO;AAAA,EACf,OAAO,EAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,MAAM,EAAE,OAAO,EAAE,SAAS;AAC5B,CAAC;AAIM,IAAM,+BAA+B,+BAA+B;AAAA,EACzE,aAAa;AAAA,EACb,gBAAgB,UAAQ,KAAK;AAC/B,CAAC;;;AHaM,IAAM,2BAAN,MAA0D;AAAA,EAS/D,YACE,SACA,UACA,QACA;AAZF,SAAS,uBAAuB;AAChC,SAAS,8BAA8B;AAYrC,SAAK,UAAU;AACf,SAAK,WAAW;AAChB,SAAK,SAAS;AAAA,EAChB;AAAA,EAEA,IAAI,WAAmB;AACrB,WAAO,KAAK,OAAO;AAAA,EACrB;AAAA,EAEQ,QAAQ;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GAAiD;AA7DnD;AA8DI,UAAM,OAAO,KAAK;AAElB,UAAM,WAAyC,CAAC;AAEhD,QAAI,oBAAoB,MAAM;AAC5B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,QAAI,mBAAmB,MAAM;AAC3B,eAAS,KAAK;AAAA,QACZ,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAEA,UAAM,WAAW;AAAA;AAAA,MAEf,OAAO,KAAK;AAAA;AAAA,MAGZ,aAAa,KAAK,SAAS;AAAA;AAAA,MAG3B,YAAY;AAAA,MACZ;AAAA;AAAA,MACA,OAAO;AAAA,MACP,aAAa;AAAA;AAAA,MAGb,UAAU,6BAA6B,MAAM;AAAA,IAC/C;AAEA,YAAQ,MAAM;AAAA,MACZ,KAAK,WAAW;AAEd,cAAM,UAAQ,UAAK,UAAL,mBAAY,UAAS,KAAK,QAAQ;AAEhD,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,OAAO,+BAAO,IAAI,WAAS;AAAA,cACzB,MAAM;AAAA,cACN,UAAU;AAAA,gBACR,MAAM,KAAK;AAAA,gBACX,aAAa,KAAK;AAAA,gBAClB,YAAY,KAAK;AAAA,cACnB;AAAA,YACF;AAAA,UACF;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,iBAAiB,EAAE,MAAM,cAAc;AAAA,UACzC;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,eAAe;AAClB,eAAO;AAAA,UACL,MAAM;AAAA,YACJ,GAAG;AAAA,YACH,aAAa;AAAA,YACb,OAAO,CAAC,EAAE,MAAM,YAAY,UAAU,KAAK,KAAK,CAAC;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,MACF;AAAA,MAEA,KAAK,kBAAkB;AACrB,cAAM,IAAIC,+BAA8B;AAAA,UACtC,eAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,MAEA,SAAS;AACP,cAAM,mBAA0B;AAChC,cAAM,IAAI,MAAM,qBAAqB,gBAAgB,EAAE;AAAA,MACzD;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,WACJ,SAC6D;AA1JjE;AA2JI,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,MACN,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAChD,UAAM,SAAS,SAAS,QAAQ,CAAC;AAEjC,WAAO;AAAA,MACL,OAAM,YAAO,QAAQ,YAAf,YAA0B;AAAA,MAChC,YAAW,YAAO,QAAQ,eAAf,mBAA2B,IAAI,eAAa;AAAA,QACrD,cAAc;AAAA,QACd,YAAY,KAAK,OAAO,WAAW;AAAA,QACnC,UAAU,SAAS,SAAS;AAAA,QAC5B,MAAM,SAAS,SAAS;AAAA,MAC1B;AAAA,MACA,cAAc,uBAAuB,OAAO,aAAa;AAAA,MACzD,OAAO;AAAA,QACL,cAAc,SAAS,MAAM;AAAA,QAC7B,kBAAkB,SAAS,MAAM;AAAA,MACnC;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SACJ,SAC2D;AAC3D,UAAM,EAAE,MAAM,SAAS,IAAI,KAAK,QAAQ,OAAO;AAE/C,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC,KAAK,GAAG,KAAK,OAAO,OAAO;AAAA,MAC3B,SAAS,KAAK,OAAO,QAAQ;AAAA,MAC7B,MAAM;AAAA,QACJ,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAAA,MACA,uBAAuB;AAAA,MACvB,2BAA2B;AAAA,QACzB;AAAA,MACF;AAAA,MACA,aAAa,QAAQ;AAAA,IACvB,CAAC;AAED,UAAM,EAAE,UAAU,WAAW,GAAG,YAAY,IAAI;AAEhD,QAAI,eAA4C;AAChD,QAAI,QAA4D;AAAA,MAC9D,cAAc,OAAO;AAAA,MACrB,kBAAkB,OAAO;AAAA,IAC3B;AAEA,UAAMC,cAAa,KAAK,OAAO;AAE/B,WAAO;AAAA,MACL,QAAQ,SAAS;AAAA,QACf,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,gBAAI,CAAC,MAAM,SAAS;AAClB,yBAAW,QAAQ,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,CAAC;AACxD;AAAA,YACF;AAEA,kBAAM,QAAQ,MAAM;AAEpB,gBAAI,MAAM,SAAS,MAAM;AACvB,sBAAQ;AAAA,gBACN,cAAc,MAAM,MAAM;AAAA,gBAC1B,kBAAkB,MAAM,MAAM;AAAA,cAChC;AAAA,YACF;AAEA,kBAAM,SAAS,MAAM,QAAQ,CAAC;AAE9B,iBAAI,iCAAQ,kBAAiB,MAAM;AACjC,6BAAe,uBAAuB,OAAO,aAAa;AAAA,YAC5D;AAEA,iBAAI,iCAAQ,UAAS,MAAM;AACzB;AAAA,YACF;AAEA,kBAAM,QAAQ,OAAO;AAErB,gBAAI,MAAM,WAAW,MAAM;AACzB,yBAAW,QAAQ;AAAA,gBACjB,MAAM;AAAA,gBACN,WAAW,MAAM;AAAA,cACnB,CAAC;AAAA,YACH;AAEA,gBAAI,MAAM,cAAc,MAAM;AAC5B,yBAAW,YAAY,MAAM,YAAY;AAGvC,sBAAM,aAAaA,YAAW;AAE9B,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd;AAAA,kBACA,UAAU,SAAS,SAAS;AAAA,kBAC5B,eAAe,SAAS,SAAS;AAAA,gBACnC,CAAC;AAED,2BAAW,QAAQ;AAAA,kBACjB,MAAM;AAAA,kBACN,cAAc;AAAA,kBACd;AAAA,kBACA,UAAU,SAAS,SAAS;AAAA,kBAC5B,MAAM,SAAS,SAAS;AAAA,gBAC1B,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AAAA,UAEA,MAAM,YAAY;AAChB,uBAAW,QAAQ,EAAE,MAAM,UAAU,cAAc,MAAM,CAAC;AAAA,UAC5D;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MACA,SAAS,EAAE,WAAW,YAAY;AAAA,MAClC;AAAA,IACF;AAAA,EACF;AACF;AAIA,IAAM,4BAA4BC,GAAE,OAAO;AAAA,EACzC,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,SAASA,GAAE,OAAO;AAAA,QAChB,MAAMA,GAAE,QAAQ,WAAW;AAAA,QAC3B,SAASA,GAAE,OAAO,EAAE,SAAS;AAAA,QAC7B,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,UAAUA,GAAE,OAAO;AAAA,cACjB,MAAMA,GAAE,OAAO;AAAA,cACf,WAAWA,GAAE,OAAO;AAAA,YACtB,CAAC;AAAA,UACH,CAAC;AAAA,QACH,EACC,SAAS,EACT,SAAS;AAAA,MACd,CAAC;AAAA,MACD,OAAOA,GAAE,OAAO;AAAA,MAChB,eAAeA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,IAChD,CAAC;AAAA,EACH;AAAA,EACA,QAAQA,GAAE,QAAQ,iBAAiB;AAAA,EACnC,OAAOA,GAAE,OAAO;AAAA,IACd,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC;AACH,CAAC;AAID,IAAM,yBAAyBA,GAAE,OAAO;AAAA,EACtC,QAAQA,GAAE,QAAQ,uBAAuB;AAAA,EACzC,SAASA,GAAE;AAAA,IACTA,GAAE,OAAO;AAAA,MACP,OAAOA,GAAE,OAAO;AAAA,QACd,MAAMA,GAAE,KAAK,CAAC,WAAW,CAAC,EAAE,SAAS;AAAA,QACrC,SAASA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,QACxC,YAAYA,GACT;AAAA,UACCA,GAAE,OAAO;AAAA,YACP,UAAUA,GAAE,OAAO,EAAE,MAAMA,GAAE,OAAO,GAAG,WAAWA,GAAE,OAAO,EAAE,CAAC;AAAA,UAChE,CAAC;AAAA,QACH,EACC,SAAS,EACT,SAAS;AAAA,MACd,CAAC;AAAA,MACD,eAAeA,GAAE,OAAO,EAAE,SAAS,EAAE,SAAS;AAAA,MAC9C,OAAOA,GAAE,OAAO;AAAA,IAClB,CAAC;AAAA,EACH;AAAA,EACA,OAAOA,GACJ,OAAO;AAAA,IACN,eAAeA,GAAE,OAAO;AAAA,IACxB,mBAAmBA,GAAE,OAAO;AAAA,EAC9B,CAAC,EACA,SAAS,EACT,SAAS;AACd,CAAC;;;ADpVM,IAAM,UAAN,MAAc;AAAA;AAAA;AAAA;AAAA,EAanB,YACE,UAiBI,CAAC,GACL;AA9CJ;AA+CI,SAAK,WACH,2BAAqB,aAAQ,YAAR,YAAmB,QAAQ,OAAO,MAAvD,YACA;AAEF,SAAK,SAAS,QAAQ;AACtB,SAAK,cAAa,aAAQ,eAAR,YAAsB;AAAA,EAC1C;AAAA,EAEA,IAAY,aAAa;AACvB,WAAO;AAAA,MACL,SAAS,KAAK;AAAA,MACd,SAAS,OAAO;AAAA,QACd,eAAe,UAAU,WAAW;AAAA,UAClC,QAAQ,KAAK;AAAA,UACb,yBAAyB;AAAA,UACzB,aAAa;AAAA,QACf,CAAC,CAAC;AAAA,MACJ;AAAA,IACF;AAAA,EACF;AAAA,EAEA,KAAK,SAA6B,WAAgC,CAAC,GAAG;AACpE,WAAO,IAAI,yBAAyB,SAAS,UAAU;AAAA,MACrD,UAAU;AAAA,MACV,GAAG,KAAK;AAAA,MACR,YAAY,KAAK;AAAA,IACnB,CAAC;AAAA,EACH;AACF;;;AKrDO,SAAS,cACd,UAiBI,CAAC,GACY;AACjB,QAAMC,WAAU,IAAI,QAAQ,OAAO;AAEnC,QAAM,WAAW,SACf,SACA,UACA;AACA,QAAI,YAAY;AACd,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,WAAOA,SAAQ,KAAK,SAAS,QAAQ;AAAA,EACvC;AAEA,WAAS,OAAOA,SAAQ,KAAK,KAAKA,QAAO;AAEzC,SAAO;AACT;AAKO,IAAM,UAAU,cAAc;","names":["UnsupportedFunctionalityError","z","UnsupportedFunctionalityError","generateId","z","mistral"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ai-sdk/mistral",
3
- "version": "0.0.1",
3
+ "version": "0.0.3",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",