@ai-sdk/mistral 2.0.0-canary.9 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +437 -0
- package/README.md +2 -2
- package/dist/index.d.mts +5 -18
- package/dist/index.d.ts +5 -18
- package/dist/index.js +148 -132
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +92 -76
- package/dist/index.mjs.map +1 -1
- package/package.json +6 -6
package/dist/index.js
CHANGED
|
@@ -31,7 +31,7 @@ var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
|
31
31
|
|
|
32
32
|
// src/mistral-chat-language-model.ts
|
|
33
33
|
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
34
|
-
var
|
|
34
|
+
var import_v43 = require("zod/v4");
|
|
35
35
|
|
|
36
36
|
// src/convert-to-mistral-chat-messages.ts
|
|
37
37
|
var import_provider = require("@ai-sdk/provider");
|
|
@@ -91,7 +91,7 @@ function convertToMistralChatMessages(prompt) {
|
|
|
91
91
|
type: "function",
|
|
92
92
|
function: {
|
|
93
93
|
name: part.toolName,
|
|
94
|
-
arguments: JSON.stringify(part.
|
|
94
|
+
arguments: JSON.stringify(part.input)
|
|
95
95
|
}
|
|
96
96
|
});
|
|
97
97
|
break;
|
|
@@ -108,11 +108,24 @@ function convertToMistralChatMessages(prompt) {
|
|
|
108
108
|
}
|
|
109
109
|
case "tool": {
|
|
110
110
|
for (const toolResponse of content) {
|
|
111
|
+
const output = toolResponse.output;
|
|
112
|
+
let contentValue;
|
|
113
|
+
switch (output.type) {
|
|
114
|
+
case "text":
|
|
115
|
+
case "error-text":
|
|
116
|
+
contentValue = output.value;
|
|
117
|
+
break;
|
|
118
|
+
case "content":
|
|
119
|
+
case "json":
|
|
120
|
+
case "error-json":
|
|
121
|
+
contentValue = JSON.stringify(output.value);
|
|
122
|
+
break;
|
|
123
|
+
}
|
|
111
124
|
messages.push({
|
|
112
125
|
role: "tool",
|
|
113
126
|
name: toolResponse.toolName,
|
|
114
|
-
|
|
115
|
-
|
|
127
|
+
tool_call_id: toolResponse.toolCallId,
|
|
128
|
+
content: contentValue
|
|
116
129
|
});
|
|
117
130
|
}
|
|
118
131
|
break;
|
|
@@ -155,27 +168,27 @@ function mapMistralFinishReason(finishReason) {
|
|
|
155
168
|
}
|
|
156
169
|
|
|
157
170
|
// src/mistral-chat-options.ts
|
|
158
|
-
var
|
|
159
|
-
var mistralProviderOptions =
|
|
171
|
+
var import_v4 = require("zod/v4");
|
|
172
|
+
var mistralProviderOptions = import_v4.z.object({
|
|
160
173
|
/**
|
|
161
174
|
Whether to inject a safety prompt before all conversations.
|
|
162
175
|
|
|
163
176
|
Defaults to `false`.
|
|
164
177
|
*/
|
|
165
|
-
safePrompt:
|
|
166
|
-
documentImageLimit:
|
|
167
|
-
documentPageLimit:
|
|
178
|
+
safePrompt: import_v4.z.boolean().optional(),
|
|
179
|
+
documentImageLimit: import_v4.z.number().optional(),
|
|
180
|
+
documentPageLimit: import_v4.z.number().optional()
|
|
168
181
|
});
|
|
169
182
|
|
|
170
183
|
// src/mistral-error.ts
|
|
171
184
|
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
172
|
-
var
|
|
173
|
-
var mistralErrorDataSchema =
|
|
174
|
-
object:
|
|
175
|
-
message:
|
|
176
|
-
type:
|
|
177
|
-
param:
|
|
178
|
-
code:
|
|
185
|
+
var import_v42 = require("zod/v4");
|
|
186
|
+
var mistralErrorDataSchema = import_v42.z.object({
|
|
187
|
+
object: import_v42.z.literal("error"),
|
|
188
|
+
message: import_v42.z.string(),
|
|
189
|
+
type: import_v42.z.string(),
|
|
190
|
+
param: import_v42.z.string().nullable(),
|
|
191
|
+
code: import_v42.z.string().nullable()
|
|
179
192
|
});
|
|
180
193
|
var mistralFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
|
|
181
194
|
errorSchema: mistralErrorDataSchema,
|
|
@@ -203,7 +216,7 @@ function prepareTools({
|
|
|
203
216
|
function: {
|
|
204
217
|
name: tool.name,
|
|
205
218
|
description: tool.description,
|
|
206
|
-
parameters: tool.
|
|
219
|
+
parameters: tool.inputSchema
|
|
207
220
|
}
|
|
208
221
|
});
|
|
209
222
|
}
|
|
@@ -239,18 +252,16 @@ function prepareTools({
|
|
|
239
252
|
var MistralChatLanguageModel = class {
|
|
240
253
|
constructor(modelId, config) {
|
|
241
254
|
this.specificationVersion = "v2";
|
|
255
|
+
this.supportedUrls = {
|
|
256
|
+
"application/pdf": [/^https:\/\/.*$/]
|
|
257
|
+
};
|
|
242
258
|
this.modelId = modelId;
|
|
243
259
|
this.config = config;
|
|
244
260
|
}
|
|
245
261
|
get provider() {
|
|
246
262
|
return this.config.provider;
|
|
247
263
|
}
|
|
248
|
-
async
|
|
249
|
-
return {
|
|
250
|
-
"application/pdf": [/^https:\/\/.*$/]
|
|
251
|
-
};
|
|
252
|
-
}
|
|
253
|
-
getArgs({
|
|
264
|
+
async getArgs({
|
|
254
265
|
prompt,
|
|
255
266
|
maxOutputTokens,
|
|
256
267
|
temperature,
|
|
@@ -267,7 +278,7 @@ var MistralChatLanguageModel = class {
|
|
|
267
278
|
}) {
|
|
268
279
|
var _a;
|
|
269
280
|
const warnings = [];
|
|
270
|
-
const options = (_a = (0, import_provider_utils2.parseProviderOptions)({
|
|
281
|
+
const options = (_a = await (0, import_provider_utils2.parseProviderOptions)({
|
|
271
282
|
provider: "mistral",
|
|
272
283
|
providerOptions,
|
|
273
284
|
schema: mistralProviderOptions
|
|
@@ -339,7 +350,7 @@ var MistralChatLanguageModel = class {
|
|
|
339
350
|
};
|
|
340
351
|
}
|
|
341
352
|
async doGenerate(options) {
|
|
342
|
-
const { args: body, warnings } = this.getArgs(options);
|
|
353
|
+
const { args: body, warnings } = await this.getArgs(options);
|
|
343
354
|
const {
|
|
344
355
|
responseHeaders,
|
|
345
356
|
value: response,
|
|
@@ -369,10 +380,9 @@ var MistralChatLanguageModel = class {
|
|
|
369
380
|
for (const toolCall of choice.message.tool_calls) {
|
|
370
381
|
content.push({
|
|
371
382
|
type: "tool-call",
|
|
372
|
-
toolCallType: "function",
|
|
373
383
|
toolCallId: toolCall.id,
|
|
374
384
|
toolName: toolCall.function.name,
|
|
375
|
-
|
|
385
|
+
input: toolCall.function.arguments
|
|
376
386
|
});
|
|
377
387
|
}
|
|
378
388
|
}
|
|
@@ -381,7 +391,8 @@ var MistralChatLanguageModel = class {
|
|
|
381
391
|
finishReason: mapMistralFinishReason(choice.finish_reason),
|
|
382
392
|
usage: {
|
|
383
393
|
inputTokens: response.usage.prompt_tokens,
|
|
384
|
-
outputTokens: response.usage.completion_tokens
|
|
394
|
+
outputTokens: response.usage.completion_tokens,
|
|
395
|
+
totalTokens: response.usage.total_tokens
|
|
385
396
|
},
|
|
386
397
|
request: { body },
|
|
387
398
|
response: {
|
|
@@ -393,7 +404,7 @@ var MistralChatLanguageModel = class {
|
|
|
393
404
|
};
|
|
394
405
|
}
|
|
395
406
|
async doStream(options) {
|
|
396
|
-
const { args, warnings } = this.getArgs(options);
|
|
407
|
+
const { args, warnings } = await this.getArgs(options);
|
|
397
408
|
const body = { ...args, stream: true };
|
|
398
409
|
const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
|
|
399
410
|
url: `${this.config.baseURL}/chat/completions`,
|
|
@@ -409,10 +420,11 @@ var MistralChatLanguageModel = class {
|
|
|
409
420
|
let finishReason = "unknown";
|
|
410
421
|
const usage = {
|
|
411
422
|
inputTokens: void 0,
|
|
412
|
-
outputTokens: void 0
|
|
423
|
+
outputTokens: void 0,
|
|
424
|
+
totalTokens: void 0
|
|
413
425
|
};
|
|
414
|
-
let
|
|
415
|
-
let
|
|
426
|
+
let isFirstChunk = true;
|
|
427
|
+
let activeText = false;
|
|
416
428
|
return {
|
|
417
429
|
stream: response.pipeThrough(
|
|
418
430
|
new TransformStream({
|
|
@@ -420,13 +432,16 @@ var MistralChatLanguageModel = class {
|
|
|
420
432
|
controller.enqueue({ type: "stream-start", warnings });
|
|
421
433
|
},
|
|
422
434
|
transform(chunk, controller) {
|
|
435
|
+
if (options.includeRawChunks) {
|
|
436
|
+
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
437
|
+
}
|
|
423
438
|
if (!chunk.success) {
|
|
424
439
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
425
440
|
return;
|
|
426
441
|
}
|
|
427
|
-
chunkNumber++;
|
|
428
442
|
const value = chunk.value;
|
|
429
|
-
if (
|
|
443
|
+
if (isFirstChunk) {
|
|
444
|
+
isFirstChunk = false;
|
|
430
445
|
controller.enqueue({
|
|
431
446
|
type: "response-metadata",
|
|
432
447
|
...getResponseMetadata(value)
|
|
@@ -435,53 +450,62 @@ var MistralChatLanguageModel = class {
|
|
|
435
450
|
if (value.usage != null) {
|
|
436
451
|
usage.inputTokens = value.usage.prompt_tokens;
|
|
437
452
|
usage.outputTokens = value.usage.completion_tokens;
|
|
453
|
+
usage.totalTokens = value.usage.total_tokens;
|
|
438
454
|
}
|
|
439
455
|
const choice = value.choices[0];
|
|
440
|
-
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
441
|
-
finishReason = mapMistralFinishReason(choice.finish_reason);
|
|
442
|
-
}
|
|
443
|
-
if ((choice == null ? void 0 : choice.delta) == null) {
|
|
444
|
-
return;
|
|
445
|
-
}
|
|
446
456
|
const delta = choice.delta;
|
|
447
457
|
const textContent = extractTextContent(delta.content);
|
|
448
|
-
if (
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
trimLeadingSpace = true;
|
|
453
|
-
}
|
|
454
|
-
return;
|
|
458
|
+
if (textContent != null && textContent.length > 0) {
|
|
459
|
+
if (!activeText) {
|
|
460
|
+
controller.enqueue({ type: "text-start", id: "0" });
|
|
461
|
+
activeText = true;
|
|
455
462
|
}
|
|
456
|
-
}
|
|
457
|
-
if (textContent != null) {
|
|
458
463
|
controller.enqueue({
|
|
459
|
-
type: "text",
|
|
460
|
-
|
|
464
|
+
type: "text-delta",
|
|
465
|
+
id: "0",
|
|
466
|
+
delta: textContent
|
|
461
467
|
});
|
|
462
|
-
trimLeadingSpace = false;
|
|
463
468
|
}
|
|
464
|
-
if (delta.tool_calls != null) {
|
|
469
|
+
if ((delta == null ? void 0 : delta.tool_calls) != null) {
|
|
465
470
|
for (const toolCall of delta.tool_calls) {
|
|
471
|
+
const toolCallId = toolCall.id;
|
|
472
|
+
const toolName = toolCall.function.name;
|
|
473
|
+
const input = toolCall.function.arguments;
|
|
474
|
+
controller.enqueue({
|
|
475
|
+
type: "tool-input-start",
|
|
476
|
+
id: toolCallId,
|
|
477
|
+
toolName
|
|
478
|
+
});
|
|
466
479
|
controller.enqueue({
|
|
467
|
-
type: "tool-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
480
|
+
type: "tool-input-delta",
|
|
481
|
+
id: toolCallId,
|
|
482
|
+
delta: input
|
|
483
|
+
});
|
|
484
|
+
controller.enqueue({
|
|
485
|
+
type: "tool-input-end",
|
|
486
|
+
id: toolCallId
|
|
472
487
|
});
|
|
473
488
|
controller.enqueue({
|
|
474
489
|
type: "tool-call",
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
args: toolCall.function.arguments
|
|
490
|
+
toolCallId,
|
|
491
|
+
toolName,
|
|
492
|
+
input
|
|
479
493
|
});
|
|
480
494
|
}
|
|
481
495
|
}
|
|
496
|
+
if (choice.finish_reason != null) {
|
|
497
|
+
finishReason = mapMistralFinishReason(choice.finish_reason);
|
|
498
|
+
}
|
|
482
499
|
},
|
|
483
500
|
flush(controller) {
|
|
484
|
-
|
|
501
|
+
if (activeText) {
|
|
502
|
+
controller.enqueue({ type: "text-end", id: "0" });
|
|
503
|
+
}
|
|
504
|
+
controller.enqueue({
|
|
505
|
+
type: "finish",
|
|
506
|
+
finishReason,
|
|
507
|
+
usage
|
|
508
|
+
});
|
|
485
509
|
}
|
|
486
510
|
})
|
|
487
511
|
),
|
|
@@ -515,105 +539,97 @@ function extractTextContent(content) {
|
|
|
515
539
|
}
|
|
516
540
|
return textContent.length ? textContent.join("") : void 0;
|
|
517
541
|
}
|
|
518
|
-
var mistralContentSchema =
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
type:
|
|
524
|
-
text:
|
|
542
|
+
var mistralContentSchema = import_v43.z.union([
|
|
543
|
+
import_v43.z.string(),
|
|
544
|
+
import_v43.z.array(
|
|
545
|
+
import_v43.z.discriminatedUnion("type", [
|
|
546
|
+
import_v43.z.object({
|
|
547
|
+
type: import_v43.z.literal("text"),
|
|
548
|
+
text: import_v43.z.string()
|
|
525
549
|
}),
|
|
526
|
-
|
|
527
|
-
type:
|
|
528
|
-
image_url:
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
url:
|
|
532
|
-
detail:
|
|
550
|
+
import_v43.z.object({
|
|
551
|
+
type: import_v43.z.literal("image_url"),
|
|
552
|
+
image_url: import_v43.z.union([
|
|
553
|
+
import_v43.z.string(),
|
|
554
|
+
import_v43.z.object({
|
|
555
|
+
url: import_v43.z.string(),
|
|
556
|
+
detail: import_v43.z.string().nullable()
|
|
533
557
|
})
|
|
534
558
|
])
|
|
535
559
|
}),
|
|
536
|
-
|
|
537
|
-
type:
|
|
538
|
-
reference_ids:
|
|
560
|
+
import_v43.z.object({
|
|
561
|
+
type: import_v43.z.literal("reference"),
|
|
562
|
+
reference_ids: import_v43.z.array(import_v43.z.number())
|
|
539
563
|
})
|
|
540
564
|
])
|
|
541
565
|
)
|
|
542
566
|
]).nullish();
|
|
543
|
-
var
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
567
|
+
var mistralUsageSchema = import_v43.z.object({
|
|
568
|
+
prompt_tokens: import_v43.z.number(),
|
|
569
|
+
completion_tokens: import_v43.z.number(),
|
|
570
|
+
total_tokens: import_v43.z.number()
|
|
571
|
+
});
|
|
572
|
+
var mistralChatResponseSchema = import_v43.z.object({
|
|
573
|
+
id: import_v43.z.string().nullish(),
|
|
574
|
+
created: import_v43.z.number().nullish(),
|
|
575
|
+
model: import_v43.z.string().nullish(),
|
|
576
|
+
choices: import_v43.z.array(
|
|
577
|
+
import_v43.z.object({
|
|
578
|
+
message: import_v43.z.object({
|
|
579
|
+
role: import_v43.z.literal("assistant"),
|
|
551
580
|
content: mistralContentSchema,
|
|
552
|
-
tool_calls:
|
|
553
|
-
|
|
554
|
-
id:
|
|
555
|
-
function:
|
|
581
|
+
tool_calls: import_v43.z.array(
|
|
582
|
+
import_v43.z.object({
|
|
583
|
+
id: import_v43.z.string(),
|
|
584
|
+
function: import_v43.z.object({ name: import_v43.z.string(), arguments: import_v43.z.string() })
|
|
556
585
|
})
|
|
557
586
|
).nullish()
|
|
558
587
|
}),
|
|
559
|
-
index:
|
|
560
|
-
finish_reason:
|
|
588
|
+
index: import_v43.z.number(),
|
|
589
|
+
finish_reason: import_v43.z.string().nullish()
|
|
561
590
|
})
|
|
562
591
|
),
|
|
563
|
-
object:
|
|
564
|
-
usage:
|
|
565
|
-
prompt_tokens: import_zod3.z.number(),
|
|
566
|
-
completion_tokens: import_zod3.z.number()
|
|
567
|
-
})
|
|
592
|
+
object: import_v43.z.literal("chat.completion"),
|
|
593
|
+
usage: mistralUsageSchema
|
|
568
594
|
});
|
|
569
|
-
var mistralChatChunkSchema =
|
|
570
|
-
id:
|
|
571
|
-
created:
|
|
572
|
-
model:
|
|
573
|
-
choices:
|
|
574
|
-
|
|
575
|
-
delta:
|
|
576
|
-
role:
|
|
595
|
+
var mistralChatChunkSchema = import_v43.z.object({
|
|
596
|
+
id: import_v43.z.string().nullish(),
|
|
597
|
+
created: import_v43.z.number().nullish(),
|
|
598
|
+
model: import_v43.z.string().nullish(),
|
|
599
|
+
choices: import_v43.z.array(
|
|
600
|
+
import_v43.z.object({
|
|
601
|
+
delta: import_v43.z.object({
|
|
602
|
+
role: import_v43.z.enum(["assistant"]).optional(),
|
|
577
603
|
content: mistralContentSchema,
|
|
578
|
-
tool_calls:
|
|
579
|
-
|
|
580
|
-
id:
|
|
581
|
-
function:
|
|
604
|
+
tool_calls: import_v43.z.array(
|
|
605
|
+
import_v43.z.object({
|
|
606
|
+
id: import_v43.z.string(),
|
|
607
|
+
function: import_v43.z.object({ name: import_v43.z.string(), arguments: import_v43.z.string() })
|
|
582
608
|
})
|
|
583
609
|
).nullish()
|
|
584
610
|
}),
|
|
585
|
-
finish_reason:
|
|
586
|
-
index:
|
|
611
|
+
finish_reason: import_v43.z.string().nullish(),
|
|
612
|
+
index: import_v43.z.number()
|
|
587
613
|
})
|
|
588
614
|
),
|
|
589
|
-
usage:
|
|
590
|
-
prompt_tokens: import_zod3.z.number(),
|
|
591
|
-
completion_tokens: import_zod3.z.number()
|
|
592
|
-
}).nullish()
|
|
615
|
+
usage: mistralUsageSchema.nullish()
|
|
593
616
|
});
|
|
594
617
|
|
|
595
618
|
// src/mistral-embedding-model.ts
|
|
596
619
|
var import_provider3 = require("@ai-sdk/provider");
|
|
597
620
|
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
598
|
-
var
|
|
621
|
+
var import_v44 = require("zod/v4");
|
|
599
622
|
var MistralEmbeddingModel = class {
|
|
600
|
-
constructor(modelId,
|
|
623
|
+
constructor(modelId, config) {
|
|
601
624
|
this.specificationVersion = "v2";
|
|
625
|
+
this.maxEmbeddingsPerCall = 32;
|
|
626
|
+
this.supportsParallelCalls = false;
|
|
602
627
|
this.modelId = modelId;
|
|
603
|
-
this.settings = settings;
|
|
604
628
|
this.config = config;
|
|
605
629
|
}
|
|
606
630
|
get provider() {
|
|
607
631
|
return this.config.provider;
|
|
608
632
|
}
|
|
609
|
-
get maxEmbeddingsPerCall() {
|
|
610
|
-
var _a;
|
|
611
|
-
return (_a = this.settings.maxEmbeddingsPerCall) != null ? _a : 32;
|
|
612
|
-
}
|
|
613
|
-
get supportsParallelCalls() {
|
|
614
|
-
var _a;
|
|
615
|
-
return (_a = this.settings.supportsParallelCalls) != null ? _a : false;
|
|
616
|
-
}
|
|
617
633
|
async doEmbed({
|
|
618
634
|
values,
|
|
619
635
|
abortSignal,
|
|
@@ -653,9 +669,9 @@ var MistralEmbeddingModel = class {
|
|
|
653
669
|
};
|
|
654
670
|
}
|
|
655
671
|
};
|
|
656
|
-
var MistralTextEmbeddingResponseSchema =
|
|
657
|
-
data:
|
|
658
|
-
usage:
|
|
672
|
+
var MistralTextEmbeddingResponseSchema = import_v44.z.object({
|
|
673
|
+
data: import_v44.z.array(import_v44.z.object({ embedding: import_v44.z.array(import_v44.z.number()) })),
|
|
674
|
+
usage: import_v44.z.object({ prompt_tokens: import_v44.z.number() }).nullish()
|
|
659
675
|
});
|
|
660
676
|
|
|
661
677
|
// src/mistral-provider.ts
|
|
@@ -676,7 +692,7 @@ function createMistral(options = {}) {
|
|
|
676
692
|
headers: getHeaders,
|
|
677
693
|
fetch: options.fetch
|
|
678
694
|
});
|
|
679
|
-
const createEmbeddingModel = (modelId
|
|
695
|
+
const createEmbeddingModel = (modelId) => new MistralEmbeddingModel(modelId, {
|
|
680
696
|
provider: "mistral.embedding",
|
|
681
697
|
baseURL,
|
|
682
698
|
headers: getHeaders,
|