ai 5.0.9 → 5.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/dist/index.d.mts +56 -30
- package/dist/index.d.ts +56 -30
- package/dist/index.js +383 -317
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +329 -262
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.js
CHANGED
@@ -4,8 +4,8 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
4
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
5
5
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
6
6
|
var __export = (target, all) => {
|
7
|
-
for (var
|
8
|
-
__defProp(target,
|
7
|
+
for (var name17 in all)
|
8
|
+
__defProp(target, name17, { get: all[name17], enumerable: true });
|
9
9
|
};
|
10
10
|
var __copyProps = (to, from, except, desc) => {
|
11
11
|
if (from && typeof from === "object" || typeof from === "function") {
|
@@ -20,31 +20,32 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
20
20
|
// src/index.ts
|
21
21
|
var src_exports = {};
|
22
22
|
__export(src_exports, {
|
23
|
-
AISDKError: () =>
|
24
|
-
APICallError: () =>
|
23
|
+
AISDKError: () => import_provider17.AISDKError,
|
24
|
+
APICallError: () => import_provider17.APICallError,
|
25
25
|
AbstractChat: () => AbstractChat,
|
26
26
|
DefaultChatTransport: () => DefaultChatTransport,
|
27
27
|
DownloadError: () => DownloadError,
|
28
|
-
EmptyResponseBodyError: () =>
|
28
|
+
EmptyResponseBodyError: () => import_provider17.EmptyResponseBodyError,
|
29
29
|
Experimental_Agent: () => Agent,
|
30
30
|
HttpChatTransport: () => HttpChatTransport,
|
31
31
|
InvalidArgumentError: () => InvalidArgumentError,
|
32
32
|
InvalidDataContentError: () => InvalidDataContentError,
|
33
33
|
InvalidMessageRoleError: () => InvalidMessageRoleError,
|
34
|
-
InvalidPromptError: () =>
|
35
|
-
InvalidResponseDataError: () =>
|
34
|
+
InvalidPromptError: () => import_provider17.InvalidPromptError,
|
35
|
+
InvalidResponseDataError: () => import_provider17.InvalidResponseDataError,
|
36
36
|
InvalidStreamPartError: () => InvalidStreamPartError,
|
37
37
|
InvalidToolInputError: () => InvalidToolInputError,
|
38
|
-
JSONParseError: () =>
|
38
|
+
JSONParseError: () => import_provider17.JSONParseError,
|
39
39
|
JsonToSseTransformStream: () => JsonToSseTransformStream,
|
40
|
-
LoadAPIKeyError: () =>
|
40
|
+
LoadAPIKeyError: () => import_provider17.LoadAPIKeyError,
|
41
41
|
MCPClientError: () => MCPClientError,
|
42
42
|
MessageConversionError: () => MessageConversionError,
|
43
|
-
NoContentGeneratedError: () =>
|
43
|
+
NoContentGeneratedError: () => import_provider17.NoContentGeneratedError,
|
44
44
|
NoImageGeneratedError: () => NoImageGeneratedError,
|
45
45
|
NoObjectGeneratedError: () => NoObjectGeneratedError,
|
46
|
+
NoOutputGeneratedError: () => NoOutputGeneratedError,
|
46
47
|
NoOutputSpecifiedError: () => NoOutputSpecifiedError,
|
47
|
-
NoSuchModelError: () =>
|
48
|
+
NoSuchModelError: () => import_provider17.NoSuchModelError,
|
48
49
|
NoSuchProviderError: () => NoSuchProviderError,
|
49
50
|
NoSuchToolError: () => NoSuchToolError,
|
50
51
|
Output: () => output_exports,
|
@@ -52,9 +53,9 @@ __export(src_exports, {
|
|
52
53
|
SerialJobExecutor: () => SerialJobExecutor,
|
53
54
|
TextStreamChatTransport: () => TextStreamChatTransport,
|
54
55
|
ToolCallRepairError: () => ToolCallRepairError,
|
55
|
-
TypeValidationError: () =>
|
56
|
+
TypeValidationError: () => import_provider17.TypeValidationError,
|
56
57
|
UI_MESSAGE_STREAM_HEADERS: () => UI_MESSAGE_STREAM_HEADERS,
|
57
|
-
UnsupportedFunctionalityError: () =>
|
58
|
+
UnsupportedFunctionalityError: () => import_provider17.UnsupportedFunctionalityError,
|
58
59
|
UnsupportedModelVersionError: () => UnsupportedModelVersionError,
|
59
60
|
asSchema: () => import_provider_utils28.asSchema,
|
60
61
|
assistantModelMessageSchema: () => assistantModelMessageSchema,
|
@@ -143,7 +144,7 @@ _a = symbol;
|
|
143
144
|
var import_gateway = require("@ai-sdk/gateway");
|
144
145
|
|
145
146
|
// src/error/index.ts
|
146
|
-
var
|
147
|
+
var import_provider17 = require("@ai-sdk/provider");
|
147
148
|
|
148
149
|
// src/error/invalid-argument-error.ts
|
149
150
|
var import_provider2 = require("@ai-sdk/provider");
|
@@ -224,11 +225,11 @@ var symbol5 = Symbol.for(marker5);
|
|
224
225
|
var _a5;
|
225
226
|
var MCPClientError = class extends import_provider5.AISDKError {
|
226
227
|
constructor({
|
227
|
-
name:
|
228
|
+
name: name17 = "MCPClientError",
|
228
229
|
message,
|
229
230
|
cause
|
230
231
|
}) {
|
231
|
-
super({ name:
|
232
|
+
super({ name: name17, message, cause });
|
232
233
|
this[_a5] = true;
|
233
234
|
}
|
234
235
|
static isInstance(error) {
|
@@ -287,22 +288,20 @@ var NoObjectGeneratedError = class extends import_provider7.AISDKError {
|
|
287
288
|
};
|
288
289
|
_a7 = symbol7;
|
289
290
|
|
290
|
-
// src/error/no-
|
291
|
+
// src/error/no-output-generated-error.ts
|
291
292
|
var import_provider8 = require("@ai-sdk/provider");
|
292
|
-
var name8 = "
|
293
|
+
var name8 = "AI_NoOutputGeneratedError";
|
293
294
|
var marker8 = `vercel.ai.error.${name8}`;
|
294
295
|
var symbol8 = Symbol.for(marker8);
|
295
296
|
var _a8;
|
296
|
-
var
|
297
|
+
var NoOutputGeneratedError = class extends import_provider8.AISDKError {
|
298
|
+
// used in isInstance
|
297
299
|
constructor({
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
super({ name: name8, message });
|
300
|
+
message = "No output generated.",
|
301
|
+
cause
|
302
|
+
} = {}) {
|
303
|
+
super({ name: name8, message, cause });
|
303
304
|
this[_a8] = true;
|
304
|
-
this.toolName = toolName;
|
305
|
-
this.availableTools = availableTools;
|
306
305
|
}
|
307
306
|
static isInstance(error) {
|
308
307
|
return import_provider8.AISDKError.hasMarker(error, marker8);
|
@@ -310,21 +309,22 @@ var NoSuchToolError = class extends import_provider8.AISDKError {
|
|
310
309
|
};
|
311
310
|
_a8 = symbol8;
|
312
311
|
|
313
|
-
// src/error/
|
312
|
+
// src/error/no-such-tool-error.ts
|
314
313
|
var import_provider9 = require("@ai-sdk/provider");
|
315
|
-
var name9 = "
|
314
|
+
var name9 = "AI_NoSuchToolError";
|
316
315
|
var marker9 = `vercel.ai.error.${name9}`;
|
317
316
|
var symbol9 = Symbol.for(marker9);
|
318
317
|
var _a9;
|
319
|
-
var
|
318
|
+
var NoSuchToolError = class extends import_provider9.AISDKError {
|
320
319
|
constructor({
|
321
|
-
|
322
|
-
|
323
|
-
message = `
|
320
|
+
toolName,
|
321
|
+
availableTools = void 0,
|
322
|
+
message = `Model tried to call unavailable tool '${toolName}'. ${availableTools === void 0 ? "No tools are available." : `Available tools: ${availableTools.join(", ")}.`}`
|
324
323
|
}) {
|
325
|
-
super({ name: name9, message
|
324
|
+
super({ name: name9, message });
|
326
325
|
this[_a9] = true;
|
327
|
-
this.
|
326
|
+
this.toolName = toolName;
|
327
|
+
this.availableTools = availableTools;
|
328
328
|
}
|
329
329
|
static isInstance(error) {
|
330
330
|
return import_provider9.AISDKError.hasMarker(error, marker9);
|
@@ -332,56 +332,57 @@ var ToolCallRepairError = class extends import_provider9.AISDKError {
|
|
332
332
|
};
|
333
333
|
_a9 = symbol9;
|
334
334
|
|
335
|
-
// src/error/
|
335
|
+
// src/error/tool-call-repair-error.ts
|
336
336
|
var import_provider10 = require("@ai-sdk/provider");
|
337
|
-
var
|
338
|
-
constructor(options) {
|
339
|
-
super({
|
340
|
-
name: "AI_UnsupportedModelVersionError",
|
341
|
-
message: `Unsupported model version ${options.version} for provider "${options.provider}" and model "${options.modelId}". AI SDK 5 only supports models that implement specification version "v2".`
|
342
|
-
});
|
343
|
-
this.version = options.version;
|
344
|
-
this.provider = options.provider;
|
345
|
-
this.modelId = options.modelId;
|
346
|
-
}
|
347
|
-
};
|
348
|
-
|
349
|
-
// src/prompt/invalid-data-content-error.ts
|
350
|
-
var import_provider11 = require("@ai-sdk/provider");
|
351
|
-
var name10 = "AI_InvalidDataContentError";
|
337
|
+
var name10 = "AI_ToolCallRepairError";
|
352
338
|
var marker10 = `vercel.ai.error.${name10}`;
|
353
339
|
var symbol10 = Symbol.for(marker10);
|
354
340
|
var _a10;
|
355
|
-
var
|
341
|
+
var ToolCallRepairError = class extends import_provider10.AISDKError {
|
356
342
|
constructor({
|
357
|
-
content,
|
358
343
|
cause,
|
359
|
-
|
344
|
+
originalError,
|
345
|
+
message = `Error repairing tool call: ${(0, import_provider10.getErrorMessage)(cause)}`
|
360
346
|
}) {
|
361
347
|
super({ name: name10, message, cause });
|
362
348
|
this[_a10] = true;
|
363
|
-
this.
|
349
|
+
this.originalError = originalError;
|
364
350
|
}
|
365
351
|
static isInstance(error) {
|
366
|
-
return
|
352
|
+
return import_provider10.AISDKError.hasMarker(error, marker10);
|
367
353
|
}
|
368
354
|
};
|
369
355
|
_a10 = symbol10;
|
370
356
|
|
371
|
-
// src/
|
357
|
+
// src/error/unsupported-model-version-error.ts
|
358
|
+
var import_provider11 = require("@ai-sdk/provider");
|
359
|
+
var UnsupportedModelVersionError = class extends import_provider11.AISDKError {
|
360
|
+
constructor(options) {
|
361
|
+
super({
|
362
|
+
name: "AI_UnsupportedModelVersionError",
|
363
|
+
message: `Unsupported model version ${options.version} for provider "${options.provider}" and model "${options.modelId}". AI SDK 5 only supports models that implement specification version "v2".`
|
364
|
+
});
|
365
|
+
this.version = options.version;
|
366
|
+
this.provider = options.provider;
|
367
|
+
this.modelId = options.modelId;
|
368
|
+
}
|
369
|
+
};
|
370
|
+
|
371
|
+
// src/prompt/invalid-data-content-error.ts
|
372
372
|
var import_provider12 = require("@ai-sdk/provider");
|
373
|
-
var name11 = "
|
373
|
+
var name11 = "AI_InvalidDataContentError";
|
374
374
|
var marker11 = `vercel.ai.error.${name11}`;
|
375
375
|
var symbol11 = Symbol.for(marker11);
|
376
376
|
var _a11;
|
377
|
-
var
|
377
|
+
var InvalidDataContentError = class extends import_provider12.AISDKError {
|
378
378
|
constructor({
|
379
|
-
|
380
|
-
|
379
|
+
content,
|
380
|
+
cause,
|
381
|
+
message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
|
381
382
|
}) {
|
382
|
-
super({ name: name11, message });
|
383
|
+
super({ name: name11, message, cause });
|
383
384
|
this[_a11] = true;
|
384
|
-
this.
|
385
|
+
this.content = content;
|
385
386
|
}
|
386
387
|
static isInstance(error) {
|
387
388
|
return import_provider12.AISDKError.hasMarker(error, marker11);
|
@@ -389,20 +390,20 @@ var InvalidMessageRoleError = class extends import_provider12.AISDKError {
|
|
389
390
|
};
|
390
391
|
_a11 = symbol11;
|
391
392
|
|
392
|
-
// src/prompt/message-
|
393
|
+
// src/prompt/invalid-message-role-error.ts
|
393
394
|
var import_provider13 = require("@ai-sdk/provider");
|
394
|
-
var name12 = "
|
395
|
+
var name12 = "AI_InvalidMessageRoleError";
|
395
396
|
var marker12 = `vercel.ai.error.${name12}`;
|
396
397
|
var symbol12 = Symbol.for(marker12);
|
397
398
|
var _a12;
|
398
|
-
var
|
399
|
+
var InvalidMessageRoleError = class extends import_provider13.AISDKError {
|
399
400
|
constructor({
|
400
|
-
|
401
|
-
message
|
401
|
+
role,
|
402
|
+
message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
|
402
403
|
}) {
|
403
404
|
super({ name: name12, message });
|
404
405
|
this[_a12] = true;
|
405
|
-
this.
|
406
|
+
this.role = role;
|
406
407
|
}
|
407
408
|
static isInstance(error) {
|
408
409
|
return import_provider13.AISDKError.hasMarker(error, marker12);
|
@@ -410,13 +411,34 @@ var MessageConversionError = class extends import_provider13.AISDKError {
|
|
410
411
|
};
|
411
412
|
_a12 = symbol12;
|
412
413
|
|
413
|
-
// src/
|
414
|
+
// src/prompt/message-conversion-error.ts
|
414
415
|
var import_provider14 = require("@ai-sdk/provider");
|
415
|
-
var name13 = "
|
416
|
+
var name13 = "AI_MessageConversionError";
|
416
417
|
var marker13 = `vercel.ai.error.${name13}`;
|
417
418
|
var symbol13 = Symbol.for(marker13);
|
418
419
|
var _a13;
|
419
|
-
var
|
420
|
+
var MessageConversionError = class extends import_provider14.AISDKError {
|
421
|
+
constructor({
|
422
|
+
originalMessage,
|
423
|
+
message
|
424
|
+
}) {
|
425
|
+
super({ name: name13, message });
|
426
|
+
this[_a13] = true;
|
427
|
+
this.originalMessage = originalMessage;
|
428
|
+
}
|
429
|
+
static isInstance(error) {
|
430
|
+
return import_provider14.AISDKError.hasMarker(error, marker13);
|
431
|
+
}
|
432
|
+
};
|
433
|
+
_a13 = symbol13;
|
434
|
+
|
435
|
+
// src/util/download-error.ts
|
436
|
+
var import_provider15 = require("@ai-sdk/provider");
|
437
|
+
var name14 = "AI_DownloadError";
|
438
|
+
var marker14 = `vercel.ai.error.${name14}`;
|
439
|
+
var symbol14 = Symbol.for(marker14);
|
440
|
+
var _a14;
|
441
|
+
var DownloadError = class extends import_provider15.AISDKError {
|
420
442
|
constructor({
|
421
443
|
url,
|
422
444
|
statusCode,
|
@@ -424,41 +446,41 @@ var DownloadError = class extends import_provider14.AISDKError {
|
|
424
446
|
cause,
|
425
447
|
message = cause == null ? `Failed to download ${url}: ${statusCode} ${statusText}` : `Failed to download ${url}: ${cause}`
|
426
448
|
}) {
|
427
|
-
super({ name:
|
428
|
-
this[
|
449
|
+
super({ name: name14, message, cause });
|
450
|
+
this[_a14] = true;
|
429
451
|
this.url = url;
|
430
452
|
this.statusCode = statusCode;
|
431
453
|
this.statusText = statusText;
|
432
454
|
}
|
433
455
|
static isInstance(error) {
|
434
|
-
return
|
456
|
+
return import_provider15.AISDKError.hasMarker(error, marker14);
|
435
457
|
}
|
436
458
|
};
|
437
|
-
|
459
|
+
_a14 = symbol14;
|
438
460
|
|
439
461
|
// src/util/retry-error.ts
|
440
|
-
var
|
441
|
-
var
|
442
|
-
var
|
443
|
-
var
|
444
|
-
var
|
445
|
-
var RetryError = class extends
|
462
|
+
var import_provider16 = require("@ai-sdk/provider");
|
463
|
+
var name15 = "AI_RetryError";
|
464
|
+
var marker15 = `vercel.ai.error.${name15}`;
|
465
|
+
var symbol15 = Symbol.for(marker15);
|
466
|
+
var _a15;
|
467
|
+
var RetryError = class extends import_provider16.AISDKError {
|
446
468
|
constructor({
|
447
469
|
message,
|
448
470
|
reason,
|
449
471
|
errors
|
450
472
|
}) {
|
451
|
-
super({ name:
|
452
|
-
this[
|
473
|
+
super({ name: name15, message });
|
474
|
+
this[_a15] = true;
|
453
475
|
this.reason = reason;
|
454
476
|
this.errors = errors;
|
455
477
|
this.lastError = errors[errors.length - 1];
|
456
478
|
}
|
457
479
|
static isInstance(error) {
|
458
|
-
return
|
480
|
+
return import_provider16.AISDKError.hasMarker(error, marker15);
|
459
481
|
}
|
460
482
|
};
|
461
|
-
|
483
|
+
_a15 = symbol15;
|
462
484
|
|
463
485
|
// src/model/resolve-model.ts
|
464
486
|
function resolveLanguageModel(model) {
|
@@ -490,8 +512,8 @@ function resolveEmbeddingModel(model) {
|
|
490
512
|
);
|
491
513
|
}
|
492
514
|
function getGlobalProvider() {
|
493
|
-
var
|
494
|
-
return (
|
515
|
+
var _a17;
|
516
|
+
return (_a17 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a17 : import_gateway.gateway;
|
495
517
|
}
|
496
518
|
|
497
519
|
// src/prompt/convert-to-language-model-prompt.ts
|
@@ -637,7 +659,7 @@ function detectMediaType({
|
|
637
659
|
|
638
660
|
// src/util/download.ts
|
639
661
|
async function download({ url }) {
|
640
|
-
var
|
662
|
+
var _a17;
|
641
663
|
const urlText = url.toString();
|
642
664
|
try {
|
643
665
|
const response = await fetch(urlText);
|
@@ -650,7 +672,7 @@ async function download({ url }) {
|
|
650
672
|
}
|
651
673
|
return {
|
652
674
|
data: new Uint8Array(await response.arrayBuffer()),
|
653
|
-
mediaType: (
|
675
|
+
mediaType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
|
654
676
|
};
|
655
677
|
} catch (error) {
|
656
678
|
if (DownloadError.isInstance(error)) {
|
@@ -661,7 +683,7 @@ async function download({ url }) {
|
|
661
683
|
}
|
662
684
|
|
663
685
|
// src/prompt/data-content.ts
|
664
|
-
var
|
686
|
+
var import_provider18 = require("@ai-sdk/provider");
|
665
687
|
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
666
688
|
var import_v4 = require("zod/v4");
|
667
689
|
|
@@ -689,8 +711,8 @@ var dataContentSchema = import_v4.z.union([
|
|
689
711
|
import_v4.z.custom(
|
690
712
|
// Buffer might not be available in some environments such as CloudFlare:
|
691
713
|
(value) => {
|
692
|
-
var
|
693
|
-
return (_b = (
|
714
|
+
var _a17, _b;
|
715
|
+
return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
|
694
716
|
},
|
695
717
|
{ message: "Must be a Buffer" }
|
696
718
|
)
|
@@ -713,7 +735,7 @@ function convertToLanguageModelV2DataContent(content) {
|
|
713
735
|
content.toString()
|
714
736
|
);
|
715
737
|
if (dataUrlMediaType == null || base64Content == null) {
|
716
|
-
throw new
|
738
|
+
throw new import_provider18.AISDKError({
|
717
739
|
name: "InvalidDataContentError",
|
718
740
|
message: `Invalid data URL format in content ${content.toString()}`
|
719
741
|
});
|
@@ -888,8 +910,8 @@ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
|
|
888
910
|
).flat().filter(
|
889
911
|
(part) => part.type === "image" || part.type === "file"
|
890
912
|
).map((part) => {
|
891
|
-
var
|
892
|
-
const mediaType = (
|
913
|
+
var _a17;
|
914
|
+
const mediaType = (_a17 = part.mediaType) != null ? _a17 : part.type === "image" ? "image/*" : void 0;
|
893
915
|
let data = part.type === "image" ? part.image : part.data;
|
894
916
|
if (typeof data === "string") {
|
895
917
|
try {
|
@@ -916,7 +938,7 @@ async function downloadAssets(messages, downloadImplementation, supportedUrls) {
|
|
916
938
|
);
|
917
939
|
}
|
918
940
|
function convertPartToLanguageModelPart(part, downloadedAssets) {
|
919
|
-
var
|
941
|
+
var _a17;
|
920
942
|
if (part.type === "text") {
|
921
943
|
return {
|
922
944
|
type: "text",
|
@@ -949,7 +971,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
949
971
|
switch (type) {
|
950
972
|
case "image": {
|
951
973
|
if (data instanceof Uint8Array || typeof data === "string") {
|
952
|
-
mediaType = (
|
974
|
+
mediaType = (_a17 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a17 : mediaType;
|
953
975
|
}
|
954
976
|
return {
|
955
977
|
type: "file",
|
@@ -1089,10 +1111,10 @@ function prepareToolsAndToolChoice({
|
|
1089
1111
|
};
|
1090
1112
|
}
|
1091
1113
|
const filteredTools = activeTools != null ? Object.entries(tools).filter(
|
1092
|
-
([
|
1114
|
+
([name17]) => activeTools.includes(name17)
|
1093
1115
|
) : Object.entries(tools);
|
1094
1116
|
return {
|
1095
|
-
tools: filteredTools.map(([
|
1117
|
+
tools: filteredTools.map(([name17, tool3]) => {
|
1096
1118
|
const toolType = tool3.type;
|
1097
1119
|
switch (toolType) {
|
1098
1120
|
case void 0:
|
@@ -1100,7 +1122,7 @@ function prepareToolsAndToolChoice({
|
|
1100
1122
|
case "function":
|
1101
1123
|
return {
|
1102
1124
|
type: "function",
|
1103
|
-
name:
|
1125
|
+
name: name17,
|
1104
1126
|
description: tool3.description,
|
1105
1127
|
inputSchema: (0, import_provider_utils4.asSchema)(tool3.inputSchema).jsonSchema,
|
1106
1128
|
providerOptions: tool3.providerOptions
|
@@ -1108,7 +1130,7 @@ function prepareToolsAndToolChoice({
|
|
1108
1130
|
case "provider-defined":
|
1109
1131
|
return {
|
1110
1132
|
type: "provider-defined",
|
1111
|
-
name:
|
1133
|
+
name: name17,
|
1112
1134
|
id: tool3.id,
|
1113
1135
|
args: tool3.args
|
1114
1136
|
};
|
@@ -1123,7 +1145,7 @@ function prepareToolsAndToolChoice({
|
|
1123
1145
|
}
|
1124
1146
|
|
1125
1147
|
// src/prompt/standardize-prompt.ts
|
1126
|
-
var
|
1148
|
+
var import_provider19 = require("@ai-sdk/provider");
|
1127
1149
|
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
1128
1150
|
var import_v46 = require("zod/v4");
|
1129
1151
|
|
@@ -1279,19 +1301,19 @@ var coreMessageSchema = modelMessageSchema;
|
|
1279
1301
|
// src/prompt/standardize-prompt.ts
|
1280
1302
|
async function standardizePrompt(prompt) {
|
1281
1303
|
if (prompt.prompt == null && prompt.messages == null) {
|
1282
|
-
throw new
|
1304
|
+
throw new import_provider19.InvalidPromptError({
|
1283
1305
|
prompt,
|
1284
1306
|
message: "prompt or messages must be defined"
|
1285
1307
|
});
|
1286
1308
|
}
|
1287
1309
|
if (prompt.prompt != null && prompt.messages != null) {
|
1288
|
-
throw new
|
1310
|
+
throw new import_provider19.InvalidPromptError({
|
1289
1311
|
prompt,
|
1290
1312
|
message: "prompt and messages cannot be defined at the same time"
|
1291
1313
|
});
|
1292
1314
|
}
|
1293
1315
|
if (prompt.system != null && typeof prompt.system !== "string") {
|
1294
|
-
throw new
|
1316
|
+
throw new import_provider19.InvalidPromptError({
|
1295
1317
|
prompt,
|
1296
1318
|
message: "system must be a string"
|
1297
1319
|
});
|
@@ -1304,13 +1326,13 @@ async function standardizePrompt(prompt) {
|
|
1304
1326
|
} else if (prompt.messages != null) {
|
1305
1327
|
messages = prompt.messages;
|
1306
1328
|
} else {
|
1307
|
-
throw new
|
1329
|
+
throw new import_provider19.InvalidPromptError({
|
1308
1330
|
prompt,
|
1309
1331
|
message: "prompt or messages must be defined"
|
1310
1332
|
});
|
1311
1333
|
}
|
1312
1334
|
if (messages.length === 0) {
|
1313
|
-
throw new
|
1335
|
+
throw new import_provider19.InvalidPromptError({
|
1314
1336
|
prompt,
|
1315
1337
|
message: "messages must not be empty"
|
1316
1338
|
});
|
@@ -1320,7 +1342,7 @@ async function standardizePrompt(prompt) {
|
|
1320
1342
|
schema: import_v46.z.array(modelMessageSchema)
|
1321
1343
|
});
|
1322
1344
|
if (!validationResult.success) {
|
1323
|
-
throw new
|
1345
|
+
throw new import_provider19.InvalidPromptError({
|
1324
1346
|
prompt,
|
1325
1347
|
message: "The messages must be a ModelMessage[]. If you have passed a UIMessage[], you can use convertToModelMessages to convert them.",
|
1326
1348
|
cause: validationResult.error
|
@@ -1334,10 +1356,10 @@ async function standardizePrompt(prompt) {
|
|
1334
1356
|
|
1335
1357
|
// src/prompt/wrap-gateway-error.ts
|
1336
1358
|
var import_gateway2 = require("@ai-sdk/gateway");
|
1337
|
-
var
|
1359
|
+
var import_provider20 = require("@ai-sdk/provider");
|
1338
1360
|
function wrapGatewayError(error) {
|
1339
1361
|
if (import_gateway2.GatewayAuthenticationError.isInstance(error) || import_gateway2.GatewayModelNotFoundError.isInstance(error)) {
|
1340
|
-
return new
|
1362
|
+
return new import_provider20.AISDKError({
|
1341
1363
|
name: "GatewayError",
|
1342
1364
|
message: "Vercel AI Gateway access failed. If you want to use AI SDK providers directly, use the providers, e.g. @ai-sdk/openai, or register a different global default provider.",
|
1343
1365
|
cause: error
|
@@ -1368,7 +1390,7 @@ function getBaseTelemetryAttributes({
|
|
1368
1390
|
telemetry,
|
1369
1391
|
headers
|
1370
1392
|
}) {
|
1371
|
-
var
|
1393
|
+
var _a17;
|
1372
1394
|
return {
|
1373
1395
|
"ai.model.provider": model.provider,
|
1374
1396
|
"ai.model.id": model.modelId,
|
@@ -1378,7 +1400,7 @@ function getBaseTelemetryAttributes({
|
|
1378
1400
|
return attributes;
|
1379
1401
|
}, {}),
|
1380
1402
|
// add metadata as attributes:
|
1381
|
-
...Object.entries((
|
1403
|
+
...Object.entries((_a17 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a17 : {}).reduce(
|
1382
1404
|
(attributes, [key, value]) => {
|
1383
1405
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
1384
1406
|
return attributes;
|
@@ -1403,7 +1425,7 @@ var noopTracer = {
|
|
1403
1425
|
startSpan() {
|
1404
1426
|
return noopSpan;
|
1405
1427
|
},
|
1406
|
-
startActiveSpan(
|
1428
|
+
startActiveSpan(name17, arg1, arg2, arg3) {
|
1407
1429
|
if (typeof arg1 === "function") {
|
1408
1430
|
return arg1(noopSpan);
|
1409
1431
|
}
|
@@ -1473,13 +1495,13 @@ function getTracer({
|
|
1473
1495
|
// src/telemetry/record-span.ts
|
1474
1496
|
var import_api2 = require("@opentelemetry/api");
|
1475
1497
|
function recordSpan({
|
1476
|
-
name:
|
1498
|
+
name: name17,
|
1477
1499
|
tracer,
|
1478
1500
|
attributes,
|
1479
1501
|
fn,
|
1480
1502
|
endWhenDone = true
|
1481
1503
|
}) {
|
1482
|
-
return tracer.startActiveSpan(
|
1504
|
+
return tracer.startActiveSpan(name17, { attributes }, async (span) => {
|
1483
1505
|
try {
|
1484
1506
|
const result = await fn(span);
|
1485
1507
|
if (endWhenDone) {
|
@@ -1583,7 +1605,7 @@ function asArray(value) {
|
|
1583
1605
|
}
|
1584
1606
|
|
1585
1607
|
// src/util/retry-with-exponential-backoff.ts
|
1586
|
-
var
|
1608
|
+
var import_provider21 = require("@ai-sdk/provider");
|
1587
1609
|
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
1588
1610
|
function getRetryDelayInMs({
|
1589
1611
|
error,
|
@@ -1650,7 +1672,7 @@ async function _retryWithExponentialBackoff(f, {
|
|
1650
1672
|
errors: newErrors
|
1651
1673
|
});
|
1652
1674
|
}
|
1653
|
-
if (error instanceof Error &&
|
1675
|
+
if (error instanceof Error && import_provider21.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
|
1654
1676
|
await (0, import_provider_utils6.delay)(
|
1655
1677
|
getRetryDelayInMs({
|
1656
1678
|
error,
|
@@ -1918,8 +1940,8 @@ function stepCountIs(stepCount) {
|
|
1918
1940
|
}
|
1919
1941
|
function hasToolCall(toolName) {
|
1920
1942
|
return ({ steps }) => {
|
1921
|
-
var
|
1922
|
-
return (_c = (_b = (
|
1943
|
+
var _a17, _b, _c;
|
1944
|
+
return (_c = (_b = (_a17 = steps[steps.length - 1]) == null ? void 0 : _a17.toolCalls) == null ? void 0 : _b.some(
|
1923
1945
|
(toolCall) => toolCall.toolName === toolName
|
1924
1946
|
)) != null ? _c : false;
|
1925
1947
|
};
|
@@ -1932,14 +1954,14 @@ async function isStopConditionMet({
|
|
1932
1954
|
}
|
1933
1955
|
|
1934
1956
|
// src/prompt/create-tool-model-output.ts
|
1935
|
-
var
|
1957
|
+
var import_provider22 = require("@ai-sdk/provider");
|
1936
1958
|
function createToolModelOutput({
|
1937
1959
|
output,
|
1938
1960
|
tool: tool3,
|
1939
1961
|
errorMode
|
1940
1962
|
}) {
|
1941
1963
|
if (errorMode === "text") {
|
1942
|
-
return { type: "error-text", value: (0,
|
1964
|
+
return { type: "error-text", value: (0, import_provider22.getErrorMessage)(output) };
|
1943
1965
|
} else if (errorMode === "json") {
|
1944
1966
|
return { type: "error-json", value: toJSONValue(output) };
|
1945
1967
|
}
|
@@ -2115,7 +2137,7 @@ async function generateText({
|
|
2115
2137
|
}),
|
2116
2138
|
tracer,
|
2117
2139
|
fn: async (span) => {
|
2118
|
-
var
|
2140
|
+
var _a17, _b, _c, _d, _e, _f;
|
2119
2141
|
const callSettings2 = prepareCallSettings(settings);
|
2120
2142
|
let currentModelResponse;
|
2121
2143
|
let clientToolCalls = [];
|
@@ -2135,7 +2157,7 @@ async function generateText({
|
|
2135
2157
|
}));
|
2136
2158
|
const promptMessages = await convertToLanguageModelPrompt({
|
2137
2159
|
prompt: {
|
2138
|
-
system: (
|
2160
|
+
system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
|
2139
2161
|
messages: (_b = prepareStepResult == null ? void 0 : prepareStepResult.messages) != null ? _b : stepInputMessages
|
2140
2162
|
},
|
2141
2163
|
supportedUrls: await model.supportedUrls
|
@@ -2150,7 +2172,7 @@ async function generateText({
|
|
2150
2172
|
});
|
2151
2173
|
currentModelResponse = await retry(
|
2152
2174
|
() => {
|
2153
|
-
var
|
2175
|
+
var _a18;
|
2154
2176
|
return recordSpan({
|
2155
2177
|
name: "ai.generateText.doGenerate",
|
2156
2178
|
attributes: selectTelemetryAttributes({
|
@@ -2182,14 +2204,14 @@ async function generateText({
|
|
2182
2204
|
"gen_ai.request.max_tokens": settings.maxOutputTokens,
|
2183
2205
|
"gen_ai.request.presence_penalty": settings.presencePenalty,
|
2184
2206
|
"gen_ai.request.stop_sequences": settings.stopSequences,
|
2185
|
-
"gen_ai.request.temperature": (
|
2207
|
+
"gen_ai.request.temperature": (_a18 = settings.temperature) != null ? _a18 : void 0,
|
2186
2208
|
"gen_ai.request.top_k": settings.topK,
|
2187
2209
|
"gen_ai.request.top_p": settings.topP
|
2188
2210
|
}
|
2189
2211
|
}),
|
2190
2212
|
tracer,
|
2191
2213
|
fn: async (span2) => {
|
2192
|
-
var
|
2214
|
+
var _a19, _b2, _c2, _d2, _e2, _f2, _g, _h;
|
2193
2215
|
const result = await stepModel.doGenerate({
|
2194
2216
|
...callSettings2,
|
2195
2217
|
tools: stepTools,
|
@@ -2201,7 +2223,7 @@ async function generateText({
|
|
2201
2223
|
headers
|
2202
2224
|
});
|
2203
2225
|
const responseData = {
|
2204
|
-
id: (_b2 = (
|
2226
|
+
id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
|
2205
2227
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
2206
2228
|
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
|
2207
2229
|
headers: (_g = result.response) == null ? void 0 : _g.headers,
|
@@ -2411,12 +2433,22 @@ async function executeTools({
|
|
2411
2433
|
tracer,
|
2412
2434
|
fn: async (span) => {
|
2413
2435
|
try {
|
2414
|
-
const
|
2415
|
-
|
2416
|
-
|
2417
|
-
|
2418
|
-
|
2436
|
+
const stream = (0, import_provider_utils9.executeTool)({
|
2437
|
+
execute: tool3.execute.bind(tool3),
|
2438
|
+
input,
|
2439
|
+
options: {
|
2440
|
+
toolCallId,
|
2441
|
+
messages,
|
2442
|
+
abortSignal,
|
2443
|
+
experimental_context
|
2444
|
+
}
|
2419
2445
|
});
|
2446
|
+
let output;
|
2447
|
+
for await (const part of stream) {
|
2448
|
+
if (part.type === "final") {
|
2449
|
+
output = part.output;
|
2450
|
+
}
|
2451
|
+
}
|
2420
2452
|
try {
|
2421
2453
|
span.setAttributes(
|
2422
2454
|
selectTelemetryAttributes({
|
@@ -2611,7 +2643,7 @@ function asContent({
|
|
2611
2643
|
}
|
2612
2644
|
|
2613
2645
|
// src/generate-text/stream-text.ts
|
2614
|
-
var
|
2646
|
+
var import_provider23 = require("@ai-sdk/provider");
|
2615
2647
|
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
2616
2648
|
|
2617
2649
|
// src/util/prepare-headers.ts
|
@@ -2810,7 +2842,8 @@ var uiMessageChunkSchema = import_v47.z.union([
|
|
2810
2842
|
toolCallId: import_v47.z.string(),
|
2811
2843
|
output: import_v47.z.unknown(),
|
2812
2844
|
providerExecuted: import_v47.z.boolean().optional(),
|
2813
|
-
dynamic: import_v47.z.boolean().optional()
|
2845
|
+
dynamic: import_v47.z.boolean().optional(),
|
2846
|
+
preliminary: import_v47.z.boolean().optional()
|
2814
2847
|
}),
|
2815
2848
|
import_v47.z.strictObject({
|
2816
2849
|
type: import_v47.z.literal("tool-output-error"),
|
@@ -3305,7 +3338,7 @@ function processUIMessageStream({
|
|
3305
3338
|
new TransformStream({
|
3306
3339
|
async transform(chunk, controller) {
|
3307
3340
|
await runUpdateMessageJob(async ({ state, write }) => {
|
3308
|
-
var
|
3341
|
+
var _a17, _b, _c, _d;
|
3309
3342
|
function getToolInvocation(toolCallId) {
|
3310
3343
|
const toolInvocations = state.message.parts.filter(isToolUIPart);
|
3311
3344
|
const toolInvocation = toolInvocations.find(
|
@@ -3333,7 +3366,7 @@ function processUIMessageStream({
|
|
3333
3366
|
return toolInvocation;
|
3334
3367
|
}
|
3335
3368
|
function updateToolPart(options) {
|
3336
|
-
var
|
3369
|
+
var _a18;
|
3337
3370
|
const part = state.message.parts.find(
|
3338
3371
|
(part2) => isToolUIPart(part2) && part2.toolCallId === options.toolCallId
|
3339
3372
|
);
|
@@ -3345,7 +3378,8 @@ function processUIMessageStream({
|
|
3345
3378
|
anyPart.output = anyOptions.output;
|
3346
3379
|
anyPart.errorText = anyOptions.errorText;
|
3347
3380
|
anyPart.rawInput = anyOptions.rawInput;
|
3348
|
-
anyPart.
|
3381
|
+
anyPart.preliminary = anyOptions.preliminary;
|
3382
|
+
anyPart.providerExecuted = (_a18 = anyOptions.providerExecuted) != null ? _a18 : part.providerExecuted;
|
3349
3383
|
if (anyOptions.providerMetadata != null && part.state === "input-available") {
|
3350
3384
|
part.callProviderMetadata = anyOptions.providerMetadata;
|
3351
3385
|
}
|
@@ -3359,12 +3393,13 @@ function processUIMessageStream({
|
|
3359
3393
|
rawInput: anyOptions.rawInput,
|
3360
3394
|
errorText: anyOptions.errorText,
|
3361
3395
|
providerExecuted: anyOptions.providerExecuted,
|
3396
|
+
preliminary: anyOptions.preliminary,
|
3362
3397
|
...anyOptions.providerMetadata != null ? { callProviderMetadata: anyOptions.providerMetadata } : {}
|
3363
3398
|
});
|
3364
3399
|
}
|
3365
3400
|
}
|
3366
3401
|
function updateDynamicToolPart(options) {
|
3367
|
-
var
|
3402
|
+
var _a18;
|
3368
3403
|
const part = state.message.parts.find(
|
3369
3404
|
(part2) => part2.type === "dynamic-tool" && part2.toolCallId === options.toolCallId
|
3370
3405
|
);
|
@@ -3376,7 +3411,8 @@ function processUIMessageStream({
|
|
3376
3411
|
anyPart.input = anyOptions.input;
|
3377
3412
|
anyPart.output = anyOptions.output;
|
3378
3413
|
anyPart.errorText = anyOptions.errorText;
|
3379
|
-
anyPart.rawInput = (
|
3414
|
+
anyPart.rawInput = (_a18 = anyOptions.rawInput) != null ? _a18 : anyPart.rawInput;
|
3415
|
+
anyPart.preliminary = anyOptions.preliminary;
|
3380
3416
|
if (anyOptions.providerMetadata != null && part.state === "input-available") {
|
3381
3417
|
part.callProviderMetadata = anyOptions.providerMetadata;
|
3382
3418
|
}
|
@@ -3389,6 +3425,7 @@ function processUIMessageStream({
|
|
3389
3425
|
input: anyOptions.input,
|
3390
3426
|
output: anyOptions.output,
|
3391
3427
|
errorText: anyOptions.errorText,
|
3428
|
+
preliminary: anyOptions.preliminary,
|
3392
3429
|
...anyOptions.providerMetadata != null ? { callProviderMetadata: anyOptions.providerMetadata } : {}
|
3393
3430
|
});
|
3394
3431
|
}
|
@@ -3421,7 +3458,7 @@ function processUIMessageStream({
|
|
3421
3458
|
case "text-delta": {
|
3422
3459
|
const textPart = state.activeTextParts[chunk.id];
|
3423
3460
|
textPart.text += chunk.delta;
|
3424
|
-
textPart.providerMetadata = (
|
3461
|
+
textPart.providerMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textPart.providerMetadata;
|
3425
3462
|
write();
|
3426
3463
|
break;
|
3427
3464
|
}
|
@@ -3605,7 +3642,8 @@ function processUIMessageStream({
|
|
3605
3642
|
toolName: toolInvocation.toolName,
|
3606
3643
|
state: "output-available",
|
3607
3644
|
input: toolInvocation.input,
|
3608
|
-
output: chunk.output
|
3645
|
+
output: chunk.output,
|
3646
|
+
preliminary: chunk.preliminary
|
3609
3647
|
});
|
3610
3648
|
} else {
|
3611
3649
|
const toolInvocation = getToolInvocation(chunk.toolCallId);
|
@@ -3615,7 +3653,8 @@ function processUIMessageStream({
|
|
3615
3653
|
state: "output-available",
|
3616
3654
|
input: toolInvocation.input,
|
3617
3655
|
output: chunk.output,
|
3618
|
-
providerExecuted: chunk.providerExecuted
|
3656
|
+
providerExecuted: chunk.providerExecuted,
|
3657
|
+
preliminary: chunk.preliminary
|
3619
3658
|
});
|
3620
3659
|
}
|
3621
3660
|
write();
|
@@ -3964,17 +4003,17 @@ var DelayedPromise = class {
|
|
3964
4003
|
return this._promise;
|
3965
4004
|
}
|
3966
4005
|
resolve(value) {
|
3967
|
-
var
|
4006
|
+
var _a17;
|
3968
4007
|
this.status = { type: "resolved", value };
|
3969
4008
|
if (this._promise) {
|
3970
|
-
(
|
4009
|
+
(_a17 = this._resolve) == null ? void 0 : _a17.call(this, value);
|
3971
4010
|
}
|
3972
4011
|
}
|
3973
4012
|
reject(error) {
|
3974
|
-
var
|
4013
|
+
var _a17;
|
3975
4014
|
this.status = { type: "rejected", error };
|
3976
4015
|
if (this._promise) {
|
3977
|
-
(
|
4016
|
+
(_a17 = this._reject) == null ? void 0 : _a17.call(this, error);
|
3978
4017
|
}
|
3979
4018
|
}
|
3980
4019
|
};
|
@@ -4005,8 +4044,8 @@ function filterStreamErrors(readable, onError) {
|
|
4005
4044
|
|
4006
4045
|
// src/util/now.ts
|
4007
4046
|
function now() {
|
4008
|
-
var
|
4009
|
-
return (_b = (
|
4047
|
+
var _a17, _b;
|
4048
|
+
return (_b = (_a17 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a17.now()) != null ? _b : Date.now();
|
4010
4049
|
}
|
4011
4050
|
|
4012
4051
|
// src/generate-text/run-tools-transformation.ts
|
@@ -4135,12 +4174,29 @@ function runToolsTransformation({
|
|
4135
4174
|
fn: async (span) => {
|
4136
4175
|
let output;
|
4137
4176
|
try {
|
4138
|
-
|
4139
|
-
|
4140
|
-
|
4141
|
-
|
4142
|
-
|
4177
|
+
const stream = (0, import_provider_utils12.executeTool)({
|
4178
|
+
execute: tool3.execute.bind(tool3),
|
4179
|
+
input: toolCall.input,
|
4180
|
+
options: {
|
4181
|
+
toolCallId: toolCall.toolCallId,
|
4182
|
+
messages,
|
4183
|
+
abortSignal,
|
4184
|
+
experimental_context
|
4185
|
+
}
|
4143
4186
|
});
|
4187
|
+
for await (const part of stream) {
|
4188
|
+
toolResultsStreamController.enqueue({
|
4189
|
+
...toolCall,
|
4190
|
+
type: "tool-result",
|
4191
|
+
output: part.output,
|
4192
|
+
...part.type === "preliminary" && {
|
4193
|
+
preliminary: true
|
4194
|
+
}
|
4195
|
+
});
|
4196
|
+
if (part.type === "final") {
|
4197
|
+
output = part.output;
|
4198
|
+
}
|
4199
|
+
}
|
4144
4200
|
} catch (error) {
|
4145
4201
|
recordErrorOnSpan(span, error);
|
4146
4202
|
toolResultsStreamController.enqueue({
|
@@ -4152,11 +4208,6 @@ function runToolsTransformation({
|
|
4152
4208
|
attemptClose();
|
4153
4209
|
return;
|
4154
4210
|
}
|
4155
|
-
toolResultsStreamController.enqueue({
|
4156
|
-
...toolCall,
|
4157
|
-
type: "tool-result",
|
4158
|
-
output
|
4159
|
-
});
|
4160
4211
|
outstandingToolResults.delete(toolExecutionId);
|
4161
4212
|
attemptClose();
|
4162
4213
|
try {
|
@@ -4427,7 +4478,7 @@ var DefaultStreamTextResult = class {
|
|
4427
4478
|
let activeReasoningContent = {};
|
4428
4479
|
const eventProcessor = new TransformStream({
|
4429
4480
|
async transform(chunk, controller) {
|
4430
|
-
var
|
4481
|
+
var _a17, _b, _c;
|
4431
4482
|
controller.enqueue(chunk);
|
4432
4483
|
const { part } = chunk;
|
4433
4484
|
if (part.type === "text-delta" || part.type === "reasoning-delta" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
|
@@ -4457,7 +4508,7 @@ var DefaultStreamTextResult = class {
|
|
4457
4508
|
return;
|
4458
4509
|
}
|
4459
4510
|
activeText.text += part.text;
|
4460
|
-
activeText.providerMetadata = (
|
4511
|
+
activeText.providerMetadata = (_a17 = part.providerMetadata) != null ? _a17 : activeText.providerMetadata;
|
4461
4512
|
}
|
4462
4513
|
if (part.type === "text-end") {
|
4463
4514
|
delete activeTextContent[part.id];
|
@@ -4509,7 +4560,7 @@ var DefaultStreamTextResult = class {
|
|
4509
4560
|
if (part.type === "tool-call") {
|
4510
4561
|
recordedContent.push(part);
|
4511
4562
|
}
|
4512
|
-
if (part.type === "tool-result") {
|
4563
|
+
if (part.type === "tool-result" && !part.preliminary) {
|
4513
4564
|
recordedContent.push(part);
|
4514
4565
|
}
|
4515
4566
|
if (part.type === "tool-error") {
|
@@ -4552,6 +4603,12 @@ var DefaultStreamTextResult = class {
|
|
4552
4603
|
async flush(controller) {
|
4553
4604
|
try {
|
4554
4605
|
if (recordedSteps.length === 0) {
|
4606
|
+
const error = new NoOutputGeneratedError({
|
4607
|
+
message: "No output generated. Check the stream for errors."
|
4608
|
+
});
|
4609
|
+
self._finishReason.reject(error);
|
4610
|
+
self._totalUsage.reject(error);
|
4611
|
+
self._steps.reject(error);
|
4555
4612
|
return;
|
4556
4613
|
}
|
4557
4614
|
const finishReason = recordedFinishReason != null ? recordedFinishReason : "unknown";
|
@@ -4594,8 +4651,8 @@ var DefaultStreamTextResult = class {
|
|
4594
4651
|
"ai.response.text": { output: () => finalStep.text },
|
4595
4652
|
"ai.response.toolCalls": {
|
4596
4653
|
output: () => {
|
4597
|
-
var
|
4598
|
-
return ((
|
4654
|
+
var _a17;
|
4655
|
+
return ((_a17 = finalStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
|
4599
4656
|
}
|
4600
4657
|
},
|
4601
4658
|
"ai.response.providerMetadata": JSON.stringify(
|
@@ -4682,7 +4739,7 @@ var DefaultStreamTextResult = class {
|
|
4682
4739
|
responseMessages,
|
4683
4740
|
usage
|
4684
4741
|
}) {
|
4685
|
-
var
|
4742
|
+
var _a17, _b, _c, _d, _e;
|
4686
4743
|
const includeRawChunks2 = self.includeRawChunks;
|
4687
4744
|
stepFinish = new DelayedPromise();
|
4688
4745
|
const initialPrompt = await standardizePrompt({
|
@@ -4702,7 +4759,7 @@ var DefaultStreamTextResult = class {
|
|
4702
4759
|
}));
|
4703
4760
|
const promptMessages = await convertToLanguageModelPrompt({
|
4704
4761
|
prompt: {
|
4705
|
-
system: (
|
4762
|
+
system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
|
4706
4763
|
messages: (_b = prepareStepResult == null ? void 0 : prepareStepResult.messages) != null ? _b : stepInputMessages
|
4707
4764
|
},
|
4708
4765
|
supportedUrls: await model.supportedUrls
|
@@ -4812,7 +4869,7 @@ var DefaultStreamTextResult = class {
|
|
4812
4869
|
streamWithToolResults.pipeThrough(
|
4813
4870
|
new TransformStream({
|
4814
4871
|
async transform(chunk, controller) {
|
4815
|
-
var
|
4872
|
+
var _a18, _b2, _c2, _d2;
|
4816
4873
|
if (chunk.type === "stream-start") {
|
4817
4874
|
warnings = chunk.warnings;
|
4818
4875
|
return;
|
@@ -4872,7 +4929,9 @@ var DefaultStreamTextResult = class {
|
|
4872
4929
|
}
|
4873
4930
|
case "tool-result": {
|
4874
4931
|
controller.enqueue(chunk);
|
4875
|
-
|
4932
|
+
if (!chunk.preliminary) {
|
4933
|
+
stepToolOutputs.push(chunk);
|
4934
|
+
}
|
4876
4935
|
break;
|
4877
4936
|
}
|
4878
4937
|
case "tool-error": {
|
@@ -4882,7 +4941,7 @@ var DefaultStreamTextResult = class {
|
|
4882
4941
|
}
|
4883
4942
|
case "response-metadata": {
|
4884
4943
|
stepResponse = {
|
4885
|
-
id: (
|
4944
|
+
id: (_a18 = chunk.id) != null ? _a18 : stepResponse.id,
|
4886
4945
|
timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
|
4887
4946
|
modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
|
4888
4947
|
};
|
@@ -5081,6 +5140,7 @@ var DefaultStreamTextResult = class {
|
|
5081
5140
|
});
|
5082
5141
|
}
|
5083
5142
|
get steps() {
|
5143
|
+
this.consumeStream();
|
5084
5144
|
return this._steps.promise;
|
5085
5145
|
}
|
5086
5146
|
get finalStep() {
|
@@ -5138,9 +5198,11 @@ var DefaultStreamTextResult = class {
|
|
5138
5198
|
return this.finalStep.then((step) => step.response);
|
5139
5199
|
}
|
5140
5200
|
get totalUsage() {
|
5201
|
+
this.consumeStream();
|
5141
5202
|
return this._totalUsage.promise;
|
5142
5203
|
}
|
5143
5204
|
get finishReason() {
|
5205
|
+
this.consumeStream();
|
5144
5206
|
return this._finishReason.promise;
|
5145
5207
|
}
|
5146
5208
|
/**
|
@@ -5181,14 +5243,14 @@ var DefaultStreamTextResult = class {
|
|
5181
5243
|
);
|
5182
5244
|
}
|
5183
5245
|
async consumeStream(options) {
|
5184
|
-
var
|
5246
|
+
var _a17;
|
5185
5247
|
try {
|
5186
5248
|
await consumeStream({
|
5187
5249
|
stream: this.fullStream,
|
5188
5250
|
onError: options == null ? void 0 : options.onError
|
5189
5251
|
});
|
5190
5252
|
} catch (error) {
|
5191
|
-
(
|
5253
|
+
(_a17 = options == null ? void 0 : options.onError) == null ? void 0 : _a17.call(options, error);
|
5192
5254
|
}
|
5193
5255
|
}
|
5194
5256
|
get experimental_partialOutputStream() {
|
@@ -5216,7 +5278,7 @@ var DefaultStreamTextResult = class {
|
|
5216
5278
|
sendSources = false,
|
5217
5279
|
sendStart = true,
|
5218
5280
|
sendFinish = true,
|
5219
|
-
onError =
|
5281
|
+
onError = import_provider23.getErrorMessage
|
5220
5282
|
} = {}) {
|
5221
5283
|
const responseMessageId = generateMessageId != null ? getResponseUIMessageId({
|
5222
5284
|
originalMessages,
|
@@ -5224,9 +5286,9 @@ var DefaultStreamTextResult = class {
|
|
5224
5286
|
}) : void 0;
|
5225
5287
|
const toolNamesByCallId = {};
|
5226
5288
|
const isDynamic = (toolCallId) => {
|
5227
|
-
var
|
5289
|
+
var _a17, _b;
|
5228
5290
|
const toolName = toolNamesByCallId[toolCallId];
|
5229
|
-
const dynamic = ((_b = (
|
5291
|
+
const dynamic = ((_b = (_a17 = this.tools) == null ? void 0 : _a17[toolName]) == null ? void 0 : _b.type) === "dynamic";
|
5230
5292
|
return dynamic ? true : void 0;
|
5231
5293
|
};
|
5232
5294
|
const baseStream = this.fullStream.pipeThrough(
|
@@ -5371,6 +5433,7 @@ var DefaultStreamTextResult = class {
|
|
5371
5433
|
toolCallId: part.toolCallId,
|
5372
5434
|
output: part.output,
|
5373
5435
|
...part.providerExecuted != null ? { providerExecuted: part.providerExecuted } : {},
|
5436
|
+
...part.preliminary != null ? { preliminary: part.preliminary } : {},
|
5374
5437
|
...dynamic != null ? { dynamic } : {}
|
5375
5438
|
});
|
5376
5439
|
break;
|
@@ -5590,7 +5653,7 @@ async function embed({
|
|
5590
5653
|
}),
|
5591
5654
|
tracer,
|
5592
5655
|
fn: async (doEmbedSpan) => {
|
5593
|
-
var
|
5656
|
+
var _a17;
|
5594
5657
|
const modelResponse = await model.doEmbed({
|
5595
5658
|
values: [value],
|
5596
5659
|
abortSignal,
|
@@ -5598,7 +5661,7 @@ async function embed({
|
|
5598
5661
|
providerOptions
|
5599
5662
|
});
|
5600
5663
|
const embedding2 = modelResponse.embeddings[0];
|
5601
|
-
const usage2 = (
|
5664
|
+
const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
|
5602
5665
|
doEmbedSpan.setAttributes(
|
5603
5666
|
selectTelemetryAttributes({
|
5604
5667
|
telemetry,
|
@@ -5701,7 +5764,7 @@ async function embedMany({
|
|
5701
5764
|
}),
|
5702
5765
|
tracer,
|
5703
5766
|
fn: async (span) => {
|
5704
|
-
var
|
5767
|
+
var _a17;
|
5705
5768
|
const [maxEmbeddingsPerCall, supportsParallelCalls] = await Promise.all([
|
5706
5769
|
model.maxEmbeddingsPerCall,
|
5707
5770
|
model.supportsParallelCalls
|
@@ -5727,7 +5790,7 @@ async function embedMany({
|
|
5727
5790
|
}),
|
5728
5791
|
tracer,
|
5729
5792
|
fn: async (doEmbedSpan) => {
|
5730
|
-
var
|
5793
|
+
var _a18;
|
5731
5794
|
const modelResponse = await model.doEmbed({
|
5732
5795
|
values,
|
5733
5796
|
abortSignal,
|
@@ -5735,7 +5798,7 @@ async function embedMany({
|
|
5735
5798
|
providerOptions
|
5736
5799
|
});
|
5737
5800
|
const embeddings3 = modelResponse.embeddings;
|
5738
|
-
const usage2 = (
|
5801
|
+
const usage2 = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
|
5739
5802
|
doEmbedSpan.setAttributes(
|
5740
5803
|
selectTelemetryAttributes({
|
5741
5804
|
telemetry,
|
@@ -5809,7 +5872,7 @@ async function embedMany({
|
|
5809
5872
|
}),
|
5810
5873
|
tracer,
|
5811
5874
|
fn: async (doEmbedSpan) => {
|
5812
|
-
var
|
5875
|
+
var _a18;
|
5813
5876
|
const modelResponse = await model.doEmbed({
|
5814
5877
|
values: chunk,
|
5815
5878
|
abortSignal,
|
@@ -5817,7 +5880,7 @@ async function embedMany({
|
|
5817
5880
|
providerOptions
|
5818
5881
|
});
|
5819
5882
|
const embeddings2 = modelResponse.embeddings;
|
5820
|
-
const usage = (
|
5883
|
+
const usage = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
|
5821
5884
|
doEmbedSpan.setAttributes(
|
5822
5885
|
selectTelemetryAttributes({
|
5823
5886
|
telemetry,
|
@@ -5854,7 +5917,7 @@ async function embedMany({
|
|
5854
5917
|
result.providerMetadata
|
5855
5918
|
)) {
|
5856
5919
|
providerMetadata[providerName] = {
|
5857
|
-
...(
|
5920
|
+
...(_a17 = providerMetadata[providerName]) != null ? _a17 : {},
|
5858
5921
|
...metadata
|
5859
5922
|
};
|
5860
5923
|
}
|
@@ -5907,7 +5970,7 @@ async function generateImage({
|
|
5907
5970
|
abortSignal,
|
5908
5971
|
headers
|
5909
5972
|
}) {
|
5910
|
-
var
|
5973
|
+
var _a17, _b;
|
5911
5974
|
if (model.specificationVersion !== "v2") {
|
5912
5975
|
throw new UnsupportedModelVersionError({
|
5913
5976
|
version: model.specificationVersion,
|
@@ -5919,7 +5982,7 @@ async function generateImage({
|
|
5919
5982
|
maxRetries: maxRetriesArg,
|
5920
5983
|
abortSignal
|
5921
5984
|
});
|
5922
|
-
const maxImagesPerCallWithDefault = (
|
5985
|
+
const maxImagesPerCallWithDefault = (_a17 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a17 : 1;
|
5923
5986
|
const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
|
5924
5987
|
const callImageCounts = Array.from({ length: callCount }, (_, i) => {
|
5925
5988
|
if (i < callCount - 1) {
|
@@ -5952,13 +6015,13 @@ async function generateImage({
|
|
5952
6015
|
images.push(
|
5953
6016
|
...result.images.map(
|
5954
6017
|
(image) => {
|
5955
|
-
var
|
6018
|
+
var _a18;
|
5956
6019
|
return new DefaultGeneratedFile({
|
5957
6020
|
data: image,
|
5958
|
-
mediaType: (
|
6021
|
+
mediaType: (_a18 = detectMediaType({
|
5959
6022
|
data: image,
|
5960
6023
|
signatures: imageMediaTypeSignatures
|
5961
|
-
})) != null ?
|
6024
|
+
})) != null ? _a18 : "image/png"
|
5962
6025
|
});
|
5963
6026
|
}
|
5964
6027
|
)
|
@@ -6009,7 +6072,7 @@ async function invokeModelMaxImagesPerCall(model) {
|
|
6009
6072
|
var import_provider_utils16 = require("@ai-sdk/provider-utils");
|
6010
6073
|
|
6011
6074
|
// src/generate-object/output-strategy.ts
|
6012
|
-
var
|
6075
|
+
var import_provider24 = require("@ai-sdk/provider");
|
6013
6076
|
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
6014
6077
|
var noSchemaOutputStrategy = {
|
6015
6078
|
type: "no-schema",
|
@@ -6030,7 +6093,7 @@ var noSchemaOutputStrategy = {
|
|
6030
6093
|
} : { success: true, value };
|
6031
6094
|
},
|
6032
6095
|
createElementStream() {
|
6033
|
-
throw new
|
6096
|
+
throw new import_provider24.UnsupportedFunctionalityError({
|
6034
6097
|
functionality: "element streams in no-schema mode"
|
6035
6098
|
});
|
6036
6099
|
}
|
@@ -6052,7 +6115,7 @@ var objectOutputStrategy = (schema) => ({
|
|
6052
6115
|
return (0, import_provider_utils14.safeValidateTypes)({ value, schema });
|
6053
6116
|
},
|
6054
6117
|
createElementStream() {
|
6055
|
-
throw new
|
6118
|
+
throw new import_provider24.UnsupportedFunctionalityError({
|
6056
6119
|
functionality: "element streams in object mode"
|
6057
6120
|
});
|
6058
6121
|
}
|
@@ -6079,11 +6142,11 @@ var arrayOutputStrategy = (schema) => {
|
|
6079
6142
|
isFirstDelta,
|
6080
6143
|
isFinalDelta
|
6081
6144
|
}) {
|
6082
|
-
var
|
6083
|
-
if (!(0,
|
6145
|
+
var _a17;
|
6146
|
+
if (!(0, import_provider24.isJSONObject)(value) || !(0, import_provider24.isJSONArray)(value.elements)) {
|
6084
6147
|
return {
|
6085
6148
|
success: false,
|
6086
|
-
error: new
|
6149
|
+
error: new import_provider24.TypeValidationError({
|
6087
6150
|
value,
|
6088
6151
|
cause: "value must be an object that contains an array of elements"
|
6089
6152
|
})
|
@@ -6102,7 +6165,7 @@ var arrayOutputStrategy = (schema) => {
|
|
6102
6165
|
}
|
6103
6166
|
resultArray.push(result.value);
|
6104
6167
|
}
|
6105
|
-
const publishedElementCount = (
|
6168
|
+
const publishedElementCount = (_a17 = latestObject == null ? void 0 : latestObject.length) != null ? _a17 : 0;
|
6106
6169
|
let textDelta = "";
|
6107
6170
|
if (isFirstDelta) {
|
6108
6171
|
textDelta += "[";
|
@@ -6123,10 +6186,10 @@ var arrayOutputStrategy = (schema) => {
|
|
6123
6186
|
};
|
6124
6187
|
},
|
6125
6188
|
async validateFinalResult(value) {
|
6126
|
-
if (!(0,
|
6189
|
+
if (!(0, import_provider24.isJSONObject)(value) || !(0, import_provider24.isJSONArray)(value.elements)) {
|
6127
6190
|
return {
|
6128
6191
|
success: false,
|
6129
|
-
error: new
|
6192
|
+
error: new import_provider24.TypeValidationError({
|
6130
6193
|
value,
|
6131
6194
|
cause: "value must be an object that contains an array of elements"
|
6132
6195
|
})
|
@@ -6189,10 +6252,10 @@ var enumOutputStrategy = (enumValues) => {
|
|
6189
6252
|
additionalProperties: false
|
6190
6253
|
},
|
6191
6254
|
async validateFinalResult(value) {
|
6192
|
-
if (!(0,
|
6255
|
+
if (!(0, import_provider24.isJSONObject)(value) || typeof value.result !== "string") {
|
6193
6256
|
return {
|
6194
6257
|
success: false,
|
6195
|
-
error: new
|
6258
|
+
error: new import_provider24.TypeValidationError({
|
6196
6259
|
value,
|
6197
6260
|
cause: 'value must be an object that contains a string in the "result" property.'
|
6198
6261
|
})
|
@@ -6201,17 +6264,17 @@ var enumOutputStrategy = (enumValues) => {
|
|
6201
6264
|
const result = value.result;
|
6202
6265
|
return enumValues.includes(result) ? { success: true, value: result } : {
|
6203
6266
|
success: false,
|
6204
|
-
error: new
|
6267
|
+
error: new import_provider24.TypeValidationError({
|
6205
6268
|
value,
|
6206
6269
|
cause: "value must be a string in the enum"
|
6207
6270
|
})
|
6208
6271
|
};
|
6209
6272
|
},
|
6210
6273
|
async validatePartialResult({ value, textDelta }) {
|
6211
|
-
if (!(0,
|
6274
|
+
if (!(0, import_provider24.isJSONObject)(value) || typeof value.result !== "string") {
|
6212
6275
|
return {
|
6213
6276
|
success: false,
|
6214
|
-
error: new
|
6277
|
+
error: new import_provider24.TypeValidationError({
|
6215
6278
|
value,
|
6216
6279
|
cause: 'value must be an object that contains a string in the "result" property.'
|
6217
6280
|
})
|
@@ -6224,7 +6287,7 @@ var enumOutputStrategy = (enumValues) => {
|
|
6224
6287
|
if (value.result.length === 0 || possibleEnumValues.length === 0) {
|
6225
6288
|
return {
|
6226
6289
|
success: false,
|
6227
|
-
error: new
|
6290
|
+
error: new import_provider24.TypeValidationError({
|
6228
6291
|
value,
|
6229
6292
|
cause: "value must be a string in the enum"
|
6230
6293
|
})
|
@@ -6239,7 +6302,7 @@ var enumOutputStrategy = (enumValues) => {
|
|
6239
6302
|
};
|
6240
6303
|
},
|
6241
6304
|
createElementStream() {
|
6242
|
-
throw new
|
6305
|
+
throw new import_provider24.UnsupportedFunctionalityError({
|
6243
6306
|
functionality: "element streams in enum mode"
|
6244
6307
|
});
|
6245
6308
|
}
|
@@ -6267,7 +6330,7 @@ function getOutputStrategy({
|
|
6267
6330
|
}
|
6268
6331
|
|
6269
6332
|
// src/generate-object/parse-and-validate-object-result.ts
|
6270
|
-
var
|
6333
|
+
var import_provider25 = require("@ai-sdk/provider");
|
6271
6334
|
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
6272
6335
|
async function parseAndValidateObjectResult(result, outputStrategy, context) {
|
6273
6336
|
const parseResult = await (0, import_provider_utils15.safeParseJSON)({ text: result });
|
@@ -6305,7 +6368,7 @@ async function parseAndValidateObjectResultWithRepair(result, outputStrategy, re
|
|
6305
6368
|
try {
|
6306
6369
|
return await parseAndValidateObjectResult(result, outputStrategy, context);
|
6307
6370
|
} catch (error) {
|
6308
|
-
if (repairText != null && NoObjectGeneratedError.isInstance(error) && (
|
6371
|
+
if (repairText != null && NoObjectGeneratedError.isInstance(error) && (import_provider25.JSONParseError.isInstance(error.cause) || import_provider25.TypeValidationError.isInstance(error.cause))) {
|
6309
6372
|
const repairedText = await repairText({
|
6310
6373
|
text: result,
|
6311
6374
|
error: error.cause
|
@@ -6516,7 +6579,7 @@ async function generateObject(options) {
|
|
6516
6579
|
}),
|
6517
6580
|
tracer,
|
6518
6581
|
fn: async (span) => {
|
6519
|
-
var
|
6582
|
+
var _a17;
|
6520
6583
|
let result;
|
6521
6584
|
let finishReason;
|
6522
6585
|
let usage;
|
@@ -6560,7 +6623,7 @@ async function generateObject(options) {
|
|
6560
6623
|
}),
|
6561
6624
|
tracer,
|
6562
6625
|
fn: async (span2) => {
|
6563
|
-
var
|
6626
|
+
var _a18, _b, _c, _d, _e, _f, _g, _h;
|
6564
6627
|
const result2 = await model.doGenerate({
|
6565
6628
|
responseFormat: {
|
6566
6629
|
type: "json",
|
@@ -6575,7 +6638,7 @@ async function generateObject(options) {
|
|
6575
6638
|
headers
|
6576
6639
|
});
|
6577
6640
|
const responseData = {
|
6578
|
-
id: (_b = (
|
6641
|
+
id: (_b = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b : generateId3(),
|
6579
6642
|
timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
|
6580
6643
|
modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
|
6581
6644
|
headers: (_g = result2.response) == null ? void 0 : _g.headers,
|
@@ -6623,7 +6686,7 @@ async function generateObject(options) {
|
|
6623
6686
|
usage = generateResult.usage;
|
6624
6687
|
warnings = generateResult.warnings;
|
6625
6688
|
resultProviderMetadata = generateResult.providerMetadata;
|
6626
|
-
request = (
|
6689
|
+
request = (_a17 = generateResult.request) != null ? _a17 : {};
|
6627
6690
|
response = generateResult.responseData;
|
6628
6691
|
const object2 = await parseAndValidateObjectResultWithRepair(
|
6629
6692
|
result,
|
@@ -6678,9 +6741,9 @@ var DefaultGenerateObjectResult = class {
|
|
6678
6741
|
this.request = options.request;
|
6679
6742
|
}
|
6680
6743
|
toJsonResponse(init) {
|
6681
|
-
var
|
6744
|
+
var _a17;
|
6682
6745
|
return new Response(JSON.stringify(this.object), {
|
6683
|
-
status: (
|
6746
|
+
status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
|
6684
6747
|
headers: prepareHeaders(init == null ? void 0 : init.headers, {
|
6685
6748
|
"content-type": "application/json; charset=utf-8"
|
6686
6749
|
})
|
@@ -6806,8 +6869,8 @@ function simulateReadableStream({
|
|
6806
6869
|
chunkDelayInMs = 0,
|
6807
6870
|
_internal
|
6808
6871
|
}) {
|
6809
|
-
var
|
6810
|
-
const delay2 = (
|
6872
|
+
var _a17;
|
6873
|
+
const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : import_provider_utils17.delay;
|
6811
6874
|
let index = 0;
|
6812
6875
|
return new ReadableStream({
|
6813
6876
|
async pull(controller) {
|
@@ -7062,7 +7125,7 @@ var DefaultStreamObjectResult = class {
|
|
7062
7125
|
const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
|
7063
7126
|
new TransformStream({
|
7064
7127
|
async transform(chunk, controller) {
|
7065
|
-
var
|
7128
|
+
var _a17, _b, _c;
|
7066
7129
|
if (typeof chunk === "object" && chunk.type === "stream-start") {
|
7067
7130
|
warnings = chunk.warnings;
|
7068
7131
|
return;
|
@@ -7112,7 +7175,7 @@ var DefaultStreamObjectResult = class {
|
|
7112
7175
|
switch (chunk.type) {
|
7113
7176
|
case "response-metadata": {
|
7114
7177
|
fullResponse = {
|
7115
|
-
id: (
|
7178
|
+
id: (_a17 = chunk.id) != null ? _a17 : fullResponse.id,
|
7116
7179
|
timestamp: (_b = chunk.timestamp) != null ? _b : fullResponse.timestamp,
|
7117
7180
|
modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
|
7118
7181
|
};
|
@@ -7336,8 +7399,8 @@ var DefaultStreamObjectResult = class {
|
|
7336
7399
|
};
|
7337
7400
|
|
7338
7401
|
// src/error/no-speech-generated-error.ts
|
7339
|
-
var
|
7340
|
-
var NoSpeechGeneratedError = class extends
|
7402
|
+
var import_provider26 = require("@ai-sdk/provider");
|
7403
|
+
var NoSpeechGeneratedError = class extends import_provider26.AISDKError {
|
7341
7404
|
constructor(options) {
|
7342
7405
|
super({
|
7343
7406
|
name: "AI_NoSpeechGeneratedError",
|
@@ -7386,7 +7449,7 @@ async function generateSpeech({
|
|
7386
7449
|
abortSignal,
|
7387
7450
|
headers
|
7388
7451
|
}) {
|
7389
|
-
var
|
7452
|
+
var _a17;
|
7390
7453
|
if (model.specificationVersion !== "v2") {
|
7391
7454
|
throw new UnsupportedModelVersionError({
|
7392
7455
|
version: model.specificationVersion,
|
@@ -7417,10 +7480,10 @@ async function generateSpeech({
|
|
7417
7480
|
return new DefaultSpeechResult({
|
7418
7481
|
audio: new DefaultGeneratedAudioFile({
|
7419
7482
|
data: result.audio,
|
7420
|
-
mediaType: (
|
7483
|
+
mediaType: (_a17 = detectMediaType({
|
7421
7484
|
data: result.audio,
|
7422
7485
|
signatures: audioMediaTypeSignatures
|
7423
|
-
})) != null ?
|
7486
|
+
})) != null ? _a17 : "audio/mp3"
|
7424
7487
|
}),
|
7425
7488
|
warnings: result.warnings,
|
7426
7489
|
responses: [result.response],
|
@@ -7429,11 +7492,11 @@ async function generateSpeech({
|
|
7429
7492
|
}
|
7430
7493
|
var DefaultSpeechResult = class {
|
7431
7494
|
constructor(options) {
|
7432
|
-
var
|
7495
|
+
var _a17;
|
7433
7496
|
this.audio = options.audio;
|
7434
7497
|
this.warnings = options.warnings;
|
7435
7498
|
this.responses = options.responses;
|
7436
|
-
this.providerMetadata = (
|
7499
|
+
this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
|
7437
7500
|
}
|
7438
7501
|
};
|
7439
7502
|
|
@@ -7515,7 +7578,7 @@ var object = ({
|
|
7515
7578
|
|
7516
7579
|
// src/generate-text/smooth-stream.ts
|
7517
7580
|
var import_provider_utils20 = require("@ai-sdk/provider-utils");
|
7518
|
-
var
|
7581
|
+
var import_provider27 = require("@ai-sdk/provider");
|
7519
7582
|
var CHUNKING_REGEXPS = {
|
7520
7583
|
word: /\S+\s+/m,
|
7521
7584
|
line: /\n+/m
|
@@ -7545,7 +7608,7 @@ function smoothStream({
|
|
7545
7608
|
} else {
|
7546
7609
|
const chunkingRegex = typeof chunking === "string" ? CHUNKING_REGEXPS[chunking] : chunking;
|
7547
7610
|
if (chunkingRegex == null) {
|
7548
|
-
throw new
|
7611
|
+
throw new import_provider27.InvalidArgumentError({
|
7549
7612
|
argument: "chunking",
|
7550
7613
|
message: `Chunking must be "word" or "line" or a RegExp. Received: ${chunking}`
|
7551
7614
|
});
|
@@ -7847,7 +7910,7 @@ var doWrap = ({
|
|
7847
7910
|
modelId,
|
7848
7911
|
providerId
|
7849
7912
|
}) => {
|
7850
|
-
var
|
7913
|
+
var _a17, _b, _c;
|
7851
7914
|
async function doTransform({
|
7852
7915
|
params,
|
7853
7916
|
type
|
@@ -7856,7 +7919,7 @@ var doWrap = ({
|
|
7856
7919
|
}
|
7857
7920
|
return {
|
7858
7921
|
specificationVersion: "v2",
|
7859
|
-
provider: (
|
7922
|
+
provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
|
7860
7923
|
modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
|
7861
7924
|
supportedUrls: (_c = overrideSupportedUrls == null ? void 0 : overrideSupportedUrls({ model })) != null ? _c : model.supportedUrls,
|
7862
7925
|
async doGenerate(params) {
|
@@ -7902,7 +7965,7 @@ function wrapProvider({
|
|
7902
7965
|
}
|
7903
7966
|
|
7904
7967
|
// src/registry/custom-provider.ts
|
7905
|
-
var
|
7968
|
+
var import_provider28 = require("@ai-sdk/provider");
|
7906
7969
|
function customProvider({
|
7907
7970
|
languageModels,
|
7908
7971
|
textEmbeddingModels,
|
@@ -7919,7 +7982,7 @@ function customProvider({
|
|
7919
7982
|
if (fallbackProvider) {
|
7920
7983
|
return fallbackProvider.languageModel(modelId);
|
7921
7984
|
}
|
7922
|
-
throw new
|
7985
|
+
throw new import_provider28.NoSuchModelError({ modelId, modelType: "languageModel" });
|
7923
7986
|
},
|
7924
7987
|
textEmbeddingModel(modelId) {
|
7925
7988
|
if (textEmbeddingModels != null && modelId in textEmbeddingModels) {
|
@@ -7928,7 +7991,7 @@ function customProvider({
|
|
7928
7991
|
if (fallbackProvider) {
|
7929
7992
|
return fallbackProvider.textEmbeddingModel(modelId);
|
7930
7993
|
}
|
7931
|
-
throw new
|
7994
|
+
throw new import_provider28.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
|
7932
7995
|
},
|
7933
7996
|
imageModel(modelId) {
|
7934
7997
|
if (imageModels != null && modelId in imageModels) {
|
@@ -7937,7 +8000,7 @@ function customProvider({
|
|
7937
8000
|
if (fallbackProvider == null ? void 0 : fallbackProvider.imageModel) {
|
7938
8001
|
return fallbackProvider.imageModel(modelId);
|
7939
8002
|
}
|
7940
|
-
throw new
|
8003
|
+
throw new import_provider28.NoSuchModelError({ modelId, modelType: "imageModel" });
|
7941
8004
|
},
|
7942
8005
|
transcriptionModel(modelId) {
|
7943
8006
|
if (transcriptionModels != null && modelId in transcriptionModels) {
|
@@ -7946,7 +8009,7 @@ function customProvider({
|
|
7946
8009
|
if (fallbackProvider == null ? void 0 : fallbackProvider.transcriptionModel) {
|
7947
8010
|
return fallbackProvider.transcriptionModel(modelId);
|
7948
8011
|
}
|
7949
|
-
throw new
|
8012
|
+
throw new import_provider28.NoSuchModelError({ modelId, modelType: "transcriptionModel" });
|
7950
8013
|
},
|
7951
8014
|
speechModel(modelId) {
|
7952
8015
|
if (speechModels != null && modelId in speechModels) {
|
@@ -7955,19 +8018,19 @@ function customProvider({
|
|
7955
8018
|
if (fallbackProvider == null ? void 0 : fallbackProvider.speechModel) {
|
7956
8019
|
return fallbackProvider.speechModel(modelId);
|
7957
8020
|
}
|
7958
|
-
throw new
|
8021
|
+
throw new import_provider28.NoSuchModelError({ modelId, modelType: "speechModel" });
|
7959
8022
|
}
|
7960
8023
|
};
|
7961
8024
|
}
|
7962
8025
|
var experimental_customProvider = customProvider;
|
7963
8026
|
|
7964
8027
|
// src/registry/no-such-provider-error.ts
|
7965
|
-
var
|
7966
|
-
var
|
7967
|
-
var
|
7968
|
-
var
|
7969
|
-
var
|
7970
|
-
var NoSuchProviderError = class extends
|
8028
|
+
var import_provider29 = require("@ai-sdk/provider");
|
8029
|
+
var name16 = "AI_NoSuchProviderError";
|
8030
|
+
var marker16 = `vercel.ai.error.${name16}`;
|
8031
|
+
var symbol16 = Symbol.for(marker16);
|
8032
|
+
var _a16;
|
8033
|
+
var NoSuchProviderError = class extends import_provider29.NoSuchModelError {
|
7971
8034
|
constructor({
|
7972
8035
|
modelId,
|
7973
8036
|
modelType,
|
@@ -7975,19 +8038,19 @@ var NoSuchProviderError = class extends import_provider28.NoSuchModelError {
|
|
7975
8038
|
availableProviders,
|
7976
8039
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
7977
8040
|
}) {
|
7978
|
-
super({ errorName:
|
7979
|
-
this[
|
8041
|
+
super({ errorName: name16, modelId, modelType, message });
|
8042
|
+
this[_a16] = true;
|
7980
8043
|
this.providerId = providerId;
|
7981
8044
|
this.availableProviders = availableProviders;
|
7982
8045
|
}
|
7983
8046
|
static isInstance(error) {
|
7984
|
-
return
|
8047
|
+
return import_provider29.AISDKError.hasMarker(error, marker16);
|
7985
8048
|
}
|
7986
8049
|
};
|
7987
|
-
|
8050
|
+
_a16 = symbol16;
|
7988
8051
|
|
7989
8052
|
// src/registry/provider-registry.ts
|
7990
|
-
var
|
8053
|
+
var import_provider30 = require("@ai-sdk/provider");
|
7991
8054
|
function createProviderRegistry(providers, {
|
7992
8055
|
separator = ":",
|
7993
8056
|
languageModelMiddleware
|
@@ -8032,7 +8095,7 @@ var DefaultProviderRegistry = class {
|
|
8032
8095
|
splitId(id, modelType) {
|
8033
8096
|
const index = id.indexOf(this.separator);
|
8034
8097
|
if (index === -1) {
|
8035
|
-
throw new
|
8098
|
+
throw new import_provider30.NoSuchModelError({
|
8036
8099
|
modelId: id,
|
8037
8100
|
modelType,
|
8038
8101
|
message: `Invalid ${modelType} id for registry: ${id} (must be in the format "providerId${this.separator}modelId")`
|
@@ -8041,14 +8104,14 @@ var DefaultProviderRegistry = class {
|
|
8041
8104
|
return [id.slice(0, index), id.slice(index + this.separator.length)];
|
8042
8105
|
}
|
8043
8106
|
languageModel(id) {
|
8044
|
-
var
|
8107
|
+
var _a17, _b;
|
8045
8108
|
const [providerId, modelId] = this.splitId(id, "languageModel");
|
8046
|
-
let model = (_b = (
|
8047
|
-
|
8109
|
+
let model = (_b = (_a17 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
|
8110
|
+
_a17,
|
8048
8111
|
modelId
|
8049
8112
|
);
|
8050
8113
|
if (model == null) {
|
8051
|
-
throw new
|
8114
|
+
throw new import_provider30.NoSuchModelError({ modelId: id, modelType: "languageModel" });
|
8052
8115
|
}
|
8053
8116
|
if (this.languageModelMiddleware != null) {
|
8054
8117
|
model = wrapLanguageModel({
|
@@ -8059,12 +8122,12 @@ var DefaultProviderRegistry = class {
|
|
8059
8122
|
return model;
|
8060
8123
|
}
|
8061
8124
|
textEmbeddingModel(id) {
|
8062
|
-
var
|
8125
|
+
var _a17;
|
8063
8126
|
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
8064
8127
|
const provider = this.getProvider(providerId, "textEmbeddingModel");
|
8065
|
-
const model = (
|
8128
|
+
const model = (_a17 = provider.textEmbeddingModel) == null ? void 0 : _a17.call(provider, modelId);
|
8066
8129
|
if (model == null) {
|
8067
|
-
throw new
|
8130
|
+
throw new import_provider30.NoSuchModelError({
|
8068
8131
|
modelId: id,
|
8069
8132
|
modelType: "textEmbeddingModel"
|
8070
8133
|
});
|
@@ -8072,22 +8135,22 @@ var DefaultProviderRegistry = class {
|
|
8072
8135
|
return model;
|
8073
8136
|
}
|
8074
8137
|
imageModel(id) {
|
8075
|
-
var
|
8138
|
+
var _a17;
|
8076
8139
|
const [providerId, modelId] = this.splitId(id, "imageModel");
|
8077
8140
|
const provider = this.getProvider(providerId, "imageModel");
|
8078
|
-
const model = (
|
8141
|
+
const model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
|
8079
8142
|
if (model == null) {
|
8080
|
-
throw new
|
8143
|
+
throw new import_provider30.NoSuchModelError({ modelId: id, modelType: "imageModel" });
|
8081
8144
|
}
|
8082
8145
|
return model;
|
8083
8146
|
}
|
8084
8147
|
transcriptionModel(id) {
|
8085
|
-
var
|
8148
|
+
var _a17;
|
8086
8149
|
const [providerId, modelId] = this.splitId(id, "transcriptionModel");
|
8087
8150
|
const provider = this.getProvider(providerId, "transcriptionModel");
|
8088
|
-
const model = (
|
8151
|
+
const model = (_a17 = provider.transcriptionModel) == null ? void 0 : _a17.call(provider, modelId);
|
8089
8152
|
if (model == null) {
|
8090
|
-
throw new
|
8153
|
+
throw new import_provider30.NoSuchModelError({
|
8091
8154
|
modelId: id,
|
8092
8155
|
modelType: "transcriptionModel"
|
8093
8156
|
});
|
@@ -8095,12 +8158,12 @@ var DefaultProviderRegistry = class {
|
|
8095
8158
|
return model;
|
8096
8159
|
}
|
8097
8160
|
speechModel(id) {
|
8098
|
-
var
|
8161
|
+
var _a17;
|
8099
8162
|
const [providerId, modelId] = this.splitId(id, "speechModel");
|
8100
8163
|
const provider = this.getProvider(providerId, "speechModel");
|
8101
|
-
const model = (
|
8164
|
+
const model = (_a17 = provider.speechModel) == null ? void 0 : _a17.call(provider, modelId);
|
8102
8165
|
if (model == null) {
|
8103
|
-
throw new
|
8166
|
+
throw new import_provider30.NoSuchModelError({ modelId: id, modelType: "speechModel" });
|
8104
8167
|
}
|
8105
8168
|
return model;
|
8106
8169
|
}
|
@@ -8267,13 +8330,13 @@ var SseMCPTransport = class {
|
|
8267
8330
|
}
|
8268
8331
|
this.abortController = new AbortController();
|
8269
8332
|
const establishConnection = async () => {
|
8270
|
-
var
|
8333
|
+
var _a17, _b, _c;
|
8271
8334
|
try {
|
8272
8335
|
const headers = new Headers(this.headers);
|
8273
8336
|
headers.set("Accept", "text/event-stream");
|
8274
8337
|
const response = await fetch(this.url.href, {
|
8275
8338
|
headers,
|
8276
|
-
signal: (
|
8339
|
+
signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
|
8277
8340
|
});
|
8278
8341
|
if (!response.ok || !response.body) {
|
8279
8342
|
const error = new MCPClientError({
|
@@ -8285,7 +8348,7 @@ var SseMCPTransport = class {
|
|
8285
8348
|
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new import_provider_utils21.EventSourceParserStream());
|
8286
8349
|
const reader = stream.getReader();
|
8287
8350
|
const processEvents = async () => {
|
8288
|
-
var
|
8351
|
+
var _a18, _b2, _c2;
|
8289
8352
|
try {
|
8290
8353
|
while (true) {
|
8291
8354
|
const { done, value } = await reader.read();
|
@@ -8313,7 +8376,7 @@ var SseMCPTransport = class {
|
|
8313
8376
|
const message = JSONRPCMessageSchema.parse(
|
8314
8377
|
JSON.parse(data)
|
8315
8378
|
);
|
8316
|
-
(
|
8379
|
+
(_a18 = this.onmessage) == null ? void 0 : _a18.call(this, message);
|
8317
8380
|
} catch (error) {
|
8318
8381
|
const e = new MCPClientError({
|
8319
8382
|
message: "MCP SSE Transport Error: Failed to parse message",
|
@@ -8347,14 +8410,14 @@ var SseMCPTransport = class {
|
|
8347
8410
|
});
|
8348
8411
|
}
|
8349
8412
|
async close() {
|
8350
|
-
var
|
8413
|
+
var _a17, _b, _c;
|
8351
8414
|
this.connected = false;
|
8352
|
-
(
|
8415
|
+
(_a17 = this.sseConnection) == null ? void 0 : _a17.close();
|
8353
8416
|
(_b = this.abortController) == null ? void 0 : _b.abort();
|
8354
8417
|
(_c = this.onclose) == null ? void 0 : _c.call(this);
|
8355
8418
|
}
|
8356
8419
|
async send(message) {
|
8357
|
-
var
|
8420
|
+
var _a17, _b, _c;
|
8358
8421
|
if (!this.endpoint || !this.connected) {
|
8359
8422
|
throw new MCPClientError({
|
8360
8423
|
message: "MCP SSE Transport Error: Not connected"
|
@@ -8367,7 +8430,7 @@ var SseMCPTransport = class {
|
|
8367
8430
|
method: "POST",
|
8368
8431
|
headers,
|
8369
8432
|
body: JSON.stringify(message),
|
8370
|
-
signal: (
|
8433
|
+
signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
|
8371
8434
|
};
|
8372
8435
|
const response = await fetch(this.endpoint, init);
|
8373
8436
|
if (!response.ok) {
|
@@ -8408,7 +8471,7 @@ async function createMCPClient(config) {
|
|
8408
8471
|
var DefaultMCPClient = class {
|
8409
8472
|
constructor({
|
8410
8473
|
transport: transportConfig,
|
8411
|
-
name:
|
8474
|
+
name: name17 = "ai-sdk-mcp-client",
|
8412
8475
|
onUncaughtError
|
8413
8476
|
}) {
|
8414
8477
|
this.requestMessageId = 0;
|
@@ -8435,7 +8498,7 @@ var DefaultMCPClient = class {
|
|
8435
8498
|
this.onResponse(message);
|
8436
8499
|
};
|
8437
8500
|
this.clientInfo = {
|
8438
|
-
name:
|
8501
|
+
name: name17,
|
8439
8502
|
version: CLIENT_VERSION
|
8440
8503
|
};
|
8441
8504
|
}
|
@@ -8475,10 +8538,10 @@ var DefaultMCPClient = class {
|
|
8475
8538
|
}
|
8476
8539
|
}
|
8477
8540
|
async close() {
|
8478
|
-
var
|
8541
|
+
var _a17;
|
8479
8542
|
if (this.isClosed)
|
8480
8543
|
return;
|
8481
|
-
await ((
|
8544
|
+
await ((_a17 = this.transport) == null ? void 0 : _a17.close());
|
8482
8545
|
this.onClose();
|
8483
8546
|
}
|
8484
8547
|
assertCapability(method) {
|
@@ -8568,13 +8631,13 @@ var DefaultMCPClient = class {
|
|
8568
8631
|
}
|
8569
8632
|
}
|
8570
8633
|
async callTool({
|
8571
|
-
name:
|
8634
|
+
name: name17,
|
8572
8635
|
args,
|
8573
8636
|
options
|
8574
8637
|
}) {
|
8575
8638
|
try {
|
8576
8639
|
return this.request({
|
8577
|
-
request: { method: "tools/call", params: { name:
|
8640
|
+
request: { method: "tools/call", params: { name: name17, arguments: args } },
|
8578
8641
|
resultSchema: CallToolResultSchema,
|
8579
8642
|
options: {
|
8580
8643
|
signal: options == null ? void 0 : options.abortSignal
|
@@ -8598,34 +8661,34 @@ var DefaultMCPClient = class {
|
|
8598
8661
|
async tools({
|
8599
8662
|
schemas = "automatic"
|
8600
8663
|
} = {}) {
|
8601
|
-
var
|
8664
|
+
var _a17;
|
8602
8665
|
const tools = {};
|
8603
8666
|
try {
|
8604
8667
|
const listToolsResult = await this.listTools();
|
8605
|
-
for (const { name:
|
8606
|
-
if (schemas !== "automatic" && !(
|
8668
|
+
for (const { name: name17, description, inputSchema } of listToolsResult.tools) {
|
8669
|
+
if (schemas !== "automatic" && !(name17 in schemas)) {
|
8607
8670
|
continue;
|
8608
8671
|
}
|
8609
8672
|
const self = this;
|
8610
8673
|
const execute = async (args, options) => {
|
8611
|
-
var
|
8612
|
-
(
|
8613
|
-
return self.callTool({ name:
|
8674
|
+
var _a18;
|
8675
|
+
(_a18 = options == null ? void 0 : options.abortSignal) == null ? void 0 : _a18.throwIfAborted();
|
8676
|
+
return self.callTool({ name: name17, args, options });
|
8614
8677
|
};
|
8615
8678
|
const toolWithExecute = schemas === "automatic" ? (0, import_provider_utils22.dynamicTool)({
|
8616
8679
|
description,
|
8617
8680
|
inputSchema: (0, import_provider_utils22.jsonSchema)({
|
8618
8681
|
...inputSchema,
|
8619
|
-
properties: (
|
8682
|
+
properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
|
8620
8683
|
additionalProperties: false
|
8621
8684
|
}),
|
8622
8685
|
execute
|
8623
8686
|
}) : (0, import_provider_utils22.tool)({
|
8624
8687
|
description,
|
8625
|
-
inputSchema: schemas[
|
8688
|
+
inputSchema: schemas[name17].inputSchema,
|
8626
8689
|
execute
|
8627
8690
|
});
|
8628
|
-
tools[
|
8691
|
+
tools[name17] = toolWithExecute;
|
8629
8692
|
}
|
8630
8693
|
return tools;
|
8631
8694
|
} catch (error) {
|
@@ -8670,8 +8733,8 @@ var DefaultMCPClient = class {
|
|
8670
8733
|
};
|
8671
8734
|
|
8672
8735
|
// src/error/no-transcript-generated-error.ts
|
8673
|
-
var
|
8674
|
-
var NoTranscriptGeneratedError = class extends
|
8736
|
+
var import_provider31 = require("@ai-sdk/provider");
|
8737
|
+
var NoTranscriptGeneratedError = class extends import_provider31.AISDKError {
|
8675
8738
|
constructor(options) {
|
8676
8739
|
super({
|
8677
8740
|
name: "AI_NoTranscriptGeneratedError",
|
@@ -8704,16 +8767,16 @@ async function transcribe({
|
|
8704
8767
|
const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
|
8705
8768
|
const result = await retry(
|
8706
8769
|
() => {
|
8707
|
-
var
|
8770
|
+
var _a17;
|
8708
8771
|
return model.doGenerate({
|
8709
8772
|
audio: audioData,
|
8710
8773
|
abortSignal,
|
8711
8774
|
headers,
|
8712
8775
|
providerOptions,
|
8713
|
-
mediaType: (
|
8776
|
+
mediaType: (_a17 = detectMediaType({
|
8714
8777
|
data: audioData,
|
8715
8778
|
signatures: audioMediaTypeSignatures
|
8716
|
-
})) != null ?
|
8779
|
+
})) != null ? _a17 : "audio/wav"
|
8717
8780
|
});
|
8718
8781
|
}
|
8719
8782
|
);
|
@@ -8732,14 +8795,14 @@ async function transcribe({
|
|
8732
8795
|
}
|
8733
8796
|
var DefaultTranscriptionResult = class {
|
8734
8797
|
constructor(options) {
|
8735
|
-
var
|
8798
|
+
var _a17;
|
8736
8799
|
this.text = options.text;
|
8737
8800
|
this.segments = options.segments;
|
8738
8801
|
this.language = options.language;
|
8739
8802
|
this.durationInSeconds = options.durationInSeconds;
|
8740
8803
|
this.warnings = options.warnings;
|
8741
8804
|
this.responses = options.responses;
|
8742
|
-
this.providerMetadata = (
|
8805
|
+
this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
|
8743
8806
|
}
|
8744
8807
|
};
|
8745
8808
|
|
@@ -8778,7 +8841,7 @@ async function callCompletionApi({
|
|
8778
8841
|
onError,
|
8779
8842
|
fetch: fetch2 = getOriginalFetch()
|
8780
8843
|
}) {
|
8781
|
-
var
|
8844
|
+
var _a17;
|
8782
8845
|
try {
|
8783
8846
|
setLoading(true);
|
8784
8847
|
setError(void 0);
|
@@ -8802,7 +8865,7 @@ async function callCompletionApi({
|
|
8802
8865
|
});
|
8803
8866
|
if (!response.ok) {
|
8804
8867
|
throw new Error(
|
8805
|
-
(
|
8868
|
+
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
8806
8869
|
);
|
8807
8870
|
}
|
8808
8871
|
if (!response.body) {
|
@@ -8886,12 +8949,12 @@ async function convertFileListToFileUIParts(files) {
|
|
8886
8949
|
}
|
8887
8950
|
return Promise.all(
|
8888
8951
|
Array.from(files).map(async (file) => {
|
8889
|
-
const { name:
|
8952
|
+
const { name: name17, type } = file;
|
8890
8953
|
const dataUrl = await new Promise((resolve2, reject) => {
|
8891
8954
|
const reader = new FileReader();
|
8892
8955
|
reader.onload = (readerEvent) => {
|
8893
|
-
var
|
8894
|
-
resolve2((
|
8956
|
+
var _a17;
|
8957
|
+
resolve2((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
|
8895
8958
|
};
|
8896
8959
|
reader.onerror = (error) => reject(error);
|
8897
8960
|
reader.readAsDataURL(file);
|
@@ -8899,7 +8962,7 @@ async function convertFileListToFileUIParts(files) {
|
|
8899
8962
|
return {
|
8900
8963
|
type: "file",
|
8901
8964
|
mediaType: type,
|
8902
|
-
filename:
|
8965
|
+
filename: name17,
|
8903
8966
|
url: dataUrl
|
8904
8967
|
};
|
8905
8968
|
})
|
@@ -8933,11 +8996,11 @@ var HttpChatTransport = class {
|
|
8933
8996
|
abortSignal,
|
8934
8997
|
...options
|
8935
8998
|
}) {
|
8936
|
-
var
|
8999
|
+
var _a17, _b, _c, _d, _e;
|
8937
9000
|
const resolvedBody = await (0, import_provider_utils24.resolve)(this.body);
|
8938
9001
|
const resolvedHeaders = await (0, import_provider_utils24.resolve)(this.headers);
|
8939
9002
|
const resolvedCredentials = await (0, import_provider_utils24.resolve)(this.credentials);
|
8940
|
-
const preparedRequest = await ((
|
9003
|
+
const preparedRequest = await ((_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
|
8941
9004
|
api: this.api,
|
8942
9005
|
id: options.chatId,
|
8943
9006
|
messages: options.messages,
|
@@ -8981,11 +9044,11 @@ var HttpChatTransport = class {
|
|
8981
9044
|
return this.processResponseStream(response.body);
|
8982
9045
|
}
|
8983
9046
|
async reconnectToStream(options) {
|
8984
|
-
var
|
9047
|
+
var _a17, _b, _c, _d, _e;
|
8985
9048
|
const resolvedBody = await (0, import_provider_utils24.resolve)(this.body);
|
8986
9049
|
const resolvedHeaders = await (0, import_provider_utils24.resolve)(this.headers);
|
8987
9050
|
const resolvedCredentials = await (0, import_provider_utils24.resolve)(this.credentials);
|
8988
|
-
const preparedRequest = await ((
|
9051
|
+
const preparedRequest = await ((_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
|
8989
9052
|
api: this.api,
|
8990
9053
|
id: options.chatId,
|
8991
9054
|
body: { ...resolvedBody, ...options.body },
|
@@ -9063,11 +9126,11 @@ var AbstractChat = class {
|
|
9063
9126
|
* If a messageId is provided, the message will be replaced.
|
9064
9127
|
*/
|
9065
9128
|
this.sendMessage = async (message, options) => {
|
9066
|
-
var
|
9129
|
+
var _a17, _b, _c, _d;
|
9067
9130
|
if (message == null) {
|
9068
9131
|
await this.makeRequest({
|
9069
9132
|
trigger: "submit-message",
|
9070
|
-
messageId: (
|
9133
|
+
messageId: (_a17 = this.lastMessage) == null ? void 0 : _a17.id,
|
9071
9134
|
...options
|
9072
9135
|
});
|
9073
9136
|
return;
|
@@ -9160,7 +9223,7 @@ var AbstractChat = class {
|
|
9160
9223
|
toolCallId,
|
9161
9224
|
output
|
9162
9225
|
}) => this.jobExecutor.run(async () => {
|
9163
|
-
var
|
9226
|
+
var _a17, _b;
|
9164
9227
|
const messages = this.state.messages;
|
9165
9228
|
const lastMessage = messages[messages.length - 1];
|
9166
9229
|
this.state.replaceMessage(messages.length - 1, {
|
@@ -9179,7 +9242,7 @@ var AbstractChat = class {
|
|
9179
9242
|
} : part
|
9180
9243
|
);
|
9181
9244
|
}
|
9182
|
-
if (this.status !== "streaming" && this.status !== "submitted" && ((
|
9245
|
+
if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
|
9183
9246
|
this.makeRequest({
|
9184
9247
|
trigger: "submit-message",
|
9185
9248
|
messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
|
@@ -9190,10 +9253,10 @@ var AbstractChat = class {
|
|
9190
9253
|
* Abort the current request immediately, keep the generated tokens if any.
|
9191
9254
|
*/
|
9192
9255
|
this.stop = async () => {
|
9193
|
-
var
|
9256
|
+
var _a17;
|
9194
9257
|
if (this.status !== "streaming" && this.status !== "submitted")
|
9195
9258
|
return;
|
9196
|
-
if ((
|
9259
|
+
if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
|
9197
9260
|
this.activeResponse.abortController.abort();
|
9198
9261
|
}
|
9199
9262
|
};
|
@@ -9248,7 +9311,7 @@ var AbstractChat = class {
|
|
9248
9311
|
body,
|
9249
9312
|
messageId
|
9250
9313
|
}) {
|
9251
|
-
var
|
9314
|
+
var _a17, _b, _c;
|
9252
9315
|
this.setStatus({ status: "submitted", error: void 0 });
|
9253
9316
|
const lastMessage = this.lastMessage;
|
9254
9317
|
try {
|
@@ -9291,9 +9354,9 @@ var AbstractChat = class {
|
|
9291
9354
|
() => job({
|
9292
9355
|
state: activeResponse.state,
|
9293
9356
|
write: () => {
|
9294
|
-
var
|
9357
|
+
var _a18;
|
9295
9358
|
this.setStatus({ status: "streaming" });
|
9296
|
-
const replaceLastMessage = activeResponse.state.message.id === ((
|
9359
|
+
const replaceLastMessage = activeResponse.state.message.id === ((_a18 = this.lastMessage) == null ? void 0 : _a18.id);
|
9297
9360
|
if (replaceLastMessage) {
|
9298
9361
|
this.state.replaceMessage(
|
9299
9362
|
this.state.messages.length - 1,
|
@@ -9322,7 +9385,7 @@ var AbstractChat = class {
|
|
9322
9385
|
throw error;
|
9323
9386
|
}
|
9324
9387
|
});
|
9325
|
-
(
|
9388
|
+
(_a17 = this.onFinish) == null ? void 0 : _a17.call(this, { message: activeResponse.state.message });
|
9326
9389
|
this.setStatus({ status: "ready" });
|
9327
9390
|
} catch (err) {
|
9328
9391
|
if (err.name === "AbortError") {
|
@@ -9386,14 +9449,16 @@ function convertToModelMessages(messages, options) {
|
|
9386
9449
|
case "text":
|
9387
9450
|
return {
|
9388
9451
|
type: "text",
|
9389
|
-
text: part.text
|
9452
|
+
text: part.text,
|
9453
|
+
...part.providerMetadata != null ? { providerOptions: part.providerMetadata } : {}
|
9390
9454
|
};
|
9391
9455
|
case "file":
|
9392
9456
|
return {
|
9393
9457
|
type: "file",
|
9394
9458
|
mediaType: part.mediaType,
|
9395
9459
|
filename: part.filename,
|
9396
|
-
data: part.url
|
9460
|
+
data: part.url,
|
9461
|
+
...part.providerMetadata != null ? { providerOptions: part.providerMetadata } : {}
|
9397
9462
|
};
|
9398
9463
|
default:
|
9399
9464
|
return part;
|
@@ -9405,7 +9470,7 @@ function convertToModelMessages(messages, options) {
|
|
9405
9470
|
case "assistant": {
|
9406
9471
|
if (message.parts != null) {
|
9407
9472
|
let processBlock2 = function() {
|
9408
|
-
var
|
9473
|
+
var _a17, _b;
|
9409
9474
|
if (block.length === 0) {
|
9410
9475
|
return;
|
9411
9476
|
}
|
@@ -9458,7 +9523,7 @@ function convertToModelMessages(messages, options) {
|
|
9458
9523
|
type: "tool-call",
|
9459
9524
|
toolCallId: part.toolCallId,
|
9460
9525
|
toolName,
|
9461
|
-
input: part.state === "output-error" ? (
|
9526
|
+
input: part.state === "output-error" ? (_a17 = part.input) != null ? _a17 : part.rawInput : part.input,
|
9462
9527
|
providerExecuted: part.providerExecuted,
|
9463
9528
|
...part.callProviderMetadata != null ? { providerOptions: part.callProviderMetadata } : {}
|
9464
9529
|
});
|
@@ -9491,7 +9556,7 @@ function convertToModelMessages(messages, options) {
|
|
9491
9556
|
modelMessages.push({
|
9492
9557
|
role: "tool",
|
9493
9558
|
content: toolParts.map((toolPart) => {
|
9494
|
-
var
|
9559
|
+
var _a18;
|
9495
9560
|
switch (toolPart.state) {
|
9496
9561
|
case "output-error":
|
9497
9562
|
case "output-available": {
|
@@ -9502,7 +9567,7 @@ function convertToModelMessages(messages, options) {
|
|
9502
9567
|
toolName,
|
9503
9568
|
output: createToolModelOutput({
|
9504
9569
|
output: toolPart.state === "output-error" ? toolPart.errorText : toolPart.output,
|
9505
|
-
tool: (
|
9570
|
+
tool: (_a18 = options == null ? void 0 : options.tools) == null ? void 0 : _a18[toolName],
|
9506
9571
|
errorMode: toolPart.state === "output-error" ? "text" : "none"
|
9507
9572
|
})
|
9508
9573
|
};
|
@@ -9692,7 +9757,7 @@ function readUIMessageStream({
|
|
9692
9757
|
onError,
|
9693
9758
|
terminateOnError = false
|
9694
9759
|
}) {
|
9695
|
-
var
|
9760
|
+
var _a17;
|
9696
9761
|
let controller;
|
9697
9762
|
let hasErrored = false;
|
9698
9763
|
const outputStream = new ReadableStream({
|
@@ -9701,7 +9766,7 @@ function readUIMessageStream({
|
|
9701
9766
|
}
|
9702
9767
|
});
|
9703
9768
|
const state = createStreamingUIMessageState({
|
9704
|
-
messageId: (
|
9769
|
+
messageId: (_a17 = message == null ? void 0 : message.id) != null ? _a17 : "",
|
9705
9770
|
lastMessage: message
|
9706
9771
|
});
|
9707
9772
|
const handleError = (error) => {
|
@@ -9757,6 +9822,7 @@ function readUIMessageStream({
|
|
9757
9822
|
NoContentGeneratedError,
|
9758
9823
|
NoImageGeneratedError,
|
9759
9824
|
NoObjectGeneratedError,
|
9825
|
+
NoOutputGeneratedError,
|
9760
9826
|
NoOutputSpecifiedError,
|
9761
9827
|
NoSuchModelError,
|
9762
9828
|
NoSuchProviderError,
|