ai 6.0.34 → 6.0.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.mts +50 -21
- package/dist/index.d.ts +50 -21
- package/dist/index.js +348 -286
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +280 -219
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +1 -1
- package/dist/internal/index.mjs +1 -1
- package/docs/07-reference/05-ai-sdk-errors/ai-ui-message-stream-error.mdx +67 -0
- package/package.json +1 -1
- package/src/error/index.ts +1 -0
- package/src/error/ui-message-stream-error.ts +48 -0
- package/src/ui/process-ui-message-stream.test.ts +242 -0
- package/src/ui/process-ui-message-stream.ts +51 -3
package/dist/index.js
CHANGED
|
@@ -4,8 +4,8 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
|
4
4
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
5
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
6
|
var __export = (target, all) => {
|
|
7
|
-
for (var
|
|
8
|
-
__defProp(target,
|
|
7
|
+
for (var name17 in all)
|
|
8
|
+
__defProp(target, name17, { get: all[name17], enumerable: true });
|
|
9
9
|
};
|
|
10
10
|
var __copyProps = (to, from, except, desc) => {
|
|
11
11
|
if (from && typeof from === "object" || typeof from === "function") {
|
|
@@ -20,47 +20,48 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
20
20
|
// src/index.ts
|
|
21
21
|
var src_exports = {};
|
|
22
22
|
__export(src_exports, {
|
|
23
|
-
AISDKError: () =>
|
|
24
|
-
APICallError: () =>
|
|
23
|
+
AISDKError: () => import_provider18.AISDKError,
|
|
24
|
+
APICallError: () => import_provider18.APICallError,
|
|
25
25
|
AbstractChat: () => AbstractChat,
|
|
26
26
|
DefaultChatTransport: () => DefaultChatTransport,
|
|
27
27
|
DirectChatTransport: () => DirectChatTransport,
|
|
28
28
|
DownloadError: () => import_provider_utils.DownloadError,
|
|
29
|
-
EmptyResponseBodyError: () =>
|
|
29
|
+
EmptyResponseBodyError: () => import_provider18.EmptyResponseBodyError,
|
|
30
30
|
Experimental_Agent: () => ToolLoopAgent,
|
|
31
31
|
HttpChatTransport: () => HttpChatTransport,
|
|
32
32
|
InvalidArgumentError: () => InvalidArgumentError,
|
|
33
33
|
InvalidDataContentError: () => InvalidDataContentError,
|
|
34
34
|
InvalidMessageRoleError: () => InvalidMessageRoleError,
|
|
35
|
-
InvalidPromptError: () =>
|
|
36
|
-
InvalidResponseDataError: () =>
|
|
35
|
+
InvalidPromptError: () => import_provider18.InvalidPromptError,
|
|
36
|
+
InvalidResponseDataError: () => import_provider18.InvalidResponseDataError,
|
|
37
37
|
InvalidStreamPartError: () => InvalidStreamPartError,
|
|
38
38
|
InvalidToolApprovalError: () => InvalidToolApprovalError,
|
|
39
39
|
InvalidToolInputError: () => InvalidToolInputError,
|
|
40
|
-
JSONParseError: () =>
|
|
40
|
+
JSONParseError: () => import_provider18.JSONParseError,
|
|
41
41
|
JsonToSseTransformStream: () => JsonToSseTransformStream,
|
|
42
|
-
LoadAPIKeyError: () =>
|
|
43
|
-
LoadSettingError: () =>
|
|
42
|
+
LoadAPIKeyError: () => import_provider18.LoadAPIKeyError,
|
|
43
|
+
LoadSettingError: () => import_provider18.LoadSettingError,
|
|
44
44
|
MessageConversionError: () => MessageConversionError,
|
|
45
|
-
NoContentGeneratedError: () =>
|
|
45
|
+
NoContentGeneratedError: () => import_provider18.NoContentGeneratedError,
|
|
46
46
|
NoImageGeneratedError: () => NoImageGeneratedError,
|
|
47
47
|
NoObjectGeneratedError: () => NoObjectGeneratedError,
|
|
48
48
|
NoOutputGeneratedError: () => NoOutputGeneratedError,
|
|
49
49
|
NoSpeechGeneratedError: () => NoSpeechGeneratedError,
|
|
50
|
-
NoSuchModelError: () =>
|
|
50
|
+
NoSuchModelError: () => import_provider18.NoSuchModelError,
|
|
51
51
|
NoSuchProviderError: () => NoSuchProviderError,
|
|
52
52
|
NoSuchToolError: () => NoSuchToolError,
|
|
53
53
|
Output: () => output_exports,
|
|
54
54
|
RetryError: () => RetryError,
|
|
55
55
|
SerialJobExecutor: () => SerialJobExecutor,
|
|
56
56
|
TextStreamChatTransport: () => TextStreamChatTransport,
|
|
57
|
-
TooManyEmbeddingValuesForCallError: () =>
|
|
57
|
+
TooManyEmbeddingValuesForCallError: () => import_provider18.TooManyEmbeddingValuesForCallError,
|
|
58
58
|
ToolCallNotFoundForApprovalError: () => ToolCallNotFoundForApprovalError,
|
|
59
59
|
ToolCallRepairError: () => ToolCallRepairError,
|
|
60
60
|
ToolLoopAgent: () => ToolLoopAgent,
|
|
61
|
-
TypeValidationError: () =>
|
|
61
|
+
TypeValidationError: () => import_provider18.TypeValidationError,
|
|
62
|
+
UIMessageStreamError: () => UIMessageStreamError,
|
|
62
63
|
UI_MESSAGE_STREAM_HEADERS: () => UI_MESSAGE_STREAM_HEADERS,
|
|
63
|
-
UnsupportedFunctionalityError: () =>
|
|
64
|
+
UnsupportedFunctionalityError: () => import_provider18.UnsupportedFunctionalityError,
|
|
64
65
|
UnsupportedModelVersionError: () => UnsupportedModelVersionError,
|
|
65
66
|
addToolInputExamplesMiddleware: () => addToolInputExamplesMiddleware,
|
|
66
67
|
asSchema: () => import_provider_utils38.asSchema,
|
|
@@ -148,7 +149,7 @@ var import_provider_utils38 = require("@ai-sdk/provider-utils");
|
|
|
148
149
|
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
|
149
150
|
|
|
150
151
|
// src/error/index.ts
|
|
151
|
-
var
|
|
152
|
+
var import_provider18 = require("@ai-sdk/provider");
|
|
152
153
|
|
|
153
154
|
// src/error/invalid-argument-error.ts
|
|
154
155
|
var import_provider = require("@ai-sdk/provider");
|
|
@@ -409,21 +410,22 @@ var UnsupportedModelVersionError = class extends import_provider12.AISDKError {
|
|
|
409
410
|
}
|
|
410
411
|
};
|
|
411
412
|
|
|
412
|
-
// src/
|
|
413
|
+
// src/error/ui-message-stream-error.ts
|
|
413
414
|
var import_provider13 = require("@ai-sdk/provider");
|
|
414
|
-
var name11 = "
|
|
415
|
+
var name11 = "AI_UIMessageStreamError";
|
|
415
416
|
var marker11 = `vercel.ai.error.${name11}`;
|
|
416
417
|
var symbol11 = Symbol.for(marker11);
|
|
417
418
|
var _a11;
|
|
418
|
-
var
|
|
419
|
+
var UIMessageStreamError = class extends import_provider13.AISDKError {
|
|
419
420
|
constructor({
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
message
|
|
421
|
+
chunkType,
|
|
422
|
+
chunkId,
|
|
423
|
+
message
|
|
423
424
|
}) {
|
|
424
|
-
super({ name: name11, message
|
|
425
|
+
super({ name: name11, message });
|
|
425
426
|
this[_a11] = true;
|
|
426
|
-
this.
|
|
427
|
+
this.chunkType = chunkType;
|
|
428
|
+
this.chunkId = chunkId;
|
|
427
429
|
}
|
|
428
430
|
static isInstance(error) {
|
|
429
431
|
return import_provider13.AISDKError.hasMarker(error, marker11);
|
|
@@ -431,20 +433,21 @@ var InvalidDataContentError = class extends import_provider13.AISDKError {
|
|
|
431
433
|
};
|
|
432
434
|
_a11 = symbol11;
|
|
433
435
|
|
|
434
|
-
// src/prompt/invalid-
|
|
436
|
+
// src/prompt/invalid-data-content-error.ts
|
|
435
437
|
var import_provider14 = require("@ai-sdk/provider");
|
|
436
|
-
var name12 = "
|
|
438
|
+
var name12 = "AI_InvalidDataContentError";
|
|
437
439
|
var marker12 = `vercel.ai.error.${name12}`;
|
|
438
440
|
var symbol12 = Symbol.for(marker12);
|
|
439
441
|
var _a12;
|
|
440
|
-
var
|
|
442
|
+
var InvalidDataContentError = class extends import_provider14.AISDKError {
|
|
441
443
|
constructor({
|
|
442
|
-
|
|
443
|
-
|
|
444
|
+
content,
|
|
445
|
+
cause,
|
|
446
|
+
message = `Invalid data content. Expected a base64 string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
|
|
444
447
|
}) {
|
|
445
|
-
super({ name: name12, message });
|
|
448
|
+
super({ name: name12, message, cause });
|
|
446
449
|
this[_a12] = true;
|
|
447
|
-
this.
|
|
450
|
+
this.content = content;
|
|
448
451
|
}
|
|
449
452
|
static isInstance(error) {
|
|
450
453
|
return import_provider14.AISDKError.hasMarker(error, marker12);
|
|
@@ -452,20 +455,20 @@ var InvalidMessageRoleError = class extends import_provider14.AISDKError {
|
|
|
452
455
|
};
|
|
453
456
|
_a12 = symbol12;
|
|
454
457
|
|
|
455
|
-
// src/prompt/message-
|
|
458
|
+
// src/prompt/invalid-message-role-error.ts
|
|
456
459
|
var import_provider15 = require("@ai-sdk/provider");
|
|
457
|
-
var name13 = "
|
|
460
|
+
var name13 = "AI_InvalidMessageRoleError";
|
|
458
461
|
var marker13 = `vercel.ai.error.${name13}`;
|
|
459
462
|
var symbol13 = Symbol.for(marker13);
|
|
460
463
|
var _a13;
|
|
461
|
-
var
|
|
464
|
+
var InvalidMessageRoleError = class extends import_provider15.AISDKError {
|
|
462
465
|
constructor({
|
|
463
|
-
|
|
464
|
-
message
|
|
466
|
+
role,
|
|
467
|
+
message = `Invalid message role: '${role}'. Must be one of: "system", "user", "assistant", "tool".`
|
|
465
468
|
}) {
|
|
466
469
|
super({ name: name13, message });
|
|
467
470
|
this[_a13] = true;
|
|
468
|
-
this.
|
|
471
|
+
this.role = role;
|
|
469
472
|
}
|
|
470
473
|
static isInstance(error) {
|
|
471
474
|
return import_provider15.AISDKError.hasMarker(error, marker13);
|
|
@@ -473,32 +476,53 @@ var MessageConversionError = class extends import_provider15.AISDKError {
|
|
|
473
476
|
};
|
|
474
477
|
_a13 = symbol13;
|
|
475
478
|
|
|
476
|
-
// src/error
|
|
477
|
-
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
478
|
-
|
|
479
|
-
// src/util/retry-error.ts
|
|
479
|
+
// src/prompt/message-conversion-error.ts
|
|
480
480
|
var import_provider16 = require("@ai-sdk/provider");
|
|
481
|
-
var name14 = "
|
|
481
|
+
var name14 = "AI_MessageConversionError";
|
|
482
482
|
var marker14 = `vercel.ai.error.${name14}`;
|
|
483
483
|
var symbol14 = Symbol.for(marker14);
|
|
484
484
|
var _a14;
|
|
485
|
-
var
|
|
485
|
+
var MessageConversionError = class extends import_provider16.AISDKError {
|
|
486
|
+
constructor({
|
|
487
|
+
originalMessage,
|
|
488
|
+
message
|
|
489
|
+
}) {
|
|
490
|
+
super({ name: name14, message });
|
|
491
|
+
this[_a14] = true;
|
|
492
|
+
this.originalMessage = originalMessage;
|
|
493
|
+
}
|
|
494
|
+
static isInstance(error) {
|
|
495
|
+
return import_provider16.AISDKError.hasMarker(error, marker14);
|
|
496
|
+
}
|
|
497
|
+
};
|
|
498
|
+
_a14 = symbol14;
|
|
499
|
+
|
|
500
|
+
// src/error/index.ts
|
|
501
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
502
|
+
|
|
503
|
+
// src/util/retry-error.ts
|
|
504
|
+
var import_provider17 = require("@ai-sdk/provider");
|
|
505
|
+
var name15 = "AI_RetryError";
|
|
506
|
+
var marker15 = `vercel.ai.error.${name15}`;
|
|
507
|
+
var symbol15 = Symbol.for(marker15);
|
|
508
|
+
var _a15;
|
|
509
|
+
var RetryError = class extends import_provider17.AISDKError {
|
|
486
510
|
constructor({
|
|
487
511
|
message,
|
|
488
512
|
reason,
|
|
489
513
|
errors
|
|
490
514
|
}) {
|
|
491
|
-
super({ name:
|
|
492
|
-
this[
|
|
515
|
+
super({ name: name15, message });
|
|
516
|
+
this[_a15] = true;
|
|
493
517
|
this.reason = reason;
|
|
494
518
|
this.errors = errors;
|
|
495
519
|
this.lastError = errors[errors.length - 1];
|
|
496
520
|
}
|
|
497
521
|
static isInstance(error) {
|
|
498
|
-
return
|
|
522
|
+
return import_provider17.AISDKError.hasMarker(error, marker15);
|
|
499
523
|
}
|
|
500
524
|
};
|
|
501
|
-
|
|
525
|
+
_a15 = symbol15;
|
|
502
526
|
|
|
503
527
|
// src/logger/log-warnings.ts
|
|
504
528
|
function formatWarning({
|
|
@@ -761,7 +785,7 @@ function resolveEmbeddingModel(model) {
|
|
|
761
785
|
return getGlobalProvider().embeddingModel(model);
|
|
762
786
|
}
|
|
763
787
|
function resolveTranscriptionModel(model) {
|
|
764
|
-
var
|
|
788
|
+
var _a17, _b;
|
|
765
789
|
if (typeof model !== "string") {
|
|
766
790
|
if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
|
|
767
791
|
const unsupportedModel = model;
|
|
@@ -773,10 +797,10 @@ function resolveTranscriptionModel(model) {
|
|
|
773
797
|
}
|
|
774
798
|
return asTranscriptionModelV3(model);
|
|
775
799
|
}
|
|
776
|
-
return (_b = (
|
|
800
|
+
return (_b = (_a17 = getGlobalProvider()).transcriptionModel) == null ? void 0 : _b.call(_a17, model);
|
|
777
801
|
}
|
|
778
802
|
function resolveSpeechModel(model) {
|
|
779
|
-
var
|
|
803
|
+
var _a17, _b;
|
|
780
804
|
if (typeof model !== "string") {
|
|
781
805
|
if (model.specificationVersion !== "v3" && model.specificationVersion !== "v2") {
|
|
782
806
|
const unsupportedModel = model;
|
|
@@ -788,7 +812,7 @@ function resolveSpeechModel(model) {
|
|
|
788
812
|
}
|
|
789
813
|
return asSpeechModelV3(model);
|
|
790
814
|
}
|
|
791
|
-
return (_b = (
|
|
815
|
+
return (_b = (_a17 = getGlobalProvider()).speechModel) == null ? void 0 : _b.call(_a17, model);
|
|
792
816
|
}
|
|
793
817
|
function resolveImageModel(model) {
|
|
794
818
|
if (typeof model !== "string") {
|
|
@@ -805,8 +829,8 @@ function resolveImageModel(model) {
|
|
|
805
829
|
return getGlobalProvider().imageModel(model);
|
|
806
830
|
}
|
|
807
831
|
function getGlobalProvider() {
|
|
808
|
-
var
|
|
809
|
-
return (
|
|
832
|
+
var _a17;
|
|
833
|
+
return (_a17 = globalThis.AI_SDK_DEFAULT_PROVIDER) != null ? _a17 : import_gateway.gateway;
|
|
810
834
|
}
|
|
811
835
|
|
|
812
836
|
// src/prompt/call-settings.ts
|
|
@@ -1025,11 +1049,11 @@ var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
|
1025
1049
|
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
1026
1050
|
|
|
1027
1051
|
// src/version.ts
|
|
1028
|
-
var VERSION = true ? "6.0.
|
|
1052
|
+
var VERSION = true ? "6.0.35" : "0.0.0-test";
|
|
1029
1053
|
|
|
1030
1054
|
// src/util/download/download.ts
|
|
1031
1055
|
var download = async ({ url }) => {
|
|
1032
|
-
var
|
|
1056
|
+
var _a17;
|
|
1033
1057
|
const urlText = url.toString();
|
|
1034
1058
|
try {
|
|
1035
1059
|
const response = await fetch(urlText, {
|
|
@@ -1048,7 +1072,7 @@ var download = async ({ url }) => {
|
|
|
1048
1072
|
}
|
|
1049
1073
|
return {
|
|
1050
1074
|
data: new Uint8Array(await response.arrayBuffer()),
|
|
1051
|
-
mediaType: (
|
|
1075
|
+
mediaType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
|
|
1052
1076
|
};
|
|
1053
1077
|
} catch (error) {
|
|
1054
1078
|
if (import_provider_utils3.DownloadError.isInstance(error)) {
|
|
@@ -1066,7 +1090,7 @@ var createDefaultDownloadFunction = (download2 = download) => (requestedDownload
|
|
|
1066
1090
|
);
|
|
1067
1091
|
|
|
1068
1092
|
// src/prompt/data-content.ts
|
|
1069
|
-
var
|
|
1093
|
+
var import_provider19 = require("@ai-sdk/provider");
|
|
1070
1094
|
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
1071
1095
|
var import_v4 = require("zod/v4");
|
|
1072
1096
|
|
|
@@ -1094,8 +1118,8 @@ var dataContentSchema = import_v4.z.union([
|
|
|
1094
1118
|
import_v4.z.custom(
|
|
1095
1119
|
// Buffer might not be available in some environments such as CloudFlare:
|
|
1096
1120
|
(value) => {
|
|
1097
|
-
var
|
|
1098
|
-
return (_b = (
|
|
1121
|
+
var _a17, _b;
|
|
1122
|
+
return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
|
|
1099
1123
|
},
|
|
1100
1124
|
{ message: "Must be a Buffer" }
|
|
1101
1125
|
)
|
|
@@ -1118,7 +1142,7 @@ function convertToLanguageModelV3DataContent(content) {
|
|
|
1118
1142
|
content.toString()
|
|
1119
1143
|
);
|
|
1120
1144
|
if (dataUrlMediaType == null || base64Content == null) {
|
|
1121
|
-
throw new
|
|
1145
|
+
throw new import_provider19.AISDKError({
|
|
1122
1146
|
name: "InvalidDataContentError",
|
|
1123
1147
|
message: `Invalid data URL format in content ${content.toString()}`
|
|
1124
1148
|
});
|
|
@@ -1335,8 +1359,8 @@ async function downloadAssets(messages, download2, supportedUrls) {
|
|
|
1335
1359
|
).flat().filter(
|
|
1336
1360
|
(part) => part.type === "image" || part.type === "file"
|
|
1337
1361
|
).map((part) => {
|
|
1338
|
-
var
|
|
1339
|
-
const mediaType = (
|
|
1362
|
+
var _a17;
|
|
1363
|
+
const mediaType = (_a17 = part.mediaType) != null ? _a17 : part.type === "image" ? "image/*" : void 0;
|
|
1340
1364
|
let data = part.type === "image" ? part.image : part.data;
|
|
1341
1365
|
if (typeof data === "string") {
|
|
1342
1366
|
try {
|
|
@@ -1366,7 +1390,7 @@ async function downloadAssets(messages, download2, supportedUrls) {
|
|
|
1366
1390
|
);
|
|
1367
1391
|
}
|
|
1368
1392
|
function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
1369
|
-
var
|
|
1393
|
+
var _a17;
|
|
1370
1394
|
if (part.type === "text") {
|
|
1371
1395
|
return {
|
|
1372
1396
|
type: "text",
|
|
@@ -1399,7 +1423,7 @@ function convertPartToLanguageModelPart(part, downloadedAssets) {
|
|
|
1399
1423
|
switch (type) {
|
|
1400
1424
|
case "image": {
|
|
1401
1425
|
if (data instanceof Uint8Array || typeof data === "string") {
|
|
1402
|
-
mediaType = (
|
|
1426
|
+
mediaType = (_a17 = detectMediaType({ data, signatures: imageMediaTypeSignatures })) != null ? _a17 : mediaType;
|
|
1403
1427
|
}
|
|
1404
1428
|
return {
|
|
1405
1429
|
type: "file",
|
|
@@ -1451,7 +1475,7 @@ function mapToolResultOutput(output) {
|
|
|
1451
1475
|
}
|
|
1452
1476
|
|
|
1453
1477
|
// src/prompt/create-tool-model-output.ts
|
|
1454
|
-
var
|
|
1478
|
+
var import_provider20 = require("@ai-sdk/provider");
|
|
1455
1479
|
async function createToolModelOutput({
|
|
1456
1480
|
toolCallId,
|
|
1457
1481
|
input,
|
|
@@ -1460,7 +1484,7 @@ async function createToolModelOutput({
|
|
|
1460
1484
|
errorMode
|
|
1461
1485
|
}) {
|
|
1462
1486
|
if (errorMode === "text") {
|
|
1463
|
-
return { type: "error-text", value: (0,
|
|
1487
|
+
return { type: "error-text", value: (0, import_provider20.getErrorMessage)(output) };
|
|
1464
1488
|
} else if (errorMode === "json") {
|
|
1465
1489
|
return { type: "error-json", value: toJSONValue(output) };
|
|
1466
1490
|
}
|
|
@@ -1587,10 +1611,10 @@ async function prepareToolsAndToolChoice({
|
|
|
1587
1611
|
};
|
|
1588
1612
|
}
|
|
1589
1613
|
const filteredTools = activeTools != null ? Object.entries(tools).filter(
|
|
1590
|
-
([
|
|
1614
|
+
([name17]) => activeTools.includes(name17)
|
|
1591
1615
|
) : Object.entries(tools);
|
|
1592
1616
|
const languageModelTools = [];
|
|
1593
|
-
for (const [
|
|
1617
|
+
for (const [name17, tool2] of filteredTools) {
|
|
1594
1618
|
const toolType = tool2.type;
|
|
1595
1619
|
switch (toolType) {
|
|
1596
1620
|
case void 0:
|
|
@@ -1598,7 +1622,7 @@ async function prepareToolsAndToolChoice({
|
|
|
1598
1622
|
case "function":
|
|
1599
1623
|
languageModelTools.push({
|
|
1600
1624
|
type: "function",
|
|
1601
|
-
name:
|
|
1625
|
+
name: name17,
|
|
1602
1626
|
description: tool2.description,
|
|
1603
1627
|
inputSchema: await (0, import_provider_utils7.asSchema)(tool2.inputSchema).jsonSchema,
|
|
1604
1628
|
...tool2.inputExamples != null ? { inputExamples: tool2.inputExamples } : {},
|
|
@@ -1609,7 +1633,7 @@ async function prepareToolsAndToolChoice({
|
|
|
1609
1633
|
case "provider":
|
|
1610
1634
|
languageModelTools.push({
|
|
1611
1635
|
type: "provider",
|
|
1612
|
-
name:
|
|
1636
|
+
name: name17,
|
|
1613
1637
|
id: tool2.id,
|
|
1614
1638
|
args: tool2.args
|
|
1615
1639
|
});
|
|
@@ -1627,7 +1651,7 @@ async function prepareToolsAndToolChoice({
|
|
|
1627
1651
|
}
|
|
1628
1652
|
|
|
1629
1653
|
// src/prompt/standardize-prompt.ts
|
|
1630
|
-
var
|
|
1654
|
+
var import_provider21 = require("@ai-sdk/provider");
|
|
1631
1655
|
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
1632
1656
|
var import_v46 = require("zod/v4");
|
|
1633
1657
|
|
|
@@ -1840,13 +1864,13 @@ var modelMessageSchema = import_v45.z.union([
|
|
|
1840
1864
|
// src/prompt/standardize-prompt.ts
|
|
1841
1865
|
async function standardizePrompt(prompt) {
|
|
1842
1866
|
if (prompt.prompt == null && prompt.messages == null) {
|
|
1843
|
-
throw new
|
|
1867
|
+
throw new import_provider21.InvalidPromptError({
|
|
1844
1868
|
prompt,
|
|
1845
1869
|
message: "prompt or messages must be defined"
|
|
1846
1870
|
});
|
|
1847
1871
|
}
|
|
1848
1872
|
if (prompt.prompt != null && prompt.messages != null) {
|
|
1849
|
-
throw new
|
|
1873
|
+
throw new import_provider21.InvalidPromptError({
|
|
1850
1874
|
prompt,
|
|
1851
1875
|
message: "prompt and messages cannot be defined at the same time"
|
|
1852
1876
|
});
|
|
@@ -1854,7 +1878,7 @@ async function standardizePrompt(prompt) {
|
|
|
1854
1878
|
if (prompt.system != null && typeof prompt.system !== "string" && !asArray(prompt.system).every(
|
|
1855
1879
|
(message) => typeof message === "object" && message !== null && "role" in message && message.role === "system"
|
|
1856
1880
|
)) {
|
|
1857
|
-
throw new
|
|
1881
|
+
throw new import_provider21.InvalidPromptError({
|
|
1858
1882
|
prompt,
|
|
1859
1883
|
message: "system must be a string, SystemModelMessage, or array of SystemModelMessage"
|
|
1860
1884
|
});
|
|
@@ -1867,13 +1891,13 @@ async function standardizePrompt(prompt) {
|
|
|
1867
1891
|
} else if (prompt.messages != null) {
|
|
1868
1892
|
messages = prompt.messages;
|
|
1869
1893
|
} else {
|
|
1870
|
-
throw new
|
|
1894
|
+
throw new import_provider21.InvalidPromptError({
|
|
1871
1895
|
prompt,
|
|
1872
1896
|
message: "prompt or messages must be defined"
|
|
1873
1897
|
});
|
|
1874
1898
|
}
|
|
1875
1899
|
if (messages.length === 0) {
|
|
1876
|
-
throw new
|
|
1900
|
+
throw new import_provider21.InvalidPromptError({
|
|
1877
1901
|
prompt,
|
|
1878
1902
|
message: "messages must not be empty"
|
|
1879
1903
|
});
|
|
@@ -1883,7 +1907,7 @@ async function standardizePrompt(prompt) {
|
|
|
1883
1907
|
schema: import_v46.z.array(modelMessageSchema)
|
|
1884
1908
|
});
|
|
1885
1909
|
if (!validationResult.success) {
|
|
1886
|
-
throw new
|
|
1910
|
+
throw new import_provider21.InvalidPromptError({
|
|
1887
1911
|
prompt,
|
|
1888
1912
|
message: "The messages do not match the ModelMessage[] schema.",
|
|
1889
1913
|
cause: validationResult.error
|
|
@@ -1897,14 +1921,14 @@ async function standardizePrompt(prompt) {
|
|
|
1897
1921
|
|
|
1898
1922
|
// src/prompt/wrap-gateway-error.ts
|
|
1899
1923
|
var import_gateway2 = require("@ai-sdk/gateway");
|
|
1900
|
-
var
|
|
1924
|
+
var import_provider22 = require("@ai-sdk/provider");
|
|
1901
1925
|
function wrapGatewayError(error) {
|
|
1902
1926
|
if (!import_gateway2.GatewayAuthenticationError.isInstance(error))
|
|
1903
1927
|
return error;
|
|
1904
1928
|
const isProductionEnv = (process == null ? void 0 : process.env.NODE_ENV) === "production";
|
|
1905
1929
|
const moreInfoURL = "https://ai-sdk.dev/unauthenticated-ai-gateway";
|
|
1906
1930
|
if (isProductionEnv) {
|
|
1907
|
-
return new
|
|
1931
|
+
return new import_provider22.AISDKError({
|
|
1908
1932
|
name: "GatewayError",
|
|
1909
1933
|
message: `Unauthenticated. Configure AI_GATEWAY_API_KEY or use a provider module. Learn more: ${moreInfoURL}`
|
|
1910
1934
|
});
|
|
@@ -1945,7 +1969,7 @@ function getBaseTelemetryAttributes({
|
|
|
1945
1969
|
telemetry,
|
|
1946
1970
|
headers
|
|
1947
1971
|
}) {
|
|
1948
|
-
var
|
|
1972
|
+
var _a17;
|
|
1949
1973
|
return {
|
|
1950
1974
|
"ai.model.provider": model.provider,
|
|
1951
1975
|
"ai.model.id": model.modelId,
|
|
@@ -1964,7 +1988,7 @@ function getBaseTelemetryAttributes({
|
|
|
1964
1988
|
return attributes;
|
|
1965
1989
|
}, {}),
|
|
1966
1990
|
// add metadata as attributes:
|
|
1967
|
-
...Object.entries((
|
|
1991
|
+
...Object.entries((_a17 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a17 : {}).reduce(
|
|
1968
1992
|
(attributes, [key, value]) => {
|
|
1969
1993
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
|
1970
1994
|
return attributes;
|
|
@@ -1989,7 +2013,7 @@ var noopTracer = {
|
|
|
1989
2013
|
startSpan() {
|
|
1990
2014
|
return noopSpan;
|
|
1991
2015
|
},
|
|
1992
|
-
startActiveSpan(
|
|
2016
|
+
startActiveSpan(name17, arg1, arg2, arg3) {
|
|
1993
2017
|
if (typeof arg1 === "function") {
|
|
1994
2018
|
return arg1(noopSpan);
|
|
1995
2019
|
}
|
|
@@ -2059,14 +2083,14 @@ function getTracer({
|
|
|
2059
2083
|
// src/telemetry/record-span.ts
|
|
2060
2084
|
var import_api2 = require("@opentelemetry/api");
|
|
2061
2085
|
async function recordSpan({
|
|
2062
|
-
name:
|
|
2086
|
+
name: name17,
|
|
2063
2087
|
tracer,
|
|
2064
2088
|
attributes,
|
|
2065
2089
|
fn,
|
|
2066
2090
|
endWhenDone = true
|
|
2067
2091
|
}) {
|
|
2068
2092
|
return tracer.startActiveSpan(
|
|
2069
|
-
|
|
2093
|
+
name17,
|
|
2070
2094
|
{ attributes: await attributes },
|
|
2071
2095
|
async (span) => {
|
|
2072
2096
|
try {
|
|
@@ -2196,12 +2220,12 @@ function createNullLanguageModelUsage() {
|
|
|
2196
2220
|
};
|
|
2197
2221
|
}
|
|
2198
2222
|
function addLanguageModelUsage(usage1, usage2) {
|
|
2199
|
-
var
|
|
2223
|
+
var _a17, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2200
2224
|
return {
|
|
2201
2225
|
inputTokens: addTokenCounts(usage1.inputTokens, usage2.inputTokens),
|
|
2202
2226
|
inputTokenDetails: {
|
|
2203
2227
|
noCacheTokens: addTokenCounts(
|
|
2204
|
-
(
|
|
2228
|
+
(_a17 = usage1.inputTokenDetails) == null ? void 0 : _a17.noCacheTokens,
|
|
2205
2229
|
(_b = usage2.inputTokenDetails) == null ? void 0 : _b.noCacheTokens
|
|
2206
2230
|
),
|
|
2207
2231
|
cacheReadTokens: addTokenCounts(
|
|
@@ -2280,7 +2304,7 @@ function mergeObjects(base, overrides) {
|
|
|
2280
2304
|
}
|
|
2281
2305
|
|
|
2282
2306
|
// src/util/retry-with-exponential-backoff.ts
|
|
2283
|
-
var
|
|
2307
|
+
var import_provider23 = require("@ai-sdk/provider");
|
|
2284
2308
|
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
|
2285
2309
|
function getRetryDelayInMs({
|
|
2286
2310
|
error,
|
|
@@ -2347,7 +2371,7 @@ async function _retryWithExponentialBackoff(f, {
|
|
|
2347
2371
|
errors: newErrors
|
|
2348
2372
|
});
|
|
2349
2373
|
}
|
|
2350
|
-
if (error instanceof Error &&
|
|
2374
|
+
if (error instanceof Error && import_provider23.APICallError.isInstance(error) && error.isRetryable === true && tryNumber <= maxRetries) {
|
|
2351
2375
|
await (0, import_provider_utils9.delay)(
|
|
2352
2376
|
getRetryDelayInMs({
|
|
2353
2377
|
error,
|
|
@@ -2654,7 +2678,7 @@ __export(output_exports, {
|
|
|
2654
2678
|
object: () => object,
|
|
2655
2679
|
text: () => text
|
|
2656
2680
|
});
|
|
2657
|
-
var
|
|
2681
|
+
var import_provider24 = require("@ai-sdk/provider");
|
|
2658
2682
|
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
2659
2683
|
|
|
2660
2684
|
// src/util/parse-partial-json.ts
|
|
@@ -3010,7 +3034,7 @@ var text = () => ({
|
|
|
3010
3034
|
});
|
|
3011
3035
|
var object = ({
|
|
3012
3036
|
schema: inputSchema,
|
|
3013
|
-
name:
|
|
3037
|
+
name: name17,
|
|
3014
3038
|
description
|
|
3015
3039
|
}) => {
|
|
3016
3040
|
const schema = (0, import_provider_utils13.asSchema)(inputSchema);
|
|
@@ -3019,7 +3043,7 @@ var object = ({
|
|
|
3019
3043
|
responseFormat: (0, import_provider_utils13.resolve)(schema.jsonSchema).then((jsonSchema2) => ({
|
|
3020
3044
|
type: "json",
|
|
3021
3045
|
schema: jsonSchema2,
|
|
3022
|
-
...
|
|
3046
|
+
...name17 != null && { name: name17 },
|
|
3023
3047
|
...description != null && { description }
|
|
3024
3048
|
})),
|
|
3025
3049
|
async parseCompleteOutput({ text: text2 }, context) {
|
|
@@ -3073,7 +3097,7 @@ var object = ({
|
|
|
3073
3097
|
};
|
|
3074
3098
|
var array = ({
|
|
3075
3099
|
element: inputElementSchema,
|
|
3076
|
-
name:
|
|
3100
|
+
name: name17,
|
|
3077
3101
|
description
|
|
3078
3102
|
}) => {
|
|
3079
3103
|
const elementSchema = (0, import_provider_utils13.asSchema)(inputElementSchema);
|
|
@@ -3093,7 +3117,7 @@ var array = ({
|
|
|
3093
3117
|
required: ["elements"],
|
|
3094
3118
|
additionalProperties: false
|
|
3095
3119
|
},
|
|
3096
|
-
...
|
|
3120
|
+
...name17 != null && { name: name17 },
|
|
3097
3121
|
...description != null && { description }
|
|
3098
3122
|
};
|
|
3099
3123
|
}),
|
|
@@ -3113,7 +3137,7 @@ var array = ({
|
|
|
3113
3137
|
if (outerValue == null || typeof outerValue !== "object" || !("elements" in outerValue) || !Array.isArray(outerValue.elements)) {
|
|
3114
3138
|
throw new NoObjectGeneratedError({
|
|
3115
3139
|
message: "No object generated: response did not match schema.",
|
|
3116
|
-
cause: new
|
|
3140
|
+
cause: new import_provider24.TypeValidationError({
|
|
3117
3141
|
value: outerValue,
|
|
3118
3142
|
cause: "response must be an object with an elements array"
|
|
3119
3143
|
}),
|
|
@@ -3185,7 +3209,7 @@ var array = ({
|
|
|
3185
3209
|
};
|
|
3186
3210
|
var choice = ({
|
|
3187
3211
|
options: choiceOptions,
|
|
3188
|
-
name:
|
|
3212
|
+
name: name17,
|
|
3189
3213
|
description
|
|
3190
3214
|
}) => {
|
|
3191
3215
|
return {
|
|
@@ -3202,7 +3226,7 @@ var choice = ({
|
|
|
3202
3226
|
required: ["result"],
|
|
3203
3227
|
additionalProperties: false
|
|
3204
3228
|
},
|
|
3205
|
-
...
|
|
3229
|
+
...name17 != null && { name: name17 },
|
|
3206
3230
|
...description != null && { description }
|
|
3207
3231
|
}),
|
|
3208
3232
|
async parseCompleteOutput({ text: text2 }, context) {
|
|
@@ -3221,7 +3245,7 @@ var choice = ({
|
|
|
3221
3245
|
if (outerValue == null || typeof outerValue !== "object" || !("result" in outerValue) || typeof outerValue.result !== "string" || !choiceOptions.includes(outerValue.result)) {
|
|
3222
3246
|
throw new NoObjectGeneratedError({
|
|
3223
3247
|
message: "No object generated: response did not match schema.",
|
|
3224
|
-
cause: new
|
|
3248
|
+
cause: new import_provider24.TypeValidationError({
|
|
3225
3249
|
value: outerValue,
|
|
3226
3250
|
cause: "response must be an object that contains a choice value."
|
|
3227
3251
|
}),
|
|
@@ -3263,14 +3287,14 @@ var choice = ({
|
|
|
3263
3287
|
};
|
|
3264
3288
|
};
|
|
3265
3289
|
var json = ({
|
|
3266
|
-
name:
|
|
3290
|
+
name: name17,
|
|
3267
3291
|
description
|
|
3268
3292
|
} = {}) => {
|
|
3269
3293
|
return {
|
|
3270
3294
|
name: "json",
|
|
3271
3295
|
responseFormat: Promise.resolve({
|
|
3272
3296
|
type: "json",
|
|
3273
|
-
...
|
|
3297
|
+
...name17 != null && { name: name17 },
|
|
3274
3298
|
...description != null && { description }
|
|
3275
3299
|
}),
|
|
3276
3300
|
async parseCompleteOutput({ text: text2 }, context) {
|
|
@@ -3315,7 +3339,7 @@ async function parseToolCall({
|
|
|
3315
3339
|
system,
|
|
3316
3340
|
messages
|
|
3317
3341
|
}) {
|
|
3318
|
-
var
|
|
3342
|
+
var _a17;
|
|
3319
3343
|
try {
|
|
3320
3344
|
if (tools == null) {
|
|
3321
3345
|
if (toolCall.providerExecuted && toolCall.dynamic) {
|
|
@@ -3364,7 +3388,7 @@ async function parseToolCall({
|
|
|
3364
3388
|
dynamic: true,
|
|
3365
3389
|
invalid: true,
|
|
3366
3390
|
error,
|
|
3367
|
-
title: (
|
|
3391
|
+
title: (_a17 = tools == null ? void 0 : tools[toolCall.toolName]) == null ? void 0 : _a17.title,
|
|
3368
3392
|
providerExecuted: toolCall.providerExecuted,
|
|
3369
3393
|
providerMetadata: toolCall.providerMetadata
|
|
3370
3394
|
};
|
|
@@ -3503,8 +3527,8 @@ function stepCountIs(stepCount) {
|
|
|
3503
3527
|
}
|
|
3504
3528
|
function hasToolCall(toolName) {
|
|
3505
3529
|
return ({ steps }) => {
|
|
3506
|
-
var
|
|
3507
|
-
return (_c = (_b = (
|
|
3530
|
+
var _a17, _b, _c;
|
|
3531
|
+
return (_c = (_b = (_a17 = steps[steps.length - 1]) == null ? void 0 : _a17.toolCalls) == null ? void 0 : _b.some(
|
|
3508
3532
|
(toolCall) => toolCall.toolName === toolName
|
|
3509
3533
|
)) != null ? _c : false;
|
|
3510
3534
|
};
|
|
@@ -3758,7 +3782,7 @@ async function generateText({
|
|
|
3758
3782
|
}),
|
|
3759
3783
|
tracer,
|
|
3760
3784
|
fn: async (span) => {
|
|
3761
|
-
var
|
|
3785
|
+
var _a17, _b, _c, _d, _e, _f, _g, _h;
|
|
3762
3786
|
const initialMessages = initialPrompt.messages;
|
|
3763
3787
|
const responseMessages = [];
|
|
3764
3788
|
const { approvedToolApprovals, deniedToolApprovals } = collectToolApprovals({ messages: initialMessages });
|
|
@@ -3853,7 +3877,7 @@ async function generateText({
|
|
|
3853
3877
|
experimental_context
|
|
3854
3878
|
}));
|
|
3855
3879
|
const stepModel = resolveLanguageModel(
|
|
3856
|
-
(
|
|
3880
|
+
(_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
|
|
3857
3881
|
);
|
|
3858
3882
|
const promptMessages = await convertToLanguageModelPrompt({
|
|
3859
3883
|
prompt: {
|
|
@@ -3871,7 +3895,7 @@ async function generateText({
|
|
|
3871
3895
|
});
|
|
3872
3896
|
currentModelResponse = await retry(
|
|
3873
3897
|
() => {
|
|
3874
|
-
var
|
|
3898
|
+
var _a18;
|
|
3875
3899
|
return recordSpan({
|
|
3876
3900
|
name: "ai.generateText.doGenerate",
|
|
3877
3901
|
attributes: selectTelemetryAttributes({
|
|
@@ -3903,14 +3927,14 @@ async function generateText({
|
|
|
3903
3927
|
"gen_ai.request.max_tokens": settings.maxOutputTokens,
|
|
3904
3928
|
"gen_ai.request.presence_penalty": settings.presencePenalty,
|
|
3905
3929
|
"gen_ai.request.stop_sequences": settings.stopSequences,
|
|
3906
|
-
"gen_ai.request.temperature": (
|
|
3930
|
+
"gen_ai.request.temperature": (_a18 = settings.temperature) != null ? _a18 : void 0,
|
|
3907
3931
|
"gen_ai.request.top_k": settings.topK,
|
|
3908
3932
|
"gen_ai.request.top_p": settings.topP
|
|
3909
3933
|
}
|
|
3910
3934
|
}),
|
|
3911
3935
|
tracer,
|
|
3912
3936
|
fn: async (span2) => {
|
|
3913
|
-
var
|
|
3937
|
+
var _a19, _b2, _c2, _d2, _e2, _f2, _g2, _h2;
|
|
3914
3938
|
const stepProviderOptions = mergeObjects(
|
|
3915
3939
|
providerOptions,
|
|
3916
3940
|
prepareStepResult == null ? void 0 : prepareStepResult.providerOptions
|
|
@@ -3926,7 +3950,7 @@ async function generateText({
|
|
|
3926
3950
|
headers: headersWithUserAgent
|
|
3927
3951
|
});
|
|
3928
3952
|
const responseData = {
|
|
3929
|
-
id: (_b2 = (
|
|
3953
|
+
id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId2(),
|
|
3930
3954
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : /* @__PURE__ */ new Date(),
|
|
3931
3955
|
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : stepModel.modelId,
|
|
3932
3956
|
headers: (_g2 = result.response) == null ? void 0 : _g2.headers,
|
|
@@ -4419,7 +4443,7 @@ function asContent({
|
|
|
4419
4443
|
}
|
|
4420
4444
|
|
|
4421
4445
|
// src/generate-text/stream-text.ts
|
|
4422
|
-
var
|
|
4446
|
+
var import_provider25 = require("@ai-sdk/provider");
|
|
4423
4447
|
var import_provider_utils19 = require("@ai-sdk/provider-utils");
|
|
4424
4448
|
|
|
4425
4449
|
// src/util/prepare-headers.ts
|
|
@@ -4798,21 +4822,23 @@ function processUIMessageStream({
|
|
|
4798
4822
|
new TransformStream({
|
|
4799
4823
|
async transform(chunk, controller) {
|
|
4800
4824
|
await runUpdateMessageJob(async ({ state, write }) => {
|
|
4801
|
-
var
|
|
4825
|
+
var _a17, _b, _c, _d;
|
|
4802
4826
|
function getToolInvocation(toolCallId) {
|
|
4803
4827
|
const toolInvocations = state.message.parts.filter(isToolUIPart);
|
|
4804
4828
|
const toolInvocation = toolInvocations.find(
|
|
4805
4829
|
(invocation) => invocation.toolCallId === toolCallId
|
|
4806
4830
|
);
|
|
4807
4831
|
if (toolInvocation == null) {
|
|
4808
|
-
throw new
|
|
4809
|
-
|
|
4810
|
-
|
|
4832
|
+
throw new UIMessageStreamError({
|
|
4833
|
+
chunkType: "tool-invocation",
|
|
4834
|
+
chunkId: toolCallId,
|
|
4835
|
+
message: `No tool invocation found for tool call ID "${toolCallId}".`
|
|
4836
|
+
});
|
|
4811
4837
|
}
|
|
4812
4838
|
return toolInvocation;
|
|
4813
4839
|
}
|
|
4814
4840
|
function updateToolPart(options) {
|
|
4815
|
-
var
|
|
4841
|
+
var _a18;
|
|
4816
4842
|
const part = state.message.parts.find(
|
|
4817
4843
|
(part2) => isStaticToolUIPart(part2) && part2.toolCallId === options.toolCallId
|
|
4818
4844
|
);
|
|
@@ -4828,7 +4854,7 @@ function processUIMessageStream({
|
|
|
4828
4854
|
if (options.title !== void 0) {
|
|
4829
4855
|
anyPart.title = options.title;
|
|
4830
4856
|
}
|
|
4831
|
-
anyPart.providerExecuted = (
|
|
4857
|
+
anyPart.providerExecuted = (_a18 = anyOptions.providerExecuted) != null ? _a18 : part.providerExecuted;
|
|
4832
4858
|
if (anyOptions.providerMetadata != null && part.state === "input-available") {
|
|
4833
4859
|
part.callProviderMetadata = anyOptions.providerMetadata;
|
|
4834
4860
|
}
|
|
@@ -4849,7 +4875,7 @@ function processUIMessageStream({
|
|
|
4849
4875
|
}
|
|
4850
4876
|
}
|
|
4851
4877
|
function updateDynamicToolPart(options) {
|
|
4852
|
-
var
|
|
4878
|
+
var _a18, _b2;
|
|
4853
4879
|
const part = state.message.parts.find(
|
|
4854
4880
|
(part2) => part2.type === "dynamic-tool" && part2.toolCallId === options.toolCallId
|
|
4855
4881
|
);
|
|
@@ -4861,7 +4887,7 @@ function processUIMessageStream({
|
|
|
4861
4887
|
anyPart.input = anyOptions.input;
|
|
4862
4888
|
anyPart.output = anyOptions.output;
|
|
4863
4889
|
anyPart.errorText = anyOptions.errorText;
|
|
4864
|
-
anyPart.rawInput = (
|
|
4890
|
+
anyPart.rawInput = (_a18 = anyOptions.rawInput) != null ? _a18 : anyPart.rawInput;
|
|
4865
4891
|
anyPart.preliminary = anyOptions.preliminary;
|
|
4866
4892
|
if (options.title !== void 0) {
|
|
4867
4893
|
anyPart.title = options.title;
|
|
@@ -4913,13 +4939,27 @@ function processUIMessageStream({
|
|
|
4913
4939
|
}
|
|
4914
4940
|
case "text-delta": {
|
|
4915
4941
|
const textPart = state.activeTextParts[chunk.id];
|
|
4942
|
+
if (textPart == null) {
|
|
4943
|
+
throw new UIMessageStreamError({
|
|
4944
|
+
chunkType: "text-delta",
|
|
4945
|
+
chunkId: chunk.id,
|
|
4946
|
+
message: `Received text-delta for missing text part with ID "${chunk.id}". Ensure a "text-start" chunk is sent before any "text-delta" chunks.`
|
|
4947
|
+
});
|
|
4948
|
+
}
|
|
4916
4949
|
textPart.text += chunk.delta;
|
|
4917
|
-
textPart.providerMetadata = (
|
|
4950
|
+
textPart.providerMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textPart.providerMetadata;
|
|
4918
4951
|
write();
|
|
4919
4952
|
break;
|
|
4920
4953
|
}
|
|
4921
4954
|
case "text-end": {
|
|
4922
4955
|
const textPart = state.activeTextParts[chunk.id];
|
|
4956
|
+
if (textPart == null) {
|
|
4957
|
+
throw new UIMessageStreamError({
|
|
4958
|
+
chunkType: "text-end",
|
|
4959
|
+
chunkId: chunk.id,
|
|
4960
|
+
message: `Received text-end for missing text part with ID "${chunk.id}". Ensure a "text-start" chunk is sent before any "text-end" chunks.`
|
|
4961
|
+
});
|
|
4962
|
+
}
|
|
4923
4963
|
textPart.state = "done";
|
|
4924
4964
|
textPart.providerMetadata = (_b = chunk.providerMetadata) != null ? _b : textPart.providerMetadata;
|
|
4925
4965
|
delete state.activeTextParts[chunk.id];
|
|
@@ -4940,6 +4980,13 @@ function processUIMessageStream({
|
|
|
4940
4980
|
}
|
|
4941
4981
|
case "reasoning-delta": {
|
|
4942
4982
|
const reasoningPart = state.activeReasoningParts[chunk.id];
|
|
4983
|
+
if (reasoningPart == null) {
|
|
4984
|
+
throw new UIMessageStreamError({
|
|
4985
|
+
chunkType: "reasoning-delta",
|
|
4986
|
+
chunkId: chunk.id,
|
|
4987
|
+
message: `Received reasoning-delta for missing reasoning part with ID "${chunk.id}". Ensure a "reasoning-start" chunk is sent before any "reasoning-delta" chunks.`
|
|
4988
|
+
});
|
|
4989
|
+
}
|
|
4943
4990
|
reasoningPart.text += chunk.delta;
|
|
4944
4991
|
reasoningPart.providerMetadata = (_c = chunk.providerMetadata) != null ? _c : reasoningPart.providerMetadata;
|
|
4945
4992
|
write();
|
|
@@ -4947,6 +4994,13 @@ function processUIMessageStream({
|
|
|
4947
4994
|
}
|
|
4948
4995
|
case "reasoning-end": {
|
|
4949
4996
|
const reasoningPart = state.activeReasoningParts[chunk.id];
|
|
4997
|
+
if (reasoningPart == null) {
|
|
4998
|
+
throw new UIMessageStreamError({
|
|
4999
|
+
chunkType: "reasoning-end",
|
|
5000
|
+
chunkId: chunk.id,
|
|
5001
|
+
message: `Received reasoning-end for missing reasoning part with ID "${chunk.id}". Ensure a "reasoning-start" chunk is sent before any "reasoning-end" chunks.`
|
|
5002
|
+
});
|
|
5003
|
+
}
|
|
4950
5004
|
reasoningPart.providerMetadata = (_d = chunk.providerMetadata) != null ? _d : reasoningPart.providerMetadata;
|
|
4951
5005
|
reasoningPart.state = "done";
|
|
4952
5006
|
delete state.activeReasoningParts[chunk.id];
|
|
@@ -5018,6 +5072,13 @@ function processUIMessageStream({
|
|
|
5018
5072
|
}
|
|
5019
5073
|
case "tool-input-delta": {
|
|
5020
5074
|
const partialToolCall = state.partialToolCalls[chunk.toolCallId];
|
|
5075
|
+
if (partialToolCall == null) {
|
|
5076
|
+
throw new UIMessageStreamError({
|
|
5077
|
+
chunkType: "tool-input-delta",
|
|
5078
|
+
chunkId: chunk.toolCallId,
|
|
5079
|
+
message: `Received tool-input-delta for missing tool call with ID "${chunk.toolCallId}". Ensure a "tool-input-start" chunk is sent before any "tool-input-delta" chunks.`
|
|
5080
|
+
});
|
|
5081
|
+
}
|
|
5021
5082
|
partialToolCall.text += chunk.inputTextDelta;
|
|
5022
5083
|
const { value: partialArgs } = await parsePartialJson(
|
|
5023
5084
|
partialToolCall.text
|
|
@@ -5353,13 +5414,13 @@ function createAsyncIterableStream(source) {
|
|
|
5353
5414
|
const reader = this.getReader();
|
|
5354
5415
|
let finished = false;
|
|
5355
5416
|
async function cleanup(cancelStream) {
|
|
5356
|
-
var
|
|
5417
|
+
var _a17;
|
|
5357
5418
|
if (finished)
|
|
5358
5419
|
return;
|
|
5359
5420
|
finished = true;
|
|
5360
5421
|
try {
|
|
5361
5422
|
if (cancelStream) {
|
|
5362
|
-
await ((
|
|
5423
|
+
await ((_a17 = reader.cancel) == null ? void 0 : _a17.call(reader));
|
|
5363
5424
|
}
|
|
5364
5425
|
} finally {
|
|
5365
5426
|
try {
|
|
@@ -5525,8 +5586,8 @@ function createStitchableStream() {
|
|
|
5525
5586
|
|
|
5526
5587
|
// src/util/now.ts
|
|
5527
5588
|
function now() {
|
|
5528
|
-
var
|
|
5529
|
-
return (_b = (
|
|
5589
|
+
var _a17, _b;
|
|
5590
|
+
return (_b = (_a17 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a17.now()) != null ? _b : Date.now();
|
|
5530
5591
|
}
|
|
5531
5592
|
|
|
5532
5593
|
// src/generate-text/run-tools-transformation.ts
|
|
@@ -5873,7 +5934,7 @@ function createOutputTransformStream(output) {
|
|
|
5873
5934
|
}
|
|
5874
5935
|
return new TransformStream({
|
|
5875
5936
|
async transform(chunk, controller) {
|
|
5876
|
-
var
|
|
5937
|
+
var _a17;
|
|
5877
5938
|
if (chunk.type === "finish-step" && textChunk.length > 0) {
|
|
5878
5939
|
publishTextChunk({ controller });
|
|
5879
5940
|
}
|
|
@@ -5900,7 +5961,7 @@ function createOutputTransformStream(output) {
|
|
|
5900
5961
|
}
|
|
5901
5962
|
text2 += chunk.text;
|
|
5902
5963
|
textChunk += chunk.text;
|
|
5903
|
-
textProviderMetadata = (
|
|
5964
|
+
textProviderMetadata = (_a17 = chunk.providerMetadata) != null ? _a17 : textProviderMetadata;
|
|
5904
5965
|
const result = await output.parsePartialOutput({ text: text2 });
|
|
5905
5966
|
if (result !== void 0) {
|
|
5906
5967
|
const currentJson = JSON.stringify(result.partial);
|
|
@@ -5969,7 +6030,7 @@ var DefaultStreamTextResult = class {
|
|
|
5969
6030
|
let activeReasoningContent = {};
|
|
5970
6031
|
const eventProcessor = new TransformStream({
|
|
5971
6032
|
async transform(chunk, controller) {
|
|
5972
|
-
var
|
|
6033
|
+
var _a17, _b, _c, _d;
|
|
5973
6034
|
controller.enqueue(chunk);
|
|
5974
6035
|
const { part } = chunk;
|
|
5975
6036
|
if (part.type === "text-delta" || part.type === "reasoning-delta" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-input-start" || part.type === "tool-input-delta" || part.type === "raw") {
|
|
@@ -5999,7 +6060,7 @@ var DefaultStreamTextResult = class {
|
|
|
5999
6060
|
return;
|
|
6000
6061
|
}
|
|
6001
6062
|
activeText.text += part.text;
|
|
6002
|
-
activeText.providerMetadata = (
|
|
6063
|
+
activeText.providerMetadata = (_a17 = part.providerMetadata) != null ? _a17 : activeText.providerMetadata;
|
|
6003
6064
|
}
|
|
6004
6065
|
if (part.type === "text-end") {
|
|
6005
6066
|
const activeText = activeTextContent[part.id];
|
|
@@ -6164,8 +6225,8 @@ var DefaultStreamTextResult = class {
|
|
|
6164
6225
|
"ai.response.text": { output: () => finalStep.text },
|
|
6165
6226
|
"ai.response.toolCalls": {
|
|
6166
6227
|
output: () => {
|
|
6167
|
-
var
|
|
6168
|
-
return ((
|
|
6228
|
+
var _a17;
|
|
6229
|
+
return ((_a17 = finalStep.toolCalls) == null ? void 0 : _a17.length) ? JSON.stringify(finalStep.toolCalls) : void 0;
|
|
6169
6230
|
}
|
|
6170
6231
|
},
|
|
6171
6232
|
"ai.response.providerMetadata": JSON.stringify(
|
|
@@ -6202,7 +6263,7 @@ var DefaultStreamTextResult = class {
|
|
|
6202
6263
|
// The `reason` is usually of type DOMException, but it can also be of any type,
|
|
6203
6264
|
// so we use getErrorMessage for serialization because it is already designed to accept values of the unknown type.
|
|
6204
6265
|
// See: https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal/reason
|
|
6205
|
-
...(abortSignal == null ? void 0 : abortSignal.reason) !== void 0 ? { reason: (0,
|
|
6266
|
+
...(abortSignal == null ? void 0 : abortSignal.reason) !== void 0 ? { reason: (0, import_provider25.getErrorMessage)(abortSignal.reason) } : {}
|
|
6206
6267
|
});
|
|
6207
6268
|
controller.close();
|
|
6208
6269
|
}
|
|
@@ -6387,7 +6448,7 @@ var DefaultStreamTextResult = class {
|
|
|
6387
6448
|
responseMessages,
|
|
6388
6449
|
usage
|
|
6389
6450
|
}) {
|
|
6390
|
-
var
|
|
6451
|
+
var _a17, _b, _c, _d, _e, _f;
|
|
6391
6452
|
const includeRawChunks2 = self.includeRawChunks;
|
|
6392
6453
|
const stepTimeoutId = stepTimeoutMs != null ? setTimeout(() => stepAbortController.abort(), stepTimeoutMs) : void 0;
|
|
6393
6454
|
let chunkTimeoutId = void 0;
|
|
@@ -6423,7 +6484,7 @@ var DefaultStreamTextResult = class {
|
|
|
6423
6484
|
experimental_context
|
|
6424
6485
|
}));
|
|
6425
6486
|
const stepModel = resolveLanguageModel(
|
|
6426
|
-
(
|
|
6487
|
+
(_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model
|
|
6427
6488
|
);
|
|
6428
6489
|
const promptMessages = await convertToLanguageModelPrompt({
|
|
6429
6490
|
prompt: {
|
|
@@ -6536,7 +6597,7 @@ var DefaultStreamTextResult = class {
|
|
|
6536
6597
|
streamWithToolResults.pipeThrough(
|
|
6537
6598
|
new TransformStream({
|
|
6538
6599
|
async transform(chunk, controller) {
|
|
6539
|
-
var
|
|
6600
|
+
var _a18, _b2, _c2, _d2, _e2;
|
|
6540
6601
|
resetChunkTimeout();
|
|
6541
6602
|
if (chunk.type === "stream-start") {
|
|
6542
6603
|
warnings = chunk.warnings;
|
|
@@ -6610,7 +6671,7 @@ var DefaultStreamTextResult = class {
|
|
|
6610
6671
|
}
|
|
6611
6672
|
case "response-metadata": {
|
|
6612
6673
|
stepResponse = {
|
|
6613
|
-
id: (
|
|
6674
|
+
id: (_a18 = chunk.id) != null ? _a18 : stepResponse.id,
|
|
6614
6675
|
timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
|
|
6615
6676
|
modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
|
|
6616
6677
|
};
|
|
@@ -6942,14 +7003,14 @@ var DefaultStreamTextResult = class {
|
|
|
6942
7003
|
);
|
|
6943
7004
|
}
|
|
6944
7005
|
async consumeStream(options) {
|
|
6945
|
-
var
|
|
7006
|
+
var _a17;
|
|
6946
7007
|
try {
|
|
6947
7008
|
await consumeStream({
|
|
6948
7009
|
stream: this.fullStream,
|
|
6949
7010
|
onError: options == null ? void 0 : options.onError
|
|
6950
7011
|
});
|
|
6951
7012
|
} catch (error) {
|
|
6952
|
-
(
|
|
7013
|
+
(_a17 = options == null ? void 0 : options.onError) == null ? void 0 : _a17.call(options, error);
|
|
6953
7014
|
}
|
|
6954
7015
|
}
|
|
6955
7016
|
get experimental_partialOutputStream() {
|
|
@@ -6969,10 +7030,10 @@ var DefaultStreamTextResult = class {
|
|
|
6969
7030
|
);
|
|
6970
7031
|
}
|
|
6971
7032
|
get elementStream() {
|
|
6972
|
-
var
|
|
6973
|
-
const transform = (
|
|
7033
|
+
var _a17, _b, _c;
|
|
7034
|
+
const transform = (_a17 = this.outputSpecification) == null ? void 0 : _a17.createElementStreamTransform();
|
|
6974
7035
|
if (transform == null) {
|
|
6975
|
-
throw new
|
|
7036
|
+
throw new import_provider25.UnsupportedFunctionalityError({
|
|
6976
7037
|
functionality: `element streams in ${(_c = (_b = this.outputSpecification) == null ? void 0 : _b.name) != null ? _c : "text"} mode`
|
|
6977
7038
|
});
|
|
6978
7039
|
}
|
|
@@ -6980,8 +7041,8 @@ var DefaultStreamTextResult = class {
|
|
|
6980
7041
|
}
|
|
6981
7042
|
get output() {
|
|
6982
7043
|
return this.finalStep.then((step) => {
|
|
6983
|
-
var
|
|
6984
|
-
const output = (
|
|
7044
|
+
var _a17;
|
|
7045
|
+
const output = (_a17 = this.outputSpecification) != null ? _a17 : text();
|
|
6985
7046
|
return output.parseCompleteOutput(
|
|
6986
7047
|
{ text: step.text },
|
|
6987
7048
|
{
|
|
@@ -7001,15 +7062,15 @@ var DefaultStreamTextResult = class {
|
|
|
7001
7062
|
sendSources = false,
|
|
7002
7063
|
sendStart = true,
|
|
7003
7064
|
sendFinish = true,
|
|
7004
|
-
onError =
|
|
7065
|
+
onError = import_provider25.getErrorMessage
|
|
7005
7066
|
} = {}) {
|
|
7006
7067
|
const responseMessageId = generateMessageId != null ? getResponseUIMessageId({
|
|
7007
7068
|
originalMessages,
|
|
7008
7069
|
responseMessageId: generateMessageId
|
|
7009
7070
|
}) : void 0;
|
|
7010
7071
|
const isDynamic = (part) => {
|
|
7011
|
-
var
|
|
7012
|
-
const tool2 = (
|
|
7072
|
+
var _a17;
|
|
7073
|
+
const tool2 = (_a17 = this.tools) == null ? void 0 : _a17[part.toolName];
|
|
7013
7074
|
if (tool2 == null) {
|
|
7014
7075
|
return part.dynamic;
|
|
7015
7076
|
}
|
|
@@ -7347,10 +7408,10 @@ var ToolLoopAgent = class {
|
|
|
7347
7408
|
return this.settings.tools;
|
|
7348
7409
|
}
|
|
7349
7410
|
async prepareCall(options) {
|
|
7350
|
-
var
|
|
7411
|
+
var _a17, _b, _c, _d;
|
|
7351
7412
|
const baseCallArgs = {
|
|
7352
7413
|
...this.settings,
|
|
7353
|
-
stopWhen: (
|
|
7414
|
+
stopWhen: (_a17 = this.settings.stopWhen) != null ? _a17 : stepCountIs(20),
|
|
7354
7415
|
...options
|
|
7355
7416
|
};
|
|
7356
7417
|
const preparedCallArgs = (_d = await ((_c = (_b = this.settings).prepareCall) == null ? void 0 : _c.call(_b, baseCallArgs))) != null ? _d : baseCallArgs;
|
|
@@ -7486,7 +7547,7 @@ function readUIMessageStream({
|
|
|
7486
7547
|
onError,
|
|
7487
7548
|
terminateOnError = false
|
|
7488
7549
|
}) {
|
|
7489
|
-
var
|
|
7550
|
+
var _a17;
|
|
7490
7551
|
let controller;
|
|
7491
7552
|
let hasErrored = false;
|
|
7492
7553
|
const outputStream = new ReadableStream({
|
|
@@ -7495,7 +7556,7 @@ function readUIMessageStream({
|
|
|
7495
7556
|
}
|
|
7496
7557
|
});
|
|
7497
7558
|
const state = createStreamingUIMessageState({
|
|
7498
|
-
messageId: (
|
|
7559
|
+
messageId: (_a17 = message == null ? void 0 : message.id) != null ? _a17 : "",
|
|
7499
7560
|
lastMessage: message
|
|
7500
7561
|
});
|
|
7501
7562
|
const handleError = (error) => {
|
|
@@ -7562,7 +7623,7 @@ async function convertToModelMessages(messages, options) {
|
|
|
7562
7623
|
modelMessages.push({
|
|
7563
7624
|
role: "user",
|
|
7564
7625
|
content: message.parts.map((part) => {
|
|
7565
|
-
var
|
|
7626
|
+
var _a17;
|
|
7566
7627
|
if (isTextUIPart(part)) {
|
|
7567
7628
|
return {
|
|
7568
7629
|
type: "text",
|
|
@@ -7580,7 +7641,7 @@ async function convertToModelMessages(messages, options) {
|
|
|
7580
7641
|
};
|
|
7581
7642
|
}
|
|
7582
7643
|
if (isDataUIPart(part)) {
|
|
7583
|
-
return (
|
|
7644
|
+
return (_a17 = options == null ? void 0 : options.convertDataPart) == null ? void 0 : _a17.call(
|
|
7584
7645
|
options,
|
|
7585
7646
|
part
|
|
7586
7647
|
);
|
|
@@ -7593,7 +7654,7 @@ async function convertToModelMessages(messages, options) {
|
|
|
7593
7654
|
if (message.parts != null) {
|
|
7594
7655
|
let block = [];
|
|
7595
7656
|
async function processBlock() {
|
|
7596
|
-
var
|
|
7657
|
+
var _a17, _b, _c, _d, _e, _f;
|
|
7597
7658
|
if (block.length === 0) {
|
|
7598
7659
|
return;
|
|
7599
7660
|
}
|
|
@@ -7625,7 +7686,7 @@ async function convertToModelMessages(messages, options) {
|
|
|
7625
7686
|
type: "tool-call",
|
|
7626
7687
|
toolCallId: part.toolCallId,
|
|
7627
7688
|
toolName,
|
|
7628
|
-
input: part.state === "output-error" ? (
|
|
7689
|
+
input: part.state === "output-error" ? (_a17 = part.input) != null ? _a17 : "rawInput" in part ? part.rawInput : void 0 : part.input,
|
|
7629
7690
|
providerExecuted: part.providerExecuted,
|
|
7630
7691
|
...part.callProviderMetadata != null ? { providerOptions: part.callProviderMetadata } : {}
|
|
7631
7692
|
});
|
|
@@ -7671,8 +7732,8 @@ async function convertToModelMessages(messages, options) {
|
|
|
7671
7732
|
});
|
|
7672
7733
|
const toolParts = block.filter(
|
|
7673
7734
|
(part) => {
|
|
7674
|
-
var
|
|
7675
|
-
return isToolUIPart(part) && (part.providerExecuted !== true || ((
|
|
7735
|
+
var _a18;
|
|
7736
|
+
return isToolUIPart(part) && (part.providerExecuted !== true || ((_a18 = part.approval) == null ? void 0 : _a18.approved) != null);
|
|
7676
7737
|
}
|
|
7677
7738
|
);
|
|
7678
7739
|
if (toolParts.length > 0) {
|
|
@@ -7760,7 +7821,7 @@ async function convertToModelMessages(messages, options) {
|
|
|
7760
7821
|
}
|
|
7761
7822
|
|
|
7762
7823
|
// src/ui/validate-ui-messages.ts
|
|
7763
|
-
var
|
|
7824
|
+
var import_provider26 = require("@ai-sdk/provider");
|
|
7764
7825
|
var import_provider_utils22 = require("@ai-sdk/provider-utils");
|
|
7765
7826
|
var import_v48 = require("zod/v4");
|
|
7766
7827
|
var uiMessagesSchema = (0, import_provider_utils22.lazySchema)(
|
|
@@ -8063,7 +8124,7 @@ async function safeValidateUIMessages({
|
|
|
8063
8124
|
if (!dataSchema) {
|
|
8064
8125
|
return {
|
|
8065
8126
|
success: false,
|
|
8066
|
-
error: new
|
|
8127
|
+
error: new import_provider26.TypeValidationError({
|
|
8067
8128
|
value: dataPart.data,
|
|
8068
8129
|
cause: `No data schema found for data part ${dataName}`
|
|
8069
8130
|
})
|
|
@@ -8087,7 +8148,7 @@ async function safeValidateUIMessages({
|
|
|
8087
8148
|
if (!tool2) {
|
|
8088
8149
|
return {
|
|
8089
8150
|
success: false,
|
|
8090
|
-
error: new
|
|
8151
|
+
error: new import_provider26.TypeValidationError({
|
|
8091
8152
|
value: toolPart.input,
|
|
8092
8153
|
cause: `No tool schema found for tool part ${toolName}`
|
|
8093
8154
|
})
|
|
@@ -8258,7 +8319,7 @@ async function embed({
|
|
|
8258
8319
|
}),
|
|
8259
8320
|
tracer,
|
|
8260
8321
|
fn: async (doEmbedSpan) => {
|
|
8261
|
-
var
|
|
8322
|
+
var _a17;
|
|
8262
8323
|
const modelResponse = await model.doEmbed({
|
|
8263
8324
|
values: [value],
|
|
8264
8325
|
abortSignal,
|
|
@@ -8266,7 +8327,7 @@ async function embed({
|
|
|
8266
8327
|
providerOptions
|
|
8267
8328
|
});
|
|
8268
8329
|
const embedding2 = modelResponse.embeddings[0];
|
|
8269
|
-
const usage2 = (
|
|
8330
|
+
const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
|
|
8270
8331
|
doEmbedSpan.setAttributes(
|
|
8271
8332
|
await selectTelemetryAttributes({
|
|
8272
8333
|
telemetry,
|
|
@@ -8380,7 +8441,7 @@ async function embedMany({
|
|
|
8380
8441
|
}),
|
|
8381
8442
|
tracer,
|
|
8382
8443
|
fn: async (span) => {
|
|
8383
|
-
var
|
|
8444
|
+
var _a17;
|
|
8384
8445
|
const [maxEmbeddingsPerCall, supportsParallelCalls] = await Promise.all([
|
|
8385
8446
|
model.maxEmbeddingsPerCall,
|
|
8386
8447
|
model.supportsParallelCalls
|
|
@@ -8405,7 +8466,7 @@ async function embedMany({
|
|
|
8405
8466
|
}),
|
|
8406
8467
|
tracer,
|
|
8407
8468
|
fn: async (doEmbedSpan) => {
|
|
8408
|
-
var
|
|
8469
|
+
var _a18;
|
|
8409
8470
|
const modelResponse = await model.doEmbed({
|
|
8410
8471
|
values,
|
|
8411
8472
|
abortSignal,
|
|
@@ -8413,7 +8474,7 @@ async function embedMany({
|
|
|
8413
8474
|
providerOptions
|
|
8414
8475
|
});
|
|
8415
8476
|
const embeddings3 = modelResponse.embeddings;
|
|
8416
|
-
const usage2 = (
|
|
8477
|
+
const usage2 = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
|
|
8417
8478
|
doEmbedSpan.setAttributes(
|
|
8418
8479
|
await selectTelemetryAttributes({
|
|
8419
8480
|
telemetry,
|
|
@@ -8494,7 +8555,7 @@ async function embedMany({
|
|
|
8494
8555
|
}),
|
|
8495
8556
|
tracer,
|
|
8496
8557
|
fn: async (doEmbedSpan) => {
|
|
8497
|
-
var
|
|
8558
|
+
var _a18;
|
|
8498
8559
|
const modelResponse = await model.doEmbed({
|
|
8499
8560
|
values: chunk,
|
|
8500
8561
|
abortSignal,
|
|
@@ -8502,7 +8563,7 @@ async function embedMany({
|
|
|
8502
8563
|
providerOptions
|
|
8503
8564
|
});
|
|
8504
8565
|
const embeddings2 = modelResponse.embeddings;
|
|
8505
|
-
const usage = (
|
|
8566
|
+
const usage = (_a18 = modelResponse.usage) != null ? _a18 : { tokens: NaN };
|
|
8506
8567
|
doEmbedSpan.setAttributes(
|
|
8507
8568
|
await selectTelemetryAttributes({
|
|
8508
8569
|
telemetry,
|
|
@@ -8541,7 +8602,7 @@ async function embedMany({
|
|
|
8541
8602
|
result.providerMetadata
|
|
8542
8603
|
)) {
|
|
8543
8604
|
providerMetadata[providerName] = {
|
|
8544
|
-
...(
|
|
8605
|
+
...(_a17 = providerMetadata[providerName]) != null ? _a17 : {},
|
|
8545
8606
|
...metadata
|
|
8546
8607
|
};
|
|
8547
8608
|
}
|
|
@@ -8602,7 +8663,7 @@ async function generateImage({
|
|
|
8602
8663
|
abortSignal,
|
|
8603
8664
|
headers
|
|
8604
8665
|
}) {
|
|
8605
|
-
var
|
|
8666
|
+
var _a17, _b;
|
|
8606
8667
|
const model = resolveImageModel(modelArg);
|
|
8607
8668
|
const headersWithUserAgent = (0, import_provider_utils25.withUserAgentSuffix)(
|
|
8608
8669
|
headers != null ? headers : {},
|
|
@@ -8612,7 +8673,7 @@ async function generateImage({
|
|
|
8612
8673
|
maxRetries: maxRetriesArg,
|
|
8613
8674
|
abortSignal
|
|
8614
8675
|
});
|
|
8615
|
-
const maxImagesPerCallWithDefault = (
|
|
8676
|
+
const maxImagesPerCallWithDefault = (_a17 = maxImagesPerCall != null ? maxImagesPerCall : await invokeModelMaxImagesPerCall(model)) != null ? _a17 : 1;
|
|
8616
8677
|
const callCount = Math.ceil(n / maxImagesPerCallWithDefault);
|
|
8617
8678
|
const callImageCounts = Array.from({ length: callCount }, (_, i) => {
|
|
8618
8679
|
if (i < callCount - 1) {
|
|
@@ -8653,13 +8714,13 @@ async function generateImage({
|
|
|
8653
8714
|
images.push(
|
|
8654
8715
|
...result.images.map(
|
|
8655
8716
|
(image) => {
|
|
8656
|
-
var
|
|
8717
|
+
var _a18;
|
|
8657
8718
|
return new DefaultGeneratedFile({
|
|
8658
8719
|
data: image,
|
|
8659
|
-
mediaType: (
|
|
8720
|
+
mediaType: (_a18 = detectMediaType({
|
|
8660
8721
|
data: image,
|
|
8661
8722
|
signatures: imageMediaTypeSignatures
|
|
8662
|
-
})) != null ?
|
|
8723
|
+
})) != null ? _a18 : "image/png"
|
|
8663
8724
|
});
|
|
8664
8725
|
}
|
|
8665
8726
|
)
|
|
@@ -8784,7 +8845,7 @@ function extractReasoningContent(content) {
|
|
|
8784
8845
|
}
|
|
8785
8846
|
|
|
8786
8847
|
// src/generate-object/output-strategy.ts
|
|
8787
|
-
var
|
|
8848
|
+
var import_provider27 = require("@ai-sdk/provider");
|
|
8788
8849
|
var import_provider_utils26 = require("@ai-sdk/provider-utils");
|
|
8789
8850
|
var noSchemaOutputStrategy = {
|
|
8790
8851
|
type: "no-schema",
|
|
@@ -8805,7 +8866,7 @@ var noSchemaOutputStrategy = {
|
|
|
8805
8866
|
} : { success: true, value };
|
|
8806
8867
|
},
|
|
8807
8868
|
createElementStream() {
|
|
8808
|
-
throw new
|
|
8869
|
+
throw new import_provider27.UnsupportedFunctionalityError({
|
|
8809
8870
|
functionality: "element streams in no-schema mode"
|
|
8810
8871
|
});
|
|
8811
8872
|
}
|
|
@@ -8827,7 +8888,7 @@ var objectOutputStrategy = (schema) => ({
|
|
|
8827
8888
|
return (0, import_provider_utils26.safeValidateTypes)({ value, schema });
|
|
8828
8889
|
},
|
|
8829
8890
|
createElementStream() {
|
|
8830
|
-
throw new
|
|
8891
|
+
throw new import_provider27.UnsupportedFunctionalityError({
|
|
8831
8892
|
functionality: "element streams in object mode"
|
|
8832
8893
|
});
|
|
8833
8894
|
}
|
|
@@ -8856,11 +8917,11 @@ var arrayOutputStrategy = (schema) => {
|
|
|
8856
8917
|
isFirstDelta,
|
|
8857
8918
|
isFinalDelta
|
|
8858
8919
|
}) {
|
|
8859
|
-
var
|
|
8860
|
-
if (!(0,
|
|
8920
|
+
var _a17;
|
|
8921
|
+
if (!(0, import_provider27.isJSONObject)(value) || !(0, import_provider27.isJSONArray)(value.elements)) {
|
|
8861
8922
|
return {
|
|
8862
8923
|
success: false,
|
|
8863
|
-
error: new
|
|
8924
|
+
error: new import_provider27.TypeValidationError({
|
|
8864
8925
|
value,
|
|
8865
8926
|
cause: "value must be an object that contains an array of elements"
|
|
8866
8927
|
})
|
|
@@ -8879,7 +8940,7 @@ var arrayOutputStrategy = (schema) => {
|
|
|
8879
8940
|
}
|
|
8880
8941
|
resultArray.push(result.value);
|
|
8881
8942
|
}
|
|
8882
|
-
const publishedElementCount = (
|
|
8943
|
+
const publishedElementCount = (_a17 = latestObject == null ? void 0 : latestObject.length) != null ? _a17 : 0;
|
|
8883
8944
|
let textDelta = "";
|
|
8884
8945
|
if (isFirstDelta) {
|
|
8885
8946
|
textDelta += "[";
|
|
@@ -8900,10 +8961,10 @@ var arrayOutputStrategy = (schema) => {
|
|
|
8900
8961
|
};
|
|
8901
8962
|
},
|
|
8902
8963
|
async validateFinalResult(value) {
|
|
8903
|
-
if (!(0,
|
|
8964
|
+
if (!(0, import_provider27.isJSONObject)(value) || !(0, import_provider27.isJSONArray)(value.elements)) {
|
|
8904
8965
|
return {
|
|
8905
8966
|
success: false,
|
|
8906
|
-
error: new
|
|
8967
|
+
error: new import_provider27.TypeValidationError({
|
|
8907
8968
|
value,
|
|
8908
8969
|
cause: "value must be an object that contains an array of elements"
|
|
8909
8970
|
})
|
|
@@ -8966,10 +9027,10 @@ var enumOutputStrategy = (enumValues) => {
|
|
|
8966
9027
|
additionalProperties: false
|
|
8967
9028
|
}),
|
|
8968
9029
|
async validateFinalResult(value) {
|
|
8969
|
-
if (!(0,
|
|
9030
|
+
if (!(0, import_provider27.isJSONObject)(value) || typeof value.result !== "string") {
|
|
8970
9031
|
return {
|
|
8971
9032
|
success: false,
|
|
8972
|
-
error: new
|
|
9033
|
+
error: new import_provider27.TypeValidationError({
|
|
8973
9034
|
value,
|
|
8974
9035
|
cause: 'value must be an object that contains a string in the "result" property.'
|
|
8975
9036
|
})
|
|
@@ -8978,17 +9039,17 @@ var enumOutputStrategy = (enumValues) => {
|
|
|
8978
9039
|
const result = value.result;
|
|
8979
9040
|
return enumValues.includes(result) ? { success: true, value: result } : {
|
|
8980
9041
|
success: false,
|
|
8981
|
-
error: new
|
|
9042
|
+
error: new import_provider27.TypeValidationError({
|
|
8982
9043
|
value,
|
|
8983
9044
|
cause: "value must be a string in the enum"
|
|
8984
9045
|
})
|
|
8985
9046
|
};
|
|
8986
9047
|
},
|
|
8987
9048
|
async validatePartialResult({ value, textDelta }) {
|
|
8988
|
-
if (!(0,
|
|
9049
|
+
if (!(0, import_provider27.isJSONObject)(value) || typeof value.result !== "string") {
|
|
8989
9050
|
return {
|
|
8990
9051
|
success: false,
|
|
8991
|
-
error: new
|
|
9052
|
+
error: new import_provider27.TypeValidationError({
|
|
8992
9053
|
value,
|
|
8993
9054
|
cause: 'value must be an object that contains a string in the "result" property.'
|
|
8994
9055
|
})
|
|
@@ -9001,7 +9062,7 @@ var enumOutputStrategy = (enumValues) => {
|
|
|
9001
9062
|
if (value.result.length === 0 || possibleEnumValues.length === 0) {
|
|
9002
9063
|
return {
|
|
9003
9064
|
success: false,
|
|
9004
|
-
error: new
|
|
9065
|
+
error: new import_provider27.TypeValidationError({
|
|
9005
9066
|
value,
|
|
9006
9067
|
cause: "value must be a string in the enum"
|
|
9007
9068
|
})
|
|
@@ -9016,7 +9077,7 @@ var enumOutputStrategy = (enumValues) => {
|
|
|
9016
9077
|
};
|
|
9017
9078
|
},
|
|
9018
9079
|
createElementStream() {
|
|
9019
|
-
throw new
|
|
9080
|
+
throw new import_provider27.UnsupportedFunctionalityError({
|
|
9020
9081
|
functionality: "element streams in enum mode"
|
|
9021
9082
|
});
|
|
9022
9083
|
}
|
|
@@ -9044,7 +9105,7 @@ function getOutputStrategy({
|
|
|
9044
9105
|
}
|
|
9045
9106
|
|
|
9046
9107
|
// src/generate-object/parse-and-validate-object-result.ts
|
|
9047
|
-
var
|
|
9108
|
+
var import_provider28 = require("@ai-sdk/provider");
|
|
9048
9109
|
var import_provider_utils27 = require("@ai-sdk/provider-utils");
|
|
9049
9110
|
async function parseAndValidateObjectResult(result, outputStrategy, context) {
|
|
9050
9111
|
const parseResult = await (0, import_provider_utils27.safeParseJSON)({ text: result });
|
|
@@ -9082,7 +9143,7 @@ async function parseAndValidateObjectResultWithRepair(result, outputStrategy, re
|
|
|
9082
9143
|
try {
|
|
9083
9144
|
return await parseAndValidateObjectResult(result, outputStrategy, context);
|
|
9084
9145
|
} catch (error) {
|
|
9085
|
-
if (repairText != null && NoObjectGeneratedError.isInstance(error) && (
|
|
9146
|
+
if (repairText != null && NoObjectGeneratedError.isInstance(error) && (import_provider28.JSONParseError.isInstance(error.cause) || import_provider28.TypeValidationError.isInstance(error.cause))) {
|
|
9086
9147
|
const repairedText = await repairText({
|
|
9087
9148
|
text: result,
|
|
9088
9149
|
error: error.cause
|
|
@@ -9299,7 +9360,7 @@ async function generateObject(options) {
|
|
|
9299
9360
|
}),
|
|
9300
9361
|
tracer,
|
|
9301
9362
|
fn: async (span) => {
|
|
9302
|
-
var
|
|
9363
|
+
var _a17;
|
|
9303
9364
|
let result;
|
|
9304
9365
|
let finishReason;
|
|
9305
9366
|
let usage;
|
|
@@ -9345,7 +9406,7 @@ async function generateObject(options) {
|
|
|
9345
9406
|
}),
|
|
9346
9407
|
tracer,
|
|
9347
9408
|
fn: async (span2) => {
|
|
9348
|
-
var
|
|
9409
|
+
var _a18, _b, _c, _d, _e, _f, _g, _h;
|
|
9349
9410
|
const result2 = await model.doGenerate({
|
|
9350
9411
|
responseFormat: {
|
|
9351
9412
|
type: "json",
|
|
@@ -9360,7 +9421,7 @@ async function generateObject(options) {
|
|
|
9360
9421
|
headers: headersWithUserAgent
|
|
9361
9422
|
});
|
|
9362
9423
|
const responseData = {
|
|
9363
|
-
id: (_b = (
|
|
9424
|
+
id: (_b = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b : generateId2(),
|
|
9364
9425
|
timestamp: (_d = (_c = result2.response) == null ? void 0 : _c.timestamp) != null ? _d : currentDate(),
|
|
9365
9426
|
modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId,
|
|
9366
9427
|
headers: (_g = result2.response) == null ? void 0 : _g.headers,
|
|
@@ -9416,7 +9477,7 @@ async function generateObject(options) {
|
|
|
9416
9477
|
usage = asLanguageModelUsage(generateResult.usage);
|
|
9417
9478
|
warnings = generateResult.warnings;
|
|
9418
9479
|
resultProviderMetadata = generateResult.providerMetadata;
|
|
9419
|
-
request = (
|
|
9480
|
+
request = (_a17 = generateResult.request) != null ? _a17 : {};
|
|
9420
9481
|
response = generateResult.responseData;
|
|
9421
9482
|
reasoning = generateResult.reasoning;
|
|
9422
9483
|
logWarnings({
|
|
@@ -9479,9 +9540,9 @@ var DefaultGenerateObjectResult = class {
|
|
|
9479
9540
|
this.reasoning = options.reasoning;
|
|
9480
9541
|
}
|
|
9481
9542
|
toJsonResponse(init) {
|
|
9482
|
-
var
|
|
9543
|
+
var _a17;
|
|
9483
9544
|
return new Response(JSON.stringify(this.object), {
|
|
9484
|
-
status: (
|
|
9545
|
+
status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
|
|
9485
9546
|
headers: prepareHeaders(init == null ? void 0 : init.headers, {
|
|
9486
9547
|
"content-type": "application/json; charset=utf-8"
|
|
9487
9548
|
})
|
|
@@ -9607,8 +9668,8 @@ function simulateReadableStream({
|
|
|
9607
9668
|
chunkDelayInMs = 0,
|
|
9608
9669
|
_internal
|
|
9609
9670
|
}) {
|
|
9610
|
-
var
|
|
9611
|
-
const delay2 = (
|
|
9671
|
+
var _a17;
|
|
9672
|
+
const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : import_provider_utils29.delay;
|
|
9612
9673
|
let index = 0;
|
|
9613
9674
|
return new ReadableStream({
|
|
9614
9675
|
async pull(controller) {
|
|
@@ -9866,7 +9927,7 @@ var DefaultStreamObjectResult = class {
|
|
|
9866
9927
|
const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
|
|
9867
9928
|
new TransformStream({
|
|
9868
9929
|
async transform(chunk, controller) {
|
|
9869
|
-
var
|
|
9930
|
+
var _a17, _b, _c;
|
|
9870
9931
|
if (typeof chunk === "object" && chunk.type === "stream-start") {
|
|
9871
9932
|
warnings = chunk.warnings;
|
|
9872
9933
|
return;
|
|
@@ -9916,7 +9977,7 @@ var DefaultStreamObjectResult = class {
|
|
|
9916
9977
|
switch (chunk.type) {
|
|
9917
9978
|
case "response-metadata": {
|
|
9918
9979
|
fullResponse = {
|
|
9919
|
-
id: (
|
|
9980
|
+
id: (_a17 = chunk.id) != null ? _a17 : fullResponse.id,
|
|
9920
9981
|
timestamp: (_b = chunk.timestamp) != null ? _b : fullResponse.timestamp,
|
|
9921
9982
|
modelId: (_c = chunk.modelId) != null ? _c : fullResponse.modelId
|
|
9922
9983
|
};
|
|
@@ -10188,7 +10249,7 @@ async function generateSpeech({
|
|
|
10188
10249
|
abortSignal,
|
|
10189
10250
|
headers
|
|
10190
10251
|
}) {
|
|
10191
|
-
var
|
|
10252
|
+
var _a17;
|
|
10192
10253
|
const resolvedModel = resolveSpeechModel(model);
|
|
10193
10254
|
if (!resolvedModel) {
|
|
10194
10255
|
throw new Error("Model could not be resolved");
|
|
@@ -10225,10 +10286,10 @@ async function generateSpeech({
|
|
|
10225
10286
|
return new DefaultSpeechResult({
|
|
10226
10287
|
audio: new DefaultGeneratedAudioFile({
|
|
10227
10288
|
data: result.audio,
|
|
10228
|
-
mediaType: (
|
|
10289
|
+
mediaType: (_a17 = detectMediaType({
|
|
10229
10290
|
data: result.audio,
|
|
10230
10291
|
signatures: audioMediaTypeSignatures
|
|
10231
|
-
})) != null ?
|
|
10292
|
+
})) != null ? _a17 : "audio/mp3"
|
|
10232
10293
|
}),
|
|
10233
10294
|
warnings: result.warnings,
|
|
10234
10295
|
responses: [result.response],
|
|
@@ -10237,11 +10298,11 @@ async function generateSpeech({
|
|
|
10237
10298
|
}
|
|
10238
10299
|
var DefaultSpeechResult = class {
|
|
10239
10300
|
constructor(options) {
|
|
10240
|
-
var
|
|
10301
|
+
var _a17;
|
|
10241
10302
|
this.audio = options.audio;
|
|
10242
10303
|
this.warnings = options.warnings;
|
|
10243
10304
|
this.responses = options.responses;
|
|
10244
|
-
this.providerMetadata = (
|
|
10305
|
+
this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
|
|
10245
10306
|
}
|
|
10246
10307
|
};
|
|
10247
10308
|
|
|
@@ -10326,7 +10387,7 @@ function pruneMessages({
|
|
|
10326
10387
|
|
|
10327
10388
|
// src/generate-text/smooth-stream.ts
|
|
10328
10389
|
var import_provider_utils32 = require("@ai-sdk/provider-utils");
|
|
10329
|
-
var
|
|
10390
|
+
var import_provider29 = require("@ai-sdk/provider");
|
|
10330
10391
|
var CHUNKING_REGEXPS = {
|
|
10331
10392
|
word: /\S+\s+/m,
|
|
10332
10393
|
line: /\n+/m
|
|
@@ -10365,7 +10426,7 @@ function smoothStream({
|
|
|
10365
10426
|
} else {
|
|
10366
10427
|
const chunkingRegex = typeof chunking === "string" ? CHUNKING_REGEXPS[chunking] : chunking instanceof RegExp ? chunking : void 0;
|
|
10367
10428
|
if (chunkingRegex == null) {
|
|
10368
|
-
throw new
|
|
10429
|
+
throw new import_provider29.InvalidArgumentError({
|
|
10369
10430
|
argument: "chunking",
|
|
10370
10431
|
message: `Chunking must be "word", "line", a RegExp, an Intl.Segmenter, or a ChunkDetector function. Received: ${chunking}`
|
|
10371
10432
|
});
|
|
@@ -10451,8 +10512,8 @@ function defaultTransform(text2) {
|
|
|
10451
10512
|
return text2.replace(/^```(?:json)?\s*\n?/, "").replace(/\n?```\s*$/, "").trim();
|
|
10452
10513
|
}
|
|
10453
10514
|
function extractJsonMiddleware(options) {
|
|
10454
|
-
var
|
|
10455
|
-
const transform = (
|
|
10515
|
+
var _a17;
|
|
10516
|
+
const transform = (_a17 = options == null ? void 0 : options.transform) != null ? _a17 : defaultTransform;
|
|
10456
10517
|
const hasCustomTransform = (options == null ? void 0 : options.transform) !== void 0;
|
|
10457
10518
|
return {
|
|
10458
10519
|
specificationVersion: "v3",
|
|
@@ -10820,13 +10881,13 @@ function addToolInputExamplesMiddleware({
|
|
|
10820
10881
|
return {
|
|
10821
10882
|
specificationVersion: "v3",
|
|
10822
10883
|
transformParams: async ({ params }) => {
|
|
10823
|
-
var
|
|
10824
|
-
if (!((
|
|
10884
|
+
var _a17;
|
|
10885
|
+
if (!((_a17 = params.tools) == null ? void 0 : _a17.length)) {
|
|
10825
10886
|
return params;
|
|
10826
10887
|
}
|
|
10827
10888
|
const transformedTools = params.tools.map((tool2) => {
|
|
10828
|
-
var
|
|
10829
|
-
if (tool2.type !== "function" || !((
|
|
10889
|
+
var _a18;
|
|
10890
|
+
if (tool2.type !== "function" || !((_a18 = tool2.inputExamples) == null ? void 0 : _a18.length)) {
|
|
10830
10891
|
return tool2;
|
|
10831
10892
|
}
|
|
10832
10893
|
const formattedExamples = tool2.inputExamples.map((example, index) => format(example, index)).join("\n");
|
|
@@ -10873,7 +10934,7 @@ var doWrap = ({
|
|
|
10873
10934
|
modelId,
|
|
10874
10935
|
providerId
|
|
10875
10936
|
}) => {
|
|
10876
|
-
var
|
|
10937
|
+
var _a17, _b, _c;
|
|
10877
10938
|
async function doTransform({
|
|
10878
10939
|
params,
|
|
10879
10940
|
type
|
|
@@ -10882,7 +10943,7 @@ var doWrap = ({
|
|
|
10882
10943
|
}
|
|
10883
10944
|
return {
|
|
10884
10945
|
specificationVersion: "v3",
|
|
10885
|
-
provider: (
|
|
10946
|
+
provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
|
|
10886
10947
|
modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
|
|
10887
10948
|
supportedUrls: (_c = overrideSupportedUrls == null ? void 0 : overrideSupportedUrls({ model })) != null ? _c : model.supportedUrls,
|
|
10888
10949
|
async doGenerate(params) {
|
|
@@ -10929,7 +10990,7 @@ var doWrap2 = ({
|
|
|
10929
10990
|
modelId,
|
|
10930
10991
|
providerId
|
|
10931
10992
|
}) => {
|
|
10932
|
-
var
|
|
10993
|
+
var _a17, _b, _c, _d;
|
|
10933
10994
|
async function doTransform({
|
|
10934
10995
|
params
|
|
10935
10996
|
}) {
|
|
@@ -10937,7 +10998,7 @@ var doWrap2 = ({
|
|
|
10937
10998
|
}
|
|
10938
10999
|
return {
|
|
10939
11000
|
specificationVersion: "v3",
|
|
10940
|
-
provider: (
|
|
11001
|
+
provider: (_a17 = providerId != null ? providerId : overrideProvider == null ? void 0 : overrideProvider({ model })) != null ? _a17 : model.provider,
|
|
10941
11002
|
modelId: (_b = modelId != null ? modelId : overrideModelId == null ? void 0 : overrideModelId({ model })) != null ? _b : model.modelId,
|
|
10942
11003
|
maxEmbeddingsPerCall: (_c = overrideMaxEmbeddingsPerCall == null ? void 0 : overrideMaxEmbeddingsPerCall({ model })) != null ? _c : model.maxEmbeddingsPerCall,
|
|
10943
11004
|
supportsParallelCalls: (_d = overrideSupportsParallelCalls == null ? void 0 : overrideSupportsParallelCalls({ model })) != null ? _d : model.supportsParallelCalls,
|
|
@@ -10976,11 +11037,11 @@ var doWrap3 = ({
|
|
|
10976
11037
|
modelId,
|
|
10977
11038
|
providerId
|
|
10978
11039
|
}) => {
|
|
10979
|
-
var
|
|
11040
|
+
var _a17, _b, _c;
|
|
10980
11041
|
async function doTransform({ params }) {
|
|
10981
11042
|
return transformParams ? await transformParams({ params, model }) : params;
|
|
10982
11043
|
}
|
|
10983
|
-
const maxImagesPerCallRaw = (
|
|
11044
|
+
const maxImagesPerCallRaw = (_a17 = overrideMaxImagesPerCall == null ? void 0 : overrideMaxImagesPerCall({ model })) != null ? _a17 : model.maxImagesPerCall;
|
|
10984
11045
|
const maxImagesPerCall = maxImagesPerCallRaw instanceof Function ? maxImagesPerCallRaw.bind(model) : maxImagesPerCallRaw;
|
|
10985
11046
|
return {
|
|
10986
11047
|
specificationVersion: "v3",
|
|
@@ -11045,7 +11106,7 @@ function wrapProvider({
|
|
|
11045
11106
|
}
|
|
11046
11107
|
|
|
11047
11108
|
// src/registry/custom-provider.ts
|
|
11048
|
-
var
|
|
11109
|
+
var import_provider30 = require("@ai-sdk/provider");
|
|
11049
11110
|
function customProvider({
|
|
11050
11111
|
languageModels,
|
|
11051
11112
|
embeddingModels,
|
|
@@ -11065,7 +11126,7 @@ function customProvider({
|
|
|
11065
11126
|
if (fallbackProvider) {
|
|
11066
11127
|
return fallbackProvider.languageModel(modelId);
|
|
11067
11128
|
}
|
|
11068
|
-
throw new
|
|
11129
|
+
throw new import_provider30.NoSuchModelError({ modelId, modelType: "languageModel" });
|
|
11069
11130
|
},
|
|
11070
11131
|
embeddingModel(modelId) {
|
|
11071
11132
|
if (embeddingModels != null && modelId in embeddingModels) {
|
|
@@ -11074,7 +11135,7 @@ function customProvider({
|
|
|
11074
11135
|
if (fallbackProvider) {
|
|
11075
11136
|
return fallbackProvider.embeddingModel(modelId);
|
|
11076
11137
|
}
|
|
11077
|
-
throw new
|
|
11138
|
+
throw new import_provider30.NoSuchModelError({ modelId, modelType: "embeddingModel" });
|
|
11078
11139
|
},
|
|
11079
11140
|
imageModel(modelId) {
|
|
11080
11141
|
if (imageModels != null && modelId in imageModels) {
|
|
@@ -11083,7 +11144,7 @@ function customProvider({
|
|
|
11083
11144
|
if (fallbackProvider == null ? void 0 : fallbackProvider.imageModel) {
|
|
11084
11145
|
return fallbackProvider.imageModel(modelId);
|
|
11085
11146
|
}
|
|
11086
|
-
throw new
|
|
11147
|
+
throw new import_provider30.NoSuchModelError({ modelId, modelType: "imageModel" });
|
|
11087
11148
|
},
|
|
11088
11149
|
transcriptionModel(modelId) {
|
|
11089
11150
|
if (transcriptionModels != null && modelId in transcriptionModels) {
|
|
@@ -11092,7 +11153,7 @@ function customProvider({
|
|
|
11092
11153
|
if (fallbackProvider == null ? void 0 : fallbackProvider.transcriptionModel) {
|
|
11093
11154
|
return fallbackProvider.transcriptionModel(modelId);
|
|
11094
11155
|
}
|
|
11095
|
-
throw new
|
|
11156
|
+
throw new import_provider30.NoSuchModelError({ modelId, modelType: "transcriptionModel" });
|
|
11096
11157
|
},
|
|
11097
11158
|
speechModel(modelId) {
|
|
11098
11159
|
if (speechModels != null && modelId in speechModels) {
|
|
@@ -11101,7 +11162,7 @@ function customProvider({
|
|
|
11101
11162
|
if (fallbackProvider == null ? void 0 : fallbackProvider.speechModel) {
|
|
11102
11163
|
return fallbackProvider.speechModel(modelId);
|
|
11103
11164
|
}
|
|
11104
|
-
throw new
|
|
11165
|
+
throw new import_provider30.NoSuchModelError({ modelId, modelType: "speechModel" });
|
|
11105
11166
|
},
|
|
11106
11167
|
rerankingModel(modelId) {
|
|
11107
11168
|
if (rerankingModels != null && modelId in rerankingModels) {
|
|
@@ -11110,19 +11171,19 @@ function customProvider({
|
|
|
11110
11171
|
if (fallbackProvider == null ? void 0 : fallbackProvider.rerankingModel) {
|
|
11111
11172
|
return fallbackProvider.rerankingModel(modelId);
|
|
11112
11173
|
}
|
|
11113
|
-
throw new
|
|
11174
|
+
throw new import_provider30.NoSuchModelError({ modelId, modelType: "rerankingModel" });
|
|
11114
11175
|
}
|
|
11115
11176
|
};
|
|
11116
11177
|
}
|
|
11117
11178
|
var experimental_customProvider = customProvider;
|
|
11118
11179
|
|
|
11119
11180
|
// src/registry/no-such-provider-error.ts
|
|
11120
|
-
var
|
|
11121
|
-
var
|
|
11122
|
-
var
|
|
11123
|
-
var
|
|
11124
|
-
var
|
|
11125
|
-
var NoSuchProviderError = class extends
|
|
11181
|
+
var import_provider31 = require("@ai-sdk/provider");
|
|
11182
|
+
var name16 = "AI_NoSuchProviderError";
|
|
11183
|
+
var marker16 = `vercel.ai.error.${name16}`;
|
|
11184
|
+
var symbol16 = Symbol.for(marker16);
|
|
11185
|
+
var _a16;
|
|
11186
|
+
var NoSuchProviderError = class extends import_provider31.NoSuchModelError {
|
|
11126
11187
|
constructor({
|
|
11127
11188
|
modelId,
|
|
11128
11189
|
modelType,
|
|
@@ -11130,19 +11191,19 @@ var NoSuchProviderError = class extends import_provider30.NoSuchModelError {
|
|
|
11130
11191
|
availableProviders,
|
|
11131
11192
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
|
11132
11193
|
}) {
|
|
11133
|
-
super({ errorName:
|
|
11134
|
-
this[
|
|
11194
|
+
super({ errorName: name16, modelId, modelType, message });
|
|
11195
|
+
this[_a16] = true;
|
|
11135
11196
|
this.providerId = providerId;
|
|
11136
11197
|
this.availableProviders = availableProviders;
|
|
11137
11198
|
}
|
|
11138
11199
|
static isInstance(error) {
|
|
11139
|
-
return
|
|
11200
|
+
return import_provider31.AISDKError.hasMarker(error, marker16);
|
|
11140
11201
|
}
|
|
11141
11202
|
};
|
|
11142
|
-
|
|
11203
|
+
_a16 = symbol16;
|
|
11143
11204
|
|
|
11144
11205
|
// src/registry/provider-registry.ts
|
|
11145
|
-
var
|
|
11206
|
+
var import_provider32 = require("@ai-sdk/provider");
|
|
11146
11207
|
function createProviderRegistry(providers, {
|
|
11147
11208
|
separator = ":",
|
|
11148
11209
|
languageModelMiddleware,
|
|
@@ -11191,7 +11252,7 @@ var DefaultProviderRegistry = class {
|
|
|
11191
11252
|
splitId(id, modelType) {
|
|
11192
11253
|
const index = id.indexOf(this.separator);
|
|
11193
11254
|
if (index === -1) {
|
|
11194
|
-
throw new
|
|
11255
|
+
throw new import_provider32.NoSuchModelError({
|
|
11195
11256
|
modelId: id,
|
|
11196
11257
|
modelType,
|
|
11197
11258
|
message: `Invalid ${modelType} id for registry: ${id} (must be in the format "providerId${this.separator}modelId")`
|
|
@@ -11200,14 +11261,14 @@ var DefaultProviderRegistry = class {
|
|
|
11200
11261
|
return [id.slice(0, index), id.slice(index + this.separator.length)];
|
|
11201
11262
|
}
|
|
11202
11263
|
languageModel(id) {
|
|
11203
|
-
var
|
|
11264
|
+
var _a17, _b;
|
|
11204
11265
|
const [providerId, modelId] = this.splitId(id, "languageModel");
|
|
11205
|
-
let model = (_b = (
|
|
11206
|
-
|
|
11266
|
+
let model = (_b = (_a17 = this.getProvider(providerId, "languageModel")).languageModel) == null ? void 0 : _b.call(
|
|
11267
|
+
_a17,
|
|
11207
11268
|
modelId
|
|
11208
11269
|
);
|
|
11209
11270
|
if (model == null) {
|
|
11210
|
-
throw new
|
|
11271
|
+
throw new import_provider32.NoSuchModelError({ modelId: id, modelType: "languageModel" });
|
|
11211
11272
|
}
|
|
11212
11273
|
if (this.languageModelMiddleware != null) {
|
|
11213
11274
|
model = wrapLanguageModel({
|
|
@@ -11218,12 +11279,12 @@ var DefaultProviderRegistry = class {
|
|
|
11218
11279
|
return model;
|
|
11219
11280
|
}
|
|
11220
11281
|
embeddingModel(id) {
|
|
11221
|
-
var
|
|
11282
|
+
var _a17;
|
|
11222
11283
|
const [providerId, modelId] = this.splitId(id, "embeddingModel");
|
|
11223
11284
|
const provider = this.getProvider(providerId, "embeddingModel");
|
|
11224
|
-
const model = (
|
|
11285
|
+
const model = (_a17 = provider.embeddingModel) == null ? void 0 : _a17.call(provider, modelId);
|
|
11225
11286
|
if (model == null) {
|
|
11226
|
-
throw new
|
|
11287
|
+
throw new import_provider32.NoSuchModelError({
|
|
11227
11288
|
modelId: id,
|
|
11228
11289
|
modelType: "embeddingModel"
|
|
11229
11290
|
});
|
|
@@ -11231,12 +11292,12 @@ var DefaultProviderRegistry = class {
|
|
|
11231
11292
|
return model;
|
|
11232
11293
|
}
|
|
11233
11294
|
imageModel(id) {
|
|
11234
|
-
var
|
|
11295
|
+
var _a17;
|
|
11235
11296
|
const [providerId, modelId] = this.splitId(id, "imageModel");
|
|
11236
11297
|
const provider = this.getProvider(providerId, "imageModel");
|
|
11237
|
-
let model = (
|
|
11298
|
+
let model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
|
|
11238
11299
|
if (model == null) {
|
|
11239
|
-
throw new
|
|
11300
|
+
throw new import_provider32.NoSuchModelError({ modelId: id, modelType: "imageModel" });
|
|
11240
11301
|
}
|
|
11241
11302
|
if (this.imageModelMiddleware != null) {
|
|
11242
11303
|
model = wrapImageModel({
|
|
@@ -11247,12 +11308,12 @@ var DefaultProviderRegistry = class {
|
|
|
11247
11308
|
return model;
|
|
11248
11309
|
}
|
|
11249
11310
|
transcriptionModel(id) {
|
|
11250
|
-
var
|
|
11311
|
+
var _a17;
|
|
11251
11312
|
const [providerId, modelId] = this.splitId(id, "transcriptionModel");
|
|
11252
11313
|
const provider = this.getProvider(providerId, "transcriptionModel");
|
|
11253
|
-
const model = (
|
|
11314
|
+
const model = (_a17 = provider.transcriptionModel) == null ? void 0 : _a17.call(provider, modelId);
|
|
11254
11315
|
if (model == null) {
|
|
11255
|
-
throw new
|
|
11316
|
+
throw new import_provider32.NoSuchModelError({
|
|
11256
11317
|
modelId: id,
|
|
11257
11318
|
modelType: "transcriptionModel"
|
|
11258
11319
|
});
|
|
@@ -11260,22 +11321,22 @@ var DefaultProviderRegistry = class {
|
|
|
11260
11321
|
return model;
|
|
11261
11322
|
}
|
|
11262
11323
|
speechModel(id) {
|
|
11263
|
-
var
|
|
11324
|
+
var _a17;
|
|
11264
11325
|
const [providerId, modelId] = this.splitId(id, "speechModel");
|
|
11265
11326
|
const provider = this.getProvider(providerId, "speechModel");
|
|
11266
|
-
const model = (
|
|
11327
|
+
const model = (_a17 = provider.speechModel) == null ? void 0 : _a17.call(provider, modelId);
|
|
11267
11328
|
if (model == null) {
|
|
11268
|
-
throw new
|
|
11329
|
+
throw new import_provider32.NoSuchModelError({ modelId: id, modelType: "speechModel" });
|
|
11269
11330
|
}
|
|
11270
11331
|
return model;
|
|
11271
11332
|
}
|
|
11272
11333
|
rerankingModel(id) {
|
|
11273
|
-
var
|
|
11334
|
+
var _a17;
|
|
11274
11335
|
const [providerId, modelId] = this.splitId(id, "rerankingModel");
|
|
11275
11336
|
const provider = this.getProvider(providerId, "rerankingModel");
|
|
11276
|
-
const model = (
|
|
11337
|
+
const model = (_a17 = provider.rerankingModel) == null ? void 0 : _a17.call(provider, modelId);
|
|
11277
11338
|
if (model == null) {
|
|
11278
|
-
throw new
|
|
11339
|
+
throw new import_provider32.NoSuchModelError({ modelId: id, modelType: "rerankingModel" });
|
|
11279
11340
|
}
|
|
11280
11341
|
return model;
|
|
11281
11342
|
}
|
|
@@ -11330,7 +11391,7 @@ async function rerank({
|
|
|
11330
11391
|
}),
|
|
11331
11392
|
tracer,
|
|
11332
11393
|
fn: async () => {
|
|
11333
|
-
var
|
|
11394
|
+
var _a17, _b;
|
|
11334
11395
|
const { ranking, response, providerMetadata, warnings } = await retry(
|
|
11335
11396
|
() => recordSpan({
|
|
11336
11397
|
name: "ai.rerank.doRerank",
|
|
@@ -11394,7 +11455,7 @@ async function rerank({
|
|
|
11394
11455
|
providerMetadata,
|
|
11395
11456
|
response: {
|
|
11396
11457
|
id: response == null ? void 0 : response.id,
|
|
11397
|
-
timestamp: (
|
|
11458
|
+
timestamp: (_a17 = response == null ? void 0 : response.timestamp) != null ? _a17 : /* @__PURE__ */ new Date(),
|
|
11398
11459
|
modelId: (_b = response == null ? void 0 : response.modelId) != null ? _b : model.modelId,
|
|
11399
11460
|
headers: response == null ? void 0 : response.headers,
|
|
11400
11461
|
body: response == null ? void 0 : response.body
|
|
@@ -11419,8 +11480,8 @@ var DefaultRerankResult = class {
|
|
|
11419
11480
|
var import_provider_utils33 = require("@ai-sdk/provider-utils");
|
|
11420
11481
|
|
|
11421
11482
|
// src/error/no-transcript-generated-error.ts
|
|
11422
|
-
var
|
|
11423
|
-
var NoTranscriptGeneratedError = class extends
|
|
11483
|
+
var import_provider33 = require("@ai-sdk/provider");
|
|
11484
|
+
var NoTranscriptGeneratedError = class extends import_provider33.AISDKError {
|
|
11424
11485
|
constructor(options) {
|
|
11425
11486
|
super({
|
|
11426
11487
|
name: "AI_NoTranscriptGeneratedError",
|
|
@@ -11454,16 +11515,16 @@ async function transcribe({
|
|
|
11454
11515
|
const audioData = audio instanceof URL ? (await download({ url: audio })).data : convertDataContentToUint8Array(audio);
|
|
11455
11516
|
const result = await retry(
|
|
11456
11517
|
() => {
|
|
11457
|
-
var
|
|
11518
|
+
var _a17;
|
|
11458
11519
|
return resolvedModel.doGenerate({
|
|
11459
11520
|
audio: audioData,
|
|
11460
11521
|
abortSignal,
|
|
11461
11522
|
headers: headersWithUserAgent,
|
|
11462
11523
|
providerOptions,
|
|
11463
|
-
mediaType: (
|
|
11524
|
+
mediaType: (_a17 = detectMediaType({
|
|
11464
11525
|
data: audioData,
|
|
11465
11526
|
signatures: audioMediaTypeSignatures
|
|
11466
|
-
})) != null ?
|
|
11527
|
+
})) != null ? _a17 : "audio/wav"
|
|
11467
11528
|
});
|
|
11468
11529
|
}
|
|
11469
11530
|
);
|
|
@@ -11487,14 +11548,14 @@ async function transcribe({
|
|
|
11487
11548
|
}
|
|
11488
11549
|
var DefaultTranscriptionResult = class {
|
|
11489
11550
|
constructor(options) {
|
|
11490
|
-
var
|
|
11551
|
+
var _a17;
|
|
11491
11552
|
this.text = options.text;
|
|
11492
11553
|
this.segments = options.segments;
|
|
11493
11554
|
this.language = options.language;
|
|
11494
11555
|
this.durationInSeconds = options.durationInSeconds;
|
|
11495
11556
|
this.warnings = options.warnings;
|
|
11496
11557
|
this.responses = options.responses;
|
|
11497
|
-
this.providerMetadata = (
|
|
11558
|
+
this.providerMetadata = (_a17 = options.providerMetadata) != null ? _a17 : {};
|
|
11498
11559
|
}
|
|
11499
11560
|
};
|
|
11500
11561
|
|
|
@@ -11533,7 +11594,7 @@ async function callCompletionApi({
|
|
|
11533
11594
|
onError,
|
|
11534
11595
|
fetch: fetch2 = getOriginalFetch()
|
|
11535
11596
|
}) {
|
|
11536
|
-
var
|
|
11597
|
+
var _a17;
|
|
11537
11598
|
try {
|
|
11538
11599
|
setLoading(true);
|
|
11539
11600
|
setError(void 0);
|
|
@@ -11561,7 +11622,7 @@ async function callCompletionApi({
|
|
|
11561
11622
|
});
|
|
11562
11623
|
if (!response.ok) {
|
|
11563
11624
|
throw new Error(
|
|
11564
|
-
(
|
|
11625
|
+
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
|
11565
11626
|
);
|
|
11566
11627
|
}
|
|
11567
11628
|
if (!response.body) {
|
|
@@ -11645,12 +11706,12 @@ async function convertFileListToFileUIParts(files) {
|
|
|
11645
11706
|
}
|
|
11646
11707
|
return Promise.all(
|
|
11647
11708
|
Array.from(files).map(async (file) => {
|
|
11648
|
-
const { name:
|
|
11709
|
+
const { name: name17, type } = file;
|
|
11649
11710
|
const dataUrl = await new Promise((resolve3, reject) => {
|
|
11650
11711
|
const reader = new FileReader();
|
|
11651
11712
|
reader.onload = (readerEvent) => {
|
|
11652
|
-
var
|
|
11653
|
-
resolve3((
|
|
11713
|
+
var _a17;
|
|
11714
|
+
resolve3((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
|
|
11654
11715
|
};
|
|
11655
11716
|
reader.onerror = (error) => reject(error);
|
|
11656
11717
|
reader.readAsDataURL(file);
|
|
@@ -11658,7 +11719,7 @@ async function convertFileListToFileUIParts(files) {
|
|
|
11658
11719
|
return {
|
|
11659
11720
|
type: "file",
|
|
11660
11721
|
mediaType: type,
|
|
11661
|
-
filename:
|
|
11722
|
+
filename: name17,
|
|
11662
11723
|
url: dataUrl
|
|
11663
11724
|
};
|
|
11664
11725
|
})
|
|
@@ -11692,7 +11753,7 @@ var HttpChatTransport = class {
|
|
|
11692
11753
|
abortSignal,
|
|
11693
11754
|
...options
|
|
11694
11755
|
}) {
|
|
11695
|
-
var
|
|
11756
|
+
var _a17, _b, _c, _d, _e;
|
|
11696
11757
|
const resolvedBody = await (0, import_provider_utils35.resolve)(this.body);
|
|
11697
11758
|
const resolvedHeaders = await (0, import_provider_utils35.resolve)(this.headers);
|
|
11698
11759
|
const resolvedCredentials = await (0, import_provider_utils35.resolve)(this.credentials);
|
|
@@ -11700,7 +11761,7 @@ var HttpChatTransport = class {
|
|
|
11700
11761
|
...(0, import_provider_utils35.normalizeHeaders)(resolvedHeaders),
|
|
11701
11762
|
...(0, import_provider_utils35.normalizeHeaders)(options.headers)
|
|
11702
11763
|
};
|
|
11703
|
-
const preparedRequest = await ((
|
|
11764
|
+
const preparedRequest = await ((_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
|
|
11704
11765
|
api: this.api,
|
|
11705
11766
|
id: options.chatId,
|
|
11706
11767
|
messages: options.messages,
|
|
@@ -11748,7 +11809,7 @@ var HttpChatTransport = class {
|
|
|
11748
11809
|
return this.processResponseStream(response.body);
|
|
11749
11810
|
}
|
|
11750
11811
|
async reconnectToStream(options) {
|
|
11751
|
-
var
|
|
11812
|
+
var _a17, _b, _c, _d, _e;
|
|
11752
11813
|
const resolvedBody = await (0, import_provider_utils35.resolve)(this.body);
|
|
11753
11814
|
const resolvedHeaders = await (0, import_provider_utils35.resolve)(this.headers);
|
|
11754
11815
|
const resolvedCredentials = await (0, import_provider_utils35.resolve)(this.credentials);
|
|
@@ -11756,7 +11817,7 @@ var HttpChatTransport = class {
|
|
|
11756
11817
|
...(0, import_provider_utils35.normalizeHeaders)(resolvedHeaders),
|
|
11757
11818
|
...(0, import_provider_utils35.normalizeHeaders)(options.headers)
|
|
11758
11819
|
};
|
|
11759
|
-
const preparedRequest = await ((
|
|
11820
|
+
const preparedRequest = await ((_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
|
|
11760
11821
|
api: this.api,
|
|
11761
11822
|
id: options.chatId,
|
|
11762
11823
|
body: { ...resolvedBody, ...options.body },
|
|
@@ -11838,11 +11899,11 @@ var AbstractChat = class {
|
|
|
11838
11899
|
* If a messageId is provided, the message will be replaced.
|
|
11839
11900
|
*/
|
|
11840
11901
|
this.sendMessage = async (message, options) => {
|
|
11841
|
-
var
|
|
11902
|
+
var _a17, _b, _c, _d;
|
|
11842
11903
|
if (message == null) {
|
|
11843
11904
|
await this.makeRequest({
|
|
11844
11905
|
trigger: "submit-message",
|
|
11845
|
-
messageId: (
|
|
11906
|
+
messageId: (_a17 = this.lastMessage) == null ? void 0 : _a17.id,
|
|
11846
11907
|
...options
|
|
11847
11908
|
});
|
|
11848
11909
|
return;
|
|
@@ -11935,7 +11996,7 @@ var AbstractChat = class {
|
|
|
11935
11996
|
approved,
|
|
11936
11997
|
reason
|
|
11937
11998
|
}) => this.jobExecutor.run(async () => {
|
|
11938
|
-
var
|
|
11999
|
+
var _a17, _b;
|
|
11939
12000
|
const messages = this.state.messages;
|
|
11940
12001
|
const lastMessage = messages[messages.length - 1];
|
|
11941
12002
|
const updatePart = (part) => isToolUIPart(part) && part.state === "approval-requested" && part.approval.id === id ? {
|
|
@@ -11950,7 +12011,7 @@ var AbstractChat = class {
|
|
|
11950
12011
|
if (this.activeResponse) {
|
|
11951
12012
|
this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
|
|
11952
12013
|
}
|
|
11953
|
-
if (this.status !== "streaming" && this.status !== "submitted" && ((
|
|
12014
|
+
if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
|
|
11954
12015
|
this.makeRequest({
|
|
11955
12016
|
trigger: "submit-message",
|
|
11956
12017
|
messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
|
|
@@ -11964,7 +12025,7 @@ var AbstractChat = class {
|
|
|
11964
12025
|
output,
|
|
11965
12026
|
errorText
|
|
11966
12027
|
}) => this.jobExecutor.run(async () => {
|
|
11967
|
-
var
|
|
12028
|
+
var _a17, _b;
|
|
11968
12029
|
const messages = this.state.messages;
|
|
11969
12030
|
const lastMessage = messages[messages.length - 1];
|
|
11970
12031
|
const updatePart = (part) => isToolUIPart(part) && part.toolCallId === toolCallId ? { ...part, state, output, errorText } : part;
|
|
@@ -11975,7 +12036,7 @@ var AbstractChat = class {
|
|
|
11975
12036
|
if (this.activeResponse) {
|
|
11976
12037
|
this.activeResponse.state.message.parts = this.activeResponse.state.message.parts.map(updatePart);
|
|
11977
12038
|
}
|
|
11978
|
-
if (this.status !== "streaming" && this.status !== "submitted" && ((
|
|
12039
|
+
if (this.status !== "streaming" && this.status !== "submitted" && ((_a17 = this.sendAutomaticallyWhen) == null ? void 0 : _a17.call(this, { messages: this.state.messages }))) {
|
|
11979
12040
|
this.makeRequest({
|
|
11980
12041
|
trigger: "submit-message",
|
|
11981
12042
|
messageId: (_b = this.lastMessage) == null ? void 0 : _b.id
|
|
@@ -11988,10 +12049,10 @@ var AbstractChat = class {
|
|
|
11988
12049
|
* Abort the current request immediately, keep the generated tokens if any.
|
|
11989
12050
|
*/
|
|
11990
12051
|
this.stop = async () => {
|
|
11991
|
-
var
|
|
12052
|
+
var _a17;
|
|
11992
12053
|
if (this.status !== "streaming" && this.status !== "submitted")
|
|
11993
12054
|
return;
|
|
11994
|
-
if ((
|
|
12055
|
+
if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
|
|
11995
12056
|
this.activeResponse.abortController.abort();
|
|
11996
12057
|
}
|
|
11997
12058
|
};
|
|
@@ -12046,7 +12107,7 @@ var AbstractChat = class {
|
|
|
12046
12107
|
body,
|
|
12047
12108
|
messageId
|
|
12048
12109
|
}) {
|
|
12049
|
-
var
|
|
12110
|
+
var _a17, _b, _c, _d;
|
|
12050
12111
|
this.setStatus({ status: "submitted", error: void 0 });
|
|
12051
12112
|
const lastMessage = this.lastMessage;
|
|
12052
12113
|
let isAbort = false;
|
|
@@ -12095,9 +12156,9 @@ var AbstractChat = class {
|
|
|
12095
12156
|
() => job({
|
|
12096
12157
|
state: activeResponse.state,
|
|
12097
12158
|
write: () => {
|
|
12098
|
-
var
|
|
12159
|
+
var _a18;
|
|
12099
12160
|
this.setStatus({ status: "streaming" });
|
|
12100
|
-
const replaceLastMessage = activeResponse.state.message.id === ((
|
|
12161
|
+
const replaceLastMessage = activeResponse.state.message.id === ((_a18 = this.lastMessage) == null ? void 0 : _a18.id);
|
|
12101
12162
|
if (replaceLastMessage) {
|
|
12102
12163
|
this.state.replaceMessage(
|
|
12103
12164
|
this.state.messages.length - 1,
|
|
@@ -12149,7 +12210,7 @@ var AbstractChat = class {
|
|
|
12149
12210
|
isAbort,
|
|
12150
12211
|
isDisconnect,
|
|
12151
12212
|
isError,
|
|
12152
|
-
finishReason: (
|
|
12213
|
+
finishReason: (_a17 = this.activeResponse) == null ? void 0 : _a17.state.finishReason
|
|
12153
12214
|
});
|
|
12154
12215
|
} catch (err) {
|
|
12155
12216
|
console.error(err);
|
|
@@ -12327,6 +12388,7 @@ var TextStreamChatTransport = class extends HttpChatTransport {
|
|
|
12327
12388
|
ToolCallRepairError,
|
|
12328
12389
|
ToolLoopAgent,
|
|
12329
12390
|
TypeValidationError,
|
|
12391
|
+
UIMessageStreamError,
|
|
12330
12392
|
UI_MESSAGE_STREAM_HEADERS,
|
|
12331
12393
|
UnsupportedFunctionalityError,
|
|
12332
12394
|
UnsupportedModelVersionError,
|