ai 4.1.53 → 4.1.55
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/dist/index.d.mts +833 -27
- package/dist/index.d.ts +833 -27
- package/dist/index.js +920 -168
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +872 -125
- package/dist/index.mjs.map +1 -1
- package/package.json +5 -4
- package/rsc/dist/rsc-server.mjs +1 -0
- package/rsc/dist/rsc-server.mjs.map +1 -1
package/dist/index.js
CHANGED
@@ -1,11 +1,13 @@
|
|
1
1
|
"use strict";
|
2
|
+
var __create = Object.create;
|
2
3
|
var __defProp = Object.defineProperty;
|
3
4
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
4
5
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
5
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
6
8
|
var __export = (target, all) => {
|
7
|
-
for (var
|
8
|
-
__defProp(target,
|
9
|
+
for (var name17 in all)
|
10
|
+
__defProp(target, name17, { get: all[name17], enumerable: true });
|
9
11
|
};
|
10
12
|
var __copyProps = (to, from, except, desc) => {
|
11
13
|
if (from && typeof from === "object" || typeof from === "function") {
|
@@ -15,33 +17,42 @@ var __copyProps = (to, from, except, desc) => {
|
|
15
17
|
}
|
16
18
|
return to;
|
17
19
|
};
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
26
|
+
mod
|
27
|
+
));
|
18
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
19
29
|
|
20
30
|
// streams/index.ts
|
21
31
|
var streams_exports = {};
|
22
32
|
__export(streams_exports, {
|
23
|
-
AISDKError: () =>
|
24
|
-
APICallError: () =>
|
33
|
+
AISDKError: () => import_provider20.AISDKError,
|
34
|
+
APICallError: () => import_provider20.APICallError,
|
25
35
|
AssistantResponse: () => AssistantResponse,
|
26
36
|
DownloadError: () => DownloadError,
|
27
|
-
EmptyResponseBodyError: () =>
|
37
|
+
EmptyResponseBodyError: () => import_provider20.EmptyResponseBodyError,
|
28
38
|
InvalidArgumentError: () => InvalidArgumentError,
|
29
39
|
InvalidDataContentError: () => InvalidDataContentError,
|
30
40
|
InvalidMessageRoleError: () => InvalidMessageRoleError,
|
31
|
-
InvalidPromptError: () =>
|
32
|
-
InvalidResponseDataError: () =>
|
41
|
+
InvalidPromptError: () => import_provider20.InvalidPromptError,
|
42
|
+
InvalidResponseDataError: () => import_provider20.InvalidResponseDataError,
|
33
43
|
InvalidStreamPartError: () => InvalidStreamPartError,
|
34
44
|
InvalidToolArgumentsError: () => InvalidToolArgumentsError,
|
35
|
-
JSONParseError: () =>
|
45
|
+
JSONParseError: () => import_provider20.JSONParseError,
|
36
46
|
LangChainAdapter: () => langchain_adapter_exports,
|
37
47
|
LlamaIndexAdapter: () => llamaindex_adapter_exports,
|
38
|
-
LoadAPIKeyError: () =>
|
48
|
+
LoadAPIKeyError: () => import_provider20.LoadAPIKeyError,
|
49
|
+
MCPClientError: () => MCPClientError,
|
39
50
|
MessageConversionError: () => MessageConversionError,
|
40
|
-
NoContentGeneratedError: () =>
|
51
|
+
NoContentGeneratedError: () => import_provider20.NoContentGeneratedError,
|
41
52
|
NoImageGeneratedError: () => NoImageGeneratedError,
|
42
53
|
NoObjectGeneratedError: () => NoObjectGeneratedError,
|
43
54
|
NoOutputSpecifiedError: () => NoOutputSpecifiedError,
|
44
|
-
NoSuchModelError: () =>
|
55
|
+
NoSuchModelError: () => import_provider20.NoSuchModelError,
|
45
56
|
NoSuchProviderError: () => NoSuchProviderError,
|
46
57
|
NoSuchToolError: () => NoSuchToolError,
|
47
58
|
Output: () => output_exports,
|
@@ -49,8 +60,8 @@ __export(streams_exports, {
|
|
49
60
|
StreamData: () => StreamData,
|
50
61
|
ToolCallRepairError: () => ToolCallRepairError,
|
51
62
|
ToolExecutionError: () => ToolExecutionError,
|
52
|
-
TypeValidationError: () =>
|
53
|
-
UnsupportedFunctionalityError: () =>
|
63
|
+
TypeValidationError: () => import_provider20.TypeValidationError,
|
64
|
+
UnsupportedFunctionalityError: () => import_provider20.UnsupportedFunctionalityError,
|
54
65
|
appendClientMessage: () => appendClientMessage,
|
55
66
|
appendResponseMessages: () => appendResponseMessages,
|
56
67
|
convertToCoreMessages: () => convertToCoreMessages,
|
@@ -66,35 +77,36 @@ __export(streams_exports, {
|
|
66
77
|
customProvider: () => customProvider,
|
67
78
|
embed: () => embed,
|
68
79
|
embedMany: () => embedMany,
|
80
|
+
experimental_createMCPClient: () => createMCPClient,
|
69
81
|
experimental_createProviderRegistry: () => experimental_createProviderRegistry,
|
70
82
|
experimental_customProvider: () => experimental_customProvider,
|
71
83
|
experimental_generateImage: () => generateImage,
|
72
84
|
experimental_wrapLanguageModel: () => experimental_wrapLanguageModel,
|
73
85
|
extractReasoningMiddleware: () => extractReasoningMiddleware,
|
74
|
-
formatAssistantStreamPart: () =>
|
75
|
-
formatDataStreamPart: () =>
|
86
|
+
formatAssistantStreamPart: () => import_ui_utils11.formatAssistantStreamPart,
|
87
|
+
formatDataStreamPart: () => import_ui_utils11.formatDataStreamPart,
|
76
88
|
generateId: () => import_provider_utils14.generateId,
|
77
89
|
generateObject: () => generateObject,
|
78
90
|
generateText: () => generateText,
|
79
|
-
jsonSchema: () =>
|
80
|
-
parseAssistantStreamPart: () =>
|
81
|
-
parseDataStreamPart: () =>
|
91
|
+
jsonSchema: () => import_ui_utils11.jsonSchema,
|
92
|
+
parseAssistantStreamPart: () => import_ui_utils11.parseAssistantStreamPart,
|
93
|
+
parseDataStreamPart: () => import_ui_utils11.parseDataStreamPart,
|
82
94
|
pipeDataStreamToResponse: () => pipeDataStreamToResponse,
|
83
|
-
processDataStream: () =>
|
84
|
-
processTextStream: () =>
|
95
|
+
processDataStream: () => import_ui_utils11.processDataStream,
|
96
|
+
processTextStream: () => import_ui_utils11.processTextStream,
|
85
97
|
simulateReadableStream: () => simulateReadableStream,
|
86
98
|
smoothStream: () => smoothStream,
|
87
99
|
streamObject: () => streamObject,
|
88
100
|
streamText: () => streamText,
|
89
101
|
tool: () => tool,
|
90
102
|
wrapLanguageModel: () => wrapLanguageModel,
|
91
|
-
zodSchema: () =>
|
103
|
+
zodSchema: () => import_ui_utils11.zodSchema
|
92
104
|
});
|
93
105
|
module.exports = __toCommonJS(streams_exports);
|
94
106
|
|
95
107
|
// core/index.ts
|
96
108
|
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
97
|
-
var
|
109
|
+
var import_ui_utils11 = require("@ai-sdk/ui-utils");
|
98
110
|
|
99
111
|
// core/data-stream/create-data-stream.ts
|
100
112
|
var import_ui_utils = require("@ai-sdk/ui-utils");
|
@@ -433,7 +445,7 @@ function getBaseTelemetryAttributes({
|
|
433
445
|
telemetry,
|
434
446
|
headers
|
435
447
|
}) {
|
436
|
-
var
|
448
|
+
var _a17;
|
437
449
|
return {
|
438
450
|
"ai.model.provider": model.provider,
|
439
451
|
"ai.model.id": model.modelId,
|
@@ -443,7 +455,7 @@ function getBaseTelemetryAttributes({
|
|
443
455
|
return attributes;
|
444
456
|
}, {}),
|
445
457
|
// add metadata as attributes:
|
446
|
-
...Object.entries((
|
458
|
+
...Object.entries((_a17 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a17 : {}).reduce(
|
447
459
|
(attributes, [key, value]) => {
|
448
460
|
attributes[`ai.telemetry.metadata.${key}`] = value;
|
449
461
|
return attributes;
|
@@ -468,7 +480,7 @@ var noopTracer = {
|
|
468
480
|
startSpan() {
|
469
481
|
return noopSpan;
|
470
482
|
},
|
471
|
-
startActiveSpan(
|
483
|
+
startActiveSpan(name17, arg1, arg2, arg3) {
|
472
484
|
if (typeof arg1 === "function") {
|
473
485
|
return arg1(noopSpan);
|
474
486
|
}
|
@@ -538,13 +550,13 @@ function getTracer({
|
|
538
550
|
// core/telemetry/record-span.ts
|
539
551
|
var import_api2 = require("@opentelemetry/api");
|
540
552
|
function recordSpan({
|
541
|
-
name:
|
553
|
+
name: name17,
|
542
554
|
tracer,
|
543
555
|
attributes,
|
544
556
|
fn,
|
545
557
|
endWhenDone = true
|
546
558
|
}) {
|
547
|
-
return tracer.startActiveSpan(
|
559
|
+
return tracer.startActiveSpan(name17, { attributes }, async (span) => {
|
548
560
|
try {
|
549
561
|
const result = await fn(span);
|
550
562
|
if (endWhenDone) {
|
@@ -652,14 +664,14 @@ async function embed({
|
|
652
664
|
}),
|
653
665
|
tracer,
|
654
666
|
fn: async (doEmbedSpan) => {
|
655
|
-
var
|
667
|
+
var _a17;
|
656
668
|
const modelResponse = await model.doEmbed({
|
657
669
|
values: [value],
|
658
670
|
abortSignal,
|
659
671
|
headers
|
660
672
|
});
|
661
673
|
const embedding2 = modelResponse.embeddings[0];
|
662
|
-
const usage2 = (
|
674
|
+
const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
|
663
675
|
doEmbedSpan.setAttributes(
|
664
676
|
selectTelemetryAttributes({
|
665
677
|
telemetry,
|
@@ -769,14 +781,14 @@ async function embedMany({
|
|
769
781
|
}),
|
770
782
|
tracer,
|
771
783
|
fn: async (doEmbedSpan) => {
|
772
|
-
var
|
784
|
+
var _a17;
|
773
785
|
const modelResponse = await model.doEmbed({
|
774
786
|
values,
|
775
787
|
abortSignal,
|
776
788
|
headers
|
777
789
|
});
|
778
790
|
const embeddings3 = modelResponse.embeddings;
|
779
|
-
const usage2 = (
|
791
|
+
const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
|
780
792
|
doEmbedSpan.setAttributes(
|
781
793
|
selectTelemetryAttributes({
|
782
794
|
telemetry,
|
@@ -828,14 +840,14 @@ async function embedMany({
|
|
828
840
|
}),
|
829
841
|
tracer,
|
830
842
|
fn: async (doEmbedSpan) => {
|
831
|
-
var
|
843
|
+
var _a17;
|
832
844
|
const modelResponse = await model.doEmbed({
|
833
845
|
values: chunk,
|
834
846
|
abortSignal,
|
835
847
|
headers
|
836
848
|
});
|
837
849
|
const embeddings2 = modelResponse.embeddings;
|
838
|
-
const usage2 = (
|
850
|
+
const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
|
839
851
|
doEmbedSpan.setAttributes(
|
840
852
|
selectTelemetryAttributes({
|
841
853
|
telemetry,
|
@@ -922,9 +934,9 @@ async function generateImage({
|
|
922
934
|
currentDate: () => /* @__PURE__ */ new Date()
|
923
935
|
}
|
924
936
|
}) {
|
925
|
-
var
|
937
|
+
var _a17;
|
926
938
|
const { retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
927
|
-
const maxImagesPerCall = (
|
939
|
+
const maxImagesPerCall = (_a17 = model.maxImagesPerCall) != null ? _a17 : 1;
|
928
940
|
const callCount = Math.ceil(n / maxImagesPerCall);
|
929
941
|
const callImageCounts = Array.from({ length: callCount }, (_, i) => {
|
930
942
|
if (i < callCount - 1) {
|
@@ -1057,7 +1069,7 @@ async function download({
|
|
1057
1069
|
url,
|
1058
1070
|
fetchImplementation = fetch
|
1059
1071
|
}) {
|
1060
|
-
var
|
1072
|
+
var _a17;
|
1061
1073
|
const urlText = url.toString();
|
1062
1074
|
try {
|
1063
1075
|
const response = await fetchImplementation(urlText);
|
@@ -1070,7 +1082,7 @@ async function download({
|
|
1070
1082
|
}
|
1071
1083
|
return {
|
1072
1084
|
data: new Uint8Array(await response.arrayBuffer()),
|
1073
|
-
mimeType: (
|
1085
|
+
mimeType: (_a17 = response.headers.get("content-type")) != null ? _a17 : void 0
|
1074
1086
|
};
|
1075
1087
|
} catch (error) {
|
1076
1088
|
if (DownloadError.isInstance(error)) {
|
@@ -1130,8 +1142,8 @@ var dataContentSchema = import_zod.z.union([
|
|
1130
1142
|
import_zod.z.custom(
|
1131
1143
|
// Buffer might not be available in some environments such as CloudFlare:
|
1132
1144
|
(value) => {
|
1133
|
-
var
|
1134
|
-
return (_b = (
|
1145
|
+
var _a17, _b;
|
1146
|
+
return (_b = (_a17 = globalThis.Buffer) == null ? void 0 : _a17.isBuffer(value)) != null ? _b : false;
|
1135
1147
|
},
|
1136
1148
|
{ message: "Must be a Buffer" }
|
1137
1149
|
)
|
@@ -1231,14 +1243,14 @@ async function convertToLanguageModelPrompt({
|
|
1231
1243
|
];
|
1232
1244
|
}
|
1233
1245
|
function convertToLanguageModelMessage(message, downloadedAssets) {
|
1234
|
-
var
|
1246
|
+
var _a17, _b, _c, _d, _e, _f;
|
1235
1247
|
const role = message.role;
|
1236
1248
|
switch (role) {
|
1237
1249
|
case "system": {
|
1238
1250
|
return {
|
1239
1251
|
role: "system",
|
1240
1252
|
content: message.content,
|
1241
|
-
providerMetadata: (
|
1253
|
+
providerMetadata: (_a17 = message.providerOptions) != null ? _a17 : message.experimental_providerMetadata
|
1242
1254
|
};
|
1243
1255
|
}
|
1244
1256
|
case "user": {
|
@@ -1282,7 +1294,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
|
|
1282
1294
|
return {
|
1283
1295
|
role: "tool",
|
1284
1296
|
content: message.content.map((part) => {
|
1285
|
-
var
|
1297
|
+
var _a18;
|
1286
1298
|
return {
|
1287
1299
|
type: "tool-result",
|
1288
1300
|
toolCallId: part.toolCallId,
|
@@ -1290,7 +1302,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
|
|
1290
1302
|
result: part.result,
|
1291
1303
|
content: part.experimental_content,
|
1292
1304
|
isError: part.isError,
|
1293
|
-
providerMetadata: (
|
1305
|
+
providerMetadata: (_a18 = part.providerOptions) != null ? _a18 : part.experimental_providerMetadata
|
1294
1306
|
};
|
1295
1307
|
}),
|
1296
1308
|
providerMetadata: (_f = message.providerOptions) != null ? _f : message.experimental_providerMetadata
|
@@ -1326,12 +1338,12 @@ async function downloadAssets(messages, downloadImplementation, modelSupportsIma
|
|
1326
1338
|
);
|
1327
1339
|
}
|
1328
1340
|
function convertPartToLanguageModelPart(part, downloadedAssets) {
|
1329
|
-
var
|
1341
|
+
var _a17, _b, _c, _d;
|
1330
1342
|
if (part.type === "text") {
|
1331
1343
|
return {
|
1332
1344
|
type: "text",
|
1333
1345
|
text: part.text,
|
1334
|
-
providerMetadata: (
|
1346
|
+
providerMetadata: (_a17 = part.providerOptions) != null ? _a17 : part.experimental_providerMetadata
|
1335
1347
|
};
|
1336
1348
|
}
|
1337
1349
|
let mimeType = part.mimeType;
|
@@ -1485,6 +1497,7 @@ function prepareCallSettings({
|
|
1485
1497
|
}
|
1486
1498
|
return {
|
1487
1499
|
maxTokens,
|
1500
|
+
// TODO v5 remove default 0 for temperature
|
1488
1501
|
temperature: temperature != null ? temperature : 0,
|
1489
1502
|
topP,
|
1490
1503
|
topK,
|
@@ -1502,7 +1515,7 @@ var import_zod7 = require("zod");
|
|
1502
1515
|
|
1503
1516
|
// core/prompt/attachments-to-parts.ts
|
1504
1517
|
function attachmentsToParts(attachments) {
|
1505
|
-
var
|
1518
|
+
var _a17, _b, _c;
|
1506
1519
|
const parts = [];
|
1507
1520
|
for (const attachment of attachments) {
|
1508
1521
|
let url;
|
@@ -1514,7 +1527,7 @@ function attachmentsToParts(attachments) {
|
|
1514
1527
|
switch (url.protocol) {
|
1515
1528
|
case "http:":
|
1516
1529
|
case "https:": {
|
1517
|
-
if ((
|
1530
|
+
if ((_a17 = attachment.contentType) == null ? void 0 : _a17.startsWith("image/")) {
|
1518
1531
|
parts.push({ type: "image", image: url });
|
1519
1532
|
} else {
|
1520
1533
|
if (!attachment.contentType) {
|
@@ -1600,8 +1613,8 @@ _a8 = symbol8;
|
|
1600
1613
|
|
1601
1614
|
// core/prompt/convert-to-core-messages.ts
|
1602
1615
|
function convertToCoreMessages(messages, options) {
|
1603
|
-
var
|
1604
|
-
const tools = (
|
1616
|
+
var _a17, _b;
|
1617
|
+
const tools = (_a17 = options == null ? void 0 : options.tools) != null ? _a17 : {};
|
1605
1618
|
const coreMessages = [];
|
1606
1619
|
for (let i = 0; i < messages.length; i++) {
|
1607
1620
|
const message = messages[i];
|
@@ -1746,14 +1759,14 @@ function convertToCoreMessages(messages, options) {
|
|
1746
1759
|
break;
|
1747
1760
|
}
|
1748
1761
|
const maxStep = toolInvocations.reduce((max, toolInvocation) => {
|
1749
|
-
var
|
1750
|
-
return Math.max(max, (
|
1762
|
+
var _a18;
|
1763
|
+
return Math.max(max, (_a18 = toolInvocation.step) != null ? _a18 : 0);
|
1751
1764
|
}, 0);
|
1752
1765
|
for (let i2 = 0; i2 <= maxStep; i2++) {
|
1753
1766
|
const stepInvocations = toolInvocations.filter(
|
1754
1767
|
(toolInvocation) => {
|
1755
|
-
var
|
1756
|
-
return ((
|
1768
|
+
var _a18;
|
1769
|
+
return ((_a18 = toolInvocation.step) != null ? _a18 : 0) === i2;
|
1757
1770
|
}
|
1758
1771
|
);
|
1759
1772
|
if (stepInvocations.length === 0) {
|
@@ -2186,7 +2199,7 @@ var arrayOutputStrategy = (schema) => {
|
|
2186
2199
|
additionalProperties: false
|
2187
2200
|
},
|
2188
2201
|
validatePartialResult({ value, latestObject, isFirstDelta, isFinalDelta }) {
|
2189
|
-
var
|
2202
|
+
var _a17;
|
2190
2203
|
if (!(0, import_provider11.isJSONObject)(value) || !(0, import_provider11.isJSONArray)(value.elements)) {
|
2191
2204
|
return {
|
2192
2205
|
success: false,
|
@@ -2209,7 +2222,7 @@ var arrayOutputStrategy = (schema) => {
|
|
2209
2222
|
}
|
2210
2223
|
resultArray.push(result.value);
|
2211
2224
|
}
|
2212
|
-
const publishedElementCount = (
|
2225
|
+
const publishedElementCount = (_a17 = latestObject == null ? void 0 : latestObject.length) != null ? _a17 : 0;
|
2213
2226
|
let textDelta = "";
|
2214
2227
|
if (isFirstDelta) {
|
2215
2228
|
textDelta += "[";
|
@@ -2547,7 +2560,7 @@ async function generateObject({
|
|
2547
2560
|
}),
|
2548
2561
|
tracer,
|
2549
2562
|
fn: async (span) => {
|
2550
|
-
var
|
2563
|
+
var _a17, _b, _c, _d;
|
2551
2564
|
if (mode === "auto" || mode == null) {
|
2552
2565
|
mode = model.defaultObjectGenerationMode;
|
2553
2566
|
}
|
@@ -2576,7 +2589,7 @@ async function generateObject({
|
|
2576
2589
|
const promptMessages = await convertToLanguageModelPrompt({
|
2577
2590
|
prompt: standardizedPrompt,
|
2578
2591
|
modelSupportsImageUrls: model.supportsImageUrls,
|
2579
|
-
modelSupportsUrl: (
|
2592
|
+
modelSupportsUrl: (_a17 = model.supportsUrl) == null ? void 0 : _a17.bind(model)
|
2580
2593
|
// support 'this' context
|
2581
2594
|
});
|
2582
2595
|
const generateResult = await retry(
|
@@ -2610,7 +2623,7 @@ async function generateObject({
|
|
2610
2623
|
}),
|
2611
2624
|
tracer,
|
2612
2625
|
fn: async (span2) => {
|
2613
|
-
var
|
2626
|
+
var _a18, _b2, _c2, _d2, _e, _f;
|
2614
2627
|
const result2 = await model.doGenerate({
|
2615
2628
|
mode: {
|
2616
2629
|
type: "object-json",
|
@@ -2626,7 +2639,7 @@ async function generateObject({
|
|
2626
2639
|
headers
|
2627
2640
|
});
|
2628
2641
|
const responseData = {
|
2629
|
-
id: (_b2 = (
|
2642
|
+
id: (_b2 = (_a18 = result2.response) == null ? void 0 : _a18.id) != null ? _b2 : generateId3(),
|
2630
2643
|
timestamp: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
2631
2644
|
modelId: (_f = (_e = result2.response) == null ? void 0 : _e.modelId) != null ? _f : model.modelId
|
2632
2645
|
};
|
@@ -2715,7 +2728,7 @@ async function generateObject({
|
|
2715
2728
|
}),
|
2716
2729
|
tracer,
|
2717
2730
|
fn: async (span2) => {
|
2718
|
-
var
|
2731
|
+
var _a18, _b2, _c2, _d2, _e, _f, _g, _h;
|
2719
2732
|
const result2 = await model.doGenerate({
|
2720
2733
|
mode: {
|
2721
2734
|
type: "object-tool",
|
@@ -2733,7 +2746,7 @@ async function generateObject({
|
|
2733
2746
|
abortSignal,
|
2734
2747
|
headers
|
2735
2748
|
});
|
2736
|
-
const objectText = (_b2 = (
|
2749
|
+
const objectText = (_b2 = (_a18 = result2.toolCalls) == null ? void 0 : _a18[0]) == null ? void 0 : _b2.args;
|
2737
2750
|
const responseData = {
|
2738
2751
|
id: (_d2 = (_c2 = result2.response) == null ? void 0 : _c2.id) != null ? _d2 : generateId3(),
|
2739
2752
|
timestamp: (_f = (_e = result2.response) == null ? void 0 : _e.timestamp) != null ? _f : currentDate(),
|
@@ -2881,9 +2894,9 @@ var DefaultGenerateObjectResult = class {
|
|
2881
2894
|
this.logprobs = options.logprobs;
|
2882
2895
|
}
|
2883
2896
|
toJsonResponse(init) {
|
2884
|
-
var
|
2897
|
+
var _a17;
|
2885
2898
|
return new Response(JSON.stringify(this.object), {
|
2886
|
-
status: (
|
2899
|
+
status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
|
2887
2900
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
2888
2901
|
contentType: "application/json; charset=utf-8"
|
2889
2902
|
})
|
@@ -2918,17 +2931,17 @@ var DelayedPromise = class {
|
|
2918
2931
|
return this.promise;
|
2919
2932
|
}
|
2920
2933
|
resolve(value) {
|
2921
|
-
var
|
2934
|
+
var _a17;
|
2922
2935
|
this.status = { type: "resolved", value };
|
2923
2936
|
if (this.promise) {
|
2924
|
-
(
|
2937
|
+
(_a17 = this._resolve) == null ? void 0 : _a17.call(this, value);
|
2925
2938
|
}
|
2926
2939
|
}
|
2927
2940
|
reject(error) {
|
2928
|
-
var
|
2941
|
+
var _a17;
|
2929
2942
|
this.status = { type: "rejected", error };
|
2930
2943
|
if (this.promise) {
|
2931
|
-
(
|
2944
|
+
(_a17 = this._reject) == null ? void 0 : _a17.call(this, error);
|
2932
2945
|
}
|
2933
2946
|
}
|
2934
2947
|
};
|
@@ -3032,8 +3045,8 @@ function createStitchableStream() {
|
|
3032
3045
|
|
3033
3046
|
// core/util/now.ts
|
3034
3047
|
function now() {
|
3035
|
-
var
|
3036
|
-
return (_b = (
|
3048
|
+
var _a17, _b;
|
3049
|
+
return (_b = (_a17 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a17.now()) != null ? _b : Date.now();
|
3037
3050
|
}
|
3038
3051
|
|
3039
3052
|
// core/generate-object/stream-object.ts
|
@@ -3169,7 +3182,7 @@ var DefaultStreamObjectResult = class {
|
|
3169
3182
|
tracer,
|
3170
3183
|
endWhenDone: false,
|
3171
3184
|
fn: async (rootSpan) => {
|
3172
|
-
var
|
3185
|
+
var _a17, _b;
|
3173
3186
|
if (mode === "auto" || mode == null) {
|
3174
3187
|
mode = model.defaultObjectGenerationMode;
|
3175
3188
|
}
|
@@ -3200,7 +3213,7 @@ var DefaultStreamObjectResult = class {
|
|
3200
3213
|
prompt: await convertToLanguageModelPrompt({
|
3201
3214
|
prompt: standardizedPrompt,
|
3202
3215
|
modelSupportsImageUrls: model.supportsImageUrls,
|
3203
|
-
modelSupportsUrl: (
|
3216
|
+
modelSupportsUrl: (_a17 = model.supportsUrl) == null ? void 0 : _a17.bind(model)
|
3204
3217
|
// support 'this' context
|
3205
3218
|
}),
|
3206
3219
|
providerMetadata: providerOptions,
|
@@ -3338,7 +3351,7 @@ var DefaultStreamObjectResult = class {
|
|
3338
3351
|
const transformedStream = stream.pipeThrough(new TransformStream(transformer)).pipeThrough(
|
3339
3352
|
new TransformStream({
|
3340
3353
|
async transform(chunk, controller) {
|
3341
|
-
var
|
3354
|
+
var _a18, _b2, _c;
|
3342
3355
|
if (isFirstChunk) {
|
3343
3356
|
const msToFirstChunk = now2() - startTimestampMs;
|
3344
3357
|
isFirstChunk = false;
|
@@ -3384,7 +3397,7 @@ var DefaultStreamObjectResult = class {
|
|
3384
3397
|
switch (chunk.type) {
|
3385
3398
|
case "response-metadata": {
|
3386
3399
|
response = {
|
3387
|
-
id: (
|
3400
|
+
id: (_a18 = chunk.id) != null ? _a18 : response.id,
|
3388
3401
|
timestamp: (_b2 = chunk.timestamp) != null ? _b2 : response.timestamp,
|
3389
3402
|
modelId: (_c = chunk.modelId) != null ? _c : response.modelId
|
3390
3403
|
};
|
@@ -3597,9 +3610,9 @@ var DefaultStreamObjectResult = class {
|
|
3597
3610
|
});
|
3598
3611
|
}
|
3599
3612
|
toTextStreamResponse(init) {
|
3600
|
-
var
|
3613
|
+
var _a17;
|
3601
3614
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
3602
|
-
status: (
|
3615
|
+
status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
|
3603
3616
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
3604
3617
|
contentType: "text/plain; charset=utf-8"
|
3605
3618
|
})
|
@@ -3675,24 +3688,24 @@ function prepareToolsAndToolChoice({
|
|
3675
3688
|
};
|
3676
3689
|
}
|
3677
3690
|
const filteredTools = activeTools != null ? Object.entries(tools).filter(
|
3678
|
-
([
|
3691
|
+
([name17]) => activeTools.includes(name17)
|
3679
3692
|
) : Object.entries(tools);
|
3680
3693
|
return {
|
3681
|
-
tools: filteredTools.map(([
|
3694
|
+
tools: filteredTools.map(([name17, tool2]) => {
|
3682
3695
|
const toolType = tool2.type;
|
3683
3696
|
switch (toolType) {
|
3684
3697
|
case void 0:
|
3685
3698
|
case "function":
|
3686
3699
|
return {
|
3687
3700
|
type: "function",
|
3688
|
-
name:
|
3701
|
+
name: name17,
|
3689
3702
|
description: tool2.description,
|
3690
3703
|
parameters: (0, import_ui_utils4.asSchema)(tool2.parameters).jsonSchema
|
3691
3704
|
};
|
3692
3705
|
case "provider-defined":
|
3693
3706
|
return {
|
3694
3707
|
type: "provider-defined",
|
3695
|
-
name:
|
3708
|
+
name: name17,
|
3696
3709
|
id: tool2.id,
|
3697
3710
|
args: tool2.args
|
3698
3711
|
};
|
@@ -3862,6 +3875,12 @@ async function doParseToolCall({
|
|
3862
3875
|
};
|
3863
3876
|
}
|
3864
3877
|
|
3878
|
+
// core/generate-text/reasoning-detail.ts
|
3879
|
+
function asReasoningText(reasoning) {
|
3880
|
+
const reasoningText = reasoning.filter((part) => part.type === "text").map((part) => part.text).join("");
|
3881
|
+
return reasoningText.length > 0 ? reasoningText : void 0;
|
3882
|
+
}
|
3883
|
+
|
3865
3884
|
// core/generate-text/to-response-messages.ts
|
3866
3885
|
function toResponseMessages({
|
3867
3886
|
text: text2 = "",
|
@@ -3910,12 +3929,6 @@ function toResponseMessages({
|
|
3910
3929
|
return responseMessages;
|
3911
3930
|
}
|
3912
3931
|
|
3913
|
-
// core/generate-text/reasoning-detail.ts
|
3914
|
-
function asReasoningText(reasoning) {
|
3915
|
-
const reasoningText = reasoning.filter((part) => part.type === "text").map((part) => part.text).join("");
|
3916
|
-
return reasoningText.length > 0 ? reasoningText : void 0;
|
3917
|
-
}
|
3918
|
-
|
3919
3932
|
// core/generate-text/generate-text.ts
|
3920
3933
|
var originalGenerateId3 = (0, import_provider_utils9.createIdGenerator)({
|
3921
3934
|
prefix: "aitxt",
|
@@ -3951,7 +3964,7 @@ async function generateText({
|
|
3951
3964
|
onStepFinish,
|
3952
3965
|
...settings
|
3953
3966
|
}) {
|
3954
|
-
var
|
3967
|
+
var _a17;
|
3955
3968
|
if (maxSteps < 1) {
|
3956
3969
|
throw new InvalidArgumentError({
|
3957
3970
|
parameter: "maxSteps",
|
@@ -3968,7 +3981,7 @@ async function generateText({
|
|
3968
3981
|
});
|
3969
3982
|
const initialPrompt = standardizePrompt({
|
3970
3983
|
prompt: {
|
3971
|
-
system: (
|
3984
|
+
system: (_a17 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a17 : system,
|
3972
3985
|
prompt,
|
3973
3986
|
messages
|
3974
3987
|
},
|
@@ -3994,7 +4007,7 @@ async function generateText({
|
|
3994
4007
|
}),
|
3995
4008
|
tracer,
|
3996
4009
|
fn: async (span) => {
|
3997
|
-
var
|
4010
|
+
var _a18, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
3998
4011
|
const mode = {
|
3999
4012
|
type: "regular",
|
4000
4013
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
@@ -4028,7 +4041,7 @@ async function generateText({
|
|
4028
4041
|
messages: stepInputMessages
|
4029
4042
|
},
|
4030
4043
|
modelSupportsImageUrls: model.supportsImageUrls,
|
4031
|
-
modelSupportsUrl: (
|
4044
|
+
modelSupportsUrl: (_a18 = model.supportsUrl) == null ? void 0 : _a18.bind(model)
|
4032
4045
|
// support 'this' context
|
4033
4046
|
});
|
4034
4047
|
currentModelResponse = await retry(
|
@@ -4049,8 +4062,8 @@ async function generateText({
|
|
4049
4062
|
"ai.prompt.tools": {
|
4050
4063
|
// convert the language model level tools:
|
4051
4064
|
input: () => {
|
4052
|
-
var
|
4053
|
-
return (
|
4065
|
+
var _a19;
|
4066
|
+
return (_a19 = mode.tools) == null ? void 0 : _a19.map((tool2) => JSON.stringify(tool2));
|
4054
4067
|
}
|
4055
4068
|
},
|
4056
4069
|
"ai.prompt.toolChoice": {
|
@@ -4070,7 +4083,7 @@ async function generateText({
|
|
4070
4083
|
}),
|
4071
4084
|
tracer,
|
4072
4085
|
fn: async (span2) => {
|
4073
|
-
var
|
4086
|
+
var _a19, _b2, _c2, _d2, _e2, _f2;
|
4074
4087
|
const result = await model.doGenerate({
|
4075
4088
|
mode,
|
4076
4089
|
...callSettings,
|
@@ -4082,7 +4095,7 @@ async function generateText({
|
|
4082
4095
|
headers
|
4083
4096
|
});
|
4084
4097
|
const responseData = {
|
4085
|
-
id: (_b2 = (
|
4098
|
+
id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
|
4086
4099
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
4087
4100
|
modelId: (_f2 = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f2 : model.modelId
|
4088
4101
|
};
|
@@ -4378,7 +4391,7 @@ var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
4378
4391
|
var import_ui_utils6 = require("@ai-sdk/ui-utils");
|
4379
4392
|
|
4380
4393
|
// errors/index.ts
|
4381
|
-
var
|
4394
|
+
var import_provider20 = require("@ai-sdk/provider");
|
4382
4395
|
|
4383
4396
|
// errors/invalid-stream-part-error.ts
|
4384
4397
|
var import_provider18 = require("@ai-sdk/provider");
|
@@ -4401,6 +4414,27 @@ var InvalidStreamPartError = class extends import_provider18.AISDKError {
|
|
4401
4414
|
};
|
4402
4415
|
_a14 = symbol14;
|
4403
4416
|
|
4417
|
+
// errors/mcp-client-error.ts
|
4418
|
+
var import_provider19 = require("@ai-sdk/provider");
|
4419
|
+
var name15 = "AI_MCPClientError";
|
4420
|
+
var marker15 = `vercel.ai.error.${name15}`;
|
4421
|
+
var symbol15 = Symbol.for(marker15);
|
4422
|
+
var _a15;
|
4423
|
+
var MCPClientError = class extends import_provider19.AISDKError {
|
4424
|
+
constructor({
|
4425
|
+
name: name17 = "MCPClientError",
|
4426
|
+
message,
|
4427
|
+
cause
|
4428
|
+
}) {
|
4429
|
+
super({ name: name17, message, cause });
|
4430
|
+
this[_a15] = true;
|
4431
|
+
}
|
4432
|
+
static isInstance(error) {
|
4433
|
+
return import_provider19.AISDKError.hasMarker(error, marker15);
|
4434
|
+
}
|
4435
|
+
};
|
4436
|
+
_a15 = symbol15;
|
4437
|
+
|
4404
4438
|
// core/generate-text/output.ts
|
4405
4439
|
var text = () => ({
|
4406
4440
|
type: "text",
|
@@ -4479,7 +4513,7 @@ var object = ({
|
|
4479
4513
|
};
|
4480
4514
|
|
4481
4515
|
// core/generate-text/smooth-stream.ts
|
4482
|
-
var
|
4516
|
+
var import_provider21 = require("@ai-sdk/provider");
|
4483
4517
|
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
4484
4518
|
var CHUNKING_REGEXPS = {
|
4485
4519
|
word: /\s*\S+\s+/m,
|
@@ -4492,7 +4526,7 @@ function smoothStream({
|
|
4492
4526
|
} = {}) {
|
4493
4527
|
const chunkingRegexp = typeof chunking === "string" ? CHUNKING_REGEXPS[chunking] : chunking;
|
4494
4528
|
if (chunkingRegexp == null) {
|
4495
|
-
throw new
|
4529
|
+
throw new import_provider21.InvalidArgumentError({
|
4496
4530
|
argument: "chunking",
|
4497
4531
|
message: `Chunking must be "word" or "line" or a RegExp. Received: ${chunking}`
|
4498
4532
|
});
|
@@ -4527,7 +4561,7 @@ function smoothStream({
|
|
4527
4561
|
}
|
4528
4562
|
|
4529
4563
|
// core/generate-text/stream-text.ts
|
4530
|
-
var
|
4564
|
+
var import_provider22 = require("@ai-sdk/provider");
|
4531
4565
|
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
4532
4566
|
var import_ui_utils8 = require("@ai-sdk/ui-utils");
|
4533
4567
|
|
@@ -4981,7 +5015,7 @@ var DefaultStreamTextResult = class {
|
|
4981
5015
|
this.requestPromise = new DelayedPromise();
|
4982
5016
|
this.responsePromise = new DelayedPromise();
|
4983
5017
|
this.stepsPromise = new DelayedPromise();
|
4984
|
-
var
|
5018
|
+
var _a17;
|
4985
5019
|
if (maxSteps < 1) {
|
4986
5020
|
throw new InvalidArgumentError({
|
4987
5021
|
parameter: "maxSteps",
|
@@ -5035,7 +5069,7 @@ var DefaultStreamTextResult = class {
|
|
5035
5069
|
}
|
5036
5070
|
if (part.type === "reasoning-signature") {
|
5037
5071
|
if (activeReasoningText == null) {
|
5038
|
-
throw new
|
5072
|
+
throw new import_provider22.AISDKError({
|
5039
5073
|
name: "InvalidStreamPart",
|
5040
5074
|
message: "reasoning-signature without reasoning"
|
5041
5075
|
});
|
@@ -5127,7 +5161,7 @@ var DefaultStreamTextResult = class {
|
|
5127
5161
|
}
|
5128
5162
|
},
|
5129
5163
|
async flush(controller) {
|
5130
|
-
var
|
5164
|
+
var _a18;
|
5131
5165
|
try {
|
5132
5166
|
if (recordedSteps.length === 0) {
|
5133
5167
|
return;
|
@@ -5164,7 +5198,7 @@ var DefaultStreamTextResult = class {
|
|
5164
5198
|
sources: lastStep.sources,
|
5165
5199
|
toolCalls: lastStep.toolCalls,
|
5166
5200
|
toolResults: lastStep.toolResults,
|
5167
|
-
request: (
|
5201
|
+
request: (_a18 = lastStep.request) != null ? _a18 : {},
|
5168
5202
|
response: lastStep.response,
|
5169
5203
|
warnings: lastStep.warnings,
|
5170
5204
|
providerMetadata: lastStep.providerMetadata,
|
@@ -5179,8 +5213,8 @@ var DefaultStreamTextResult = class {
|
|
5179
5213
|
"ai.response.text": { output: () => recordedFullText },
|
5180
5214
|
"ai.response.toolCalls": {
|
5181
5215
|
output: () => {
|
5182
|
-
var
|
5183
|
-
return ((
|
5216
|
+
var _a19;
|
5217
|
+
return ((_a19 = lastStep.toolCalls) == null ? void 0 : _a19.length) ? JSON.stringify(lastStep.toolCalls) : void 0;
|
5184
5218
|
}
|
5185
5219
|
},
|
5186
5220
|
"ai.usage.promptTokens": usage.promptTokens,
|
@@ -5222,7 +5256,7 @@ var DefaultStreamTextResult = class {
|
|
5222
5256
|
});
|
5223
5257
|
const initialPrompt = standardizePrompt({
|
5224
5258
|
prompt: {
|
5225
|
-
system: (
|
5259
|
+
system: (_a17 = output == null ? void 0 : output.injectIntoSystemPrompt({ system, model })) != null ? _a17 : system,
|
5226
5260
|
prompt,
|
5227
5261
|
messages
|
5228
5262
|
},
|
@@ -5256,7 +5290,7 @@ var DefaultStreamTextResult = class {
|
|
5256
5290
|
hasLeadingWhitespace,
|
5257
5291
|
messageId
|
5258
5292
|
}) {
|
5259
|
-
var
|
5293
|
+
var _a18;
|
5260
5294
|
const promptFormat = responseMessages.length === 0 ? initialPrompt.type : "messages";
|
5261
5295
|
const stepInputMessages = [
|
5262
5296
|
...initialPrompt.messages,
|
@@ -5269,7 +5303,7 @@ var DefaultStreamTextResult = class {
|
|
5269
5303
|
messages: stepInputMessages
|
5270
5304
|
},
|
5271
5305
|
modelSupportsImageUrls: model.supportsImageUrls,
|
5272
|
-
modelSupportsUrl: (
|
5306
|
+
modelSupportsUrl: (_a18 = model.supportsUrl) == null ? void 0 : _a18.bind(model)
|
5273
5307
|
// support 'this' context
|
5274
5308
|
});
|
5275
5309
|
const mode = {
|
@@ -5300,8 +5334,8 @@ var DefaultStreamTextResult = class {
|
|
5300
5334
|
"ai.prompt.tools": {
|
5301
5335
|
// convert the language model level tools:
|
5302
5336
|
input: () => {
|
5303
|
-
var
|
5304
|
-
return (
|
5337
|
+
var _a19;
|
5338
|
+
return (_a19 = mode.tools) == null ? void 0 : _a19.map((tool2) => JSON.stringify(tool2));
|
5305
5339
|
}
|
5306
5340
|
},
|
5307
5341
|
"ai.prompt.toolChoice": {
|
@@ -5388,7 +5422,7 @@ var DefaultStreamTextResult = class {
|
|
5388
5422
|
transformedStream.pipeThrough(
|
5389
5423
|
new TransformStream({
|
5390
5424
|
async transform(chunk, controller) {
|
5391
|
-
var
|
5425
|
+
var _a19, _b, _c;
|
5392
5426
|
if (stepFirstChunk) {
|
5393
5427
|
const msToFirstChunk = now2() - startTimestampMs;
|
5394
5428
|
stepFirstChunk = false;
|
@@ -5479,7 +5513,7 @@ var DefaultStreamTextResult = class {
|
|
5479
5513
|
}
|
5480
5514
|
case "response-metadata": {
|
5481
5515
|
stepResponse = {
|
5482
|
-
id: (
|
5516
|
+
id: (_a19 = chunk.id) != null ? _a19 : stepResponse.id,
|
5483
5517
|
timestamp: (_b = chunk.timestamp) != null ? _b : stepResponse.timestamp,
|
5484
5518
|
modelId: (_c = chunk.modelId) != null ? _c : stepResponse.modelId
|
5485
5519
|
};
|
@@ -6002,9 +6036,9 @@ var DefaultStreamTextResult = class {
|
|
6002
6036
|
);
|
6003
6037
|
}
|
6004
6038
|
toTextStreamResponse(init) {
|
6005
|
-
var
|
6039
|
+
var _a17;
|
6006
6040
|
return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
|
6007
|
-
status: (
|
6041
|
+
status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
|
6008
6042
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
6009
6043
|
contentType: "text/plain; charset=utf-8"
|
6010
6044
|
})
|
@@ -6061,7 +6095,7 @@ function extractReasoningMiddleware({
|
|
6061
6095
|
);
|
6062
6096
|
textWithoutReasoning = beforeMatch + (beforeMatch.length > 0 && afterMatch.length > 0 ? separator : "") + afterMatch;
|
6063
6097
|
}
|
6064
|
-
return { text: textWithoutReasoning, reasoning
|
6098
|
+
return { ...rest, text: textWithoutReasoning, reasoning };
|
6065
6099
|
},
|
6066
6100
|
wrapStream: async ({ doStream }) => {
|
6067
6101
|
const { stream, ...rest } = await doStream();
|
@@ -6139,7 +6173,7 @@ var doWrap = ({
|
|
6139
6173
|
modelId,
|
6140
6174
|
providerId
|
6141
6175
|
}) => {
|
6142
|
-
var
|
6176
|
+
var _a17;
|
6143
6177
|
async function doTransform({
|
6144
6178
|
params,
|
6145
6179
|
type
|
@@ -6152,7 +6186,7 @@ var doWrap = ({
|
|
6152
6186
|
modelId: modelId != null ? modelId : model.modelId,
|
6153
6187
|
defaultObjectGenerationMode: model.defaultObjectGenerationMode,
|
6154
6188
|
supportsImageUrls: model.supportsImageUrls,
|
6155
|
-
supportsUrl: (
|
6189
|
+
supportsUrl: (_a17 = model.supportsUrl) == null ? void 0 : _a17.bind(model),
|
6156
6190
|
supportsStructuredOutputs: model.supportsStructuredOutputs,
|
6157
6191
|
async doGenerate(params) {
|
6158
6192
|
const transformedParams = await doTransform({ params, type: "generate" });
|
@@ -6186,7 +6220,7 @@ function appendResponseMessages({
|
|
6186
6220
|
responseMessages,
|
6187
6221
|
_internal: { currentDate = () => /* @__PURE__ */ new Date() } = {}
|
6188
6222
|
}) {
|
6189
|
-
var
|
6223
|
+
var _a17, _b, _c, _d;
|
6190
6224
|
const clonedMessages = structuredClone(messages);
|
6191
6225
|
for (const message of responseMessages) {
|
6192
6226
|
const role = message.role;
|
@@ -6268,7 +6302,7 @@ function appendResponseMessages({
|
|
6268
6302
|
const maxStep = (0, import_ui_utils9.extractMaxToolInvocationStep)(
|
6269
6303
|
lastMessage.toolInvocations
|
6270
6304
|
);
|
6271
|
-
(
|
6305
|
+
(_a17 = lastMessage.parts) != null ? _a17 : lastMessage.parts = [];
|
6272
6306
|
lastMessage.content = textContent;
|
6273
6307
|
lastMessage.reasoning = reasoningTextContent;
|
6274
6308
|
lastMessage.parts.push(...parts);
|
@@ -6344,7 +6378,7 @@ function appendResponseMessages({
|
|
6344
6378
|
}
|
6345
6379
|
|
6346
6380
|
// core/registry/custom-provider.ts
|
6347
|
-
var
|
6381
|
+
var import_provider23 = require("@ai-sdk/provider");
|
6348
6382
|
function customProvider({
|
6349
6383
|
languageModels,
|
6350
6384
|
textEmbeddingModels,
|
@@ -6359,7 +6393,7 @@ function customProvider({
|
|
6359
6393
|
if (fallbackProvider) {
|
6360
6394
|
return fallbackProvider.languageModel(modelId);
|
6361
6395
|
}
|
6362
|
-
throw new
|
6396
|
+
throw new import_provider23.NoSuchModelError({ modelId, modelType: "languageModel" });
|
6363
6397
|
},
|
6364
6398
|
textEmbeddingModel(modelId) {
|
6365
6399
|
if (textEmbeddingModels != null && modelId in textEmbeddingModels) {
|
@@ -6368,7 +6402,7 @@ function customProvider({
|
|
6368
6402
|
if (fallbackProvider) {
|
6369
6403
|
return fallbackProvider.textEmbeddingModel(modelId);
|
6370
6404
|
}
|
6371
|
-
throw new
|
6405
|
+
throw new import_provider23.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
|
6372
6406
|
},
|
6373
6407
|
imageModel(modelId) {
|
6374
6408
|
if (imageModels != null && modelId in imageModels) {
|
@@ -6377,19 +6411,19 @@ function customProvider({
|
|
6377
6411
|
if (fallbackProvider == null ? void 0 : fallbackProvider.imageModel) {
|
6378
6412
|
return fallbackProvider.imageModel(modelId);
|
6379
6413
|
}
|
6380
|
-
throw new
|
6414
|
+
throw new import_provider23.NoSuchModelError({ modelId, modelType: "imageModel" });
|
6381
6415
|
}
|
6382
6416
|
};
|
6383
6417
|
}
|
6384
6418
|
var experimental_customProvider = customProvider;
|
6385
6419
|
|
6386
6420
|
// core/registry/no-such-provider-error.ts
|
6387
|
-
var
|
6388
|
-
var
|
6389
|
-
var
|
6390
|
-
var
|
6391
|
-
var
|
6392
|
-
var NoSuchProviderError = class extends
|
6421
|
+
var import_provider24 = require("@ai-sdk/provider");
|
6422
|
+
var name16 = "AI_NoSuchProviderError";
|
6423
|
+
var marker16 = `vercel.ai.error.${name16}`;
|
6424
|
+
var symbol16 = Symbol.for(marker16);
|
6425
|
+
var _a16;
|
6426
|
+
var NoSuchProviderError = class extends import_provider24.NoSuchModelError {
|
6393
6427
|
constructor({
|
6394
6428
|
modelId,
|
6395
6429
|
modelType,
|
@@ -6397,19 +6431,19 @@ var NoSuchProviderError = class extends import_provider23.NoSuchModelError {
|
|
6397
6431
|
availableProviders,
|
6398
6432
|
message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
|
6399
6433
|
}) {
|
6400
|
-
super({ errorName:
|
6401
|
-
this[
|
6434
|
+
super({ errorName: name16, modelId, modelType, message });
|
6435
|
+
this[_a16] = true;
|
6402
6436
|
this.providerId = providerId;
|
6403
6437
|
this.availableProviders = availableProviders;
|
6404
6438
|
}
|
6405
6439
|
static isInstance(error) {
|
6406
|
-
return
|
6440
|
+
return import_provider24.AISDKError.hasMarker(error, marker16);
|
6407
6441
|
}
|
6408
6442
|
};
|
6409
|
-
|
6443
|
+
_a16 = symbol16;
|
6410
6444
|
|
6411
6445
|
// core/registry/provider-registry.ts
|
6412
|
-
var
|
6446
|
+
var import_provider25 = require("@ai-sdk/provider");
|
6413
6447
|
function experimental_createProviderRegistry(providers) {
|
6414
6448
|
const registry = new DefaultProviderRegistry();
|
6415
6449
|
for (const [id, provider] of Object.entries(providers)) {
|
@@ -6442,7 +6476,7 @@ var DefaultProviderRegistry = class {
|
|
6442
6476
|
splitId(id, modelType) {
|
6443
6477
|
const index = id.indexOf(":");
|
6444
6478
|
if (index === -1) {
|
6445
|
-
throw new
|
6479
|
+
throw new import_provider25.NoSuchModelError({
|
6446
6480
|
modelId: id,
|
6447
6481
|
modelType,
|
6448
6482
|
message: `Invalid ${modelType} id for registry: ${id} (must be in the format "providerId:modelId")`
|
@@ -6451,21 +6485,21 @@ var DefaultProviderRegistry = class {
|
|
6451
6485
|
return [id.slice(0, index), id.slice(index + 1)];
|
6452
6486
|
}
|
6453
6487
|
languageModel(id) {
|
6454
|
-
var
|
6488
|
+
var _a17, _b;
|
6455
6489
|
const [providerId, modelId] = this.splitId(id, "languageModel");
|
6456
|
-
const model = (_b = (
|
6490
|
+
const model = (_b = (_a17 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a17, modelId);
|
6457
6491
|
if (model == null) {
|
6458
|
-
throw new
|
6492
|
+
throw new import_provider25.NoSuchModelError({ modelId: id, modelType: "languageModel" });
|
6459
6493
|
}
|
6460
6494
|
return model;
|
6461
6495
|
}
|
6462
6496
|
textEmbeddingModel(id) {
|
6463
|
-
var
|
6497
|
+
var _a17;
|
6464
6498
|
const [providerId, modelId] = this.splitId(id, "textEmbeddingModel");
|
6465
6499
|
const provider = this.getProvider(providerId);
|
6466
|
-
const model = (
|
6500
|
+
const model = (_a17 = provider.textEmbeddingModel) == null ? void 0 : _a17.call(provider, modelId);
|
6467
6501
|
if (model == null) {
|
6468
|
-
throw new
|
6502
|
+
throw new import_provider25.NoSuchModelError({
|
6469
6503
|
modelId: id,
|
6470
6504
|
modelType: "textEmbeddingModel"
|
6471
6505
|
});
|
@@ -6473,12 +6507,12 @@ var DefaultProviderRegistry = class {
|
|
6473
6507
|
return model;
|
6474
6508
|
}
|
6475
6509
|
imageModel(id) {
|
6476
|
-
var
|
6510
|
+
var _a17;
|
6477
6511
|
const [providerId, modelId] = this.splitId(id, "imageModel");
|
6478
6512
|
const provider = this.getProvider(providerId);
|
6479
|
-
const model = (
|
6513
|
+
const model = (_a17 = provider.imageModel) == null ? void 0 : _a17.call(provider, modelId);
|
6480
6514
|
if (model == null) {
|
6481
|
-
throw new
|
6515
|
+
throw new import_provider25.NoSuchModelError({ modelId: id, modelType: "imageModel" });
|
6482
6516
|
}
|
6483
6517
|
return model;
|
6484
6518
|
}
|
@@ -6490,11 +6524,727 @@ var DefaultProviderRegistry = class {
|
|
6490
6524
|
}
|
6491
6525
|
};
|
6492
6526
|
|
6527
|
+
// core/tool/mcp/mcp-client.ts
|
6528
|
+
var import_ui_utils10 = require("@ai-sdk/ui-utils");
|
6529
|
+
|
6493
6530
|
// core/tool/tool.ts
|
6494
6531
|
function tool(tool2) {
|
6495
6532
|
return tool2;
|
6496
6533
|
}
|
6497
6534
|
|
6535
|
+
// core/tool/mcp/types.ts
|
6536
|
+
var import_zod8 = require("zod");
|
6537
|
+
var LATEST_PROTOCOL_VERSION = "2024-11-05";
|
6538
|
+
var SUPPORTED_PROTOCOL_VERSIONS = [
|
6539
|
+
LATEST_PROTOCOL_VERSION,
|
6540
|
+
"2024-10-07"
|
6541
|
+
];
|
6542
|
+
var JSONRPC_VERSION = "2.0";
|
6543
|
+
var ClientOrServerImplementationSchema = import_zod8.z.object({
|
6544
|
+
name: import_zod8.z.string(),
|
6545
|
+
version: import_zod8.z.string()
|
6546
|
+
}).passthrough();
|
6547
|
+
var BaseParamsSchema = import_zod8.z.object({
|
6548
|
+
_meta: import_zod8.z.optional(import_zod8.z.object({}).passthrough())
|
6549
|
+
}).passthrough();
|
6550
|
+
var RequestSchema = import_zod8.z.object({
|
6551
|
+
method: import_zod8.z.string(),
|
6552
|
+
params: import_zod8.z.optional(BaseParamsSchema)
|
6553
|
+
});
|
6554
|
+
var ResultSchema = BaseParamsSchema;
|
6555
|
+
var NotificationSchema = import_zod8.z.object({
|
6556
|
+
method: import_zod8.z.string(),
|
6557
|
+
params: import_zod8.z.optional(BaseParamsSchema)
|
6558
|
+
});
|
6559
|
+
var RequestIdSchema = import_zod8.z.union([import_zod8.z.string(), import_zod8.z.number().int()]);
|
6560
|
+
var JSONRPCRequestSchema = import_zod8.z.object({
|
6561
|
+
jsonrpc: import_zod8.z.literal(JSONRPC_VERSION),
|
6562
|
+
id: RequestIdSchema
|
6563
|
+
}).merge(RequestSchema).strict();
|
6564
|
+
var JSONRPCResponseSchema = import_zod8.z.object({
|
6565
|
+
jsonrpc: import_zod8.z.literal(JSONRPC_VERSION),
|
6566
|
+
id: RequestIdSchema,
|
6567
|
+
result: ResultSchema
|
6568
|
+
}).strict();
|
6569
|
+
var JSONRPCErrorSchema = import_zod8.z.object({
|
6570
|
+
jsonrpc: import_zod8.z.literal(JSONRPC_VERSION),
|
6571
|
+
id: RequestIdSchema,
|
6572
|
+
error: import_zod8.z.object({
|
6573
|
+
code: import_zod8.z.number().int(),
|
6574
|
+
message: import_zod8.z.string(),
|
6575
|
+
data: import_zod8.z.optional(import_zod8.z.unknown())
|
6576
|
+
})
|
6577
|
+
}).strict();
|
6578
|
+
var JSONRPCNotificationSchema = import_zod8.z.object({
|
6579
|
+
jsonrpc: import_zod8.z.literal(JSONRPC_VERSION)
|
6580
|
+
}).merge(NotificationSchema).strict();
|
6581
|
+
var JSONRPCMessageSchema = import_zod8.z.union([
|
6582
|
+
JSONRPCRequestSchema,
|
6583
|
+
JSONRPCNotificationSchema,
|
6584
|
+
JSONRPCResponseSchema,
|
6585
|
+
JSONRPCErrorSchema
|
6586
|
+
]);
|
6587
|
+
var ServerCapabilitiesSchema = import_zod8.z.object({
|
6588
|
+
experimental: import_zod8.z.optional(import_zod8.z.object({}).passthrough()),
|
6589
|
+
logging: import_zod8.z.optional(import_zod8.z.object({}).passthrough()),
|
6590
|
+
prompts: import_zod8.z.optional(
|
6591
|
+
import_zod8.z.object({
|
6592
|
+
listChanged: import_zod8.z.optional(import_zod8.z.boolean())
|
6593
|
+
}).passthrough()
|
6594
|
+
),
|
6595
|
+
resources: import_zod8.z.optional(
|
6596
|
+
import_zod8.z.object({
|
6597
|
+
subscribe: import_zod8.z.optional(import_zod8.z.boolean()),
|
6598
|
+
listChanged: import_zod8.z.optional(import_zod8.z.boolean())
|
6599
|
+
}).passthrough()
|
6600
|
+
),
|
6601
|
+
tools: import_zod8.z.optional(
|
6602
|
+
import_zod8.z.object({
|
6603
|
+
listChanged: import_zod8.z.optional(import_zod8.z.boolean())
|
6604
|
+
}).passthrough()
|
6605
|
+
)
|
6606
|
+
}).passthrough();
|
6607
|
+
var InitializeResultSchema = ResultSchema.extend({
|
6608
|
+
protocolVersion: import_zod8.z.string(),
|
6609
|
+
capabilities: ServerCapabilitiesSchema,
|
6610
|
+
serverInfo: ClientOrServerImplementationSchema,
|
6611
|
+
instructions: import_zod8.z.optional(import_zod8.z.string())
|
6612
|
+
});
|
6613
|
+
var PaginatedResultSchema = ResultSchema.extend({
|
6614
|
+
nextCursor: import_zod8.z.optional(import_zod8.z.string())
|
6615
|
+
});
|
6616
|
+
var ToolSchema = import_zod8.z.object({
|
6617
|
+
name: import_zod8.z.string(),
|
6618
|
+
description: import_zod8.z.optional(import_zod8.z.string()),
|
6619
|
+
inputSchema: import_zod8.z.object({
|
6620
|
+
type: import_zod8.z.literal("object"),
|
6621
|
+
properties: import_zod8.z.optional(import_zod8.z.object({}).passthrough())
|
6622
|
+
}).passthrough()
|
6623
|
+
}).passthrough();
|
6624
|
+
var ListToolsResultSchema = PaginatedResultSchema.extend({
|
6625
|
+
tools: import_zod8.z.array(ToolSchema)
|
6626
|
+
});
|
6627
|
+
var TextContentSchema = import_zod8.z.object({
|
6628
|
+
type: import_zod8.z.literal("text"),
|
6629
|
+
text: import_zod8.z.string()
|
6630
|
+
}).passthrough();
|
6631
|
+
var ImageContentSchema = import_zod8.z.object({
|
6632
|
+
type: import_zod8.z.literal("image"),
|
6633
|
+
data: import_zod8.z.string().base64(),
|
6634
|
+
mimeType: import_zod8.z.string()
|
6635
|
+
}).passthrough();
|
6636
|
+
var ResourceContentsSchema = import_zod8.z.object({
|
6637
|
+
/**
|
6638
|
+
* The URI of this resource.
|
6639
|
+
*/
|
6640
|
+
uri: import_zod8.z.string(),
|
6641
|
+
/**
|
6642
|
+
* The MIME type of this resource, if known.
|
6643
|
+
*/
|
6644
|
+
mimeType: import_zod8.z.optional(import_zod8.z.string())
|
6645
|
+
}).passthrough();
|
6646
|
+
var TextResourceContentsSchema = ResourceContentsSchema.extend({
|
6647
|
+
text: import_zod8.z.string()
|
6648
|
+
});
|
6649
|
+
var BlobResourceContentsSchema = ResourceContentsSchema.extend({
|
6650
|
+
blob: import_zod8.z.string().base64()
|
6651
|
+
});
|
6652
|
+
var EmbeddedResourceSchema = import_zod8.z.object({
|
6653
|
+
type: import_zod8.z.literal("resource"),
|
6654
|
+
resource: import_zod8.z.union([TextResourceContentsSchema, BlobResourceContentsSchema])
|
6655
|
+
}).passthrough();
|
6656
|
+
var CallToolResultSchema = ResultSchema.extend({
|
6657
|
+
content: import_zod8.z.array(
|
6658
|
+
import_zod8.z.union([TextContentSchema, ImageContentSchema, EmbeddedResourceSchema])
|
6659
|
+
),
|
6660
|
+
isError: import_zod8.z.boolean().default(false).optional()
|
6661
|
+
}).or(
|
6662
|
+
ResultSchema.extend({
|
6663
|
+
toolResult: import_zod8.z.unknown()
|
6664
|
+
})
|
6665
|
+
);
|
6666
|
+
|
6667
|
+
// core/tool/mcp/utils.ts
|
6668
|
+
function detectRuntime() {
|
6669
|
+
var _a17, _b;
|
6670
|
+
if (typeof window !== "undefined") {
|
6671
|
+
return "browser";
|
6672
|
+
}
|
6673
|
+
if (((_b = (_a17 = globalThis.process) == null ? void 0 : _a17.release) == null ? void 0 : _b.name) === "node") {
|
6674
|
+
return "node";
|
6675
|
+
}
|
6676
|
+
return null;
|
6677
|
+
}
|
6678
|
+
async function createChildProcess(config, signal) {
|
6679
|
+
var _a17, _b, _c;
|
6680
|
+
const runtime = detectRuntime();
|
6681
|
+
if (runtime !== "node") {
|
6682
|
+
throw new MCPClientError({
|
6683
|
+
message: "Attempted to use child_process module outside of Node.js environment"
|
6684
|
+
});
|
6685
|
+
}
|
6686
|
+
let childProcess;
|
6687
|
+
try {
|
6688
|
+
childProcess = await import("child_process");
|
6689
|
+
} catch (error) {
|
6690
|
+
try {
|
6691
|
+
childProcess = require("child_process");
|
6692
|
+
} catch (innerError) {
|
6693
|
+
throw new MCPClientError({
|
6694
|
+
message: "Failed to load child_process module dynamically",
|
6695
|
+
cause: innerError
|
6696
|
+
});
|
6697
|
+
}
|
6698
|
+
}
|
6699
|
+
const { spawn } = childProcess;
|
6700
|
+
return spawn(config.command, (_a17 = config.args) != null ? _a17 : [], {
|
6701
|
+
env: (_b = config.env) != null ? _b : getDefaultEnvironment(),
|
6702
|
+
stdio: ["pipe", "pipe", (_c = config.stderr) != null ? _c : "inherit"],
|
6703
|
+
shell: false,
|
6704
|
+
signal,
|
6705
|
+
windowsHide: process.platform === "win32" && isElectron(),
|
6706
|
+
cwd: config.cwd
|
6707
|
+
});
|
6708
|
+
}
|
6709
|
+
var DEFAULT_INHERITED_ENV_VARS = process.platform === "win32" ? [
|
6710
|
+
"APPDATA",
|
6711
|
+
"HOMEDRIVE",
|
6712
|
+
"HOMEPATH",
|
6713
|
+
"LOCALAPPDATA",
|
6714
|
+
"PATH",
|
6715
|
+
"PROCESSOR_ARCHITECTURE",
|
6716
|
+
"SYSTEMDRIVE",
|
6717
|
+
"SYSTEMROOT",
|
6718
|
+
"TEMP",
|
6719
|
+
"USERNAME",
|
6720
|
+
"USERPROFILE"
|
6721
|
+
] : ["HOME", "LOGNAME", "PATH", "SHELL", "TERM", "USER"];
|
6722
|
+
function getDefaultEnvironment() {
|
6723
|
+
const env = {};
|
6724
|
+
for (const key of DEFAULT_INHERITED_ENV_VARS) {
|
6725
|
+
const value = process.env[key];
|
6726
|
+
if (value === void 0) {
|
6727
|
+
continue;
|
6728
|
+
}
|
6729
|
+
if (value.startsWith("()")) {
|
6730
|
+
continue;
|
6731
|
+
}
|
6732
|
+
env[key] = value;
|
6733
|
+
}
|
6734
|
+
return env;
|
6735
|
+
}
|
6736
|
+
function isElectron() {
|
6737
|
+
return "type" in process;
|
6738
|
+
}
|
6739
|
+
|
6740
|
+
// core/tool/mcp/mcp-stdio-transport.ts
|
6741
|
+
var StdioClientTransport = class {
|
6742
|
+
constructor(server) {
|
6743
|
+
this.abortController = new AbortController();
|
6744
|
+
this.readBuffer = new ReadBuffer();
|
6745
|
+
this.serverParams = server;
|
6746
|
+
}
|
6747
|
+
async start() {
|
6748
|
+
if (this.process) {
|
6749
|
+
throw new MCPClientError({
|
6750
|
+
message: "StdioClientTransport already started."
|
6751
|
+
});
|
6752
|
+
}
|
6753
|
+
return new Promise(async (resolve, reject) => {
|
6754
|
+
var _a17, _b, _c, _d;
|
6755
|
+
try {
|
6756
|
+
const process2 = await createChildProcess(
|
6757
|
+
this.serverParams,
|
6758
|
+
this.abortController.signal
|
6759
|
+
);
|
6760
|
+
this.process = process2;
|
6761
|
+
this.process.on("error", (error) => {
|
6762
|
+
var _a18, _b2;
|
6763
|
+
if (error.name === "AbortError") {
|
6764
|
+
(_a18 = this.onClose) == null ? void 0 : _a18.call(this);
|
6765
|
+
return;
|
6766
|
+
}
|
6767
|
+
reject(error);
|
6768
|
+
(_b2 = this.onError) == null ? void 0 : _b2.call(this, error);
|
6769
|
+
});
|
6770
|
+
this.process.on("spawn", () => {
|
6771
|
+
resolve();
|
6772
|
+
});
|
6773
|
+
this.process.on("close", (_code) => {
|
6774
|
+
var _a18;
|
6775
|
+
this.process = void 0;
|
6776
|
+
(_a18 = this.onClose) == null ? void 0 : _a18.call(this);
|
6777
|
+
});
|
6778
|
+
(_a17 = this.process.stdin) == null ? void 0 : _a17.on("error", (error) => {
|
6779
|
+
var _a18;
|
6780
|
+
(_a18 = this.onError) == null ? void 0 : _a18.call(this, error);
|
6781
|
+
});
|
6782
|
+
(_b = this.process.stdout) == null ? void 0 : _b.on("data", (chunk) => {
|
6783
|
+
this.readBuffer.append(chunk);
|
6784
|
+
this.processReadBuffer();
|
6785
|
+
});
|
6786
|
+
(_c = this.process.stdout) == null ? void 0 : _c.on("error", (error) => {
|
6787
|
+
var _a18;
|
6788
|
+
(_a18 = this.onError) == null ? void 0 : _a18.call(this, error);
|
6789
|
+
});
|
6790
|
+
} catch (error) {
|
6791
|
+
reject(error);
|
6792
|
+
(_d = this.onError) == null ? void 0 : _d.call(this, error);
|
6793
|
+
}
|
6794
|
+
});
|
6795
|
+
}
|
6796
|
+
processReadBuffer() {
|
6797
|
+
var _a17, _b;
|
6798
|
+
while (true) {
|
6799
|
+
try {
|
6800
|
+
const message = this.readBuffer.readMessage();
|
6801
|
+
if (message === null) {
|
6802
|
+
break;
|
6803
|
+
}
|
6804
|
+
(_a17 = this.onMessage) == null ? void 0 : _a17.call(this, message);
|
6805
|
+
} catch (error) {
|
6806
|
+
(_b = this.onError) == null ? void 0 : _b.call(this, error);
|
6807
|
+
}
|
6808
|
+
}
|
6809
|
+
}
|
6810
|
+
async close() {
|
6811
|
+
this.abortController.abort();
|
6812
|
+
this.process = void 0;
|
6813
|
+
this.readBuffer.clear();
|
6814
|
+
}
|
6815
|
+
send(message) {
|
6816
|
+
return new Promise((resolve) => {
|
6817
|
+
var _a17;
|
6818
|
+
if (!((_a17 = this.process) == null ? void 0 : _a17.stdin)) {
|
6819
|
+
throw new MCPClientError({
|
6820
|
+
message: "StdioClientTransport not connected"
|
6821
|
+
});
|
6822
|
+
}
|
6823
|
+
const json = serializeMessage(message);
|
6824
|
+
if (this.process.stdin.write(json)) {
|
6825
|
+
resolve();
|
6826
|
+
} else {
|
6827
|
+
this.process.stdin.once("drain", resolve);
|
6828
|
+
}
|
6829
|
+
});
|
6830
|
+
}
|
6831
|
+
};
|
6832
|
+
var ReadBuffer = class {
|
6833
|
+
append(chunk) {
|
6834
|
+
this.buffer = this.buffer ? Buffer.concat([this.buffer, chunk]) : chunk;
|
6835
|
+
}
|
6836
|
+
readMessage() {
|
6837
|
+
if (!this.buffer)
|
6838
|
+
return null;
|
6839
|
+
const index = this.buffer.indexOf("\n");
|
6840
|
+
if (index === -1) {
|
6841
|
+
return null;
|
6842
|
+
}
|
6843
|
+
const line = this.buffer.toString("utf8", 0, index);
|
6844
|
+
this.buffer = this.buffer.subarray(index + 1);
|
6845
|
+
return deserializeMessage(line);
|
6846
|
+
}
|
6847
|
+
clear() {
|
6848
|
+
this.buffer = void 0;
|
6849
|
+
}
|
6850
|
+
};
|
6851
|
+
function serializeMessage(message) {
|
6852
|
+
return JSON.stringify(message) + "\n";
|
6853
|
+
}
|
6854
|
+
function deserializeMessage(line) {
|
6855
|
+
return JSONRPCMessageSchema.parse(JSON.parse(line));
|
6856
|
+
}
|
6857
|
+
|
6858
|
+
// core/tool/mcp/mcp-sse-transport.ts
|
6859
|
+
var import_stream = require("eventsource-parser/stream");
|
6860
|
+
var SSEClientTransport = class {
|
6861
|
+
constructor({ url }) {
|
6862
|
+
this.connected = false;
|
6863
|
+
this.url = new URL(url);
|
6864
|
+
}
|
6865
|
+
async start() {
|
6866
|
+
return new Promise((resolve, reject) => {
|
6867
|
+
if (this.connected) {
|
6868
|
+
return resolve();
|
6869
|
+
}
|
6870
|
+
this.abortController = new AbortController();
|
6871
|
+
const establishConnection = async () => {
|
6872
|
+
var _a17, _b, _c;
|
6873
|
+
try {
|
6874
|
+
const response = await fetch(this.url.href, {
|
6875
|
+
headers: {
|
6876
|
+
Accept: "text/event-stream"
|
6877
|
+
},
|
6878
|
+
signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
|
6879
|
+
});
|
6880
|
+
if (!response.ok || !response.body) {
|
6881
|
+
const error = new MCPClientError({
|
6882
|
+
message: `MCP SSE Transport Error: ${response.status} ${response.statusText}`
|
6883
|
+
});
|
6884
|
+
(_b = this.onError) == null ? void 0 : _b.call(this, error);
|
6885
|
+
return reject(error);
|
6886
|
+
}
|
6887
|
+
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new import_stream.EventSourceParserStream());
|
6888
|
+
const reader = stream.getReader();
|
6889
|
+
const processEvents = async () => {
|
6890
|
+
var _a18, _b2, _c2;
|
6891
|
+
try {
|
6892
|
+
while (true) {
|
6893
|
+
const { done, value } = await reader.read();
|
6894
|
+
if (done) {
|
6895
|
+
if (this.connected) {
|
6896
|
+
this.connected = false;
|
6897
|
+
throw new MCPClientError({
|
6898
|
+
message: "MCP SSE Transport Error: Connection closed unexpectedly"
|
6899
|
+
});
|
6900
|
+
}
|
6901
|
+
return;
|
6902
|
+
}
|
6903
|
+
const { event, data } = value;
|
6904
|
+
if (event === "endpoint") {
|
6905
|
+
this.endpoint = new URL(data, this.url);
|
6906
|
+
if (this.endpoint.origin !== this.url.origin) {
|
6907
|
+
throw new MCPClientError({
|
6908
|
+
message: `MCP SSE Transport Error: Endpoint origin does not match connection origin: ${this.endpoint.origin}`
|
6909
|
+
});
|
6910
|
+
}
|
6911
|
+
this.connected = true;
|
6912
|
+
resolve();
|
6913
|
+
} else if (event === "message") {
|
6914
|
+
try {
|
6915
|
+
const message = JSONRPCMessageSchema.parse(
|
6916
|
+
JSON.parse(data)
|
6917
|
+
);
|
6918
|
+
(_a18 = this.onMessage) == null ? void 0 : _a18.call(this, message);
|
6919
|
+
} catch (error) {
|
6920
|
+
const e = new MCPClientError({
|
6921
|
+
message: "MCP SSE Transport Error: Failed to parse message",
|
6922
|
+
cause: error
|
6923
|
+
});
|
6924
|
+
(_b2 = this.onError) == null ? void 0 : _b2.call(this, e);
|
6925
|
+
}
|
6926
|
+
}
|
6927
|
+
}
|
6928
|
+
} catch (error) {
|
6929
|
+
if (error instanceof Error && error.name === "AbortError") {
|
6930
|
+
return;
|
6931
|
+
}
|
6932
|
+
(_c2 = this.onError) == null ? void 0 : _c2.call(this, error);
|
6933
|
+
reject(error);
|
6934
|
+
}
|
6935
|
+
};
|
6936
|
+
this.sseConnection = {
|
6937
|
+
close: () => reader.cancel()
|
6938
|
+
};
|
6939
|
+
processEvents();
|
6940
|
+
} catch (error) {
|
6941
|
+
if (error instanceof Error && error.name === "AbortError") {
|
6942
|
+
return;
|
6943
|
+
}
|
6944
|
+
(_c = this.onError) == null ? void 0 : _c.call(this, error);
|
6945
|
+
reject(error);
|
6946
|
+
}
|
6947
|
+
};
|
6948
|
+
establishConnection();
|
6949
|
+
});
|
6950
|
+
}
|
6951
|
+
async close() {
|
6952
|
+
var _a17, _b, _c;
|
6953
|
+
this.connected = false;
|
6954
|
+
(_a17 = this.sseConnection) == null ? void 0 : _a17.close();
|
6955
|
+
(_b = this.abortController) == null ? void 0 : _b.abort();
|
6956
|
+
(_c = this.onClose) == null ? void 0 : _c.call(this);
|
6957
|
+
}
|
6958
|
+
async send(message) {
|
6959
|
+
var _a17, _b, _c;
|
6960
|
+
if (!this.endpoint || !this.connected) {
|
6961
|
+
throw new MCPClientError({
|
6962
|
+
message: "MCP SSE Transport Error: Not connected"
|
6963
|
+
});
|
6964
|
+
}
|
6965
|
+
try {
|
6966
|
+
const headers = new Headers();
|
6967
|
+
headers.set("Content-Type", "application/json");
|
6968
|
+
const init = {
|
6969
|
+
method: "POST",
|
6970
|
+
headers,
|
6971
|
+
body: JSON.stringify(message),
|
6972
|
+
signal: (_a17 = this.abortController) == null ? void 0 : _a17.signal
|
6973
|
+
};
|
6974
|
+
const response = await fetch(this.endpoint, init);
|
6975
|
+
if (!response.ok) {
|
6976
|
+
const text2 = await response.text().catch(() => null);
|
6977
|
+
const error = new MCPClientError({
|
6978
|
+
message: `MCP SSE Transport Error: POSTing to endpoint (HTTP ${response.status}): ${text2}`
|
6979
|
+
});
|
6980
|
+
(_b = this.onError) == null ? void 0 : _b.call(this, error);
|
6981
|
+
return;
|
6982
|
+
}
|
6983
|
+
} catch (error) {
|
6984
|
+
(_c = this.onError) == null ? void 0 : _c.call(this, error);
|
6985
|
+
return;
|
6986
|
+
}
|
6987
|
+
}
|
6988
|
+
};
|
6989
|
+
|
6990
|
+
// core/tool/mcp/mcp-transport.ts
|
6991
|
+
function createMcpTransport(config) {
|
6992
|
+
return config.type === "stdio" ? new StdioClientTransport(config) : new SSEClientTransport(config);
|
6993
|
+
}
|
6994
|
+
|
6995
|
+
// core/tool/mcp/mcp-client.ts
|
6996
|
+
var CLIENT_VERSION = "1.0.0";
|
6997
|
+
async function createMCPClient(config) {
|
6998
|
+
const client = new MCPClient(config);
|
6999
|
+
await client.init();
|
7000
|
+
return client;
|
7001
|
+
}
|
7002
|
+
var MCPClient = class {
|
7003
|
+
constructor({
|
7004
|
+
transport: transportConfig,
|
7005
|
+
name: name17 = "ai-sdk-mcp-client",
|
7006
|
+
onUncaughtError
|
7007
|
+
}) {
|
7008
|
+
this.requestMessageId = 0;
|
7009
|
+
this.responseHandlers = /* @__PURE__ */ new Map();
|
7010
|
+
this.serverCapabilities = {};
|
7011
|
+
this.isClosed = true;
|
7012
|
+
this.onUncaughtError = onUncaughtError;
|
7013
|
+
this.transport = createMcpTransport(transportConfig);
|
7014
|
+
this.transport.onClose = () => this.onClose();
|
7015
|
+
this.transport.onError = (error) => this.onError(error);
|
7016
|
+
this.transport.onMessage = (message) => {
|
7017
|
+
if ("method" in message) {
|
7018
|
+
this.onError(
|
7019
|
+
new MCPClientError({
|
7020
|
+
message: "Unsupported message type"
|
7021
|
+
})
|
7022
|
+
);
|
7023
|
+
return;
|
7024
|
+
}
|
7025
|
+
this.onResponse(message);
|
7026
|
+
};
|
7027
|
+
this.clientInfo = {
|
7028
|
+
name: name17,
|
7029
|
+
version: CLIENT_VERSION
|
7030
|
+
};
|
7031
|
+
}
|
7032
|
+
async init() {
|
7033
|
+
try {
|
7034
|
+
await this.transport.start();
|
7035
|
+
this.isClosed = false;
|
7036
|
+
const result = await this.request({
|
7037
|
+
request: {
|
7038
|
+
method: "initialize",
|
7039
|
+
params: {
|
7040
|
+
protocolVersion: LATEST_PROTOCOL_VERSION,
|
7041
|
+
capabilities: {},
|
7042
|
+
clientInfo: this.clientInfo
|
7043
|
+
}
|
7044
|
+
},
|
7045
|
+
resultSchema: InitializeResultSchema
|
7046
|
+
});
|
7047
|
+
if (result === void 0) {
|
7048
|
+
throw new MCPClientError({
|
7049
|
+
message: "Server sent invalid initialize result"
|
7050
|
+
});
|
7051
|
+
}
|
7052
|
+
if (!SUPPORTED_PROTOCOL_VERSIONS.includes(result.protocolVersion)) {
|
7053
|
+
throw new MCPClientError({
|
7054
|
+
message: `Server's protocol version is not supported: ${result.protocolVersion}`
|
7055
|
+
});
|
7056
|
+
}
|
7057
|
+
this.serverCapabilities = result.capabilities;
|
7058
|
+
await this.notification({
|
7059
|
+
method: "notifications/initialized"
|
7060
|
+
});
|
7061
|
+
return this;
|
7062
|
+
} catch (error) {
|
7063
|
+
await this.close();
|
7064
|
+
throw error;
|
7065
|
+
}
|
7066
|
+
}
|
7067
|
+
async close() {
|
7068
|
+
var _a17;
|
7069
|
+
if (this.isClosed)
|
7070
|
+
return;
|
7071
|
+
await ((_a17 = this.transport) == null ? void 0 : _a17.close());
|
7072
|
+
this.onClose();
|
7073
|
+
}
|
7074
|
+
async request({
|
7075
|
+
request,
|
7076
|
+
resultSchema,
|
7077
|
+
options
|
7078
|
+
}) {
|
7079
|
+
return new Promise((resolve, reject) => {
|
7080
|
+
if (this.isClosed) {
|
7081
|
+
return reject(
|
7082
|
+
new MCPClientError({
|
7083
|
+
message: "Attempted to send a request from a closed client"
|
7084
|
+
})
|
7085
|
+
);
|
7086
|
+
}
|
7087
|
+
const signal = options == null ? void 0 : options.signal;
|
7088
|
+
signal == null ? void 0 : signal.throwIfAborted();
|
7089
|
+
const messageId = this.requestMessageId++;
|
7090
|
+
const jsonrpcRequest = {
|
7091
|
+
...request,
|
7092
|
+
jsonrpc: "2.0",
|
7093
|
+
id: messageId
|
7094
|
+
};
|
7095
|
+
const cleanup = () => {
|
7096
|
+
this.responseHandlers.delete(messageId);
|
7097
|
+
};
|
7098
|
+
this.responseHandlers.set(messageId, (response) => {
|
7099
|
+
if (signal == null ? void 0 : signal.aborted) {
|
7100
|
+
return reject(
|
7101
|
+
new MCPClientError({
|
7102
|
+
message: "Request was aborted",
|
7103
|
+
cause: signal.reason
|
7104
|
+
})
|
7105
|
+
);
|
7106
|
+
}
|
7107
|
+
if (response instanceof Error) {
|
7108
|
+
return reject(response);
|
7109
|
+
}
|
7110
|
+
try {
|
7111
|
+
const result = resultSchema.parse(response.result);
|
7112
|
+
resolve(result);
|
7113
|
+
} catch (error) {
|
7114
|
+
const parseError = new MCPClientError({
|
7115
|
+
message: "Failed to parse server initialization result",
|
7116
|
+
cause: error
|
7117
|
+
});
|
7118
|
+
reject(parseError);
|
7119
|
+
}
|
7120
|
+
});
|
7121
|
+
this.transport.send(jsonrpcRequest).catch((error) => {
|
7122
|
+
cleanup();
|
7123
|
+
reject(error);
|
7124
|
+
});
|
7125
|
+
});
|
7126
|
+
}
|
7127
|
+
async listTools({
|
7128
|
+
params,
|
7129
|
+
options
|
7130
|
+
} = {}) {
|
7131
|
+
if (!this.serverCapabilities.tools) {
|
7132
|
+
throw new MCPClientError({
|
7133
|
+
message: `Server does not support tools`
|
7134
|
+
});
|
7135
|
+
}
|
7136
|
+
try {
|
7137
|
+
return this.request({
|
7138
|
+
request: { method: "tools/list", params },
|
7139
|
+
resultSchema: ListToolsResultSchema,
|
7140
|
+
options
|
7141
|
+
});
|
7142
|
+
} catch (error) {
|
7143
|
+
throw error;
|
7144
|
+
}
|
7145
|
+
}
|
7146
|
+
async callTool({
|
7147
|
+
name: name17,
|
7148
|
+
args,
|
7149
|
+
options
|
7150
|
+
}) {
|
7151
|
+
if (!this.serverCapabilities.tools) {
|
7152
|
+
throw new MCPClientError({
|
7153
|
+
message: `Server does not support tools`
|
7154
|
+
});
|
7155
|
+
}
|
7156
|
+
try {
|
7157
|
+
return this.request({
|
7158
|
+
request: { method: "tools/call", params: { name: name17, arguments: args } },
|
7159
|
+
resultSchema: CallToolResultSchema,
|
7160
|
+
options: {
|
7161
|
+
signal: options == null ? void 0 : options.abortSignal
|
7162
|
+
}
|
7163
|
+
});
|
7164
|
+
} catch (error) {
|
7165
|
+
throw error;
|
7166
|
+
}
|
7167
|
+
}
|
7168
|
+
async notification(notification) {
|
7169
|
+
const jsonrpcNotification = {
|
7170
|
+
...notification,
|
7171
|
+
jsonrpc: "2.0"
|
7172
|
+
};
|
7173
|
+
await this.transport.send(jsonrpcNotification);
|
7174
|
+
}
|
7175
|
+
/**
|
7176
|
+
* Returns a set of AI SDK tools from the MCP server
|
7177
|
+
* @returns A record of tool names to their implementations
|
7178
|
+
*/
|
7179
|
+
async tools({
|
7180
|
+
schemas = "automatic"
|
7181
|
+
} = {}) {
|
7182
|
+
const tools = {};
|
7183
|
+
try {
|
7184
|
+
const listToolsResult = await this.listTools();
|
7185
|
+
for (const { name: name17, description, inputSchema } of listToolsResult.tools) {
|
7186
|
+
if (schemas !== "automatic" && !(name17 in schemas)) {
|
7187
|
+
continue;
|
7188
|
+
}
|
7189
|
+
const parameters = schemas === "automatic" ? (0, import_ui_utils10.jsonSchema)(inputSchema) : schemas[name17].parameters;
|
7190
|
+
const self = this;
|
7191
|
+
const toolWithExecute = tool({
|
7192
|
+
description,
|
7193
|
+
parameters,
|
7194
|
+
execute: async (args, options) => {
|
7195
|
+
var _a17;
|
7196
|
+
(_a17 = options == null ? void 0 : options.abortSignal) == null ? void 0 : _a17.throwIfAborted();
|
7197
|
+
return self.callTool({
|
7198
|
+
name: name17,
|
7199
|
+
args,
|
7200
|
+
options
|
7201
|
+
});
|
7202
|
+
}
|
7203
|
+
});
|
7204
|
+
tools[name17] = toolWithExecute;
|
7205
|
+
}
|
7206
|
+
return tools;
|
7207
|
+
} catch (error) {
|
7208
|
+
throw error;
|
7209
|
+
}
|
7210
|
+
}
|
7211
|
+
onClose() {
|
7212
|
+
if (this.isClosed)
|
7213
|
+
return;
|
7214
|
+
this.isClosed = true;
|
7215
|
+
const error = new MCPClientError({
|
7216
|
+
message: "Connection closed"
|
7217
|
+
});
|
7218
|
+
for (const handler of this.responseHandlers.values()) {
|
7219
|
+
handler(error);
|
7220
|
+
}
|
7221
|
+
this.responseHandlers.clear();
|
7222
|
+
}
|
7223
|
+
onError(error) {
|
7224
|
+
if (this.onUncaughtError) {
|
7225
|
+
this.onUncaughtError(error);
|
7226
|
+
}
|
7227
|
+
}
|
7228
|
+
onResponse(response) {
|
7229
|
+
const messageId = Number(response.id);
|
7230
|
+
const handler = this.responseHandlers.get(messageId);
|
7231
|
+
if (handler === void 0) {
|
7232
|
+
throw new MCPClientError({
|
7233
|
+
message: `Protocol error: Received a response for an unknown message ID: ${JSON.stringify(
|
7234
|
+
response
|
7235
|
+
)}`
|
7236
|
+
});
|
7237
|
+
}
|
7238
|
+
this.responseHandlers.delete(messageId);
|
7239
|
+
handler(
|
7240
|
+
"result" in response ? response : new MCPClientError({
|
7241
|
+
message: response.error.message,
|
7242
|
+
cause: response.error
|
7243
|
+
})
|
7244
|
+
);
|
7245
|
+
}
|
7246
|
+
};
|
7247
|
+
|
6498
7248
|
// core/util/cosine-similarity.ts
|
6499
7249
|
function cosineSimilarity(vector1, vector2, options = {
|
6500
7250
|
throwErrorForEmptyVectors: false
|
@@ -6537,8 +7287,8 @@ function simulateReadableStream({
|
|
6537
7287
|
chunkDelayInMs = 0,
|
6538
7288
|
_internal
|
6539
7289
|
}) {
|
6540
|
-
var
|
6541
|
-
const delay2 = (
|
7290
|
+
var _a17;
|
7291
|
+
const delay2 = (_a17 = _internal == null ? void 0 : _internal.delay) != null ? _a17 : import_provider_utils13.delay;
|
6542
7292
|
let index = 0;
|
6543
7293
|
return new ReadableStream({
|
6544
7294
|
async pull(controller) {
|
@@ -6553,40 +7303,40 @@ function simulateReadableStream({
|
|
6553
7303
|
}
|
6554
7304
|
|
6555
7305
|
// streams/assistant-response.ts
|
6556
|
-
var
|
7306
|
+
var import_ui_utils12 = require("@ai-sdk/ui-utils");
|
6557
7307
|
function AssistantResponse({ threadId, messageId }, process2) {
|
6558
7308
|
const stream = new ReadableStream({
|
6559
7309
|
async start(controller) {
|
6560
|
-
var
|
7310
|
+
var _a17;
|
6561
7311
|
const textEncoder = new TextEncoder();
|
6562
7312
|
const sendMessage = (message) => {
|
6563
7313
|
controller.enqueue(
|
6564
7314
|
textEncoder.encode(
|
6565
|
-
(0,
|
7315
|
+
(0, import_ui_utils12.formatAssistantStreamPart)("assistant_message", message)
|
6566
7316
|
)
|
6567
7317
|
);
|
6568
7318
|
};
|
6569
7319
|
const sendDataMessage = (message) => {
|
6570
7320
|
controller.enqueue(
|
6571
7321
|
textEncoder.encode(
|
6572
|
-
(0,
|
7322
|
+
(0, import_ui_utils12.formatAssistantStreamPart)("data_message", message)
|
6573
7323
|
)
|
6574
7324
|
);
|
6575
7325
|
};
|
6576
7326
|
const sendError = (errorMessage) => {
|
6577
7327
|
controller.enqueue(
|
6578
|
-
textEncoder.encode((0,
|
7328
|
+
textEncoder.encode((0, import_ui_utils12.formatAssistantStreamPart)("error", errorMessage))
|
6579
7329
|
);
|
6580
7330
|
};
|
6581
7331
|
const forwardStream = async (stream2) => {
|
6582
|
-
var
|
7332
|
+
var _a18, _b;
|
6583
7333
|
let result = void 0;
|
6584
7334
|
for await (const value of stream2) {
|
6585
7335
|
switch (value.event) {
|
6586
7336
|
case "thread.message.created": {
|
6587
7337
|
controller.enqueue(
|
6588
7338
|
textEncoder.encode(
|
6589
|
-
(0,
|
7339
|
+
(0, import_ui_utils12.formatAssistantStreamPart)("assistant_message", {
|
6590
7340
|
id: value.data.id,
|
6591
7341
|
role: "assistant",
|
6592
7342
|
content: [{ type: "text", text: { value: "" } }]
|
@@ -6596,11 +7346,11 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
6596
7346
|
break;
|
6597
7347
|
}
|
6598
7348
|
case "thread.message.delta": {
|
6599
|
-
const content = (
|
7349
|
+
const content = (_a18 = value.data.delta.content) == null ? void 0 : _a18[0];
|
6600
7350
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
6601
7351
|
controller.enqueue(
|
6602
7352
|
textEncoder.encode(
|
6603
|
-
(0,
|
7353
|
+
(0, import_ui_utils12.formatAssistantStreamPart)("text", content.text.value)
|
6604
7354
|
)
|
6605
7355
|
);
|
6606
7356
|
}
|
@@ -6617,7 +7367,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
6617
7367
|
};
|
6618
7368
|
controller.enqueue(
|
6619
7369
|
textEncoder.encode(
|
6620
|
-
(0,
|
7370
|
+
(0, import_ui_utils12.formatAssistantStreamPart)("assistant_control_data", {
|
6621
7371
|
threadId,
|
6622
7372
|
messageId
|
6623
7373
|
})
|
@@ -6630,7 +7380,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
6630
7380
|
forwardStream
|
6631
7381
|
});
|
6632
7382
|
} catch (error) {
|
6633
|
-
sendError((
|
7383
|
+
sendError((_a17 = error.message) != null ? _a17 : `${error}`);
|
6634
7384
|
} finally {
|
6635
7385
|
controller.close();
|
6636
7386
|
}
|
@@ -6655,7 +7405,7 @@ __export(langchain_adapter_exports, {
|
|
6655
7405
|
toDataStream: () => toDataStream,
|
6656
7406
|
toDataStreamResponse: () => toDataStreamResponse
|
6657
7407
|
});
|
6658
|
-
var
|
7408
|
+
var import_ui_utils13 = require("@ai-sdk/ui-utils");
|
6659
7409
|
|
6660
7410
|
// streams/stream-callbacks.ts
|
6661
7411
|
function createCallbacksTransformer(callbacks = {}) {
|
@@ -6691,7 +7441,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
6691
7441
|
return stream.pipeThrough(
|
6692
7442
|
new TransformStream({
|
6693
7443
|
transform: async (value, controller) => {
|
6694
|
-
var
|
7444
|
+
var _a17;
|
6695
7445
|
if (typeof value === "string") {
|
6696
7446
|
controller.enqueue(value);
|
6697
7447
|
return;
|
@@ -6699,7 +7449,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
6699
7449
|
if ("event" in value) {
|
6700
7450
|
if (value.event === "on_chat_model_stream") {
|
6701
7451
|
forwardAIMessageChunk(
|
6702
|
-
(
|
7452
|
+
(_a17 = value.data) == null ? void 0 : _a17.chunk,
|
6703
7453
|
controller
|
6704
7454
|
);
|
6705
7455
|
}
|
@@ -6711,7 +7461,7 @@ function toDataStreamInternal(stream, callbacks) {
|
|
6711
7461
|
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
|
6712
7462
|
new TransformStream({
|
6713
7463
|
transform: async (chunk, controller) => {
|
6714
|
-
controller.enqueue((0,
|
7464
|
+
controller.enqueue((0, import_ui_utils13.formatDataStreamPart)("text", chunk));
|
6715
7465
|
}
|
6716
7466
|
})
|
6717
7467
|
);
|
@@ -6722,7 +7472,7 @@ function toDataStream(stream, callbacks) {
|
|
6722
7472
|
);
|
6723
7473
|
}
|
6724
7474
|
function toDataStreamResponse(stream, options) {
|
6725
|
-
var
|
7475
|
+
var _a17;
|
6726
7476
|
const dataStream = toDataStreamInternal(
|
6727
7477
|
stream,
|
6728
7478
|
options == null ? void 0 : options.callbacks
|
@@ -6731,7 +7481,7 @@ function toDataStreamResponse(stream, options) {
|
|
6731
7481
|
const init = options == null ? void 0 : options.init;
|
6732
7482
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
6733
7483
|
return new Response(responseStream, {
|
6734
|
-
status: (
|
7484
|
+
status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
|
6735
7485
|
statusText: init == null ? void 0 : init.statusText,
|
6736
7486
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
6737
7487
|
contentType: "text/plain; charset=utf-8",
|
@@ -6763,7 +7513,7 @@ __export(llamaindex_adapter_exports, {
|
|
6763
7513
|
toDataStreamResponse: () => toDataStreamResponse2
|
6764
7514
|
});
|
6765
7515
|
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
6766
|
-
var
|
7516
|
+
var import_ui_utils14 = require("@ai-sdk/ui-utils");
|
6767
7517
|
function toDataStreamInternal2(stream, callbacks) {
|
6768
7518
|
const trimStart = trimStartOfStream();
|
6769
7519
|
return (0, import_provider_utils15.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
|
@@ -6775,7 +7525,7 @@ function toDataStreamInternal2(stream, callbacks) {
|
|
6775
7525
|
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
|
6776
7526
|
new TransformStream({
|
6777
7527
|
transform: async (chunk, controller) => {
|
6778
|
-
controller.enqueue((0,
|
7528
|
+
controller.enqueue((0, import_ui_utils14.formatDataStreamPart)("text", chunk));
|
6779
7529
|
}
|
6780
7530
|
})
|
6781
7531
|
);
|
@@ -6786,14 +7536,14 @@ function toDataStream2(stream, callbacks) {
|
|
6786
7536
|
);
|
6787
7537
|
}
|
6788
7538
|
function toDataStreamResponse2(stream, options = {}) {
|
6789
|
-
var
|
7539
|
+
var _a17;
|
6790
7540
|
const { init, data, callbacks } = options;
|
6791
7541
|
const dataStream = toDataStreamInternal2(stream, callbacks).pipeThrough(
|
6792
7542
|
new TextEncoderStream()
|
6793
7543
|
);
|
6794
7544
|
const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
|
6795
7545
|
return new Response(responseStream, {
|
6796
|
-
status: (
|
7546
|
+
status: (_a17 = init == null ? void 0 : init.status) != null ? _a17 : 200,
|
6797
7547
|
statusText: init == null ? void 0 : init.statusText,
|
6798
7548
|
headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
|
6799
7549
|
contentType: "text/plain; charset=utf-8",
|
@@ -6817,7 +7567,7 @@ function trimStartOfStream() {
|
|
6817
7567
|
}
|
6818
7568
|
|
6819
7569
|
// streams/stream-data.ts
|
6820
|
-
var
|
7570
|
+
var import_ui_utils15 = require("@ai-sdk/ui-utils");
|
6821
7571
|
|
6822
7572
|
// util/constants.ts
|
6823
7573
|
var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
|
@@ -6869,7 +7619,7 @@ var StreamData = class {
|
|
6869
7619
|
throw new Error("Stream controller is not initialized.");
|
6870
7620
|
}
|
6871
7621
|
this.controller.enqueue(
|
6872
|
-
this.encoder.encode((0,
|
7622
|
+
this.encoder.encode((0, import_ui_utils15.formatDataStreamPart)("data", [value]))
|
6873
7623
|
);
|
6874
7624
|
}
|
6875
7625
|
appendMessageAnnotation(value) {
|
@@ -6880,7 +7630,7 @@ var StreamData = class {
|
|
6880
7630
|
throw new Error("Stream controller is not initialized.");
|
6881
7631
|
}
|
6882
7632
|
this.controller.enqueue(
|
6883
|
-
this.encoder.encode((0,
|
7633
|
+
this.encoder.encode((0, import_ui_utils15.formatDataStreamPart)("message_annotations", [value]))
|
6884
7634
|
);
|
6885
7635
|
}
|
6886
7636
|
};
|
@@ -6902,6 +7652,7 @@ var StreamData = class {
|
|
6902
7652
|
LangChainAdapter,
|
6903
7653
|
LlamaIndexAdapter,
|
6904
7654
|
LoadAPIKeyError,
|
7655
|
+
MCPClientError,
|
6905
7656
|
MessageConversionError,
|
6906
7657
|
NoContentGeneratedError,
|
6907
7658
|
NoImageGeneratedError,
|
@@ -6932,6 +7683,7 @@ var StreamData = class {
|
|
6932
7683
|
customProvider,
|
6933
7684
|
embed,
|
6934
7685
|
embedMany,
|
7686
|
+
experimental_createMCPClient,
|
6935
7687
|
experimental_createProviderRegistry,
|
6936
7688
|
experimental_customProvider,
|
6937
7689
|
experimental_generateImage,
|