ai 5.0.0-alpha.13 → 5.0.0-alpha.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +37 -0
- package/dist/index.d.mts +207 -186
- package/dist/index.d.ts +207 -186
- package/dist/index.js +345 -339
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +345 -341
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +33 -20
- package/dist/internal/index.d.ts +33 -20
- package/dist/internal/index.js +14 -7
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +14 -7
- package/dist/internal/index.mjs.map +1 -1
- package/dist/mcp-stdio/index.d.mts +3 -3
- package/dist/mcp-stdio/index.d.ts +3 -3
- package/dist/mcp-stdio/index.js.map +1 -1
- package/dist/mcp-stdio/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
@@ -74,24 +74,22 @@ var InvalidStreamPartError = class extends AISDKError2 {
|
|
74
74
|
};
|
75
75
|
_a2 = symbol2;
|
76
76
|
|
77
|
-
// src/error/invalid-tool-
|
77
|
+
// src/error/invalid-tool-input-error.ts
|
78
78
|
import { AISDKError as AISDKError3, getErrorMessage } from "@ai-sdk/provider";
|
79
|
-
var name3 = "
|
79
|
+
var name3 = "AI_InvalidToolInputError";
|
80
80
|
var marker3 = `vercel.ai.error.${name3}`;
|
81
81
|
var symbol3 = Symbol.for(marker3);
|
82
82
|
var _a3;
|
83
|
-
var
|
83
|
+
var InvalidToolInputError = class extends AISDKError3 {
|
84
84
|
constructor({
|
85
|
-
|
85
|
+
toolInput,
|
86
86
|
toolName,
|
87
87
|
cause,
|
88
|
-
message = `Invalid
|
89
|
-
cause
|
90
|
-
)}`
|
88
|
+
message = `Invalid input for tool ${toolName}: ${getErrorMessage(cause)}`
|
91
89
|
}) {
|
92
90
|
super({ name: name3, message, cause });
|
93
91
|
this[_a3] = true;
|
94
|
-
this.
|
92
|
+
this.toolInput = toolInput;
|
95
93
|
this.toolName = toolName;
|
96
94
|
}
|
97
95
|
static isInstance(error) {
|
@@ -242,7 +240,7 @@ var symbol10 = Symbol.for(marker10);
|
|
242
240
|
var _a10;
|
243
241
|
var ToolExecutionError = class extends AISDKError10 {
|
244
242
|
constructor({
|
245
|
-
|
243
|
+
toolInput,
|
246
244
|
toolName,
|
247
245
|
toolCallId,
|
248
246
|
cause,
|
@@ -250,7 +248,7 @@ var ToolExecutionError = class extends AISDKError10 {
|
|
250
248
|
}) {
|
251
249
|
super({ name: name10, message, cause });
|
252
250
|
this[_a10] = true;
|
253
|
-
this.
|
251
|
+
this.toolInput = toolInput;
|
254
252
|
this.toolName = toolName;
|
255
253
|
this.toolCallId = toolCallId;
|
256
254
|
}
|
@@ -464,25 +462,25 @@ var uiMessageStreamPartSchema = z.union([
|
|
464
462
|
errorText: z.string()
|
465
463
|
}),
|
466
464
|
z.strictObject({
|
467
|
-
type: z.literal("tool-
|
465
|
+
type: z.literal("tool-input-start"),
|
468
466
|
toolCallId: z.string(),
|
469
467
|
toolName: z.string()
|
470
468
|
}),
|
471
469
|
z.strictObject({
|
472
|
-
type: z.literal("tool-
|
470
|
+
type: z.literal("tool-input-delta"),
|
473
471
|
toolCallId: z.string(),
|
474
|
-
|
472
|
+
inputTextDelta: z.string()
|
475
473
|
}),
|
476
474
|
z.strictObject({
|
477
|
-
type: z.literal("tool-
|
475
|
+
type: z.literal("tool-input-available"),
|
478
476
|
toolCallId: z.string(),
|
479
477
|
toolName: z.string(),
|
480
|
-
|
478
|
+
input: z.unknown()
|
481
479
|
}),
|
482
480
|
z.strictObject({
|
483
|
-
type: z.literal("tool-
|
481
|
+
type: z.literal("tool-output-available"),
|
484
482
|
toolCallId: z.string(),
|
485
|
-
|
483
|
+
output: z.unknown(),
|
486
484
|
providerMetadata: z.any().optional()
|
487
485
|
}),
|
488
486
|
z.strictObject({
|
@@ -758,103 +756,127 @@ async function convertFileListToFileUIParts(files) {
|
|
758
756
|
}
|
759
757
|
|
760
758
|
// src/ui/default-chat-transport.ts
|
761
|
-
import {
|
762
|
-
|
763
|
-
|
759
|
+
import { parseJsonEventStream as parseJsonEventStream2 } from "@ai-sdk/provider-utils";
|
760
|
+
|
761
|
+
// src/ui/http-chat-transport.ts
|
764
762
|
var getOriginalFetch2 = () => fetch;
|
765
|
-
|
766
|
-
api,
|
767
|
-
body,
|
768
|
-
credentials,
|
769
|
-
headers,
|
770
|
-
abortSignal,
|
771
|
-
fetch: fetch2 = getOriginalFetch2(),
|
772
|
-
requestType = "generate"
|
773
|
-
}) {
|
774
|
-
var _a17;
|
775
|
-
const response = requestType === "resume" ? await fetch2(`${api}?id=${body.id}`, {
|
776
|
-
method: "GET",
|
777
|
-
headers: {
|
778
|
-
"Content-Type": "application/json",
|
779
|
-
...headers
|
780
|
-
},
|
781
|
-
signal: abortSignal,
|
782
|
-
credentials
|
783
|
-
}) : await fetch2(api, {
|
784
|
-
method: "POST",
|
785
|
-
body: JSON.stringify(body),
|
786
|
-
headers: {
|
787
|
-
"Content-Type": "application/json",
|
788
|
-
...headers
|
789
|
-
},
|
790
|
-
signal: abortSignal,
|
791
|
-
credentials
|
792
|
-
});
|
793
|
-
if (!response.ok) {
|
794
|
-
throw new Error(
|
795
|
-
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
796
|
-
);
|
797
|
-
}
|
798
|
-
if (!response.body) {
|
799
|
-
throw new Error("The response body is empty.");
|
800
|
-
}
|
801
|
-
return parseJsonEventStream2({
|
802
|
-
stream: response.body,
|
803
|
-
schema: uiMessageStreamPartSchema
|
804
|
-
}).pipeThrough(
|
805
|
-
new TransformStream({
|
806
|
-
async transform(part, controller) {
|
807
|
-
if (!part.success) {
|
808
|
-
throw part.error;
|
809
|
-
}
|
810
|
-
controller.enqueue(part.value);
|
811
|
-
}
|
812
|
-
})
|
813
|
-
);
|
814
|
-
}
|
815
|
-
var DefaultChatTransport = class {
|
763
|
+
var HttpChatTransport = class {
|
816
764
|
constructor({
|
817
765
|
api = "/api/chat",
|
818
766
|
credentials,
|
819
767
|
headers,
|
820
768
|
body,
|
821
|
-
fetch: fetch2,
|
822
|
-
|
823
|
-
|
769
|
+
fetch: fetch2 = getOriginalFetch2(),
|
770
|
+
prepareSendMessagesRequest,
|
771
|
+
prepareReconnectToStreamRequest
|
772
|
+
}) {
|
824
773
|
this.api = api;
|
825
774
|
this.credentials = credentials;
|
826
775
|
this.headers = headers;
|
827
776
|
this.body = body;
|
828
777
|
this.fetch = fetch2;
|
829
|
-
this.
|
778
|
+
this.prepareSendMessagesRequest = prepareSendMessagesRequest;
|
779
|
+
this.prepareReconnectToStreamRequest = prepareReconnectToStreamRequest;
|
830
780
|
}
|
831
|
-
|
832
|
-
chatId,
|
833
|
-
messages,
|
781
|
+
async sendMessages({
|
834
782
|
abortSignal,
|
835
|
-
|
836
|
-
headers,
|
837
|
-
body,
|
838
|
-
requestType
|
783
|
+
...options
|
839
784
|
}) {
|
840
|
-
var _a17, _b;
|
841
|
-
const preparedRequest = (_a17 = this.
|
842
|
-
|
843
|
-
|
844
|
-
|
845
|
-
|
785
|
+
var _a17, _b, _c, _d;
|
786
|
+
const preparedRequest = (_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
|
787
|
+
api: this.api,
|
788
|
+
id: options.chatId,
|
789
|
+
messages: options.messages,
|
790
|
+
body: { ...this.body, ...options.body },
|
791
|
+
headers: { ...this.headers, ...options.headers },
|
846
792
|
credentials: this.credentials,
|
847
|
-
requestMetadata: metadata
|
793
|
+
requestMetadata: options.metadata,
|
794
|
+
trigger: options.trigger,
|
795
|
+
messageId: options.messageId
|
796
|
+
});
|
797
|
+
const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : this.api;
|
798
|
+
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...options.headers };
|
799
|
+
const body = (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : {
|
800
|
+
...this.body,
|
801
|
+
...options.body,
|
802
|
+
id: options.chatId,
|
803
|
+
messages: options.messages,
|
804
|
+
trigger: options.trigger,
|
805
|
+
messageId: options.messageId
|
806
|
+
};
|
807
|
+
const credentials = (_c = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _c : this.credentials;
|
808
|
+
const response = await fetch(api, {
|
809
|
+
method: "POST",
|
810
|
+
headers: {
|
811
|
+
"Content-Type": "application/json",
|
812
|
+
...headers
|
813
|
+
},
|
814
|
+
body: JSON.stringify(body),
|
815
|
+
credentials,
|
816
|
+
signal: abortSignal
|
848
817
|
});
|
849
|
-
|
818
|
+
if (!response.ok) {
|
819
|
+
throw new Error(
|
820
|
+
(_d = await response.text()) != null ? _d : "Failed to fetch the chat response."
|
821
|
+
);
|
822
|
+
}
|
823
|
+
if (!response.body) {
|
824
|
+
throw new Error("The response body is empty.");
|
825
|
+
}
|
826
|
+
return this.processResponseStream(response.body);
|
827
|
+
}
|
828
|
+
async reconnectToStream(options) {
|
829
|
+
var _a17, _b, _c, _d;
|
830
|
+
const preparedRequest = (_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
|
850
831
|
api: this.api,
|
851
|
-
|
852
|
-
|
853
|
-
|
854
|
-
|
855
|
-
|
856
|
-
|
832
|
+
id: options.chatId,
|
833
|
+
body: { ...this.body, ...options.body },
|
834
|
+
headers: { ...this.headers, ...options.headers },
|
835
|
+
credentials: this.credentials,
|
836
|
+
requestMetadata: options.metadata
|
837
|
+
});
|
838
|
+
const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : `${this.api}/${options.chatId}/stream`;
|
839
|
+
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...options.headers };
|
840
|
+
const credentials = (_c = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _c : this.credentials;
|
841
|
+
const response = await fetch(api, {
|
842
|
+
method: "GET",
|
843
|
+
headers,
|
844
|
+
credentials
|
857
845
|
});
|
846
|
+
if (response.status === 204) {
|
847
|
+
return null;
|
848
|
+
}
|
849
|
+
if (!response.ok) {
|
850
|
+
throw new Error(
|
851
|
+
(_d = await response.text()) != null ? _d : "Failed to fetch the chat response."
|
852
|
+
);
|
853
|
+
}
|
854
|
+
if (!response.body) {
|
855
|
+
throw new Error("The response body is empty.");
|
856
|
+
}
|
857
|
+
return this.processResponseStream(response.body);
|
858
|
+
}
|
859
|
+
};
|
860
|
+
|
861
|
+
// src/ui/default-chat-transport.ts
|
862
|
+
var DefaultChatTransport = class extends HttpChatTransport {
|
863
|
+
constructor(options = {}) {
|
864
|
+
super(options);
|
865
|
+
}
|
866
|
+
processResponseStream(stream) {
|
867
|
+
return parseJsonEventStream2({
|
868
|
+
stream,
|
869
|
+
schema: uiMessageStreamPartSchema
|
870
|
+
}).pipeThrough(
|
871
|
+
new TransformStream({
|
872
|
+
async transform(part, controller) {
|
873
|
+
if (!part.success) {
|
874
|
+
throw part.error;
|
875
|
+
}
|
876
|
+
controller.enqueue(part.value);
|
877
|
+
}
|
878
|
+
})
|
879
|
+
);
|
858
880
|
}
|
859
881
|
};
|
860
882
|
|
@@ -1275,15 +1297,15 @@ function processUIMessageStream({
|
|
1275
1297
|
);
|
1276
1298
|
if (part2 != null) {
|
1277
1299
|
part2.state = options.state;
|
1278
|
-
part2.
|
1279
|
-
part2.
|
1300
|
+
part2.input = options.input;
|
1301
|
+
part2.output = options.output;
|
1280
1302
|
} else {
|
1281
1303
|
state.message.parts.push({
|
1282
1304
|
type: `tool-${options.toolName}`,
|
1283
1305
|
toolCallId: options.toolCallId,
|
1284
1306
|
state: options.state,
|
1285
|
-
|
1286
|
-
|
1307
|
+
input: options.input,
|
1308
|
+
output: options.output
|
1287
1309
|
});
|
1288
1310
|
}
|
1289
1311
|
}
|
@@ -1366,7 +1388,7 @@ function processUIMessageStream({
|
|
1366
1388
|
write();
|
1367
1389
|
break;
|
1368
1390
|
}
|
1369
|
-
case "tool-
|
1391
|
+
case "tool-input-start": {
|
1370
1392
|
const toolInvocations = state.message.parts.filter(isToolUIPart);
|
1371
1393
|
state.partialToolCalls[part.toolCallId] = {
|
1372
1394
|
text: "",
|
@@ -1376,33 +1398,33 @@ function processUIMessageStream({
|
|
1376
1398
|
updateToolInvocationPart({
|
1377
1399
|
toolCallId: part.toolCallId,
|
1378
1400
|
toolName: part.toolName,
|
1379
|
-
state: "
|
1380
|
-
|
1401
|
+
state: "input-streaming",
|
1402
|
+
input: void 0
|
1381
1403
|
});
|
1382
1404
|
write();
|
1383
1405
|
break;
|
1384
1406
|
}
|
1385
|
-
case "tool-
|
1407
|
+
case "tool-input-delta": {
|
1386
1408
|
const partialToolCall = state.partialToolCalls[part.toolCallId];
|
1387
|
-
partialToolCall.text += part.
|
1409
|
+
partialToolCall.text += part.inputTextDelta;
|
1388
1410
|
const { value: partialArgs } = await parsePartialJson(
|
1389
1411
|
partialToolCall.text
|
1390
1412
|
);
|
1391
1413
|
updateToolInvocationPart({
|
1392
1414
|
toolCallId: part.toolCallId,
|
1393
1415
|
toolName: partialToolCall.toolName,
|
1394
|
-
state: "
|
1395
|
-
|
1416
|
+
state: "input-streaming",
|
1417
|
+
input: partialArgs
|
1396
1418
|
});
|
1397
1419
|
write();
|
1398
1420
|
break;
|
1399
1421
|
}
|
1400
|
-
case "tool-
|
1422
|
+
case "tool-input-available": {
|
1401
1423
|
updateToolInvocationPart({
|
1402
1424
|
toolCallId: part.toolCallId,
|
1403
1425
|
toolName: part.toolName,
|
1404
|
-
state: "
|
1405
|
-
|
1426
|
+
state: "input-available",
|
1427
|
+
input: part.input
|
1406
1428
|
});
|
1407
1429
|
write();
|
1408
1430
|
if (onToolCall) {
|
@@ -1413,16 +1435,16 @@ function processUIMessageStream({
|
|
1413
1435
|
updateToolInvocationPart({
|
1414
1436
|
toolCallId: part.toolCallId,
|
1415
1437
|
toolName: part.toolName,
|
1416
|
-
state: "
|
1417
|
-
|
1418
|
-
result
|
1438
|
+
state: "output-available",
|
1439
|
+
input: part.input,
|
1440
|
+
output: result
|
1419
1441
|
});
|
1420
1442
|
write();
|
1421
1443
|
}
|
1422
1444
|
}
|
1423
1445
|
break;
|
1424
1446
|
}
|
1425
|
-
case "tool-
|
1447
|
+
case "tool-output-available": {
|
1426
1448
|
const toolInvocations = state.message.parts.filter(isToolUIPart);
|
1427
1449
|
if (toolInvocations == null) {
|
1428
1450
|
throw new Error("tool_result must be preceded by a tool_call");
|
@@ -1441,9 +1463,9 @@ function processUIMessageStream({
|
|
1441
1463
|
updateToolInvocationPart({
|
1442
1464
|
toolCallId: part.toolCallId,
|
1443
1465
|
toolName,
|
1444
|
-
state: "
|
1445
|
-
|
1446
|
-
|
1466
|
+
state: "output-available",
|
1467
|
+
input: toolInvocations[toolInvocationIndex].input,
|
1468
|
+
output: part.output
|
1447
1469
|
});
|
1448
1470
|
write();
|
1449
1471
|
break;
|
@@ -1539,7 +1561,7 @@ function isAssistantMessageWithCompletedToolCalls(message) {
|
|
1539
1561
|
return part.type === "step-start" ? index : lastIndex;
|
1540
1562
|
}, -1);
|
1541
1563
|
const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter(isToolUIPart);
|
1542
|
-
return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => part.state === "
|
1564
|
+
return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => part.state === "output-available");
|
1543
1565
|
}
|
1544
1566
|
|
1545
1567
|
// src/ui/chat.ts
|
@@ -1558,22 +1580,14 @@ var AbstractChat = class {
|
|
1558
1580
|
}) {
|
1559
1581
|
this.activeResponse = void 0;
|
1560
1582
|
this.jobExecutor = new SerialJobExecutor();
|
1561
|
-
this.removeAssistantResponse = () => {
|
1562
|
-
const lastMessage = this.state.messages[this.state.messages.length - 1];
|
1563
|
-
if (lastMessage == null) {
|
1564
|
-
throw new Error("Cannot remove assistant response from empty chat");
|
1565
|
-
}
|
1566
|
-
if (lastMessage.role !== "assistant") {
|
1567
|
-
throw new Error("Last message is not an assistant message");
|
1568
|
-
}
|
1569
|
-
this.state.popMessage();
|
1570
|
-
};
|
1571
1583
|
/**
|
1572
|
-
*
|
1584
|
+
* Appends or replaces a user message to the chat list. This triggers the API call to fetch
|
1573
1585
|
* the assistant's response.
|
1586
|
+
*
|
1587
|
+
* If a messageId is provided, the message will be replaced.
|
1574
1588
|
*/
|
1575
1589
|
this.sendMessage = async (message, options = {}) => {
|
1576
|
-
var _a17, _b;
|
1590
|
+
var _a17, _b, _c;
|
1577
1591
|
let uiMessage;
|
1578
1592
|
if ("text" in message || "files" in message) {
|
1579
1593
|
const fileParts = Array.isArray(message.files) ? message.files : await convertFileListToFileUIParts(message.files);
|
@@ -1586,40 +1600,75 @@ var AbstractChat = class {
|
|
1586
1600
|
} else {
|
1587
1601
|
uiMessage = message;
|
1588
1602
|
}
|
1589
|
-
|
1590
|
-
|
1591
|
-
|
1592
|
-
|
1603
|
+
if (message.messageId != null) {
|
1604
|
+
const messageIndex = this.state.messages.findIndex(
|
1605
|
+
(m) => m.id === message.messageId
|
1606
|
+
);
|
1607
|
+
if (messageIndex === -1) {
|
1608
|
+
throw new Error(`message with id ${message.messageId} not found`);
|
1609
|
+
}
|
1610
|
+
if (this.state.messages[messageIndex].role !== "user") {
|
1611
|
+
throw new Error(
|
1612
|
+
`message with id ${message.messageId} is not a user message`
|
1613
|
+
);
|
1614
|
+
}
|
1615
|
+
this.state.messages = this.state.messages.slice(0, messageIndex + 1);
|
1616
|
+
this.state.replaceMessage(messageIndex, {
|
1617
|
+
...uiMessage,
|
1618
|
+
id: message.messageId,
|
1619
|
+
role: (_a17 = uiMessage.role) != null ? _a17 : "user"
|
1620
|
+
});
|
1621
|
+
} else {
|
1622
|
+
this.state.pushMessage({
|
1623
|
+
...uiMessage,
|
1624
|
+
id: (_b = uiMessage.id) != null ? _b : this.generateId(),
|
1625
|
+
role: (_c = uiMessage.role) != null ? _c : "user"
|
1626
|
+
});
|
1627
|
+
}
|
1628
|
+
await this.makeRequest({
|
1629
|
+
trigger: "submit-user-message",
|
1630
|
+
messageId: message.messageId,
|
1631
|
+
...options
|
1593
1632
|
});
|
1594
|
-
await this.triggerRequest({ requestType: "generate", ...options });
|
1595
1633
|
};
|
1596
1634
|
/**
|
1597
|
-
* Regenerate the
|
1635
|
+
* Regenerate the assistant message with the provided message id.
|
1636
|
+
* If no message id is provided, the last assistant message will be regenerated.
|
1598
1637
|
*/
|
1599
|
-
this.
|
1600
|
-
|
1601
|
-
|
1602
|
-
|
1603
|
-
|
1604
|
-
|
1638
|
+
this.regenerate = async ({
|
1639
|
+
messageId,
|
1640
|
+
...options
|
1641
|
+
} = {}) => {
|
1642
|
+
const messageIndex = messageId == null ? this.state.messages.length - 1 : this.state.messages.findIndex((message) => message.id === messageId);
|
1643
|
+
if (messageIndex === -1) {
|
1644
|
+
throw new Error(`message ${messageId} not found`);
|
1605
1645
|
}
|
1606
|
-
|
1646
|
+
this.state.messages = this.state.messages.slice(
|
1647
|
+
0,
|
1648
|
+
// if the message is a user message, we need to include it in the request:
|
1649
|
+
this.messages[messageIndex].role === "assistant" ? messageIndex : messageIndex + 1
|
1650
|
+
);
|
1651
|
+
await this.makeRequest({
|
1652
|
+
trigger: "regenerate-assistant-message",
|
1653
|
+
messageId,
|
1654
|
+
...options
|
1655
|
+
});
|
1607
1656
|
};
|
1608
1657
|
/**
|
1609
|
-
*
|
1658
|
+
* Attempt to resume an ongoing streaming response.
|
1610
1659
|
*/
|
1611
|
-
this.
|
1612
|
-
await this.
|
1660
|
+
this.resumeStream = async (options = {}) => {
|
1661
|
+
await this.makeRequest({ trigger: "resume-stream", ...options });
|
1613
1662
|
};
|
1614
1663
|
this.addToolResult = async ({
|
1615
1664
|
toolCallId,
|
1616
|
-
|
1665
|
+
output
|
1617
1666
|
}) => {
|
1618
1667
|
this.jobExecutor.run(async () => {
|
1619
|
-
|
1668
|
+
updateToolOutput({
|
1620
1669
|
messages: this.state.messages,
|
1621
1670
|
toolCallId,
|
1622
|
-
|
1671
|
+
output
|
1623
1672
|
});
|
1624
1673
|
this.messages = this.state.messages;
|
1625
1674
|
if (this.status === "submitted" || this.status === "streaming") {
|
@@ -1627,8 +1676,8 @@ var AbstractChat = class {
|
|
1627
1676
|
}
|
1628
1677
|
const lastMessage = this.lastMessage;
|
1629
1678
|
if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {
|
1630
|
-
this.
|
1631
|
-
|
1679
|
+
this.makeRequest({
|
1680
|
+
trigger: "submit-tool-result"
|
1632
1681
|
});
|
1633
1682
|
}
|
1634
1683
|
});
|
@@ -1687,11 +1736,12 @@ var AbstractChat = class {
|
|
1687
1736
|
set messages(messages) {
|
1688
1737
|
this.state.messages = messages;
|
1689
1738
|
}
|
1690
|
-
async
|
1691
|
-
|
1739
|
+
async makeRequest({
|
1740
|
+
trigger,
|
1692
1741
|
metadata,
|
1693
1742
|
headers,
|
1694
|
-
body
|
1743
|
+
body,
|
1744
|
+
messageId
|
1695
1745
|
}) {
|
1696
1746
|
var _a17, _b;
|
1697
1747
|
this.setStatus({ status: "submitted", error: void 0 });
|
@@ -1707,15 +1757,30 @@ var AbstractChat = class {
|
|
1707
1757
|
abortController: new AbortController()
|
1708
1758
|
};
|
1709
1759
|
this.activeResponse = activeResponse;
|
1710
|
-
|
1711
|
-
|
1712
|
-
|
1713
|
-
|
1714
|
-
|
1715
|
-
|
1716
|
-
|
1717
|
-
|
1718
|
-
|
1760
|
+
let stream;
|
1761
|
+
if (trigger === "resume-stream") {
|
1762
|
+
const reconnect = await this.transport.reconnectToStream({
|
1763
|
+
chatId: this.id,
|
1764
|
+
metadata,
|
1765
|
+
headers,
|
1766
|
+
body
|
1767
|
+
});
|
1768
|
+
if (reconnect == null) {
|
1769
|
+
return;
|
1770
|
+
}
|
1771
|
+
stream = reconnect;
|
1772
|
+
} else {
|
1773
|
+
stream = await this.transport.sendMessages({
|
1774
|
+
chatId: this.id,
|
1775
|
+
messages: this.state.messages,
|
1776
|
+
abortSignal: activeResponse.abortController.signal,
|
1777
|
+
metadata,
|
1778
|
+
headers,
|
1779
|
+
body,
|
1780
|
+
trigger,
|
1781
|
+
messageId
|
1782
|
+
});
|
1783
|
+
}
|
1719
1784
|
const runUpdateMessageJob = (job) => (
|
1720
1785
|
// serialize the job execution to avoid race conditions:
|
1721
1786
|
this.jobExecutor.run(
|
@@ -1770,19 +1835,20 @@ var AbstractChat = class {
|
|
1770
1835
|
maxSteps: this.maxSteps,
|
1771
1836
|
messages: this.state.messages
|
1772
1837
|
})) {
|
1773
|
-
await this.
|
1774
|
-
requestType,
|
1838
|
+
await this.makeRequest({
|
1775
1839
|
metadata,
|
1776
1840
|
headers,
|
1777
|
-
body
|
1841
|
+
body,
|
1842
|
+
// secondary requests are triggered by automatic tool execution
|
1843
|
+
trigger: "submit-tool-result"
|
1778
1844
|
});
|
1779
1845
|
}
|
1780
1846
|
}
|
1781
1847
|
};
|
1782
|
-
function
|
1848
|
+
function updateToolOutput({
|
1783
1849
|
messages,
|
1784
1850
|
toolCallId,
|
1785
|
-
|
1851
|
+
output
|
1786
1852
|
}) {
|
1787
1853
|
const lastMessage = messages[messages.length - 1];
|
1788
1854
|
const toolPart = lastMessage.parts.find(
|
@@ -1791,8 +1857,8 @@ function updateToolResult({
|
|
1791
1857
|
if (toolPart == null) {
|
1792
1858
|
return;
|
1793
1859
|
}
|
1794
|
-
toolPart.state = "
|
1795
|
-
toolPart.
|
1860
|
+
toolPart.state = "output-available";
|
1861
|
+
toolPart.output = output;
|
1796
1862
|
}
|
1797
1863
|
|
1798
1864
|
// src/ui/convert-to-model-messages.ts
|
@@ -1849,17 +1915,17 @@ function convertToModelMessages(messages, options) {
|
|
1849
1915
|
});
|
1850
1916
|
} else if (isToolUIPart(part)) {
|
1851
1917
|
const toolName = getToolName(part);
|
1852
|
-
if (part.state === "
|
1918
|
+
if (part.state === "input-streaming") {
|
1853
1919
|
throw new MessageConversionError({
|
1854
1920
|
originalMessage: message,
|
1855
|
-
message: `
|
1921
|
+
message: `incomplete tool input is not supported: ${part.toolCallId}`
|
1856
1922
|
});
|
1857
1923
|
} else {
|
1858
1924
|
content.push({
|
1859
1925
|
type: "tool-call",
|
1860
1926
|
toolCallId: part.toolCallId,
|
1861
1927
|
toolName,
|
1862
|
-
|
1928
|
+
input: part.input
|
1863
1929
|
});
|
1864
1930
|
}
|
1865
1931
|
} else {
|
@@ -1876,26 +1942,26 @@ function convertToModelMessages(messages, options) {
|
|
1876
1942
|
modelMessages.push({
|
1877
1943
|
role: "tool",
|
1878
1944
|
content: toolParts.map((toolPart) => {
|
1879
|
-
if (toolPart.state !== "
|
1945
|
+
if (toolPart.state !== "output-available") {
|
1880
1946
|
throw new MessageConversionError({
|
1881
1947
|
originalMessage: message,
|
1882
1948
|
message: "ToolInvocation must have a result: " + JSON.stringify(toolPart)
|
1883
1949
|
});
|
1884
1950
|
}
|
1885
1951
|
const toolName = getToolName(toolPart);
|
1886
|
-
const { toolCallId,
|
1952
|
+
const { toolCallId, output } = toolPart;
|
1887
1953
|
const tool2 = tools[toolName];
|
1888
1954
|
return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {
|
1889
1955
|
type: "tool-result",
|
1890
1956
|
toolCallId,
|
1891
1957
|
toolName,
|
1892
|
-
|
1893
|
-
experimental_content: tool2.experimental_toToolResultContent(
|
1958
|
+
output: tool2.experimental_toToolResultContent(output),
|
1959
|
+
experimental_content: tool2.experimental_toToolResultContent(output)
|
1894
1960
|
} : {
|
1895
1961
|
type: "tool-result",
|
1896
1962
|
toolCallId,
|
1897
1963
|
toolName,
|
1898
|
-
|
1964
|
+
output
|
1899
1965
|
};
|
1900
1966
|
})
|
1901
1967
|
});
|
@@ -1951,89 +2017,13 @@ function transformTextToUiMessageStream({
|
|
1951
2017
|
}
|
1952
2018
|
|
1953
2019
|
// src/ui/text-stream-chat-transport.ts
|
1954
|
-
var
|
1955
|
-
|
1956
|
-
|
1957
|
-
body,
|
1958
|
-
credentials,
|
1959
|
-
headers,
|
1960
|
-
abortSignal,
|
1961
|
-
fetch: fetch2 = getOriginalFetch3(),
|
1962
|
-
requestType = "generate"
|
1963
|
-
}) {
|
1964
|
-
var _a17;
|
1965
|
-
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
|
1966
|
-
method: "GET",
|
1967
|
-
headers: {
|
1968
|
-
"Content-Type": "application/json",
|
1969
|
-
...headers
|
1970
|
-
},
|
1971
|
-
signal: abortSignal,
|
1972
|
-
credentials
|
1973
|
-
}) : await fetch2(api, {
|
1974
|
-
method: "POST",
|
1975
|
-
body: JSON.stringify(body),
|
1976
|
-
headers: {
|
1977
|
-
"Content-Type": "application/json",
|
1978
|
-
...headers
|
1979
|
-
},
|
1980
|
-
signal: abortSignal,
|
1981
|
-
credentials
|
1982
|
-
});
|
1983
|
-
if (!response.ok) {
|
1984
|
-
throw new Error(
|
1985
|
-
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
1986
|
-
);
|
1987
|
-
}
|
1988
|
-
if (!response.body) {
|
1989
|
-
throw new Error("The response body is empty.");
|
1990
|
-
}
|
1991
|
-
return transformTextToUiMessageStream({
|
1992
|
-
stream: response.body.pipeThrough(new TextDecoderStream())
|
1993
|
-
});
|
1994
|
-
}
|
1995
|
-
var TextStreamChatTransport = class {
|
1996
|
-
constructor({
|
1997
|
-
api,
|
1998
|
-
credentials,
|
1999
|
-
headers,
|
2000
|
-
body,
|
2001
|
-
fetch: fetch2,
|
2002
|
-
prepareRequest
|
2003
|
-
}) {
|
2004
|
-
this.api = api;
|
2005
|
-
this.credentials = credentials;
|
2006
|
-
this.headers = headers;
|
2007
|
-
this.body = body;
|
2008
|
-
this.fetch = fetch2;
|
2009
|
-
this.prepareRequest = prepareRequest;
|
2020
|
+
var TextStreamChatTransport = class extends HttpChatTransport {
|
2021
|
+
constructor(options = {}) {
|
2022
|
+
super(options);
|
2010
2023
|
}
|
2011
|
-
|
2012
|
-
|
2013
|
-
|
2014
|
-
abortSignal,
|
2015
|
-
metadata,
|
2016
|
-
headers,
|
2017
|
-
body,
|
2018
|
-
requestType
|
2019
|
-
}) {
|
2020
|
-
var _a17, _b;
|
2021
|
-
const preparedRequest = (_a17 = this.prepareRequest) == null ? void 0 : _a17.call(this, {
|
2022
|
-
id: chatId,
|
2023
|
-
messages,
|
2024
|
-
body: { ...this.body, ...body },
|
2025
|
-
headers: { ...this.headers, ...headers },
|
2026
|
-
credentials: this.credentials,
|
2027
|
-
requestMetadata: metadata
|
2028
|
-
});
|
2029
|
-
return fetchTextStream({
|
2030
|
-
api: this.api,
|
2031
|
-
body: (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : { ...this.body, ...body },
|
2032
|
-
headers: (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...headers },
|
2033
|
-
credentials: (_b = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _b : this.credentials,
|
2034
|
-
abortSignal,
|
2035
|
-
fetch: this.fetch,
|
2036
|
-
requestType
|
2024
|
+
processResponseStream(stream) {
|
2025
|
+
return transformTextToUiMessageStream({
|
2026
|
+
stream: stream.pipeThrough(new TextDecoderStream())
|
2037
2027
|
});
|
2038
2028
|
}
|
2039
2029
|
};
|
@@ -2200,7 +2190,7 @@ var JsonToSseTransformStream = class extends TransformStream {
|
|
2200
2190
|
};
|
2201
2191
|
|
2202
2192
|
// src/ui-message-stream/ui-message-stream-headers.ts
|
2203
|
-
var
|
2193
|
+
var UI_MESSAGE_STREAM_HEADERS = {
|
2204
2194
|
"content-type": "text/event-stream",
|
2205
2195
|
"cache-control": "no-cache",
|
2206
2196
|
connection: "keep-alive",
|
@@ -2214,16 +2204,20 @@ function createUIMessageStreamResponse({
|
|
2214
2204
|
status,
|
2215
2205
|
statusText,
|
2216
2206
|
headers,
|
2217
|
-
stream
|
2207
|
+
stream,
|
2208
|
+
consumeSseStream
|
2218
2209
|
}) {
|
2219
|
-
|
2220
|
-
|
2221
|
-
|
2222
|
-
|
2223
|
-
|
2224
|
-
|
2225
|
-
|
2226
|
-
|
2210
|
+
let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
|
2211
|
+
if (consumeSseStream) {
|
2212
|
+
const [stream1, stream2] = sseStream.tee();
|
2213
|
+
sseStream = stream1;
|
2214
|
+
consumeSseStream({ stream: stream2 });
|
2215
|
+
}
|
2216
|
+
return new Response(sseStream.pipeThrough(new TextEncoderStream()), {
|
2217
|
+
status,
|
2218
|
+
statusText,
|
2219
|
+
headers: prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS)
|
2220
|
+
});
|
2227
2221
|
}
|
2228
2222
|
|
2229
2223
|
// src/ui-message-stream/pipe-ui-message-stream-to-response.ts
|
@@ -2232,16 +2226,23 @@ function pipeUIMessageStreamToResponse({
|
|
2232
2226
|
status,
|
2233
2227
|
statusText,
|
2234
2228
|
headers,
|
2235
|
-
stream
|
2229
|
+
stream,
|
2230
|
+
consumeSseStream
|
2236
2231
|
}) {
|
2232
|
+
let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
|
2233
|
+
if (consumeSseStream) {
|
2234
|
+
const [stream1, stream2] = sseStream.tee();
|
2235
|
+
sseStream = stream1;
|
2236
|
+
consumeSseStream({ stream: stream2 });
|
2237
|
+
}
|
2237
2238
|
writeToServerResponse({
|
2238
2239
|
response,
|
2239
2240
|
status,
|
2240
2241
|
statusText,
|
2241
2242
|
headers: Object.fromEntries(
|
2242
|
-
prepareHeaders(headers,
|
2243
|
+
prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS).entries()
|
2243
2244
|
),
|
2244
|
-
stream:
|
2245
|
+
stream: sseStream.pipeThrough(new TextEncoderStream())
|
2245
2246
|
});
|
2246
2247
|
}
|
2247
2248
|
|
@@ -3443,7 +3444,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
|
|
3443
3444
|
type: "tool-call",
|
3444
3445
|
toolCallId: part.toolCallId,
|
3445
3446
|
toolName: part.toolName,
|
3446
|
-
|
3447
|
+
input: part.input,
|
3447
3448
|
providerOptions
|
3448
3449
|
};
|
3449
3450
|
}
|
@@ -3459,7 +3460,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
|
|
3459
3460
|
type: "tool-result",
|
3460
3461
|
toolCallId: part.toolCallId,
|
3461
3462
|
toolName: part.toolName,
|
3462
|
-
|
3463
|
+
output: part.output,
|
3463
3464
|
content: part.experimental_content,
|
3464
3465
|
isError: part.isError,
|
3465
3466
|
providerOptions: part.providerOptions
|
@@ -3661,14 +3662,11 @@ function prepareCallSettings({
|
|
3661
3662
|
|
3662
3663
|
// core/prompt/resolve-language-model.ts
|
3663
3664
|
import { gateway } from "@ai-sdk/gateway";
|
3664
|
-
var GLOBAL_DEFAULT_PROVIDER = Symbol(
|
3665
|
-
"vercel.ai.global.defaultProvider"
|
3666
|
-
);
|
3667
3665
|
function resolveLanguageModel(model) {
|
3668
3666
|
if (typeof model !== "string") {
|
3669
3667
|
return model;
|
3670
3668
|
}
|
3671
|
-
const globalProvider = globalThis
|
3669
|
+
const globalProvider = globalThis.AI_SDK_DEFAULT_PROVIDER;
|
3672
3670
|
return (globalProvider != null ? globalProvider : gateway).languageModel(model);
|
3673
3671
|
}
|
3674
3672
|
|
@@ -3746,14 +3744,14 @@ var toolCallPartSchema = z6.object({
|
|
3746
3744
|
type: z6.literal("tool-call"),
|
3747
3745
|
toolCallId: z6.string(),
|
3748
3746
|
toolName: z6.string(),
|
3749
|
-
|
3747
|
+
input: z6.unknown(),
|
3750
3748
|
providerOptions: providerMetadataSchema.optional()
|
3751
3749
|
});
|
3752
3750
|
var toolResultPartSchema = z6.object({
|
3753
3751
|
type: z6.literal("tool-result"),
|
3754
3752
|
toolCallId: z6.string(),
|
3755
3753
|
toolName: z6.string(),
|
3756
|
-
|
3754
|
+
output: z6.unknown(),
|
3757
3755
|
content: toolResultContentSchema.optional(),
|
3758
3756
|
isError: z6.boolean().optional(),
|
3759
3757
|
providerOptions: providerMetadataSchema.optional()
|
@@ -5359,11 +5357,18 @@ function prepareToolsAndToolChoice({
|
|
5359
5357
|
type: "function",
|
5360
5358
|
name: name17,
|
5361
5359
|
description: tool2.description,
|
5362
|
-
|
5360
|
+
inputSchema: asSchema2(tool2.inputSchema).jsonSchema
|
5361
|
+
};
|
5362
|
+
case "provider-defined-client":
|
5363
|
+
return {
|
5364
|
+
type: "provider-defined-client",
|
5365
|
+
name: name17,
|
5366
|
+
id: tool2.id,
|
5367
|
+
args: tool2.args
|
5363
5368
|
};
|
5364
|
-
case "provider-defined":
|
5369
|
+
case "provider-defined-server":
|
5365
5370
|
return {
|
5366
|
-
type: "provider-defined",
|
5371
|
+
type: "provider-defined-server",
|
5367
5372
|
name: name17,
|
5368
5373
|
id: tool2.id,
|
5369
5374
|
args: tool2.args
|
@@ -5447,7 +5452,7 @@ async function parseToolCall({
|
|
5447
5452
|
try {
|
5448
5453
|
return await doParseToolCall({ toolCall, tools });
|
5449
5454
|
} catch (error) {
|
5450
|
-
if (repairToolCall == null || !(NoSuchToolError.isInstance(error) ||
|
5455
|
+
if (repairToolCall == null || !(NoSuchToolError.isInstance(error) || InvalidToolInputError.isInstance(error))) {
|
5451
5456
|
throw error;
|
5452
5457
|
}
|
5453
5458
|
let repairedToolCall = null;
|
@@ -5455,9 +5460,9 @@ async function parseToolCall({
|
|
5455
5460
|
repairedToolCall = await repairToolCall({
|
5456
5461
|
toolCall,
|
5457
5462
|
tools,
|
5458
|
-
|
5459
|
-
const {
|
5460
|
-
return asSchema3(
|
5463
|
+
inputSchema: ({ toolName }) => {
|
5464
|
+
const { inputSchema } = tools[toolName];
|
5465
|
+
return asSchema3(inputSchema).jsonSchema;
|
5461
5466
|
},
|
5462
5467
|
system,
|
5463
5468
|
messages,
|
@@ -5487,12 +5492,12 @@ async function doParseToolCall({
|
|
5487
5492
|
availableTools: Object.keys(tools)
|
5488
5493
|
});
|
5489
5494
|
}
|
5490
|
-
const schema = asSchema3(tool2.
|
5491
|
-
const parseResult = toolCall.
|
5495
|
+
const schema = asSchema3(tool2.inputSchema);
|
5496
|
+
const parseResult = toolCall.input.trim() === "" ? await safeValidateTypes3({ value: {}, schema }) : await safeParseJSON3({ text: toolCall.input, schema });
|
5492
5497
|
if (parseResult.success === false) {
|
5493
|
-
throw new
|
5498
|
+
throw new InvalidToolInputError({
|
5494
5499
|
toolName,
|
5495
|
-
|
5500
|
+
toolInput: toolCall.input,
|
5496
5501
|
cause: parseResult.error
|
5497
5502
|
});
|
5498
5503
|
}
|
@@ -5500,7 +5505,7 @@ async function doParseToolCall({
|
|
5500
5505
|
type: "tool-call",
|
5501
5506
|
toolCallId: toolCall.toolCallId,
|
5502
5507
|
toolName,
|
5503
|
-
|
5508
|
+
input: parseResult.value
|
5504
5509
|
};
|
5505
5510
|
}
|
5506
5511
|
|
@@ -5603,15 +5608,15 @@ function toResponseMessages({
|
|
5603
5608
|
type: "tool-result",
|
5604
5609
|
toolCallId: toolResult.toolCallId,
|
5605
5610
|
toolName: toolResult.toolName,
|
5606
|
-
|
5611
|
+
output: tool2.experimental_toToolResultContent(toolResult.output),
|
5607
5612
|
experimental_content: tool2.experimental_toToolResultContent(
|
5608
|
-
toolResult.
|
5613
|
+
toolResult.output
|
5609
5614
|
)
|
5610
5615
|
} : {
|
5611
5616
|
type: "tool-result",
|
5612
5617
|
toolCallId: toolResult.toolCallId,
|
5613
5618
|
toolName: toolResult.toolName,
|
5614
|
-
|
5619
|
+
output: toolResult.output
|
5615
5620
|
};
|
5616
5621
|
});
|
5617
5622
|
if (toolResultContent.length > 0) {
|
@@ -5917,11 +5922,11 @@ async function executeTools({
|
|
5917
5922
|
abortSignal
|
5918
5923
|
}) {
|
5919
5924
|
const toolResults = await Promise.all(
|
5920
|
-
toolCalls.map(async ({ toolCallId, toolName,
|
5925
|
+
toolCalls.map(async ({ toolCallId, toolName, input }) => {
|
5921
5926
|
const tool2 = tools[toolName];
|
5922
|
-
if ((tool2 == null ? void 0 : tool2.
|
5923
|
-
await tool2.
|
5924
|
-
|
5927
|
+
if ((tool2 == null ? void 0 : tool2.onInputAvailable) != null) {
|
5928
|
+
await tool2.onInputAvailable({
|
5929
|
+
input,
|
5925
5930
|
toolCallId,
|
5926
5931
|
messages,
|
5927
5932
|
abortSignal
|
@@ -5941,15 +5946,15 @@ async function executeTools({
|
|
5941
5946
|
}),
|
5942
5947
|
"ai.toolCall.name": toolName,
|
5943
5948
|
"ai.toolCall.id": toolCallId,
|
5944
|
-
"ai.toolCall.
|
5945
|
-
output: () => JSON.stringify(
|
5949
|
+
"ai.toolCall.input": {
|
5950
|
+
output: () => JSON.stringify(input)
|
5946
5951
|
}
|
5947
5952
|
}
|
5948
5953
|
}),
|
5949
5954
|
tracer,
|
5950
5955
|
fn: async (span) => {
|
5951
5956
|
try {
|
5952
|
-
const result2 = await tool2.execute(
|
5957
|
+
const result2 = await tool2.execute(input, {
|
5953
5958
|
toolCallId,
|
5954
5959
|
messages,
|
5955
5960
|
abortSignal
|
@@ -5972,7 +5977,7 @@ async function executeTools({
|
|
5972
5977
|
throw new ToolExecutionError({
|
5973
5978
|
toolCallId,
|
5974
5979
|
toolName,
|
5975
|
-
|
5980
|
+
toolInput: input,
|
5976
5981
|
cause: error
|
5977
5982
|
});
|
5978
5983
|
}
|
@@ -5982,8 +5987,8 @@ async function executeTools({
|
|
5982
5987
|
type: "tool-result",
|
5983
5988
|
toolCallId,
|
5984
5989
|
toolName,
|
5985
|
-
|
5986
|
-
result
|
5990
|
+
input,
|
5991
|
+
output: result
|
5987
5992
|
};
|
5988
5993
|
})
|
5989
5994
|
);
|
@@ -6073,7 +6078,7 @@ function asToolCalls(content) {
|
|
6073
6078
|
toolCallType: toolCall.toolCallType,
|
6074
6079
|
toolCallId: toolCall.toolCallId,
|
6075
6080
|
toolName: toolCall.toolName,
|
6076
|
-
|
6081
|
+
input: toolCall.input
|
6077
6082
|
}));
|
6078
6083
|
}
|
6079
6084
|
|
@@ -6301,7 +6306,7 @@ function runToolsTransformation({
|
|
6301
6306
|
type: "tool-call-delta",
|
6302
6307
|
toolCallId: chunk.toolCallId,
|
6303
6308
|
toolName: chunk.toolName,
|
6304
|
-
|
6309
|
+
inputTextDelta: chunk.inputTextDelta
|
6305
6310
|
});
|
6306
6311
|
break;
|
6307
6312
|
}
|
@@ -6316,9 +6321,9 @@ function runToolsTransformation({
|
|
6316
6321
|
});
|
6317
6322
|
controller.enqueue(toolCall);
|
6318
6323
|
const tool2 = tools[toolCall.toolName];
|
6319
|
-
if (tool2.
|
6320
|
-
await tool2.
|
6321
|
-
|
6324
|
+
if (tool2.onInputAvailable != null) {
|
6325
|
+
await tool2.onInputAvailable({
|
6326
|
+
input: toolCall.input,
|
6322
6327
|
toolCallId: toolCall.toolCallId,
|
6323
6328
|
messages,
|
6324
6329
|
abortSignal
|
@@ -6338,22 +6343,22 @@ function runToolsTransformation({
|
|
6338
6343
|
}),
|
6339
6344
|
"ai.toolCall.name": toolCall.toolName,
|
6340
6345
|
"ai.toolCall.id": toolCall.toolCallId,
|
6341
|
-
"ai.toolCall.
|
6342
|
-
output: () => JSON.stringify(toolCall.
|
6346
|
+
"ai.toolCall.input": {
|
6347
|
+
output: () => JSON.stringify(toolCall.input)
|
6343
6348
|
}
|
6344
6349
|
}
|
6345
6350
|
}),
|
6346
6351
|
tracer,
|
6347
|
-
fn: async (span) => tool2.execute(toolCall.
|
6352
|
+
fn: async (span) => tool2.execute(toolCall.input, {
|
6348
6353
|
toolCallId: toolCall.toolCallId,
|
6349
6354
|
messages,
|
6350
6355
|
abortSignal
|
6351
6356
|
}).then(
|
6352
|
-
(
|
6357
|
+
(output) => {
|
6353
6358
|
toolResultsStreamController.enqueue({
|
6354
6359
|
...toolCall,
|
6355
6360
|
type: "tool-result",
|
6356
|
-
|
6361
|
+
output
|
6357
6362
|
});
|
6358
6363
|
outstandingToolResults.delete(toolExecutionId);
|
6359
6364
|
attemptClose();
|
@@ -6362,8 +6367,8 @@ function runToolsTransformation({
|
|
6362
6367
|
selectTelemetryAttributes({
|
6363
6368
|
telemetry,
|
6364
6369
|
attributes: {
|
6365
|
-
"ai.toolCall.
|
6366
|
-
output: () => JSON.stringify(
|
6370
|
+
"ai.toolCall.output": {
|
6371
|
+
output: () => JSON.stringify(output)
|
6367
6372
|
}
|
6368
6373
|
}
|
6369
6374
|
})
|
@@ -6377,7 +6382,7 @@ function runToolsTransformation({
|
|
6377
6382
|
error: new ToolExecutionError({
|
6378
6383
|
toolCallId: toolCall.toolCallId,
|
6379
6384
|
toolName: toolCall.toolName,
|
6380
|
-
|
6385
|
+
toolInput: toolCall.input,
|
6381
6386
|
cause: error
|
6382
6387
|
})
|
6383
6388
|
});
|
@@ -7029,8 +7034,8 @@ var DefaultStreamTextResult = class {
|
|
7029
7034
|
}
|
7030
7035
|
case "tool-call-streaming-start": {
|
7031
7036
|
const tool2 = tools == null ? void 0 : tools[chunk.toolName];
|
7032
|
-
if ((tool2 == null ? void 0 : tool2.
|
7033
|
-
await tool2.
|
7037
|
+
if ((tool2 == null ? void 0 : tool2.onInputStart) != null) {
|
7038
|
+
await tool2.onInputStart({
|
7034
7039
|
toolCallId: chunk.toolCallId,
|
7035
7040
|
messages: stepInputMessages,
|
7036
7041
|
abortSignal
|
@@ -7041,9 +7046,9 @@ var DefaultStreamTextResult = class {
|
|
7041
7046
|
}
|
7042
7047
|
case "tool-call-delta": {
|
7043
7048
|
const tool2 = tools == null ? void 0 : tools[chunk.toolName];
|
7044
|
-
if ((tool2 == null ? void 0 : tool2.
|
7045
|
-
await tool2.
|
7046
|
-
|
7049
|
+
if ((tool2 == null ? void 0 : tool2.onInputDelta) != null) {
|
7050
|
+
await tool2.onInputDelta({
|
7051
|
+
inputTextDelta: chunk.inputTextDelta,
|
7047
7052
|
toolCallId: chunk.toolCallId,
|
7048
7053
|
messages: stepInputMessages,
|
7049
7054
|
abortSignal
|
@@ -7358,7 +7363,7 @@ var DefaultStreamTextResult = class {
|
|
7358
7363
|
}
|
7359
7364
|
case "tool-call-streaming-start": {
|
7360
7365
|
controller.enqueue({
|
7361
|
-
type: "tool-
|
7366
|
+
type: "tool-input-start",
|
7362
7367
|
toolCallId: part.toolCallId,
|
7363
7368
|
toolName: part.toolName
|
7364
7369
|
});
|
@@ -7366,26 +7371,26 @@ var DefaultStreamTextResult = class {
|
|
7366
7371
|
}
|
7367
7372
|
case "tool-call-delta": {
|
7368
7373
|
controller.enqueue({
|
7369
|
-
type: "tool-
|
7374
|
+
type: "tool-input-delta",
|
7370
7375
|
toolCallId: part.toolCallId,
|
7371
|
-
|
7376
|
+
inputTextDelta: part.inputTextDelta
|
7372
7377
|
});
|
7373
7378
|
break;
|
7374
7379
|
}
|
7375
7380
|
case "tool-call": {
|
7376
7381
|
controller.enqueue({
|
7377
|
-
type: "tool-
|
7382
|
+
type: "tool-input-available",
|
7378
7383
|
toolCallId: part.toolCallId,
|
7379
7384
|
toolName: part.toolName,
|
7380
|
-
|
7385
|
+
input: part.input
|
7381
7386
|
});
|
7382
7387
|
break;
|
7383
7388
|
}
|
7384
7389
|
case "tool-result": {
|
7385
7390
|
controller.enqueue({
|
7386
|
-
type: "tool-
|
7391
|
+
type: "tool-output-available",
|
7387
7392
|
toolCallId: part.toolCallId,
|
7388
|
-
|
7393
|
+
output: part.output
|
7389
7394
|
});
|
7390
7395
|
break;
|
7391
7396
|
}
|
@@ -7901,7 +7906,7 @@ function tool(tool2) {
|
|
7901
7906
|
}
|
7902
7907
|
|
7903
7908
|
// core/tool/mcp/mcp-sse-transport.ts
|
7904
|
-
import {
|
7909
|
+
import { EventSourceParserStream } from "@ai-sdk/provider-utils";
|
7905
7910
|
|
7906
7911
|
// core/tool/mcp/json-rpc-message.ts
|
7907
7912
|
import { z as z10 } from "zod";
|
@@ -8072,7 +8077,7 @@ var SseMCPTransport = class {
|
|
8072
8077
|
(_b = this.onerror) == null ? void 0 : _b.call(this, error);
|
8073
8078
|
return reject(error);
|
8074
8079
|
}
|
8075
|
-
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(
|
8080
|
+
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream());
|
8076
8081
|
const reader = stream.getReader();
|
8077
8082
|
const processEvents = async () => {
|
8078
8083
|
var _a18, _b2, _c2;
|
@@ -8396,15 +8401,14 @@ var MCPClient = class {
|
|
8396
8401
|
if (schemas !== "automatic" && !(name17 in schemas)) {
|
8397
8402
|
continue;
|
8398
8403
|
}
|
8399
|
-
const parameters = schemas === "automatic" ? jsonSchema({
|
8400
|
-
...inputSchema,
|
8401
|
-
properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
|
8402
|
-
additionalProperties: false
|
8403
|
-
}) : schemas[name17].parameters;
|
8404
8404
|
const self = this;
|
8405
8405
|
const toolWithExecute = tool({
|
8406
8406
|
description,
|
8407
|
-
|
8407
|
+
inputSchema: schemas === "automatic" ? jsonSchema({
|
8408
|
+
...inputSchema,
|
8409
|
+
properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
|
8410
|
+
additionalProperties: false
|
8411
|
+
}) : schemas[name17].inputSchema,
|
8408
8412
|
execute: async (args, options) => {
|
8409
8413
|
var _a18;
|
8410
8414
|
(_a18 = options == null ? void 0 : options.abortSignal) == null ? void 0 : _a18.throwIfAborted();
|
@@ -8529,14 +8533,13 @@ export {
|
|
8529
8533
|
DefaultChatTransport,
|
8530
8534
|
DownloadError,
|
8531
8535
|
EmptyResponseBodyError,
|
8532
|
-
GLOBAL_DEFAULT_PROVIDER,
|
8533
8536
|
InvalidArgumentError,
|
8534
8537
|
InvalidDataContentError,
|
8535
8538
|
InvalidMessageRoleError,
|
8536
8539
|
InvalidPromptError,
|
8537
8540
|
InvalidResponseDataError,
|
8538
8541
|
InvalidStreamPartError,
|
8539
|
-
|
8542
|
+
InvalidToolInputError,
|
8540
8543
|
JSONParseError,
|
8541
8544
|
JsonToSseTransformStream,
|
8542
8545
|
LoadAPIKeyError,
|
@@ -8556,6 +8559,7 @@ export {
|
|
8556
8559
|
ToolCallRepairError,
|
8557
8560
|
ToolExecutionError,
|
8558
8561
|
TypeValidationError,
|
8562
|
+
UI_MESSAGE_STREAM_HEADERS,
|
8559
8563
|
UnsupportedFunctionalityError,
|
8560
8564
|
asSchema5 as asSchema,
|
8561
8565
|
assistantModelMessageSchema,
|