ai 5.0.0-alpha.13 → 5.0.0-alpha.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -26,14 +26,13 @@ __export(src_exports, {
26
26
  DefaultChatTransport: () => DefaultChatTransport,
27
27
  DownloadError: () => DownloadError,
28
28
  EmptyResponseBodyError: () => import_provider16.EmptyResponseBodyError,
29
- GLOBAL_DEFAULT_PROVIDER: () => GLOBAL_DEFAULT_PROVIDER,
30
29
  InvalidArgumentError: () => InvalidArgumentError,
31
30
  InvalidDataContentError: () => InvalidDataContentError,
32
31
  InvalidMessageRoleError: () => InvalidMessageRoleError,
33
32
  InvalidPromptError: () => import_provider16.InvalidPromptError,
34
33
  InvalidResponseDataError: () => import_provider16.InvalidResponseDataError,
35
34
  InvalidStreamPartError: () => InvalidStreamPartError,
36
- InvalidToolArgumentsError: () => InvalidToolArgumentsError,
35
+ InvalidToolInputError: () => InvalidToolInputError,
37
36
  JSONParseError: () => import_provider16.JSONParseError,
38
37
  JsonToSseTransformStream: () => JsonToSseTransformStream,
39
38
  LoadAPIKeyError: () => import_provider16.LoadAPIKeyError,
@@ -53,6 +52,7 @@ __export(src_exports, {
53
52
  ToolCallRepairError: () => ToolCallRepairError,
54
53
  ToolExecutionError: () => ToolExecutionError,
55
54
  TypeValidationError: () => import_provider16.TypeValidationError,
55
+ UI_MESSAGE_STREAM_HEADERS: () => UI_MESSAGE_STREAM_HEADERS,
56
56
  UnsupportedFunctionalityError: () => import_provider16.UnsupportedFunctionalityError,
57
57
  asSchema: () => import_provider_utils26.asSchema,
58
58
  assistantModelMessageSchema: () => assistantModelMessageSchema,
@@ -160,24 +160,22 @@ var InvalidStreamPartError = class extends import_provider2.AISDKError {
160
160
  };
161
161
  _a2 = symbol2;
162
162
 
163
- // src/error/invalid-tool-arguments-error.ts
163
+ // src/error/invalid-tool-input-error.ts
164
164
  var import_provider3 = require("@ai-sdk/provider");
165
- var name3 = "AI_InvalidToolArgumentsError";
165
+ var name3 = "AI_InvalidToolInputError";
166
166
  var marker3 = `vercel.ai.error.${name3}`;
167
167
  var symbol3 = Symbol.for(marker3);
168
168
  var _a3;
169
- var InvalidToolArgumentsError = class extends import_provider3.AISDKError {
169
+ var InvalidToolInputError = class extends import_provider3.AISDKError {
170
170
  constructor({
171
- toolArgs,
171
+ toolInput,
172
172
  toolName,
173
173
  cause,
174
- message = `Invalid arguments for tool ${toolName}: ${(0, import_provider3.getErrorMessage)(
175
- cause
176
- )}`
174
+ message = `Invalid input for tool ${toolName}: ${(0, import_provider3.getErrorMessage)(cause)}`
177
175
  }) {
178
176
  super({ name: name3, message, cause });
179
177
  this[_a3] = true;
180
- this.toolArgs = toolArgs;
178
+ this.toolInput = toolInput;
181
179
  this.toolName = toolName;
182
180
  }
183
181
  static isInstance(error) {
@@ -328,7 +326,7 @@ var symbol10 = Symbol.for(marker10);
328
326
  var _a10;
329
327
  var ToolExecutionError = class extends import_provider10.AISDKError {
330
328
  constructor({
331
- toolArgs,
329
+ toolInput,
332
330
  toolName,
333
331
  toolCallId,
334
332
  cause,
@@ -336,7 +334,7 @@ var ToolExecutionError = class extends import_provider10.AISDKError {
336
334
  }) {
337
335
  super({ name: name10, message, cause });
338
336
  this[_a10] = true;
339
- this.toolArgs = toolArgs;
337
+ this.toolInput = toolInput;
340
338
  this.toolName = toolName;
341
339
  this.toolCallId = toolCallId;
342
340
  }
@@ -550,25 +548,25 @@ var uiMessageStreamPartSchema = import_zod.z.union([
550
548
  errorText: import_zod.z.string()
551
549
  }),
552
550
  import_zod.z.strictObject({
553
- type: import_zod.z.literal("tool-call-streaming-start"),
551
+ type: import_zod.z.literal("tool-input-start"),
554
552
  toolCallId: import_zod.z.string(),
555
553
  toolName: import_zod.z.string()
556
554
  }),
557
555
  import_zod.z.strictObject({
558
- type: import_zod.z.literal("tool-call-delta"),
556
+ type: import_zod.z.literal("tool-input-delta"),
559
557
  toolCallId: import_zod.z.string(),
560
- argsTextDelta: import_zod.z.string()
558
+ inputTextDelta: import_zod.z.string()
561
559
  }),
562
560
  import_zod.z.strictObject({
563
- type: import_zod.z.literal("tool-call"),
561
+ type: import_zod.z.literal("tool-input-available"),
564
562
  toolCallId: import_zod.z.string(),
565
563
  toolName: import_zod.z.string(),
566
- args: import_zod.z.unknown()
564
+ input: import_zod.z.unknown()
567
565
  }),
568
566
  import_zod.z.strictObject({
569
- type: import_zod.z.literal("tool-result"),
567
+ type: import_zod.z.literal("tool-output-available"),
570
568
  toolCallId: import_zod.z.string(),
571
- result: import_zod.z.unknown(),
569
+ output: import_zod.z.unknown(),
572
570
  providerMetadata: import_zod.z.any().optional()
573
571
  }),
574
572
  import_zod.z.strictObject({
@@ -843,100 +841,126 @@ async function convertFileListToFileUIParts(files) {
843
841
 
844
842
  // src/ui/default-chat-transport.ts
845
843
  var import_provider_utils2 = require("@ai-sdk/provider-utils");
844
+
845
+ // src/ui/http-chat-transport.ts
846
846
  var getOriginalFetch2 = () => fetch;
847
- async function fetchUIMessageStream({
848
- api,
849
- body,
850
- credentials,
851
- headers,
852
- abortSignal,
853
- fetch: fetch2 = getOriginalFetch2(),
854
- requestType = "generate"
855
- }) {
856
- var _a17;
857
- const response = requestType === "resume" ? await fetch2(`${api}?id=${body.id}`, {
858
- method: "GET",
859
- headers: {
860
- "Content-Type": "application/json",
861
- ...headers
862
- },
863
- signal: abortSignal,
864
- credentials
865
- }) : await fetch2(api, {
866
- method: "POST",
867
- body: JSON.stringify(body),
868
- headers: {
869
- "Content-Type": "application/json",
870
- ...headers
871
- },
872
- signal: abortSignal,
873
- credentials
874
- });
875
- if (!response.ok) {
876
- throw new Error(
877
- (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
878
- );
879
- }
880
- if (!response.body) {
881
- throw new Error("The response body is empty.");
882
- }
883
- return (0, import_provider_utils2.parseJsonEventStream)({
884
- stream: response.body,
885
- schema: uiMessageStreamPartSchema
886
- }).pipeThrough(
887
- new TransformStream({
888
- async transform(part, controller) {
889
- if (!part.success) {
890
- throw part.error;
891
- }
892
- controller.enqueue(part.value);
893
- }
894
- })
895
- );
896
- }
897
- var DefaultChatTransport = class {
847
+ var HttpChatTransport = class {
898
848
  constructor({
899
849
  api = "/api/chat",
900
850
  credentials,
901
851
  headers,
902
852
  body,
903
- fetch: fetch2,
904
- prepareRequest
905
- } = {}) {
853
+ fetch: fetch2 = getOriginalFetch2(),
854
+ prepareSendMessagesRequest,
855
+ prepareReconnectToStreamRequest
856
+ }) {
906
857
  this.api = api;
907
858
  this.credentials = credentials;
908
859
  this.headers = headers;
909
860
  this.body = body;
910
861
  this.fetch = fetch2;
911
- this.prepareRequest = prepareRequest;
862
+ this.prepareSendMessagesRequest = prepareSendMessagesRequest;
863
+ this.prepareReconnectToStreamRequest = prepareReconnectToStreamRequest;
912
864
  }
913
- submitMessages({
914
- chatId,
915
- messages,
865
+ async sendMessages({
916
866
  abortSignal,
917
- metadata,
918
- headers,
919
- body,
920
- requestType
867
+ ...options
921
868
  }) {
922
- var _a17, _b;
923
- const preparedRequest = (_a17 = this.prepareRequest) == null ? void 0 : _a17.call(this, {
924
- id: chatId,
925
- messages,
926
- body: { ...this.body, ...body },
927
- headers: { ...this.headers, ...headers },
869
+ var _a17, _b, _c, _d;
870
+ const preparedRequest = (_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
871
+ api: this.api,
872
+ id: options.chatId,
873
+ messages: options.messages,
874
+ body: { ...this.body, ...options.body },
875
+ headers: { ...this.headers, ...options.headers },
928
876
  credentials: this.credentials,
929
- requestMetadata: metadata
877
+ requestMetadata: options.metadata,
878
+ trigger: options.trigger,
879
+ messageId: options.messageId
880
+ });
881
+ const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : this.api;
882
+ const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...options.headers };
883
+ const body = (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : {
884
+ ...this.body,
885
+ ...options.body,
886
+ id: options.chatId,
887
+ messages: options.messages,
888
+ trigger: options.trigger,
889
+ messageId: options.messageId
890
+ };
891
+ const credentials = (_c = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _c : this.credentials;
892
+ const response = await fetch(api, {
893
+ method: "POST",
894
+ headers: {
895
+ "Content-Type": "application/json",
896
+ ...headers
897
+ },
898
+ body: JSON.stringify(body),
899
+ credentials,
900
+ signal: abortSignal
930
901
  });
931
- return fetchUIMessageStream({
902
+ if (!response.ok) {
903
+ throw new Error(
904
+ (_d = await response.text()) != null ? _d : "Failed to fetch the chat response."
905
+ );
906
+ }
907
+ if (!response.body) {
908
+ throw new Error("The response body is empty.");
909
+ }
910
+ return this.processResponseStream(response.body);
911
+ }
912
+ async reconnectToStream(options) {
913
+ var _a17, _b, _c, _d;
914
+ const preparedRequest = (_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
932
915
  api: this.api,
933
- body: (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : { ...this.body, ...body, id: chatId, messages },
934
- headers: (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...headers },
935
- credentials: (_b = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _b : this.credentials,
936
- abortSignal,
937
- fetch: this.fetch,
938
- requestType
916
+ id: options.chatId,
917
+ body: { ...this.body, ...options.body },
918
+ headers: { ...this.headers, ...options.headers },
919
+ credentials: this.credentials,
920
+ requestMetadata: options.metadata
939
921
  });
922
+ const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : `${this.api}/${options.chatId}/stream`;
923
+ const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...options.headers };
924
+ const credentials = (_c = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _c : this.credentials;
925
+ const response = await fetch(api, {
926
+ method: "GET",
927
+ headers,
928
+ credentials
929
+ });
930
+ if (response.status === 204) {
931
+ return null;
932
+ }
933
+ if (!response.ok) {
934
+ throw new Error(
935
+ (_d = await response.text()) != null ? _d : "Failed to fetch the chat response."
936
+ );
937
+ }
938
+ if (!response.body) {
939
+ throw new Error("The response body is empty.");
940
+ }
941
+ return this.processResponseStream(response.body);
942
+ }
943
+ };
944
+
945
+ // src/ui/default-chat-transport.ts
946
+ var DefaultChatTransport = class extends HttpChatTransport {
947
+ constructor(options = {}) {
948
+ super(options);
949
+ }
950
+ processResponseStream(stream) {
951
+ return (0, import_provider_utils2.parseJsonEventStream)({
952
+ stream,
953
+ schema: uiMessageStreamPartSchema
954
+ }).pipeThrough(
955
+ new TransformStream({
956
+ async transform(part, controller) {
957
+ if (!part.success) {
958
+ throw part.error;
959
+ }
960
+ controller.enqueue(part.value);
961
+ }
962
+ })
963
+ );
940
964
  }
941
965
  };
942
966
 
@@ -1355,15 +1379,15 @@ function processUIMessageStream({
1355
1379
  );
1356
1380
  if (part2 != null) {
1357
1381
  part2.state = options.state;
1358
- part2.args = options.args;
1359
- part2.result = options.result;
1382
+ part2.input = options.input;
1383
+ part2.output = options.output;
1360
1384
  } else {
1361
1385
  state.message.parts.push({
1362
1386
  type: `tool-${options.toolName}`,
1363
1387
  toolCallId: options.toolCallId,
1364
1388
  state: options.state,
1365
- args: options.args,
1366
- result: options.result
1389
+ input: options.input,
1390
+ output: options.output
1367
1391
  });
1368
1392
  }
1369
1393
  }
@@ -1446,7 +1470,7 @@ function processUIMessageStream({
1446
1470
  write();
1447
1471
  break;
1448
1472
  }
1449
- case "tool-call-streaming-start": {
1473
+ case "tool-input-start": {
1450
1474
  const toolInvocations = state.message.parts.filter(isToolUIPart);
1451
1475
  state.partialToolCalls[part.toolCallId] = {
1452
1476
  text: "",
@@ -1456,33 +1480,33 @@ function processUIMessageStream({
1456
1480
  updateToolInvocationPart({
1457
1481
  toolCallId: part.toolCallId,
1458
1482
  toolName: part.toolName,
1459
- state: "partial-call",
1460
- args: void 0
1483
+ state: "input-streaming",
1484
+ input: void 0
1461
1485
  });
1462
1486
  write();
1463
1487
  break;
1464
1488
  }
1465
- case "tool-call-delta": {
1489
+ case "tool-input-delta": {
1466
1490
  const partialToolCall = state.partialToolCalls[part.toolCallId];
1467
- partialToolCall.text += part.argsTextDelta;
1491
+ partialToolCall.text += part.inputTextDelta;
1468
1492
  const { value: partialArgs } = await parsePartialJson(
1469
1493
  partialToolCall.text
1470
1494
  );
1471
1495
  updateToolInvocationPart({
1472
1496
  toolCallId: part.toolCallId,
1473
1497
  toolName: partialToolCall.toolName,
1474
- state: "partial-call",
1475
- args: partialArgs
1498
+ state: "input-streaming",
1499
+ input: partialArgs
1476
1500
  });
1477
1501
  write();
1478
1502
  break;
1479
1503
  }
1480
- case "tool-call": {
1504
+ case "tool-input-available": {
1481
1505
  updateToolInvocationPart({
1482
1506
  toolCallId: part.toolCallId,
1483
1507
  toolName: part.toolName,
1484
- state: "call",
1485
- args: part.args
1508
+ state: "input-available",
1509
+ input: part.input
1486
1510
  });
1487
1511
  write();
1488
1512
  if (onToolCall) {
@@ -1493,16 +1517,16 @@ function processUIMessageStream({
1493
1517
  updateToolInvocationPart({
1494
1518
  toolCallId: part.toolCallId,
1495
1519
  toolName: part.toolName,
1496
- state: "result",
1497
- args: part.args,
1498
- result
1520
+ state: "output-available",
1521
+ input: part.input,
1522
+ output: result
1499
1523
  });
1500
1524
  write();
1501
1525
  }
1502
1526
  }
1503
1527
  break;
1504
1528
  }
1505
- case "tool-result": {
1529
+ case "tool-output-available": {
1506
1530
  const toolInvocations = state.message.parts.filter(isToolUIPart);
1507
1531
  if (toolInvocations == null) {
1508
1532
  throw new Error("tool_result must be preceded by a tool_call");
@@ -1521,9 +1545,9 @@ function processUIMessageStream({
1521
1545
  updateToolInvocationPart({
1522
1546
  toolCallId: part.toolCallId,
1523
1547
  toolName,
1524
- state: "result",
1525
- args: toolInvocations[toolInvocationIndex].args,
1526
- result: part.result
1548
+ state: "output-available",
1549
+ input: toolInvocations[toolInvocationIndex].input,
1550
+ output: part.output
1527
1551
  });
1528
1552
  write();
1529
1553
  break;
@@ -1619,7 +1643,7 @@ function isAssistantMessageWithCompletedToolCalls(message) {
1619
1643
  return part.type === "step-start" ? index : lastIndex;
1620
1644
  }, -1);
1621
1645
  const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter(isToolUIPart);
1622
- return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => part.state === "result");
1646
+ return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => part.state === "output-available");
1623
1647
  }
1624
1648
 
1625
1649
  // src/ui/chat.ts
@@ -1638,22 +1662,14 @@ var AbstractChat = class {
1638
1662
  }) {
1639
1663
  this.activeResponse = void 0;
1640
1664
  this.jobExecutor = new SerialJobExecutor();
1641
- this.removeAssistantResponse = () => {
1642
- const lastMessage = this.state.messages[this.state.messages.length - 1];
1643
- if (lastMessage == null) {
1644
- throw new Error("Cannot remove assistant response from empty chat");
1645
- }
1646
- if (lastMessage.role !== "assistant") {
1647
- throw new Error("Last message is not an assistant message");
1648
- }
1649
- this.state.popMessage();
1650
- };
1651
1665
  /**
1652
- * Append a user message to the chat list. This triggers the API call to fetch
1666
+ * Appends or replaces a user message to the chat list. This triggers the API call to fetch
1653
1667
  * the assistant's response.
1668
+ *
1669
+ * If a messageId is provided, the message will be replaced.
1654
1670
  */
1655
1671
  this.sendMessage = async (message, options = {}) => {
1656
- var _a17, _b;
1672
+ var _a17, _b, _c;
1657
1673
  let uiMessage;
1658
1674
  if ("text" in message || "files" in message) {
1659
1675
  const fileParts = Array.isArray(message.files) ? message.files : await convertFileListToFileUIParts(message.files);
@@ -1666,40 +1682,75 @@ var AbstractChat = class {
1666
1682
  } else {
1667
1683
  uiMessage = message;
1668
1684
  }
1669
- this.state.pushMessage({
1670
- ...uiMessage,
1671
- id: (_a17 = uiMessage.id) != null ? _a17 : this.generateId(),
1672
- role: (_b = uiMessage.role) != null ? _b : "user"
1685
+ if (message.messageId != null) {
1686
+ const messageIndex = this.state.messages.findIndex(
1687
+ (m) => m.id === message.messageId
1688
+ );
1689
+ if (messageIndex === -1) {
1690
+ throw new Error(`message with id ${message.messageId} not found`);
1691
+ }
1692
+ if (this.state.messages[messageIndex].role !== "user") {
1693
+ throw new Error(
1694
+ `message with id ${message.messageId} is not a user message`
1695
+ );
1696
+ }
1697
+ this.state.messages = this.state.messages.slice(0, messageIndex + 1);
1698
+ this.state.replaceMessage(messageIndex, {
1699
+ ...uiMessage,
1700
+ id: message.messageId,
1701
+ role: (_a17 = uiMessage.role) != null ? _a17 : "user"
1702
+ });
1703
+ } else {
1704
+ this.state.pushMessage({
1705
+ ...uiMessage,
1706
+ id: (_b = uiMessage.id) != null ? _b : this.generateId(),
1707
+ role: (_c = uiMessage.role) != null ? _c : "user"
1708
+ });
1709
+ }
1710
+ await this.makeRequest({
1711
+ trigger: "submit-user-message",
1712
+ messageId: message.messageId,
1713
+ ...options
1673
1714
  });
1674
- await this.triggerRequest({ requestType: "generate", ...options });
1675
1715
  };
1676
1716
  /**
1677
- * Regenerate the last assistant message.
1717
+ * Regenerate the assistant message with the provided message id.
1718
+ * If no message id is provided, the last assistant message will be regenerated.
1678
1719
  */
1679
- this.reload = async (options = {}) => {
1680
- if (this.lastMessage === void 0) {
1681
- return;
1720
+ this.regenerate = async ({
1721
+ messageId,
1722
+ ...options
1723
+ } = {}) => {
1724
+ const messageIndex = messageId == null ? this.state.messages.length - 1 : this.state.messages.findIndex((message) => message.id === messageId);
1725
+ if (messageIndex === -1) {
1726
+ throw new Error(`message ${messageId} not found`);
1682
1727
  }
1683
- if (this.lastMessage.role === "assistant") {
1684
- this.state.popMessage();
1685
- }
1686
- await this.triggerRequest({ requestType: "generate", ...options });
1728
+ this.state.messages = this.state.messages.slice(
1729
+ 0,
1730
+ // if the message is a user message, we need to include it in the request:
1731
+ this.messages[messageIndex].role === "assistant" ? messageIndex : messageIndex + 1
1732
+ );
1733
+ await this.makeRequest({
1734
+ trigger: "regenerate-assistant-message",
1735
+ messageId,
1736
+ ...options
1737
+ });
1687
1738
  };
1688
1739
  /**
1689
- * Resume an ongoing chat generation stream. This does not resume an aborted generation.
1740
+ * Attempt to resume an ongoing streaming response.
1690
1741
  */
1691
- this.experimental_resume = async (options = {}) => {
1692
- await this.triggerRequest({ requestType: "resume", ...options });
1742
+ this.resumeStream = async (options = {}) => {
1743
+ await this.makeRequest({ trigger: "resume-stream", ...options });
1693
1744
  };
1694
1745
  this.addToolResult = async ({
1695
1746
  toolCallId,
1696
- result
1747
+ output
1697
1748
  }) => {
1698
1749
  this.jobExecutor.run(async () => {
1699
- updateToolResult({
1750
+ updateToolOutput({
1700
1751
  messages: this.state.messages,
1701
1752
  toolCallId,
1702
- toolResult: result
1753
+ output
1703
1754
  });
1704
1755
  this.messages = this.state.messages;
1705
1756
  if (this.status === "submitted" || this.status === "streaming") {
@@ -1707,8 +1758,8 @@ var AbstractChat = class {
1707
1758
  }
1708
1759
  const lastMessage = this.lastMessage;
1709
1760
  if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {
1710
- this.triggerRequest({
1711
- requestType: "generate"
1761
+ this.makeRequest({
1762
+ trigger: "submit-tool-result"
1712
1763
  });
1713
1764
  }
1714
1765
  });
@@ -1767,11 +1818,12 @@ var AbstractChat = class {
1767
1818
  set messages(messages) {
1768
1819
  this.state.messages = messages;
1769
1820
  }
1770
- async triggerRequest({
1771
- requestType,
1821
+ async makeRequest({
1822
+ trigger,
1772
1823
  metadata,
1773
1824
  headers,
1774
- body
1825
+ body,
1826
+ messageId
1775
1827
  }) {
1776
1828
  var _a17, _b;
1777
1829
  this.setStatus({ status: "submitted", error: void 0 });
@@ -1787,15 +1839,30 @@ var AbstractChat = class {
1787
1839
  abortController: new AbortController()
1788
1840
  };
1789
1841
  this.activeResponse = activeResponse;
1790
- const stream = await this.transport.submitMessages({
1791
- chatId: this.id,
1792
- messages: this.state.messages,
1793
- abortSignal: activeResponse.abortController.signal,
1794
- metadata,
1795
- headers,
1796
- body,
1797
- requestType
1798
- });
1842
+ let stream;
1843
+ if (trigger === "resume-stream") {
1844
+ const reconnect = await this.transport.reconnectToStream({
1845
+ chatId: this.id,
1846
+ metadata,
1847
+ headers,
1848
+ body
1849
+ });
1850
+ if (reconnect == null) {
1851
+ return;
1852
+ }
1853
+ stream = reconnect;
1854
+ } else {
1855
+ stream = await this.transport.sendMessages({
1856
+ chatId: this.id,
1857
+ messages: this.state.messages,
1858
+ abortSignal: activeResponse.abortController.signal,
1859
+ metadata,
1860
+ headers,
1861
+ body,
1862
+ trigger,
1863
+ messageId
1864
+ });
1865
+ }
1799
1866
  const runUpdateMessageJob = (job) => (
1800
1867
  // serialize the job execution to avoid race conditions:
1801
1868
  this.jobExecutor.run(
@@ -1850,19 +1917,20 @@ var AbstractChat = class {
1850
1917
  maxSteps: this.maxSteps,
1851
1918
  messages: this.state.messages
1852
1919
  })) {
1853
- await this.triggerRequest({
1854
- requestType,
1920
+ await this.makeRequest({
1855
1921
  metadata,
1856
1922
  headers,
1857
- body
1923
+ body,
1924
+ // secondary requests are triggered by automatic tool execution
1925
+ trigger: "submit-tool-result"
1858
1926
  });
1859
1927
  }
1860
1928
  }
1861
1929
  };
1862
- function updateToolResult({
1930
+ function updateToolOutput({
1863
1931
  messages,
1864
1932
  toolCallId,
1865
- toolResult: result
1933
+ output
1866
1934
  }) {
1867
1935
  const lastMessage = messages[messages.length - 1];
1868
1936
  const toolPart = lastMessage.parts.find(
@@ -1871,8 +1939,8 @@ function updateToolResult({
1871
1939
  if (toolPart == null) {
1872
1940
  return;
1873
1941
  }
1874
- toolPart.state = "result";
1875
- toolPart.result = result;
1942
+ toolPart.state = "output-available";
1943
+ toolPart.output = output;
1876
1944
  }
1877
1945
 
1878
1946
  // src/ui/convert-to-model-messages.ts
@@ -1929,17 +1997,17 @@ function convertToModelMessages(messages, options) {
1929
1997
  });
1930
1998
  } else if (isToolUIPart(part)) {
1931
1999
  const toolName = getToolName(part);
1932
- if (part.state === "partial-call") {
2000
+ if (part.state === "input-streaming") {
1933
2001
  throw new MessageConversionError({
1934
2002
  originalMessage: message,
1935
- message: `Partial tool call is not supported: ${part.toolCallId}`
2003
+ message: `incomplete tool input is not supported: ${part.toolCallId}`
1936
2004
  });
1937
2005
  } else {
1938
2006
  content.push({
1939
2007
  type: "tool-call",
1940
2008
  toolCallId: part.toolCallId,
1941
2009
  toolName,
1942
- args: part.args
2010
+ input: part.input
1943
2011
  });
1944
2012
  }
1945
2013
  } else {
@@ -1956,26 +2024,26 @@ function convertToModelMessages(messages, options) {
1956
2024
  modelMessages.push({
1957
2025
  role: "tool",
1958
2026
  content: toolParts.map((toolPart) => {
1959
- if (toolPart.state !== "result") {
2027
+ if (toolPart.state !== "output-available") {
1960
2028
  throw new MessageConversionError({
1961
2029
  originalMessage: message,
1962
2030
  message: "ToolInvocation must have a result: " + JSON.stringify(toolPart)
1963
2031
  });
1964
2032
  }
1965
2033
  const toolName = getToolName(toolPart);
1966
- const { toolCallId, result } = toolPart;
2034
+ const { toolCallId, output } = toolPart;
1967
2035
  const tool2 = tools[toolName];
1968
2036
  return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {
1969
2037
  type: "tool-result",
1970
2038
  toolCallId,
1971
2039
  toolName,
1972
- result: tool2.experimental_toToolResultContent(result),
1973
- experimental_content: tool2.experimental_toToolResultContent(result)
2040
+ output: tool2.experimental_toToolResultContent(output),
2041
+ experimental_content: tool2.experimental_toToolResultContent(output)
1974
2042
  } : {
1975
2043
  type: "tool-result",
1976
2044
  toolCallId,
1977
2045
  toolName,
1978
- result
2046
+ output
1979
2047
  };
1980
2048
  })
1981
2049
  });
@@ -2031,89 +2099,13 @@ function transformTextToUiMessageStream({
2031
2099
  }
2032
2100
 
2033
2101
  // src/ui/text-stream-chat-transport.ts
2034
- var getOriginalFetch3 = () => fetch;
2035
- async function fetchTextStream({
2036
- api,
2037
- body,
2038
- credentials,
2039
- headers,
2040
- abortSignal,
2041
- fetch: fetch2 = getOriginalFetch3(),
2042
- requestType = "generate"
2043
- }) {
2044
- var _a17;
2045
- const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
2046
- method: "GET",
2047
- headers: {
2048
- "Content-Type": "application/json",
2049
- ...headers
2050
- },
2051
- signal: abortSignal,
2052
- credentials
2053
- }) : await fetch2(api, {
2054
- method: "POST",
2055
- body: JSON.stringify(body),
2056
- headers: {
2057
- "Content-Type": "application/json",
2058
- ...headers
2059
- },
2060
- signal: abortSignal,
2061
- credentials
2062
- });
2063
- if (!response.ok) {
2064
- throw new Error(
2065
- (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
2066
- );
2067
- }
2068
- if (!response.body) {
2069
- throw new Error("The response body is empty.");
2070
- }
2071
- return transformTextToUiMessageStream({
2072
- stream: response.body.pipeThrough(new TextDecoderStream())
2073
- });
2074
- }
2075
- var TextStreamChatTransport = class {
2076
- constructor({
2077
- api,
2078
- credentials,
2079
- headers,
2080
- body,
2081
- fetch: fetch2,
2082
- prepareRequest
2083
- }) {
2084
- this.api = api;
2085
- this.credentials = credentials;
2086
- this.headers = headers;
2087
- this.body = body;
2088
- this.fetch = fetch2;
2089
- this.prepareRequest = prepareRequest;
2102
+ var TextStreamChatTransport = class extends HttpChatTransport {
2103
+ constructor(options = {}) {
2104
+ super(options);
2090
2105
  }
2091
- submitMessages({
2092
- chatId,
2093
- messages,
2094
- abortSignal,
2095
- metadata,
2096
- headers,
2097
- body,
2098
- requestType
2099
- }) {
2100
- var _a17, _b;
2101
- const preparedRequest = (_a17 = this.prepareRequest) == null ? void 0 : _a17.call(this, {
2102
- id: chatId,
2103
- messages,
2104
- body: { ...this.body, ...body },
2105
- headers: { ...this.headers, ...headers },
2106
- credentials: this.credentials,
2107
- requestMetadata: metadata
2108
- });
2109
- return fetchTextStream({
2110
- api: this.api,
2111
- body: (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : { ...this.body, ...body },
2112
- headers: (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...headers },
2113
- credentials: (_b = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _b : this.credentials,
2114
- abortSignal,
2115
- fetch: this.fetch,
2116
- requestType
2106
+ processResponseStream(stream) {
2107
+ return transformTextToUiMessageStream({
2108
+ stream: stream.pipeThrough(new TextDecoderStream())
2117
2109
  });
2118
2110
  }
2119
2111
  };
@@ -2278,7 +2270,7 @@ var JsonToSseTransformStream = class extends TransformStream {
2278
2270
  };
2279
2271
 
2280
2272
  // src/ui-message-stream/ui-message-stream-headers.ts
2281
- var uiMessageStreamHeaders = {
2273
+ var UI_MESSAGE_STREAM_HEADERS = {
2282
2274
  "content-type": "text/event-stream",
2283
2275
  "cache-control": "no-cache",
2284
2276
  connection: "keep-alive",
@@ -2292,16 +2284,20 @@ function createUIMessageStreamResponse({
2292
2284
  status,
2293
2285
  statusText,
2294
2286
  headers,
2295
- stream
2287
+ stream,
2288
+ consumeSseStream
2296
2289
  }) {
2297
- return new Response(
2298
- stream.pipeThrough(new JsonToSseTransformStream()).pipeThrough(new TextEncoderStream()),
2299
- {
2300
- status,
2301
- statusText,
2302
- headers: prepareHeaders(headers, uiMessageStreamHeaders)
2303
- }
2304
- );
2290
+ let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
2291
+ if (consumeSseStream) {
2292
+ const [stream1, stream2] = sseStream.tee();
2293
+ sseStream = stream1;
2294
+ consumeSseStream({ stream: stream2 });
2295
+ }
2296
+ return new Response(sseStream.pipeThrough(new TextEncoderStream()), {
2297
+ status,
2298
+ statusText,
2299
+ headers: prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS)
2300
+ });
2305
2301
  }
2306
2302
 
2307
2303
  // src/ui-message-stream/pipe-ui-message-stream-to-response.ts
@@ -2310,16 +2306,23 @@ function pipeUIMessageStreamToResponse({
2310
2306
  status,
2311
2307
  statusText,
2312
2308
  headers,
2313
- stream
2309
+ stream,
2310
+ consumeSseStream
2314
2311
  }) {
2312
+ let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
2313
+ if (consumeSseStream) {
2314
+ const [stream1, stream2] = sseStream.tee();
2315
+ sseStream = stream1;
2316
+ consumeSseStream({ stream: stream2 });
2317
+ }
2315
2318
  writeToServerResponse({
2316
2319
  response,
2317
2320
  status,
2318
2321
  statusText,
2319
2322
  headers: Object.fromEntries(
2320
- prepareHeaders(headers, uiMessageStreamHeaders).entries()
2323
+ prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS).entries()
2321
2324
  ),
2322
- stream: stream.pipeThrough(new JsonToSseTransformStream()).pipeThrough(new TextEncoderStream())
2325
+ stream: sseStream.pipeThrough(new TextEncoderStream())
2323
2326
  });
2324
2327
  }
2325
2328
 
@@ -3509,7 +3512,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
3509
3512
  type: "tool-call",
3510
3513
  toolCallId: part.toolCallId,
3511
3514
  toolName: part.toolName,
3512
- args: part.args,
3515
+ input: part.input,
3513
3516
  providerOptions
3514
3517
  };
3515
3518
  }
@@ -3525,7 +3528,7 @@ function convertToLanguageModelMessage(message, downloadedAssets) {
3525
3528
  type: "tool-result",
3526
3529
  toolCallId: part.toolCallId,
3527
3530
  toolName: part.toolName,
3528
- result: part.result,
3531
+ output: part.output,
3529
3532
  content: part.experimental_content,
3530
3533
  isError: part.isError,
3531
3534
  providerOptions: part.providerOptions
@@ -3727,14 +3730,11 @@ function prepareCallSettings({
3727
3730
 
3728
3731
  // core/prompt/resolve-language-model.ts
3729
3732
  var import_gateway = require("@ai-sdk/gateway");
3730
- var GLOBAL_DEFAULT_PROVIDER = Symbol(
3731
- "vercel.ai.global.defaultProvider"
3732
- );
3733
3733
  function resolveLanguageModel(model) {
3734
3734
  if (typeof model !== "string") {
3735
3735
  return model;
3736
3736
  }
3737
- const globalProvider = globalThis[GLOBAL_DEFAULT_PROVIDER];
3737
+ const globalProvider = globalThis.AI_SDK_DEFAULT_PROVIDER;
3738
3738
  return (globalProvider != null ? globalProvider : import_gateway.gateway).languageModel(model);
3739
3739
  }
3740
3740
 
@@ -3812,14 +3812,14 @@ var toolCallPartSchema = import_zod6.z.object({
3812
3812
  type: import_zod6.z.literal("tool-call"),
3813
3813
  toolCallId: import_zod6.z.string(),
3814
3814
  toolName: import_zod6.z.string(),
3815
- args: import_zod6.z.unknown(),
3815
+ input: import_zod6.z.unknown(),
3816
3816
  providerOptions: providerMetadataSchema.optional()
3817
3817
  });
3818
3818
  var toolResultPartSchema = import_zod6.z.object({
3819
3819
  type: import_zod6.z.literal("tool-result"),
3820
3820
  toolCallId: import_zod6.z.string(),
3821
3821
  toolName: import_zod6.z.string(),
3822
- result: import_zod6.z.unknown(),
3822
+ output: import_zod6.z.unknown(),
3823
3823
  content: toolResultContentSchema.optional(),
3824
3824
  isError: import_zod6.z.boolean().optional(),
3825
3825
  providerOptions: providerMetadataSchema.optional()
@@ -5412,11 +5412,18 @@ function prepareToolsAndToolChoice({
5412
5412
  type: "function",
5413
5413
  name: name17,
5414
5414
  description: tool2.description,
5415
- parameters: (0, import_provider_utils17.asSchema)(tool2.parameters).jsonSchema
5415
+ inputSchema: (0, import_provider_utils17.asSchema)(tool2.inputSchema).jsonSchema
5416
+ };
5417
+ case "provider-defined-client":
5418
+ return {
5419
+ type: "provider-defined-client",
5420
+ name: name17,
5421
+ id: tool2.id,
5422
+ args: tool2.args
5416
5423
  };
5417
- case "provider-defined":
5424
+ case "provider-defined-server":
5418
5425
  return {
5419
- type: "provider-defined",
5426
+ type: "provider-defined-server",
5420
5427
  name: name17,
5421
5428
  id: tool2.id,
5422
5429
  args: tool2.args
@@ -5496,7 +5503,7 @@ async function parseToolCall({
5496
5503
  try {
5497
5504
  return await doParseToolCall({ toolCall, tools });
5498
5505
  } catch (error) {
5499
- if (repairToolCall == null || !(NoSuchToolError.isInstance(error) || InvalidToolArgumentsError.isInstance(error))) {
5506
+ if (repairToolCall == null || !(NoSuchToolError.isInstance(error) || InvalidToolInputError.isInstance(error))) {
5500
5507
  throw error;
5501
5508
  }
5502
5509
  let repairedToolCall = null;
@@ -5504,9 +5511,9 @@ async function parseToolCall({
5504
5511
  repairedToolCall = await repairToolCall({
5505
5512
  toolCall,
5506
5513
  tools,
5507
- parameterSchema: ({ toolName }) => {
5508
- const { parameters } = tools[toolName];
5509
- return (0, import_provider_utils18.asSchema)(parameters).jsonSchema;
5514
+ inputSchema: ({ toolName }) => {
5515
+ const { inputSchema } = tools[toolName];
5516
+ return (0, import_provider_utils18.asSchema)(inputSchema).jsonSchema;
5510
5517
  },
5511
5518
  system,
5512
5519
  messages,
@@ -5536,12 +5543,12 @@ async function doParseToolCall({
5536
5543
  availableTools: Object.keys(tools)
5537
5544
  });
5538
5545
  }
5539
- const schema = (0, import_provider_utils18.asSchema)(tool2.parameters);
5540
- const parseResult = toolCall.args.trim() === "" ? await (0, import_provider_utils18.safeValidateTypes)({ value: {}, schema }) : await (0, import_provider_utils18.safeParseJSON)({ text: toolCall.args, schema });
5546
+ const schema = (0, import_provider_utils18.asSchema)(tool2.inputSchema);
5547
+ const parseResult = toolCall.input.trim() === "" ? await (0, import_provider_utils18.safeValidateTypes)({ value: {}, schema }) : await (0, import_provider_utils18.safeParseJSON)({ text: toolCall.input, schema });
5541
5548
  if (parseResult.success === false) {
5542
- throw new InvalidToolArgumentsError({
5549
+ throw new InvalidToolInputError({
5543
5550
  toolName,
5544
- toolArgs: toolCall.args,
5551
+ toolInput: toolCall.input,
5545
5552
  cause: parseResult.error
5546
5553
  });
5547
5554
  }
@@ -5549,7 +5556,7 @@ async function doParseToolCall({
5549
5556
  type: "tool-call",
5550
5557
  toolCallId: toolCall.toolCallId,
5551
5558
  toolName,
5552
- args: parseResult == null ? void 0 : parseResult.value
5559
+ input: parseResult.value
5553
5560
  };
5554
5561
  }
5555
5562
 
@@ -5652,15 +5659,15 @@ function toResponseMessages({
5652
5659
  type: "tool-result",
5653
5660
  toolCallId: toolResult.toolCallId,
5654
5661
  toolName: toolResult.toolName,
5655
- result: tool2.experimental_toToolResultContent(toolResult.result),
5662
+ output: tool2.experimental_toToolResultContent(toolResult.output),
5656
5663
  experimental_content: tool2.experimental_toToolResultContent(
5657
- toolResult.result
5664
+ toolResult.output
5658
5665
  )
5659
5666
  } : {
5660
5667
  type: "tool-result",
5661
5668
  toolCallId: toolResult.toolCallId,
5662
5669
  toolName: toolResult.toolName,
5663
- result: toolResult.result
5670
+ output: toolResult.output
5664
5671
  };
5665
5672
  });
5666
5673
  if (toolResultContent.length > 0) {
@@ -5966,11 +5973,11 @@ async function executeTools({
5966
5973
  abortSignal
5967
5974
  }) {
5968
5975
  const toolResults = await Promise.all(
5969
- toolCalls.map(async ({ toolCallId, toolName, args }) => {
5976
+ toolCalls.map(async ({ toolCallId, toolName, input }) => {
5970
5977
  const tool2 = tools[toolName];
5971
- if ((tool2 == null ? void 0 : tool2.onArgsAvailable) != null) {
5972
- await tool2.onArgsAvailable({
5973
- args,
5978
+ if ((tool2 == null ? void 0 : tool2.onInputAvailable) != null) {
5979
+ await tool2.onInputAvailable({
5980
+ input,
5974
5981
  toolCallId,
5975
5982
  messages,
5976
5983
  abortSignal
@@ -5990,15 +5997,15 @@ async function executeTools({
5990
5997
  }),
5991
5998
  "ai.toolCall.name": toolName,
5992
5999
  "ai.toolCall.id": toolCallId,
5993
- "ai.toolCall.args": {
5994
- output: () => JSON.stringify(args)
6000
+ "ai.toolCall.input": {
6001
+ output: () => JSON.stringify(input)
5995
6002
  }
5996
6003
  }
5997
6004
  }),
5998
6005
  tracer,
5999
6006
  fn: async (span) => {
6000
6007
  try {
6001
- const result2 = await tool2.execute(args, {
6008
+ const result2 = await tool2.execute(input, {
6002
6009
  toolCallId,
6003
6010
  messages,
6004
6011
  abortSignal
@@ -6021,7 +6028,7 @@ async function executeTools({
6021
6028
  throw new ToolExecutionError({
6022
6029
  toolCallId,
6023
6030
  toolName,
6024
- toolArgs: args,
6031
+ toolInput: input,
6025
6032
  cause: error
6026
6033
  });
6027
6034
  }
@@ -6031,8 +6038,8 @@ async function executeTools({
6031
6038
  type: "tool-result",
6032
6039
  toolCallId,
6033
6040
  toolName,
6034
- args,
6035
- result
6041
+ input,
6042
+ output: result
6036
6043
  };
6037
6044
  })
6038
6045
  );
@@ -6122,7 +6129,7 @@ function asToolCalls(content) {
6122
6129
  toolCallType: toolCall.toolCallType,
6123
6130
  toolCallId: toolCall.toolCallId,
6124
6131
  toolName: toolCall.toolName,
6125
- args: toolCall.args
6132
+ input: toolCall.input
6126
6133
  }));
6127
6134
  }
6128
6135
 
@@ -6346,7 +6353,7 @@ function runToolsTransformation({
6346
6353
  type: "tool-call-delta",
6347
6354
  toolCallId: chunk.toolCallId,
6348
6355
  toolName: chunk.toolName,
6349
- argsTextDelta: chunk.argsTextDelta
6356
+ inputTextDelta: chunk.inputTextDelta
6350
6357
  });
6351
6358
  break;
6352
6359
  }
@@ -6361,9 +6368,9 @@ function runToolsTransformation({
6361
6368
  });
6362
6369
  controller.enqueue(toolCall);
6363
6370
  const tool2 = tools[toolCall.toolName];
6364
- if (tool2.onArgsAvailable != null) {
6365
- await tool2.onArgsAvailable({
6366
- args: toolCall.args,
6371
+ if (tool2.onInputAvailable != null) {
6372
+ await tool2.onInputAvailable({
6373
+ input: toolCall.input,
6367
6374
  toolCallId: toolCall.toolCallId,
6368
6375
  messages,
6369
6376
  abortSignal
@@ -6383,22 +6390,22 @@ function runToolsTransformation({
6383
6390
  }),
6384
6391
  "ai.toolCall.name": toolCall.toolName,
6385
6392
  "ai.toolCall.id": toolCall.toolCallId,
6386
- "ai.toolCall.args": {
6387
- output: () => JSON.stringify(toolCall.args)
6393
+ "ai.toolCall.input": {
6394
+ output: () => JSON.stringify(toolCall.input)
6388
6395
  }
6389
6396
  }
6390
6397
  }),
6391
6398
  tracer,
6392
- fn: async (span) => tool2.execute(toolCall.args, {
6399
+ fn: async (span) => tool2.execute(toolCall.input, {
6393
6400
  toolCallId: toolCall.toolCallId,
6394
6401
  messages,
6395
6402
  abortSignal
6396
6403
  }).then(
6397
- (result) => {
6404
+ (output) => {
6398
6405
  toolResultsStreamController.enqueue({
6399
6406
  ...toolCall,
6400
6407
  type: "tool-result",
6401
- result
6408
+ output
6402
6409
  });
6403
6410
  outstandingToolResults.delete(toolExecutionId);
6404
6411
  attemptClose();
@@ -6407,8 +6414,8 @@ function runToolsTransformation({
6407
6414
  selectTelemetryAttributes({
6408
6415
  telemetry,
6409
6416
  attributes: {
6410
- "ai.toolCall.result": {
6411
- output: () => JSON.stringify(result)
6417
+ "ai.toolCall.output": {
6418
+ output: () => JSON.stringify(output)
6412
6419
  }
6413
6420
  }
6414
6421
  })
@@ -6422,7 +6429,7 @@ function runToolsTransformation({
6422
6429
  error: new ToolExecutionError({
6423
6430
  toolCallId: toolCall.toolCallId,
6424
6431
  toolName: toolCall.toolName,
6425
- toolArgs: toolCall.args,
6432
+ toolInput: toolCall.input,
6426
6433
  cause: error
6427
6434
  })
6428
6435
  });
@@ -7074,8 +7081,8 @@ var DefaultStreamTextResult = class {
7074
7081
  }
7075
7082
  case "tool-call-streaming-start": {
7076
7083
  const tool2 = tools == null ? void 0 : tools[chunk.toolName];
7077
- if ((tool2 == null ? void 0 : tool2.onArgsStreamingStart) != null) {
7078
- await tool2.onArgsStreamingStart({
7084
+ if ((tool2 == null ? void 0 : tool2.onInputStart) != null) {
7085
+ await tool2.onInputStart({
7079
7086
  toolCallId: chunk.toolCallId,
7080
7087
  messages: stepInputMessages,
7081
7088
  abortSignal
@@ -7086,9 +7093,9 @@ var DefaultStreamTextResult = class {
7086
7093
  }
7087
7094
  case "tool-call-delta": {
7088
7095
  const tool2 = tools == null ? void 0 : tools[chunk.toolName];
7089
- if ((tool2 == null ? void 0 : tool2.onArgsStreamingDelta) != null) {
7090
- await tool2.onArgsStreamingDelta({
7091
- argsTextDelta: chunk.argsTextDelta,
7096
+ if ((tool2 == null ? void 0 : tool2.onInputDelta) != null) {
7097
+ await tool2.onInputDelta({
7098
+ inputTextDelta: chunk.inputTextDelta,
7092
7099
  toolCallId: chunk.toolCallId,
7093
7100
  messages: stepInputMessages,
7094
7101
  abortSignal
@@ -7403,7 +7410,7 @@ var DefaultStreamTextResult = class {
7403
7410
  }
7404
7411
  case "tool-call-streaming-start": {
7405
7412
  controller.enqueue({
7406
- type: "tool-call-streaming-start",
7413
+ type: "tool-input-start",
7407
7414
  toolCallId: part.toolCallId,
7408
7415
  toolName: part.toolName
7409
7416
  });
@@ -7411,26 +7418,26 @@ var DefaultStreamTextResult = class {
7411
7418
  }
7412
7419
  case "tool-call-delta": {
7413
7420
  controller.enqueue({
7414
- type: "tool-call-delta",
7421
+ type: "tool-input-delta",
7415
7422
  toolCallId: part.toolCallId,
7416
- argsTextDelta: part.argsTextDelta
7423
+ inputTextDelta: part.inputTextDelta
7417
7424
  });
7418
7425
  break;
7419
7426
  }
7420
7427
  case "tool-call": {
7421
7428
  controller.enqueue({
7422
- type: "tool-call",
7429
+ type: "tool-input-available",
7423
7430
  toolCallId: part.toolCallId,
7424
7431
  toolName: part.toolName,
7425
- args: part.args
7432
+ input: part.input
7426
7433
  });
7427
7434
  break;
7428
7435
  }
7429
7436
  case "tool-result": {
7430
7437
  controller.enqueue({
7431
- type: "tool-result",
7438
+ type: "tool-output-available",
7432
7439
  toolCallId: part.toolCallId,
7433
- result: part.result
7440
+ output: part.output
7434
7441
  });
7435
7442
  break;
7436
7443
  }
@@ -8113,7 +8120,7 @@ var SseMCPTransport = class {
8113
8120
  (_b = this.onerror) == null ? void 0 : _b.call(this, error);
8114
8121
  return reject(error);
8115
8122
  }
8116
- const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough((0, import_provider_utils24.createEventSourceParserStream)());
8123
+ const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new import_provider_utils24.EventSourceParserStream());
8117
8124
  const reader = stream.getReader();
8118
8125
  const processEvents = async () => {
8119
8126
  var _a18, _b2, _c2;
@@ -8437,15 +8444,14 @@ var MCPClient = class {
8437
8444
  if (schemas !== "automatic" && !(name17 in schemas)) {
8438
8445
  continue;
8439
8446
  }
8440
- const parameters = schemas === "automatic" ? (0, import_provider_utils25.jsonSchema)({
8441
- ...inputSchema,
8442
- properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
8443
- additionalProperties: false
8444
- }) : schemas[name17].parameters;
8445
8447
  const self = this;
8446
8448
  const toolWithExecute = tool({
8447
8449
  description,
8448
- parameters,
8450
+ inputSchema: schemas === "automatic" ? (0, import_provider_utils25.jsonSchema)({
8451
+ ...inputSchema,
8452
+ properties: (_a17 = inputSchema.properties) != null ? _a17 : {},
8453
+ additionalProperties: false
8454
+ }) : schemas[name17].inputSchema,
8449
8455
  execute: async (args, options) => {
8450
8456
  var _a18;
8451
8457
  (_a18 = options == null ? void 0 : options.abortSignal) == null ? void 0 : _a18.throwIfAborted();
@@ -8571,14 +8577,13 @@ var DefaultTranscriptionResult = class {
8571
8577
  DefaultChatTransport,
8572
8578
  DownloadError,
8573
8579
  EmptyResponseBodyError,
8574
- GLOBAL_DEFAULT_PROVIDER,
8575
8580
  InvalidArgumentError,
8576
8581
  InvalidDataContentError,
8577
8582
  InvalidMessageRoleError,
8578
8583
  InvalidPromptError,
8579
8584
  InvalidResponseDataError,
8580
8585
  InvalidStreamPartError,
8581
- InvalidToolArgumentsError,
8586
+ InvalidToolInputError,
8582
8587
  JSONParseError,
8583
8588
  JsonToSseTransformStream,
8584
8589
  LoadAPIKeyError,
@@ -8598,6 +8603,7 @@ var DefaultTranscriptionResult = class {
8598
8603
  ToolCallRepairError,
8599
8604
  ToolExecutionError,
8600
8605
  TypeValidationError,
8606
+ UI_MESSAGE_STREAM_HEADERS,
8601
8607
  UnsupportedFunctionalityError,
8602
8608
  asSchema,
8603
8609
  assistantModelMessageSchema,