ai 5.0.0-alpha.11 → 5.0.0-alpha.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -455,42 +455,45 @@ import { parseJsonEventStream } from "@ai-sdk/provider-utils";
455
455
  // src/ui-message-stream/ui-message-stream-parts.ts
456
456
  import { z } from "zod";
457
457
  var uiMessageStreamPartSchema = z.union([
458
- z.object({
458
+ z.strictObject({
459
459
  type: z.literal("text"),
460
460
  text: z.string()
461
461
  }),
462
- z.object({
462
+ z.strictObject({
463
463
  type: z.literal("error"),
464
464
  errorText: z.string()
465
465
  }),
466
- z.object({
466
+ z.strictObject({
467
467
  type: z.literal("tool-call-streaming-start"),
468
468
  toolCallId: z.string(),
469
469
  toolName: z.string()
470
470
  }),
471
- z.object({
471
+ z.strictObject({
472
472
  type: z.literal("tool-call-delta"),
473
473
  toolCallId: z.string(),
474
474
  argsTextDelta: z.string()
475
475
  }),
476
- z.object({
476
+ z.strictObject({
477
477
  type: z.literal("tool-call"),
478
478
  toolCallId: z.string(),
479
479
  toolName: z.string(),
480
480
  args: z.unknown()
481
481
  }),
482
- z.object({
482
+ z.strictObject({
483
483
  type: z.literal("tool-result"),
484
484
  toolCallId: z.string(),
485
485
  result: z.unknown(),
486
486
  providerMetadata: z.any().optional()
487
487
  }),
488
- z.object({
488
+ z.strictObject({
489
489
  type: z.literal("reasoning"),
490
490
  text: z.string(),
491
491
  providerMetadata: z.record(z.any()).optional()
492
492
  }),
493
- z.object({
493
+ z.strictObject({
494
+ type: z.literal("reasoning-part-finish")
495
+ }),
496
+ z.strictObject({
494
497
  type: z.literal("source-url"),
495
498
  sourceId: z.string(),
496
499
  url: z.string(),
@@ -498,7 +501,7 @@ var uiMessageStreamPartSchema = z.union([
498
501
  providerMetadata: z.any().optional()
499
502
  // Use z.any() for generic metadata
500
503
  }),
501
- z.object({
504
+ z.strictObject({
502
505
  type: z.literal("source-document"),
503
506
  sourceId: z.string(),
504
507
  mediaType: z.string(),
@@ -507,39 +510,34 @@ var uiMessageStreamPartSchema = z.union([
507
510
  providerMetadata: z.any().optional()
508
511
  // Use z.any() for generic metadata
509
512
  }),
510
- z.object({
513
+ z.strictObject({
511
514
  type: z.literal("file"),
512
515
  url: z.string(),
513
516
  mediaType: z.string()
514
517
  }),
515
- z.object({
518
+ z.strictObject({
516
519
  type: z.string().startsWith("data-"),
517
520
  id: z.string().optional(),
518
521
  data: z.unknown()
519
522
  }),
520
- z.object({
521
- type: z.literal("metadata"),
522
- value: z.object({ metadata: z.unknown() })
523
+ z.strictObject({
524
+ type: z.literal("start-step")
523
525
  }),
524
- z.object({
525
- type: z.literal("start-step"),
526
- metadata: z.unknown().optional()
526
+ z.strictObject({
527
+ type: z.literal("finish-step")
527
528
  }),
528
- z.object({
529
- type: z.literal("finish-step"),
530
- metadata: z.unknown().optional()
531
- }),
532
- z.object({
529
+ z.strictObject({
533
530
  type: z.literal("start"),
534
531
  messageId: z.string().optional(),
535
- metadata: z.unknown().optional()
532
+ messageMetadata: z.unknown().optional()
536
533
  }),
537
- z.object({
534
+ z.strictObject({
538
535
  type: z.literal("finish"),
539
- metadata: z.unknown().optional()
536
+ messageMetadata: z.unknown().optional()
540
537
  }),
541
- z.object({
542
- type: z.literal("reasoning-part-finish")
538
+ z.strictObject({
539
+ type: z.literal("message-metadata"),
540
+ messageMetadata: z.unknown()
543
541
  })
544
542
  ]);
545
543
  function isDataUIMessageStreamPart(part) {
@@ -729,6 +727,137 @@ var SerialJobExecutor = class {
729
727
  }
730
728
  };
731
729
 
730
+ // src/ui/convert-file-list-to-file-ui-parts.ts
731
+ async function convertFileListToFileUIParts(files) {
732
+ if (files == null) {
733
+ return [];
734
+ }
735
+ if (!globalThis.FileList || !(files instanceof globalThis.FileList)) {
736
+ throw new Error("FileList is not supported in the current environment");
737
+ }
738
+ return Promise.all(
739
+ Array.from(files).map(async (file) => {
740
+ const { name: name17, type } = file;
741
+ const dataUrl = await new Promise((resolve, reject) => {
742
+ const reader = new FileReader();
743
+ reader.onload = (readerEvent) => {
744
+ var _a17;
745
+ resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
746
+ };
747
+ reader.onerror = (error) => reject(error);
748
+ reader.readAsDataURL(file);
749
+ });
750
+ return {
751
+ type: "file",
752
+ mediaType: type,
753
+ filename: name17,
754
+ url: dataUrl
755
+ };
756
+ })
757
+ );
758
+ }
759
+
760
+ // src/ui/default-chat-transport.ts
761
+ import {
762
+ parseJsonEventStream as parseJsonEventStream2
763
+ } from "@ai-sdk/provider-utils";
764
+ var getOriginalFetch2 = () => fetch;
765
+ async function fetchUIMessageStream({
766
+ api,
767
+ body,
768
+ credentials,
769
+ headers,
770
+ abortSignal,
771
+ fetch: fetch2 = getOriginalFetch2(),
772
+ requestType = "generate"
773
+ }) {
774
+ var _a17;
775
+ const response = requestType === "resume" ? await fetch2(`${api}?id=${body.id}`, {
776
+ method: "GET",
777
+ headers: {
778
+ "Content-Type": "application/json",
779
+ ...headers
780
+ },
781
+ signal: abortSignal,
782
+ credentials
783
+ }) : await fetch2(api, {
784
+ method: "POST",
785
+ body: JSON.stringify(body),
786
+ headers: {
787
+ "Content-Type": "application/json",
788
+ ...headers
789
+ },
790
+ signal: abortSignal,
791
+ credentials
792
+ });
793
+ if (!response.ok) {
794
+ throw new Error(
795
+ (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
796
+ );
797
+ }
798
+ if (!response.body) {
799
+ throw new Error("The response body is empty.");
800
+ }
801
+ return parseJsonEventStream2({
802
+ stream: response.body,
803
+ schema: uiMessageStreamPartSchema
804
+ }).pipeThrough(
805
+ new TransformStream({
806
+ async transform(part, controller) {
807
+ if (!part.success) {
808
+ throw part.error;
809
+ }
810
+ controller.enqueue(part.value);
811
+ }
812
+ })
813
+ );
814
+ }
815
+ var DefaultChatTransport = class {
816
+ constructor({
817
+ api = "/api/chat",
818
+ credentials,
819
+ headers,
820
+ body,
821
+ fetch: fetch2,
822
+ prepareRequest
823
+ } = {}) {
824
+ this.api = api;
825
+ this.credentials = credentials;
826
+ this.headers = headers;
827
+ this.body = body;
828
+ this.fetch = fetch2;
829
+ this.prepareRequest = prepareRequest;
830
+ }
831
+ submitMessages({
832
+ chatId,
833
+ messages,
834
+ abortSignal,
835
+ metadata,
836
+ headers,
837
+ body,
838
+ requestType
839
+ }) {
840
+ var _a17, _b;
841
+ const preparedRequest = (_a17 = this.prepareRequest) == null ? void 0 : _a17.call(this, {
842
+ id: chatId,
843
+ messages,
844
+ body: { ...this.body, ...body },
845
+ headers: { ...this.headers, ...headers },
846
+ credentials: this.credentials,
847
+ requestMetadata: metadata
848
+ });
849
+ return fetchUIMessageStream({
850
+ api: this.api,
851
+ body: (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : { ...this.body, ...body, id: chatId, messages },
852
+ headers: (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...headers },
853
+ credentials: (_b = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _b : this.credentials,
854
+ abortSignal,
855
+ fetch: this.fetch,
856
+ requestType
857
+ });
858
+ }
859
+ };
860
+
732
861
  // src/ui/process-ui-message-stream.ts
733
862
  import {
734
863
  validateTypes
@@ -1104,27 +1233,26 @@ async function parsePartialJson(jsonText) {
1104
1233
  return { value: void 0, state: "failed-parse" };
1105
1234
  }
1106
1235
 
1107
- // src/ui/get-tool-invocations.ts
1108
- function getToolInvocations(message) {
1109
- return message.parts.filter(
1110
- (part) => part.type === "tool-invocation"
1111
- ).map((part) => part.toolInvocation);
1236
+ // src/ui/ui-messages.ts
1237
+ function isToolUIPart(part) {
1238
+ return part.type.startsWith("tool-");
1239
+ }
1240
+ function getToolName(part) {
1241
+ return part.type.split("-")[1];
1112
1242
  }
1113
1243
 
1114
1244
  // src/ui/process-ui-message-stream.ts
1115
1245
  function createStreamingUIMessageState({
1116
1246
  lastMessage,
1117
- newMessageId = ""
1118
- } = {}) {
1119
- const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
1120
- const message = isContinuation ? lastMessage : {
1121
- id: newMessageId,
1122
- metadata: {},
1123
- role: "assistant",
1124
- parts: []
1125
- };
1247
+ messageId
1248
+ }) {
1126
1249
  return {
1127
- message,
1250
+ message: (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage : {
1251
+ id: messageId,
1252
+ metadata: void 0,
1253
+ role: "assistant",
1254
+ parts: []
1255
+ },
1128
1256
  activeTextPart: void 0,
1129
1257
  activeReasoningPart: void 0,
1130
1258
  partialToolCalls: {}
@@ -1141,16 +1269,21 @@ function processUIMessageStream({
1141
1269
  new TransformStream({
1142
1270
  async transform(part, controller) {
1143
1271
  await runUpdateMessageJob(async ({ state, write }) => {
1144
- function updateToolInvocationPart(toolCallId, invocation) {
1272
+ function updateToolInvocationPart(options) {
1145
1273
  const part2 = state.message.parts.find(
1146
- (part3) => isToolInvocationUIPart(part3) && part3.toolInvocation.toolCallId === toolCallId
1274
+ (part3) => isToolUIPart(part3) && part3.toolCallId === options.toolCallId
1147
1275
  );
1148
1276
  if (part2 != null) {
1149
- part2.toolInvocation = invocation;
1277
+ part2.state = options.state;
1278
+ part2.args = options.args;
1279
+ part2.result = options.result;
1150
1280
  } else {
1151
1281
  state.message.parts.push({
1152
- type: "tool-invocation",
1153
- toolInvocation: invocation
1282
+ type: `tool-${options.toolName}`,
1283
+ toolCallId: options.toolCallId,
1284
+ state: options.state,
1285
+ args: options.args,
1286
+ result: options.result
1154
1287
  });
1155
1288
  }
1156
1289
  }
@@ -1234,16 +1367,16 @@ function processUIMessageStream({
1234
1367
  break;
1235
1368
  }
1236
1369
  case "tool-call-streaming-start": {
1237
- const toolInvocations = getToolInvocations(state.message);
1370
+ const toolInvocations = state.message.parts.filter(isToolUIPart);
1238
1371
  state.partialToolCalls[part.toolCallId] = {
1239
1372
  text: "",
1240
1373
  toolName: part.toolName,
1241
1374
  index: toolInvocations.length
1242
1375
  };
1243
- updateToolInvocationPart(part.toolCallId, {
1244
- state: "partial-call",
1376
+ updateToolInvocationPart({
1245
1377
  toolCallId: part.toolCallId,
1246
1378
  toolName: part.toolName,
1379
+ state: "partial-call",
1247
1380
  args: void 0
1248
1381
  });
1249
1382
  write();
@@ -1255,20 +1388,20 @@ function processUIMessageStream({
1255
1388
  const { value: partialArgs } = await parsePartialJson(
1256
1389
  partialToolCall.text
1257
1390
  );
1258
- updateToolInvocationPart(part.toolCallId, {
1259
- state: "partial-call",
1391
+ updateToolInvocationPart({
1260
1392
  toolCallId: part.toolCallId,
1261
1393
  toolName: partialToolCall.toolName,
1394
+ state: "partial-call",
1262
1395
  args: partialArgs
1263
1396
  });
1264
1397
  write();
1265
1398
  break;
1266
1399
  }
1267
1400
  case "tool-call": {
1268
- updateToolInvocationPart(part.toolCallId, {
1269
- state: "call",
1401
+ updateToolInvocationPart({
1270
1402
  toolCallId: part.toolCallId,
1271
1403
  toolName: part.toolName,
1404
+ state: "call",
1272
1405
  args: part.args
1273
1406
  });
1274
1407
  write();
@@ -1277,10 +1410,10 @@ function processUIMessageStream({
1277
1410
  toolCall: part
1278
1411
  });
1279
1412
  if (result != null) {
1280
- updateToolInvocationPart(part.toolCallId, {
1281
- state: "result",
1413
+ updateToolInvocationPart({
1282
1414
  toolCallId: part.toolCallId,
1283
1415
  toolName: part.toolName,
1416
+ state: "result",
1284
1417
  args: part.args,
1285
1418
  result
1286
1419
  });
@@ -1290,7 +1423,7 @@ function processUIMessageStream({
1290
1423
  break;
1291
1424
  }
1292
1425
  case "tool-result": {
1293
- const toolInvocations = getToolInvocations(state.message);
1426
+ const toolInvocations = state.message.parts.filter(isToolUIPart);
1294
1427
  if (toolInvocations == null) {
1295
1428
  throw new Error("tool_result must be preceded by a tool_call");
1296
1429
  }
@@ -1302,9 +1435,14 @@ function processUIMessageStream({
1302
1435
  "tool_result must be preceded by a tool_call with the same toolCallId"
1303
1436
  );
1304
1437
  }
1305
- updateToolInvocationPart(part.toolCallId, {
1306
- ...toolInvocations[toolInvocationIndex],
1438
+ const toolName = getToolName(
1439
+ toolInvocations[toolInvocationIndex]
1440
+ );
1441
+ updateToolInvocationPart({
1442
+ toolCallId: part.toolCallId,
1443
+ toolName,
1307
1444
  state: "result",
1445
+ args: toolInvocations[toolInvocationIndex].args,
1308
1446
  result: part.result
1309
1447
  });
1310
1448
  write();
@@ -1312,39 +1450,33 @@ function processUIMessageStream({
1312
1450
  }
1313
1451
  case "start-step": {
1314
1452
  state.message.parts.push({ type: "step-start" });
1315
- await updateMessageMetadata(part.metadata);
1316
- write();
1317
1453
  break;
1318
1454
  }
1319
1455
  case "finish-step": {
1320
1456
  state.activeTextPart = void 0;
1321
1457
  state.activeReasoningPart = void 0;
1322
- await updateMessageMetadata(part.metadata);
1323
- if (part.metadata != null) {
1324
- write();
1325
- }
1326
1458
  break;
1327
1459
  }
1328
1460
  case "start": {
1329
1461
  if (part.messageId != null) {
1330
1462
  state.message.id = part.messageId;
1331
1463
  }
1332
- await updateMessageMetadata(part.metadata);
1333
- if (part.messageId != null || part.metadata != null) {
1464
+ await updateMessageMetadata(part.messageMetadata);
1465
+ if (part.messageId != null || part.messageMetadata != null) {
1334
1466
  write();
1335
1467
  }
1336
1468
  break;
1337
1469
  }
1338
1470
  case "finish": {
1339
- await updateMessageMetadata(part.metadata);
1340
- if (part.metadata != null) {
1471
+ await updateMessageMetadata(part.messageMetadata);
1472
+ if (part.messageMetadata != null) {
1341
1473
  write();
1342
1474
  }
1343
1475
  break;
1344
1476
  }
1345
- case "metadata": {
1346
- await updateMessageMetadata(part.metadata);
1347
- if (part.metadata != null) {
1477
+ case "message-metadata": {
1478
+ await updateMessageMetadata(part.messageMetadata);
1479
+ if (part.messageMetadata != null) {
1348
1480
  write();
1349
1481
  }
1350
1482
  break;
@@ -1372,9 +1504,6 @@ function processUIMessageStream({
1372
1504
  })
1373
1505
  );
1374
1506
  }
1375
- function isToolInvocationUIPart(part) {
1376
- return part.type === "tool-invocation";
1377
- }
1378
1507
  function isObject(value) {
1379
1508
  return typeof value === "object" && value !== null;
1380
1509
  }
@@ -1409,139 +1538,8 @@ function isAssistantMessageWithCompletedToolCalls(message) {
1409
1538
  const lastStepStartIndex = message.parts.reduce((lastIndex, part, index) => {
1410
1539
  return part.type === "step-start" ? index : lastIndex;
1411
1540
  }, -1);
1412
- const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter((part) => part.type === "tool-invocation");
1413
- return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
1414
- }
1415
-
1416
- // src/ui/default-chat-transport.ts
1417
- import {
1418
- parseJsonEventStream as parseJsonEventStream2
1419
- } from "@ai-sdk/provider-utils";
1420
- var getOriginalFetch2 = () => fetch;
1421
- async function fetchUIMessageStream({
1422
- api,
1423
- body,
1424
- credentials,
1425
- headers,
1426
- abortSignal,
1427
- fetch: fetch2 = getOriginalFetch2(),
1428
- requestType = "generate"
1429
- }) {
1430
- var _a17;
1431
- const response = requestType === "resume" ? await fetch2(`${api}?id=${body.id}`, {
1432
- method: "GET",
1433
- headers: {
1434
- "Content-Type": "application/json",
1435
- ...headers
1436
- },
1437
- signal: abortSignal,
1438
- credentials
1439
- }) : await fetch2(api, {
1440
- method: "POST",
1441
- body: JSON.stringify(body),
1442
- headers: {
1443
- "Content-Type": "application/json",
1444
- ...headers
1445
- },
1446
- signal: abortSignal,
1447
- credentials
1448
- });
1449
- if (!response.ok) {
1450
- throw new Error(
1451
- (_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
1452
- );
1453
- }
1454
- if (!response.body) {
1455
- throw new Error("The response body is empty.");
1456
- }
1457
- return parseJsonEventStream2({
1458
- stream: response.body,
1459
- schema: uiMessageStreamPartSchema
1460
- }).pipeThrough(
1461
- new TransformStream({
1462
- async transform(part, controller) {
1463
- if (!part.success) {
1464
- throw part.error;
1465
- }
1466
- controller.enqueue(part.value);
1467
- }
1468
- })
1469
- );
1470
- }
1471
- var DefaultChatTransport = class {
1472
- constructor({
1473
- api = "/api/chat",
1474
- credentials,
1475
- headers,
1476
- body,
1477
- fetch: fetch2,
1478
- prepareRequest
1479
- } = {}) {
1480
- this.api = api;
1481
- this.credentials = credentials;
1482
- this.headers = headers;
1483
- this.body = body;
1484
- this.fetch = fetch2;
1485
- this.prepareRequest = prepareRequest;
1486
- }
1487
- submitMessages({
1488
- chatId,
1489
- messages,
1490
- abortSignal,
1491
- metadata,
1492
- headers,
1493
- body,
1494
- requestType
1495
- }) {
1496
- var _a17, _b;
1497
- const preparedRequest = (_a17 = this.prepareRequest) == null ? void 0 : _a17.call(this, {
1498
- id: chatId,
1499
- messages,
1500
- body: { ...this.body, ...body },
1501
- headers: { ...this.headers, ...headers },
1502
- credentials: this.credentials,
1503
- requestMetadata: metadata
1504
- });
1505
- return fetchUIMessageStream({
1506
- api: this.api,
1507
- body: (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : { ...this.body, ...body, id: chatId, messages },
1508
- headers: (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...headers },
1509
- credentials: (_b = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _b : this.credentials,
1510
- abortSignal,
1511
- fetch: this.fetch,
1512
- requestType
1513
- });
1514
- }
1515
- };
1516
-
1517
- // src/ui/convert-file-list-to-file-ui-parts.ts
1518
- async function convertFileListToFileUIParts(files) {
1519
- if (files == null) {
1520
- return [];
1521
- }
1522
- if (!globalThis.FileList || !(files instanceof globalThis.FileList)) {
1523
- throw new Error("FileList is not supported in the current environment");
1524
- }
1525
- return Promise.all(
1526
- Array.from(files).map(async (file) => {
1527
- const { name: name17, type } = file;
1528
- const dataUrl = await new Promise((resolve, reject) => {
1529
- const reader = new FileReader();
1530
- reader.onload = (readerEvent) => {
1531
- var _a17;
1532
- resolve((_a17 = readerEvent.target) == null ? void 0 : _a17.result);
1533
- };
1534
- reader.onerror = (error) => reject(error);
1535
- reader.readAsDataURL(file);
1536
- });
1537
- return {
1538
- type: "file",
1539
- mediaType: type,
1540
- filename: name17,
1541
- url: dataUrl
1542
- };
1543
- })
1544
- );
1541
+ const lastStepToolInvocations = message.parts.slice(lastStepStartIndex + 1).filter(isToolUIPart);
1542
+ return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => part.state === "result");
1545
1543
  }
1546
1544
 
1547
1545
  // src/ui/chat.ts
@@ -1558,7 +1556,6 @@ var AbstractChat = class {
1558
1556
  onToolCall,
1559
1557
  onFinish
1560
1558
  }) {
1561
- this.subscribers = /* @__PURE__ */ new Set();
1562
1559
  this.activeResponse = void 0;
1563
1560
  this.jobExecutor = new SerialJobExecutor();
1564
1561
  this.removeAssistantResponse = () => {
@@ -1570,7 +1567,6 @@ var AbstractChat = class {
1570
1567
  throw new Error("Last message is not an assistant message");
1571
1568
  }
1572
1569
  this.state.popMessage();
1573
- this.emit({ type: "messages-changed" });
1574
1570
  };
1575
1571
  /**
1576
1572
  * Append a user message to the chat list. This triggers the API call to fetch
@@ -1595,7 +1591,6 @@ var AbstractChat = class {
1595
1591
  id: (_a17 = uiMessage.id) != null ? _a17 : this.generateId(),
1596
1592
  role: (_b = uiMessage.role) != null ? _b : "user"
1597
1593
  });
1598
- this.emit({ type: "messages-changed" });
1599
1594
  await this.triggerRequest({ requestType: "generate", ...options });
1600
1595
  };
1601
1596
  /**
@@ -1607,7 +1602,6 @@ var AbstractChat = class {
1607
1602
  }
1608
1603
  if (this.lastMessage.role === "assistant") {
1609
1604
  this.state.popMessage();
1610
- this.emit({ type: "messages-changed" });
1611
1605
  }
1612
1606
  await this.triggerRequest({ requestType: "generate", ...options });
1613
1607
  };
@@ -1622,7 +1616,7 @@ var AbstractChat = class {
1622
1616
  result
1623
1617
  }) => {
1624
1618
  this.jobExecutor.run(async () => {
1625
- updateToolCallResult({
1619
+ updateToolResult({
1626
1620
  messages: this.state.messages,
1627
1621
  toolCallId,
1628
1622
  toolResult: result
@@ -1648,7 +1642,6 @@ var AbstractChat = class {
1648
1642
  return;
1649
1643
  if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
1650
1644
  this.activeResponse.abortController.abort();
1651
- this.activeResponse.abortController = void 0;
1652
1645
  }
1653
1646
  };
1654
1647
  this.id = id;
@@ -1681,7 +1674,6 @@ var AbstractChat = class {
1681
1674
  return;
1682
1675
  this.state.status = status;
1683
1676
  this.state.error = error;
1684
- this.emit({ type: "status-changed" });
1685
1677
  }
1686
1678
  get error() {
1687
1679
  return this.state.error;
@@ -1692,18 +1684,8 @@ var AbstractChat = class {
1692
1684
  get lastMessage() {
1693
1685
  return this.state.messages[this.state.messages.length - 1];
1694
1686
  }
1695
- subscribe(subscriber) {
1696
- this.subscribers.add(subscriber);
1697
- return () => this.subscribers.delete(subscriber);
1698
- }
1699
1687
  set messages(messages) {
1700
1688
  this.state.messages = messages;
1701
- this.emit({ type: "messages-changed" });
1702
- }
1703
- emit(event) {
1704
- for (const subscriber of this.subscribers) {
1705
- subscriber.onChange(event);
1706
- }
1707
1689
  }
1708
1690
  async triggerRequest({
1709
1691
  requestType,
@@ -1720,7 +1702,7 @@ var AbstractChat = class {
1720
1702
  const activeResponse = {
1721
1703
  state: createStreamingUIMessageState({
1722
1704
  lastMessage: this.state.snapshot(lastMessage),
1723
- newMessageId: this.generateId()
1705
+ messageId: this.generateId()
1724
1706
  }),
1725
1707
  abortController: new AbortController()
1726
1708
  };
@@ -1751,9 +1733,6 @@ var AbstractChat = class {
1751
1733
  } else {
1752
1734
  this.state.pushMessage(activeResponse.state.message);
1753
1735
  }
1754
- this.emit({
1755
- type: "messages-changed"
1756
- });
1757
1736
  }
1758
1737
  })
1759
1738
  )
@@ -1800,23 +1779,20 @@ var AbstractChat = class {
1800
1779
  }
1801
1780
  }
1802
1781
  };
1803
- function updateToolCallResult({
1782
+ function updateToolResult({
1804
1783
  messages,
1805
1784
  toolCallId,
1806
1785
  toolResult: result
1807
1786
  }) {
1808
1787
  const lastMessage = messages[messages.length - 1];
1809
- const invocationPart = lastMessage.parts.find(
1810
- (part) => part.type === "tool-invocation" && part.toolInvocation.toolCallId === toolCallId
1788
+ const toolPart = lastMessage.parts.find(
1789
+ (part) => isToolUIPart(part) && part.toolCallId === toolCallId
1811
1790
  );
1812
- if (invocationPart == null) {
1791
+ if (toolPart == null) {
1813
1792
  return;
1814
1793
  }
1815
- invocationPart.toolInvocation = {
1816
- ...invocationPart.toolInvocation,
1817
- state: "result",
1818
- result
1819
- };
1794
+ toolPart.state = "result";
1795
+ toolPart.result = result;
1820
1796
  }
1821
1797
 
1822
1798
  // src/ui/convert-to-model-messages.ts
@@ -1857,75 +1833,71 @@ function convertToModelMessages(messages, options) {
1857
1833
  }
1858
1834
  const content = [];
1859
1835
  for (const part of block) {
1860
- switch (part.type) {
1861
- case "text": {
1862
- content.push(part);
1863
- break;
1864
- }
1865
- case "file": {
1866
- content.push({
1867
- type: "file",
1868
- mediaType: part.mediaType,
1869
- data: part.url
1870
- });
1871
- break;
1872
- }
1873
- case "reasoning": {
1874
- content.push({
1875
- type: "reasoning",
1876
- text: part.text,
1877
- providerOptions: part.providerMetadata
1836
+ if (part.type === "text") {
1837
+ content.push(part);
1838
+ } else if (part.type === "file") {
1839
+ content.push({
1840
+ type: "file",
1841
+ mediaType: part.mediaType,
1842
+ data: part.url
1843
+ });
1844
+ } else if (part.type === "reasoning") {
1845
+ content.push({
1846
+ type: "reasoning",
1847
+ text: part.text,
1848
+ providerOptions: part.providerMetadata
1849
+ });
1850
+ } else if (isToolUIPart(part)) {
1851
+ const toolName = getToolName(part);
1852
+ if (part.state === "partial-call") {
1853
+ throw new MessageConversionError({
1854
+ originalMessage: message,
1855
+ message: `Partial tool call is not supported: ${part.toolCallId}`
1878
1856
  });
1879
- break;
1880
- }
1881
- case "tool-invocation":
1857
+ } else {
1882
1858
  content.push({
1883
1859
  type: "tool-call",
1884
- toolCallId: part.toolInvocation.toolCallId,
1885
- toolName: part.toolInvocation.toolName,
1886
- args: part.toolInvocation.args
1860
+ toolCallId: part.toolCallId,
1861
+ toolName,
1862
+ args: part.args
1887
1863
  });
1888
- break;
1889
- default: {
1890
- const _exhaustiveCheck = part;
1891
- throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
1892
1864
  }
1865
+ } else {
1866
+ const _exhaustiveCheck = part;
1867
+ throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
1893
1868
  }
1894
1869
  }
1895
1870
  modelMessages.push({
1896
1871
  role: "assistant",
1897
1872
  content
1898
1873
  });
1899
- const stepInvocations = block.filter(
1900
- (part) => part.type === "tool-invocation"
1901
- ).map((part) => part.toolInvocation);
1902
- if (stepInvocations.length > 0) {
1874
+ const toolParts = block.filter(isToolUIPart);
1875
+ if (toolParts.length > 0) {
1903
1876
  modelMessages.push({
1904
1877
  role: "tool",
1905
- content: stepInvocations.map(
1906
- (toolInvocation) => {
1907
- if (!("result" in toolInvocation)) {
1908
- throw new MessageConversionError({
1909
- originalMessage: message,
1910
- message: "ToolInvocation must have a result: " + JSON.stringify(toolInvocation)
1911
- });
1912
- }
1913
- const { toolCallId, toolName, result } = toolInvocation;
1914
- const tool2 = tools[toolName];
1915
- return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {
1916
- type: "tool-result",
1917
- toolCallId,
1918
- toolName,
1919
- result: tool2.experimental_toToolResultContent(result),
1920
- experimental_content: tool2.experimental_toToolResultContent(result)
1921
- } : {
1922
- type: "tool-result",
1923
- toolCallId,
1924
- toolName,
1925
- result
1926
- };
1878
+ content: toolParts.map((toolPart) => {
1879
+ if (toolPart.state !== "result") {
1880
+ throw new MessageConversionError({
1881
+ originalMessage: message,
1882
+ message: "ToolInvocation must have a result: " + JSON.stringify(toolPart)
1883
+ });
1927
1884
  }
1928
- )
1885
+ const toolName = getToolName(toolPart);
1886
+ const { toolCallId, result } = toolPart;
1887
+ const tool2 = tools[toolName];
1888
+ return (tool2 == null ? void 0 : tool2.experimental_toToolResultContent) != null ? {
1889
+ type: "tool-result",
1890
+ toolCallId,
1891
+ toolName,
1892
+ result: tool2.experimental_toToolResultContent(result),
1893
+ experimental_content: tool2.experimental_toToolResultContent(result)
1894
+ } : {
1895
+ type: "tool-result",
1896
+ toolCallId,
1897
+ toolName,
1898
+ result
1899
+ };
1900
+ })
1929
1901
  });
1930
1902
  }
1931
1903
  block = [];
@@ -1933,18 +1905,10 @@ function convertToModelMessages(messages, options) {
1933
1905
  var processBlock = processBlock2;
1934
1906
  let block = [];
1935
1907
  for (const part of message.parts) {
1936
- switch (part.type) {
1937
- case "text":
1938
- case "reasoning":
1939
- case "file":
1940
- case "tool-invocation": {
1941
- block.push(part);
1942
- break;
1943
- }
1944
- case "step-start": {
1945
- processBlock2();
1946
- break;
1947
- }
1908
+ if (part.type === "text" || part.type === "reasoning" || part.type === "file" || isToolUIPart(part)) {
1909
+ block.push(part);
1910
+ } else if (part.type === "step-start") {
1911
+ processBlock2();
1948
1912
  }
1949
1913
  }
1950
1914
  processBlock2();
@@ -2074,9 +2038,14 @@ var TextStreamChatTransport = class {
2074
2038
  }
2075
2039
  };
2076
2040
 
2041
+ // src/ui-message-stream/create-ui-message-stream.ts
2042
+ import {
2043
+ generateId as generateIdFunc2
2044
+ } from "@ai-sdk/provider-utils";
2045
+
2077
2046
  // src/ui-message-stream/handle-ui-message-stream-finish.ts
2078
2047
  function handleUIMessageStreamFinish({
2079
- newMessageId,
2048
+ messageId,
2080
2049
  originalMessages = [],
2081
2050
  onFinish,
2082
2051
  stream
@@ -2084,19 +2053,30 @@ function handleUIMessageStreamFinish({
2084
2053
  if (onFinish == null) {
2085
2054
  return stream;
2086
2055
  }
2087
- const lastMessage = originalMessages[originalMessages.length - 1];
2088
- const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
2089
- const messageId = isContinuation ? lastMessage.id : newMessageId;
2056
+ const lastMessage = originalMessages == null ? void 0 : originalMessages[originalMessages.length - 1];
2090
2057
  const state = createStreamingUIMessageState({
2091
- lastMessage: structuredClone(lastMessage),
2092
- newMessageId: messageId
2058
+ lastMessage: lastMessage ? structuredClone(lastMessage) : void 0,
2059
+ messageId
2060
+ // will be overridden by the stream
2093
2061
  });
2094
2062
  const runUpdateMessageJob = async (job) => {
2095
2063
  await job({ state, write: () => {
2096
2064
  } });
2097
2065
  };
2098
2066
  return processUIMessageStream({
2099
- stream,
2067
+ stream: stream.pipeThrough(
2068
+ new TransformStream({
2069
+ transform(chunk, controller) {
2070
+ if (chunk.type === "start") {
2071
+ const startChunk = chunk;
2072
+ if (startChunk.messageId == null) {
2073
+ startChunk.messageId = messageId;
2074
+ }
2075
+ }
2076
+ controller.enqueue(chunk);
2077
+ }
2078
+ })
2079
+ ),
2100
2080
  runUpdateMessageJob
2101
2081
  }).pipeThrough(
2102
2082
  new TransformStream({
@@ -2104,12 +2084,12 @@ function handleUIMessageStreamFinish({
2104
2084
  controller.enqueue(chunk);
2105
2085
  },
2106
2086
  flush() {
2107
- const isContinuation2 = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
2087
+ const isContinuation = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
2108
2088
  onFinish({
2109
- isContinuation: isContinuation2,
2089
+ isContinuation,
2110
2090
  responseMessage: state.message,
2111
2091
  messages: [
2112
- ...isContinuation2 ? originalMessages.slice(0, -1) : originalMessages,
2092
+ ...isContinuation ? originalMessages.slice(0, -1) : originalMessages,
2113
2093
  state.message
2114
2094
  ]
2115
2095
  });
@@ -2124,7 +2104,8 @@ function createUIMessageStream({
2124
2104
  onError = () => "An error occurred.",
2125
2105
  // mask error messages for safety by default
2126
2106
  originalMessages,
2127
- onFinish
2107
+ onFinish,
2108
+ generateId: generateId3 = generateIdFunc2
2128
2109
  }) {
2129
2110
  let controller;
2130
2111
  const ongoingStreamPromises = [];
@@ -2156,7 +2137,10 @@ function createUIMessageStream({
2156
2137
  safeEnqueue(value);
2157
2138
  }
2158
2139
  })().catch((error) => {
2159
- safeEnqueue({ type: "error", errorText: onError(error) });
2140
+ safeEnqueue({
2141
+ type: "error",
2142
+ errorText: onError(error)
2143
+ });
2160
2144
  })
2161
2145
  );
2162
2146
  },
@@ -2166,12 +2150,18 @@ function createUIMessageStream({
2166
2150
  if (result) {
2167
2151
  ongoingStreamPromises.push(
2168
2152
  result.catch((error) => {
2169
- safeEnqueue({ type: "error", errorText: onError(error) });
2153
+ safeEnqueue({
2154
+ type: "error",
2155
+ errorText: onError(error)
2156
+ });
2170
2157
  })
2171
2158
  );
2172
2159
  }
2173
2160
  } catch (error) {
2174
- safeEnqueue({ type: "error", errorText: onError(error) });
2161
+ safeEnqueue({
2162
+ type: "error",
2163
+ errorText: onError(error)
2164
+ });
2175
2165
  }
2176
2166
  const waitForStreams = new Promise(async (resolve) => {
2177
2167
  while (ongoingStreamPromises.length > 0) {
@@ -2187,22 +2177,12 @@ function createUIMessageStream({
2187
2177
  });
2188
2178
  return handleUIMessageStreamFinish({
2189
2179
  stream,
2190
- newMessageId: "",
2180
+ messageId: generateId3(),
2191
2181
  originalMessages,
2192
2182
  onFinish
2193
2183
  });
2194
2184
  }
2195
2185
 
2196
- // src/ui-message-stream/ui-message-stream-headers.ts
2197
- var uiMessageStreamHeaders = {
2198
- "content-type": "text/event-stream",
2199
- "cache-control": "no-cache",
2200
- connection: "keep-alive",
2201
- "x-vercel-ai-ui-message-stream": "v1",
2202
- "x-accel-buffering": "no"
2203
- // disable nginx buffering
2204
- };
2205
-
2206
2186
  // src/ui-message-stream/json-to-sse-transform-stream.ts
2207
2187
  var JsonToSseTransformStream = class extends TransformStream {
2208
2188
  constructor() {
@@ -2219,6 +2199,16 @@ var JsonToSseTransformStream = class extends TransformStream {
2219
2199
  }
2220
2200
  };
2221
2201
 
2202
+ // src/ui-message-stream/ui-message-stream-headers.ts
2203
+ var uiMessageStreamHeaders = {
2204
+ "content-type": "text/event-stream",
2205
+ "cache-control": "no-cache",
2206
+ connection: "keep-alive",
2207
+ "x-vercel-ai-ui-message-stream": "v1",
2208
+ "x-accel-buffering": "no"
2209
+ // disable nginx buffering
2210
+ };
2211
+
2222
2212
  // src/ui-message-stream/create-ui-message-stream-response.ts
2223
2213
  function createUIMessageStreamResponse({
2224
2214
  status,
@@ -4885,7 +4875,8 @@ var DefaultStreamObjectResult = class {
4885
4875
  }),
4886
4876
  providerOptions,
4887
4877
  abortSignal,
4888
- headers
4878
+ headers,
4879
+ includeRawChunks: false
4889
4880
  };
4890
4881
  const transformer = {
4891
4882
  transform: (chunk, controller) => {
@@ -6243,7 +6234,6 @@ import { generateId } from "@ai-sdk/provider-utils";
6243
6234
  function runToolsTransformation({
6244
6235
  tools,
6245
6236
  generatorStream,
6246
- toolCallStreaming,
6247
6237
  tracer,
6248
6238
  telemetry,
6249
6239
  system,
@@ -6284,6 +6274,10 @@ function runToolsTransformation({
6284
6274
  controller.enqueue(chunk);
6285
6275
  break;
6286
6276
  }
6277
+ case "raw": {
6278
+ controller.enqueue(chunk);
6279
+ break;
6280
+ }
6287
6281
  case "file": {
6288
6282
  controller.enqueue({
6289
6283
  type: "file",
@@ -6295,22 +6289,20 @@ function runToolsTransformation({
6295
6289
  break;
6296
6290
  }
6297
6291
  case "tool-call-delta": {
6298
- if (toolCallStreaming) {
6299
- if (!activeToolCalls[chunk.toolCallId]) {
6300
- controller.enqueue({
6301
- type: "tool-call-streaming-start",
6302
- toolCallId: chunk.toolCallId,
6303
- toolName: chunk.toolName
6304
- });
6305
- activeToolCalls[chunk.toolCallId] = true;
6306
- }
6292
+ if (!activeToolCalls[chunk.toolCallId]) {
6307
6293
  controller.enqueue({
6308
- type: "tool-call-delta",
6294
+ type: "tool-call-streaming-start",
6309
6295
  toolCallId: chunk.toolCallId,
6310
- toolName: chunk.toolName,
6311
- argsTextDelta: chunk.argsTextDelta
6296
+ toolName: chunk.toolName
6312
6297
  });
6298
+ activeToolCalls[chunk.toolCallId] = true;
6313
6299
  }
6300
+ controller.enqueue({
6301
+ type: "tool-call-delta",
6302
+ toolCallId: chunk.toolCallId,
6303
+ toolName: chunk.toolName,
6304
+ argsTextDelta: chunk.argsTextDelta
6305
+ });
6314
6306
  break;
6315
6307
  }
6316
6308
  case "tool-call": {
@@ -6441,6 +6433,18 @@ function runToolsTransformation({
6441
6433
  });
6442
6434
  }
6443
6435
 
6436
+ // src/ui-message-stream/get-response-ui-message-id.ts
6437
+ function getResponseUIMessageId({
6438
+ originalMessages,
6439
+ responseMessageId
6440
+ }) {
6441
+ if (originalMessages == null) {
6442
+ return void 0;
6443
+ }
6444
+ const lastMessage = originalMessages[originalMessages.length - 1];
6445
+ return (lastMessage == null ? void 0 : lastMessage.role) === "assistant" ? lastMessage.id : typeof responseMessageId === "function" ? responseMessageId() : responseMessageId;
6446
+ }
6447
+
6444
6448
  // core/generate-text/stream-text.ts
6445
6449
  var originalGenerateId4 = createIdGenerator4({
6446
6450
  prefix: "aitxt",
@@ -6461,12 +6465,11 @@ function streamText({
6461
6465
  experimental_telemetry: telemetry,
6462
6466
  prepareStep,
6463
6467
  providerOptions,
6464
- experimental_toolCallStreaming = false,
6465
- toolCallStreaming = experimental_toolCallStreaming,
6466
6468
  experimental_activeTools,
6467
6469
  activeTools = experimental_activeTools,
6468
6470
  experimental_repairToolCall: repairToolCall,
6469
6471
  experimental_transform: transform,
6472
+ includeRawChunks = false,
6470
6473
  onChunk,
6471
6474
  onError = ({ error }) => {
6472
6475
  console.error(error);
@@ -6492,7 +6495,6 @@ function streamText({
6492
6495
  messages,
6493
6496
  tools,
6494
6497
  toolChoice,
6495
- toolCallStreaming,
6496
6498
  transforms: asArray(transform),
6497
6499
  activeTools,
6498
6500
  repairToolCall,
@@ -6500,6 +6502,7 @@ function streamText({
6500
6502
  output,
6501
6503
  providerOptions,
6502
6504
  prepareStep,
6505
+ includeRawChunks,
6503
6506
  onChunk,
6504
6507
  onError,
6505
6508
  onFinish,
@@ -6570,7 +6573,6 @@ var DefaultStreamTextResult = class {
6570
6573
  messages,
6571
6574
  tools,
6572
6575
  toolChoice,
6573
- toolCallStreaming,
6574
6576
  transforms,
6575
6577
  activeTools,
6576
6578
  repairToolCall,
@@ -6578,6 +6580,7 @@ var DefaultStreamTextResult = class {
6578
6580
  output,
6579
6581
  providerOptions,
6580
6582
  prepareStep,
6583
+ includeRawChunks,
6581
6584
  now: now2,
6582
6585
  currentDate,
6583
6586
  generateId: generateId3,
@@ -6590,6 +6593,7 @@ var DefaultStreamTextResult = class {
6590
6593
  this._finishReason = new DelayedPromise();
6591
6594
  this._steps = new DelayedPromise();
6592
6595
  this.output = output;
6596
+ this.includeRawChunks = includeRawChunks;
6593
6597
  this.generateId = generateId3;
6594
6598
  let stepFinish;
6595
6599
  let activeReasoningPart = void 0;
@@ -6605,7 +6609,7 @@ var DefaultStreamTextResult = class {
6605
6609
  async transform(chunk, controller) {
6606
6610
  controller.enqueue(chunk);
6607
6611
  const { part } = chunk;
6608
- if (part.type === "text" || part.type === "reasoning" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-call-streaming-start" || part.type === "tool-call-delta") {
6612
+ if (part.type === "text" || part.type === "reasoning" || part.type === "source" || part.type === "tool-call" || part.type === "tool-result" || part.type === "tool-call-streaming-start" || part.type === "tool-call-delta" || part.type === "raw") {
6609
6613
  await (onChunk == null ? void 0 : onChunk({ chunk: part }));
6610
6614
  }
6611
6615
  if (part.type === "error") {
@@ -6797,6 +6801,7 @@ var DefaultStreamTextResult = class {
6797
6801
  usage
6798
6802
  }) {
6799
6803
  var _a17, _b, _c, _d;
6804
+ const includeRawChunks2 = self.includeRawChunks;
6800
6805
  stepFinish = new DelayedPromise();
6801
6806
  const initialPrompt = await standardizePrompt({
6802
6807
  system,
@@ -6883,7 +6888,8 @@ var DefaultStreamTextResult = class {
6883
6888
  prompt: promptMessages,
6884
6889
  providerOptions,
6885
6890
  abortSignal,
6886
- headers
6891
+ headers,
6892
+ includeRawChunks: includeRawChunks2
6887
6893
  })
6888
6894
  };
6889
6895
  }
@@ -6892,7 +6898,6 @@ var DefaultStreamTextResult = class {
6892
6898
  const streamWithToolResults = runToolsTransformation({
6893
6899
  tools,
6894
6900
  generatorStream: stream2,
6895
- toolCallStreaming,
6896
6901
  tracer,
6897
6902
  telemetry,
6898
6903
  system,
@@ -7052,6 +7057,12 @@ var DefaultStreamTextResult = class {
7052
7057
  stepFinishReason = "error";
7053
7058
  break;
7054
7059
  }
7060
+ case "raw": {
7061
+ if (includeRawChunks2) {
7062
+ controller.enqueue(chunk);
7063
+ }
7064
+ break;
7065
+ }
7055
7066
  default: {
7056
7067
  const exhaustiveCheck = chunkType;
7057
7068
  throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
@@ -7272,23 +7283,24 @@ var DefaultStreamTextResult = class {
7272
7283
  );
7273
7284
  }
7274
7285
  toUIMessageStream({
7275
- newMessageId,
7276
- originalMessages = [],
7286
+ originalMessages,
7277
7287
  onFinish,
7278
7288
  messageMetadata,
7279
- sendReasoning = false,
7289
+ sendReasoning = true,
7280
7290
  sendSources = false,
7281
7291
  sendStart = true,
7282
7292
  sendFinish = true,
7283
7293
  onError = () => "An error occurred."
7284
7294
  // mask error messages for safety by default
7285
7295
  } = {}) {
7286
- const lastMessage = originalMessages[originalMessages.length - 1];
7287
- const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
7288
- const messageId = isContinuation ? lastMessage.id : newMessageId;
7296
+ const responseMessageId = getResponseUIMessageId({
7297
+ originalMessages,
7298
+ responseMessageId: this.generateId
7299
+ });
7289
7300
  const baseStream = this.fullStream.pipeThrough(
7290
7301
  new TransformStream({
7291
7302
  transform: async (part, controller) => {
7303
+ const messageMetadataValue = messageMetadata == null ? void 0 : messageMetadata({ part });
7292
7304
  const partType = part.type;
7293
7305
  switch (partType) {
7294
7306
  case "text": {
@@ -7385,59 +7397,57 @@ var DefaultStreamTextResult = class {
7385
7397
  break;
7386
7398
  }
7387
7399
  case "start-step": {
7388
- const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
7389
- controller.enqueue({
7390
- type: "start-step",
7391
- metadata
7392
- });
7400
+ controller.enqueue({ type: "start-step" });
7393
7401
  break;
7394
7402
  }
7395
7403
  case "finish-step": {
7396
- const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
7397
- controller.enqueue({
7398
- type: "finish-step",
7399
- metadata
7400
- });
7404
+ controller.enqueue({ type: "finish-step" });
7401
7405
  break;
7402
7406
  }
7403
7407
  case "start": {
7404
7408
  if (sendStart) {
7405
- const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
7406
7409
  controller.enqueue({
7407
7410
  type: "start",
7408
- messageId,
7409
- metadata
7411
+ messageId: responseMessageId,
7412
+ messageMetadata: messageMetadataValue
7410
7413
  });
7411
7414
  }
7412
7415
  break;
7413
7416
  }
7414
7417
  case "finish": {
7415
7418
  if (sendFinish) {
7416
- const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
7417
7419
  controller.enqueue({
7418
7420
  type: "finish",
7419
- metadata
7421
+ messageMetadata: messageMetadataValue
7420
7422
  });
7421
7423
  }
7422
7424
  break;
7423
7425
  }
7426
+ case "raw": {
7427
+ break;
7428
+ }
7424
7429
  default: {
7425
7430
  const exhaustiveCheck = partType;
7426
7431
  throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
7427
7432
  }
7428
7433
  }
7434
+ if (messageMetadataValue != null && partType !== "start" && partType !== "finish") {
7435
+ controller.enqueue({
7436
+ type: "message-metadata",
7437
+ messageMetadata: messageMetadataValue
7438
+ });
7439
+ }
7429
7440
  }
7430
7441
  })
7431
7442
  );
7432
7443
  return handleUIMessageStreamFinish({
7433
7444
  stream: baseStream,
7434
- newMessageId: messageId != null ? messageId : this.generateId(),
7445
+ messageId: responseMessageId != null ? responseMessageId : this.generateId(),
7435
7446
  originalMessages,
7436
7447
  onFinish
7437
7448
  });
7438
7449
  }
7439
7450
  pipeUIMessageStreamToResponse(response, {
7440
- newMessageId,
7441
7451
  originalMessages,
7442
7452
  onFinish,
7443
7453
  messageMetadata,
@@ -7451,7 +7461,6 @@ var DefaultStreamTextResult = class {
7451
7461
  pipeUIMessageStreamToResponse({
7452
7462
  response,
7453
7463
  stream: this.toUIMessageStream({
7454
- newMessageId,
7455
7464
  originalMessages,
7456
7465
  onFinish,
7457
7466
  messageMetadata,
@@ -7472,7 +7481,6 @@ var DefaultStreamTextResult = class {
7472
7481
  });
7473
7482
  }
7474
7483
  toUIMessageStreamResponse({
7475
- newMessageId,
7476
7484
  originalMessages,
7477
7485
  onFinish,
7478
7486
  messageMetadata,
@@ -7485,7 +7493,6 @@ var DefaultStreamTextResult = class {
7485
7493
  } = {}) {
7486
7494
  return createUIMessageStreamResponse({
7487
7495
  stream: this.toUIMessageStream({
7488
- newMessageId,
7489
7496
  originalMessages,
7490
7497
  onFinish,
7491
7498
  messageMetadata,
@@ -8582,9 +8589,10 @@ export {
8582
8589
  generateObject,
8583
8590
  generateText,
8584
8591
  getTextFromDataUrl,
8585
- getToolInvocations,
8592
+ getToolName,
8586
8593
  hasToolCall,
8587
8594
  isDeepEqualData,
8595
+ isToolUIPart,
8588
8596
  jsonSchema2 as jsonSchema,
8589
8597
  modelMessageSchema,
8590
8598
  parsePartialJson,