ai 5.0.0-canary.23 → 5.0.0-canary.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -88,9 +88,11 @@ __export(src_exports, {
88
88
  generateText: () => generateText,
89
89
  getTextFromDataUrl: () => getTextFromDataUrl,
90
90
  getToolInvocations: () => getToolInvocations,
91
+ hasToolCall: () => hasToolCall,
91
92
  isAssistantMessageWithCompletedToolCalls: () => isAssistantMessageWithCompletedToolCalls,
92
93
  isDeepEqualData: () => isDeepEqualData,
93
94
  jsonSchema: () => import_provider_utils26.jsonSchema,
95
+ maxSteps: () => maxSteps,
94
96
  modelMessageSchema: () => modelMessageSchema,
95
97
  parsePartialJson: () => parsePartialJson,
96
98
  pipeTextStreamToResponse: () => pipeTextStreamToResponse,
@@ -552,102 +554,84 @@ var import_provider_utils3 = require("@ai-sdk/provider-utils");
552
554
 
553
555
  // src/ui-message-stream/ui-message-stream-parts.ts
554
556
  var import_zod = require("zod");
555
- var toolCallSchema = import_zod.z.object({
556
- toolCallId: import_zod.z.string(),
557
- toolName: import_zod.z.string(),
558
- args: import_zod.z.unknown()
559
- });
560
- var toolResultValueSchema = import_zod.z.object({
561
- toolCallId: import_zod.z.string(),
562
- result: import_zod.z.unknown(),
563
- providerMetadata: import_zod.z.any().optional()
564
- });
565
- var sourceSchema = import_zod.z.object({
566
- type: import_zod.z.literal("source"),
567
- sourceType: import_zod.z.literal("url"),
568
- id: import_zod.z.string(),
569
- url: import_zod.z.string(),
570
- title: import_zod.z.string().optional(),
571
- providerMetadata: import_zod.z.any().optional()
572
- // Use z.any() for generic metadata
573
- });
574
- var uiMessageStreamPartSchema = import_zod.z.discriminatedUnion("type", [
557
+ var uiMessageStreamPartSchema = import_zod.z.union([
575
558
  import_zod.z.object({
576
559
  type: import_zod.z.literal("text"),
577
- value: import_zod.z.string()
560
+ text: import_zod.z.string()
578
561
  }),
579
562
  import_zod.z.object({
580
563
  type: import_zod.z.literal("error"),
581
- value: import_zod.z.string()
564
+ errorText: import_zod.z.string()
582
565
  }),
583
566
  import_zod.z.object({
584
- type: import_zod.z.literal("tool-call"),
585
- value: toolCallSchema
567
+ type: import_zod.z.literal("tool-call-streaming-start"),
568
+ toolCallId: import_zod.z.string(),
569
+ toolName: import_zod.z.string()
586
570
  }),
587
571
  import_zod.z.object({
588
- type: import_zod.z.literal("tool-result"),
589
- value: toolResultValueSchema
572
+ type: import_zod.z.literal("tool-call-delta"),
573
+ toolCallId: import_zod.z.string(),
574
+ argsTextDelta: import_zod.z.string()
590
575
  }),
591
576
  import_zod.z.object({
592
- type: import_zod.z.literal("tool-call-streaming-start"),
593
- value: import_zod.z.object({ toolCallId: import_zod.z.string(), toolName: import_zod.z.string() })
577
+ type: import_zod.z.literal("tool-call"),
578
+ toolCallId: import_zod.z.string(),
579
+ toolName: import_zod.z.string(),
580
+ args: import_zod.z.unknown()
594
581
  }),
595
582
  import_zod.z.object({
596
- type: import_zod.z.literal("tool-call-delta"),
597
- value: import_zod.z.object({ toolCallId: import_zod.z.string(), argsTextDelta: import_zod.z.string() })
583
+ type: import_zod.z.literal("tool-result"),
584
+ toolCallId: import_zod.z.string(),
585
+ result: import_zod.z.unknown(),
586
+ providerMetadata: import_zod.z.any().optional()
598
587
  }),
599
588
  import_zod.z.object({
600
589
  type: import_zod.z.literal("reasoning"),
601
- value: import_zod.z.object({
602
- text: import_zod.z.string(),
603
- providerMetadata: import_zod.z.record(import_zod.z.any()).optional()
604
- })
590
+ text: import_zod.z.string(),
591
+ providerMetadata: import_zod.z.record(import_zod.z.any()).optional()
605
592
  }),
606
593
  import_zod.z.object({
607
594
  type: import_zod.z.literal("source"),
608
- value: sourceSchema
595
+ sourceType: import_zod.z.literal("url"),
596
+ id: import_zod.z.string(),
597
+ url: import_zod.z.string(),
598
+ title: import_zod.z.string().optional(),
599
+ providerMetadata: import_zod.z.any().optional()
600
+ // Use z.any() for generic metadata
609
601
  }),
610
602
  import_zod.z.object({
611
603
  type: import_zod.z.literal("file"),
612
- value: import_zod.z.object({
613
- url: import_zod.z.string(),
614
- mediaType: import_zod.z.string()
615
- })
604
+ url: import_zod.z.string(),
605
+ mediaType: import_zod.z.string()
606
+ }),
607
+ import_zod.z.object({
608
+ type: import_zod.z.string().startsWith("data-"),
609
+ id: import_zod.z.string().optional(),
610
+ data: import_zod.z.unknown()
616
611
  }),
617
612
  import_zod.z.object({
618
613
  type: import_zod.z.literal("metadata"),
619
- value: import_zod.z.object({
620
- metadata: import_zod.z.unknown()
621
- })
614
+ value: import_zod.z.object({ metadata: import_zod.z.unknown() })
622
615
  }),
623
616
  import_zod.z.object({
624
617
  type: import_zod.z.literal("start-step"),
625
- value: import_zod.z.object({
626
- metadata: import_zod.z.unknown()
627
- })
618
+ metadata: import_zod.z.unknown().optional()
628
619
  }),
629
620
  import_zod.z.object({
630
621
  type: import_zod.z.literal("finish-step"),
631
- value: import_zod.z.object({
632
- metadata: import_zod.z.unknown()
633
- })
622
+ metadata: import_zod.z.unknown().optional()
634
623
  }),
635
624
  import_zod.z.object({
636
625
  type: import_zod.z.literal("start"),
637
- value: import_zod.z.object({
638
- messageId: import_zod.z.string().optional(),
639
- metadata: import_zod.z.unknown()
640
- })
626
+ messageId: import_zod.z.string().optional(),
627
+ metadata: import_zod.z.unknown().optional()
641
628
  }),
642
629
  import_zod.z.object({
643
630
  type: import_zod.z.literal("finish"),
644
- value: import_zod.z.object({
645
- metadata: import_zod.z.unknown()
646
- })
631
+ metadata: import_zod.z.unknown().optional()
647
632
  }),
648
633
  import_zod.z.object({
649
- type: import_zod.z.literal("reasoning-part-finish"),
650
- value: import_zod.z.null()
634
+ type: import_zod.z.literal("reasoning-part-finish")
651
635
  })
652
636
  ]);
653
637
 
@@ -1088,14 +1072,14 @@ function processUIMessageStream({
1088
1072
  }) {
1089
1073
  return stream.pipeThrough(
1090
1074
  new TransformStream({
1091
- async transform(chunk, controller) {
1075
+ async transform(part, controller) {
1092
1076
  await runUpdateMessageJob(async ({ state, write }) => {
1093
1077
  function updateToolInvocationPart(toolCallId, invocation) {
1094
- const part = state.message.parts.find(
1095
- (part2) => part2.type === "tool-invocation" && part2.toolInvocation.toolCallId === toolCallId
1078
+ const part2 = state.message.parts.find(
1079
+ (part3) => part3.type === "tool-invocation" && part3.toolInvocation.toolCallId === toolCallId
1096
1080
  );
1097
- if (part != null) {
1098
- part.toolInvocation = invocation;
1081
+ if (part2 != null) {
1082
+ part2.toolInvocation = invocation;
1099
1083
  } else {
1100
1084
  state.message.parts.push({
1101
1085
  type: "tool-invocation",
@@ -1115,17 +1099,16 @@ function processUIMessageStream({
1115
1099
  state.message.metadata = mergedMetadata;
1116
1100
  }
1117
1101
  }
1118
- const { type, value } = chunk;
1119
- switch (type) {
1102
+ switch (part.type) {
1120
1103
  case "text": {
1121
1104
  if (state.activeTextPart == null) {
1122
1105
  state.activeTextPart = {
1123
1106
  type: "text",
1124
- text: value
1107
+ text: part.text
1125
1108
  };
1126
1109
  state.message.parts.push(state.activeTextPart);
1127
1110
  } else {
1128
- state.activeTextPart.text += value;
1111
+ state.activeTextPart.text += part.text;
1129
1112
  }
1130
1113
  write();
1131
1114
  break;
@@ -1134,13 +1117,13 @@ function processUIMessageStream({
1134
1117
  if (state.activeReasoningPart == null) {
1135
1118
  state.activeReasoningPart = {
1136
1119
  type: "reasoning",
1137
- text: value.text,
1138
- providerMetadata: value.providerMetadata
1120
+ text: part.text,
1121
+ providerMetadata: part.providerMetadata
1139
1122
  };
1140
1123
  state.message.parts.push(state.activeReasoningPart);
1141
1124
  } else {
1142
- state.activeReasoningPart.text += value.text;
1143
- state.activeReasoningPart.providerMetadata = value.providerMetadata;
1125
+ state.activeReasoningPart.text += part.text;
1126
+ state.activeReasoningPart.providerMetadata = part.providerMetadata;
1144
1127
  }
1145
1128
  write();
1146
1129
  break;
@@ -1154,8 +1137,8 @@ function processUIMessageStream({
1154
1137
  case "file": {
1155
1138
  state.message.parts.push({
1156
1139
  type: "file",
1157
- mediaType: value.mediaType,
1158
- url: value.url
1140
+ mediaType: part.mediaType,
1141
+ url: part.url
1159
1142
  });
1160
1143
  write();
1161
1144
  break;
@@ -1163,39 +1146,45 @@ function processUIMessageStream({
1163
1146
  case "source": {
1164
1147
  state.message.parts.push({
1165
1148
  type: "source",
1166
- source: value
1149
+ source: {
1150
+ sourceType: "url",
1151
+ id: part.id,
1152
+ url: part.url,
1153
+ title: part.title,
1154
+ providerMetadata: part.providerMetadata
1155
+ }
1167
1156
  });
1168
1157
  write();
1169
1158
  break;
1170
1159
  }
1171
1160
  case "tool-call-streaming-start": {
1172
1161
  const toolInvocations = getToolInvocations(state.message);
1173
- state.partialToolCalls[value.toolCallId] = {
1162
+ state.partialToolCalls[part.toolCallId] = {
1174
1163
  text: "",
1175
1164
  step: state.step,
1176
- toolName: value.toolName,
1165
+ toolName: part.toolName,
1177
1166
  index: toolInvocations.length
1178
1167
  };
1179
- updateToolInvocationPart(value.toolCallId, {
1168
+ updateToolInvocationPart(part.toolCallId, {
1180
1169
  state: "partial-call",
1181
1170
  step: state.step,
1182
- toolCallId: value.toolCallId,
1183
- toolName: value.toolName,
1171
+ toolCallId: part.toolCallId,
1172
+ toolName: part.toolName,
1184
1173
  args: void 0
1185
1174
  });
1186
1175
  write();
1187
1176
  break;
1188
1177
  }
1189
1178
  case "tool-call-delta": {
1190
- const partialToolCall = state.partialToolCalls[value.toolCallId];
1191
- partialToolCall.text += value.argsTextDelta;
1179
+ const partialToolCall = state.partialToolCalls[part.toolCallId];
1180
+ partialToolCall.text += part.argsTextDelta;
1192
1181
  const { value: partialArgs } = await parsePartialJson(
1193
1182
  partialToolCall.text
1194
1183
  );
1195
- updateToolInvocationPart(value.toolCallId, {
1184
+ updateToolInvocationPart(part.toolCallId, {
1196
1185
  state: "partial-call",
1197
1186
  step: partialToolCall.step,
1198
- toolCallId: value.toolCallId,
1187
+ toolCallId: part.toolCallId,
1199
1188
  toolName: partialToolCall.toolName,
1200
1189
  args: partialArgs
1201
1190
  });
@@ -1203,22 +1192,25 @@ function processUIMessageStream({
1203
1192
  break;
1204
1193
  }
1205
1194
  case "tool-call": {
1206
- const call = { args: value.args, ...value };
1207
- updateToolInvocationPart(value.toolCallId, {
1195
+ updateToolInvocationPart(part.toolCallId, {
1208
1196
  state: "call",
1209
1197
  step: state.step,
1210
- ...call
1198
+ toolCallId: part.toolCallId,
1199
+ toolName: part.toolName,
1200
+ args: part.args
1211
1201
  });
1212
1202
  write();
1213
1203
  if (onToolCall) {
1214
1204
  const result = await onToolCall({
1215
- toolCall: call
1205
+ toolCall: part
1216
1206
  });
1217
1207
  if (result != null) {
1218
- updateToolInvocationPart(value.toolCallId, {
1208
+ updateToolInvocationPart(part.toolCallId, {
1219
1209
  state: "result",
1220
1210
  step: state.step,
1221
- ...call,
1211
+ toolCallId: part.toolCallId,
1212
+ toolName: part.toolName,
1213
+ args: part.args,
1222
1214
  result
1223
1215
  });
1224
1216
  write();
@@ -1232,25 +1224,24 @@ function processUIMessageStream({
1232
1224
  throw new Error("tool_result must be preceded by a tool_call");
1233
1225
  }
1234
1226
  const toolInvocationIndex = toolInvocations.findIndex(
1235
- (invocation) => invocation.toolCallId === value.toolCallId
1227
+ (invocation) => invocation.toolCallId === part.toolCallId
1236
1228
  );
1237
1229
  if (toolInvocationIndex === -1) {
1238
1230
  throw new Error(
1239
1231
  "tool_result must be preceded by a tool_call with the same toolCallId"
1240
1232
  );
1241
1233
  }
1242
- const result = { result: value.result, ...value };
1243
- updateToolInvocationPart(value.toolCallId, {
1234
+ updateToolInvocationPart(part.toolCallId, {
1244
1235
  ...toolInvocations[toolInvocationIndex],
1245
1236
  state: "result",
1246
- ...result
1237
+ result: part.result
1247
1238
  });
1248
1239
  write();
1249
1240
  break;
1250
1241
  }
1251
1242
  case "start-step": {
1252
1243
  state.message.parts.push({ type: "step-start" });
1253
- await updateMessageMetadata(value.metadata);
1244
+ await updateMessageMetadata(part.metadata);
1254
1245
  write();
1255
1246
  break;
1256
1247
  }
@@ -1258,45 +1249,61 @@ function processUIMessageStream({
1258
1249
  state.step += 1;
1259
1250
  state.activeTextPart = void 0;
1260
1251
  state.activeReasoningPart = void 0;
1261
- await updateMessageMetadata(value.metadata);
1262
- if (value.metadata != null) {
1252
+ await updateMessageMetadata(part.metadata);
1253
+ if (part.metadata != null) {
1263
1254
  write();
1264
1255
  }
1265
1256
  break;
1266
1257
  }
1267
1258
  case "start": {
1268
- if (value.messageId != null) {
1269
- state.message.id = value.messageId;
1259
+ if (part.messageId != null) {
1260
+ state.message.id = part.messageId;
1270
1261
  }
1271
- await updateMessageMetadata(value.metadata);
1272
- if (value.messageId != null || value.metadata != null) {
1262
+ await updateMessageMetadata(part.metadata);
1263
+ if (part.messageId != null || part.metadata != null) {
1273
1264
  write();
1274
1265
  }
1275
1266
  break;
1276
1267
  }
1277
1268
  case "finish": {
1278
- await updateMessageMetadata(value.metadata);
1279
- if (value.metadata != null) {
1269
+ await updateMessageMetadata(part.metadata);
1270
+ if (part.metadata != null) {
1280
1271
  write();
1281
1272
  }
1282
1273
  break;
1283
1274
  }
1284
1275
  case "metadata": {
1285
- await updateMessageMetadata(value.metadata);
1286
- if (value.metadata != null) {
1276
+ await updateMessageMetadata(part.metadata);
1277
+ if (part.metadata != null) {
1287
1278
  write();
1288
1279
  }
1289
1280
  break;
1290
1281
  }
1291
1282
  case "error": {
1292
- throw new Error(value);
1283
+ throw new Error(part.errorText);
1293
1284
  }
1294
1285
  default: {
1295
- const _exhaustiveCheck = type;
1296
- throw new Error(`Unhandled stream part: ${_exhaustiveCheck}`);
1286
+ if (part.type.startsWith("data-")) {
1287
+ const existingPart = part.id != null ? state.message.parts.find(
1288
+ (partArg) => part.type === partArg.type && part.id === partArg.id
1289
+ ) : void 0;
1290
+ if (existingPart != null) {
1291
+ existingPart.value = mergeObjects(
1292
+ existingPart.data,
1293
+ part.data
1294
+ );
1295
+ } else {
1296
+ state.message.parts.push({
1297
+ type: part.type,
1298
+ id: part.id,
1299
+ value: part.data
1300
+ });
1301
+ }
1302
+ write();
1303
+ }
1297
1304
  }
1298
1305
  }
1299
- controller.enqueue(chunk);
1306
+ controller.enqueue(part);
1300
1307
  });
1301
1308
  }
1302
1309
  })
@@ -1310,15 +1317,15 @@ function transformTextToUiMessageStream({
1310
1317
  return stream.pipeThrough(
1311
1318
  new TransformStream({
1312
1319
  start(controller) {
1313
- controller.enqueue({ type: "start", value: {} });
1314
- controller.enqueue({ type: "start-step", value: {} });
1320
+ controller.enqueue({ type: "start" });
1321
+ controller.enqueue({ type: "start-step" });
1315
1322
  },
1316
1323
  async transform(part, controller) {
1317
- controller.enqueue({ type: "text", value: part });
1324
+ controller.enqueue({ type: "text", text: part });
1318
1325
  },
1319
1326
  async flush(controller) {
1320
- controller.enqueue({ type: "finish-step", value: {} });
1321
- controller.enqueue({ type: "finish", value: {} });
1327
+ controller.enqueue({ type: "finish-step" });
1328
+ controller.enqueue({ type: "finish" });
1322
1329
  }
1323
1330
  })
1324
1331
  );
@@ -1538,12 +1545,12 @@ async function callCompletionApi({
1538
1545
  if (!part.success) {
1539
1546
  throw part.error;
1540
1547
  }
1541
- const { type, value } = part.value;
1542
- if (type === "text") {
1543
- result += value;
1548
+ const streamPart = part.value;
1549
+ if (streamPart.type === "text") {
1550
+ result += streamPart.text;
1544
1551
  setCompletion(result);
1545
- } else if (type === "error") {
1546
- throw new Error(value);
1552
+ } else if (streamPart.type === "error") {
1553
+ throw new Error(streamPart.errorText);
1547
1554
  }
1548
1555
  }
1549
1556
  })
@@ -1619,18 +1626,18 @@ var SerialJobExecutor = class {
1619
1626
  function shouldResubmitMessages({
1620
1627
  originalMaxToolInvocationStep,
1621
1628
  originalMessageCount,
1622
- maxSteps,
1629
+ maxSteps: maxSteps2,
1623
1630
  messages
1624
1631
  }) {
1625
1632
  var _a17;
1626
1633
  const lastMessage = messages[messages.length - 1];
1627
1634
  return (
1628
1635
  // check if the feature is enabled:
1629
- maxSteps > 1 && // ensure there is a last message:
1636
+ maxSteps2 > 1 && // ensure there is a last message:
1630
1637
  lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
1631
1638
  (messages.length > originalMessageCount || extractMaxToolInvocationStep(getToolInvocations(lastMessage)) !== originalMaxToolInvocationStep) && // check that next step is possible:
1632
1639
  isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
1633
- ((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) < maxSteps
1640
+ ((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) < maxSteps2
1634
1641
  );
1635
1642
  }
1636
1643
  function isAssistantMessageWithCompletedToolCalls(message) {
@@ -1671,7 +1678,7 @@ var ChatStore = class {
1671
1678
  generateId: generateId3,
1672
1679
  messageMetadataSchema,
1673
1680
  transport,
1674
- maxSteps = 1
1681
+ maxSteps: maxSteps2 = 1
1675
1682
  }) {
1676
1683
  this.chats = new Map(
1677
1684
  Object.entries(chats).map(([id, state]) => [
@@ -1685,7 +1692,7 @@ var ChatStore = class {
1685
1692
  }
1686
1693
  ])
1687
1694
  );
1688
- this.maxSteps = maxSteps;
1695
+ this.maxSteps = maxSteps2;
1689
1696
  this.transport = transport;
1690
1697
  this.subscribers = /* @__PURE__ */ new Set();
1691
1698
  this.generateId = generateId3 != null ? generateId3 : import_provider_utils5.generateId;
@@ -1743,14 +1750,6 @@ var ChatStore = class {
1743
1750
  this.getChat(id).messages = [...messages];
1744
1751
  this.emit({ type: "chat-messages-changed", chatId: id });
1745
1752
  }
1746
- appendMessage({
1747
- id,
1748
- message
1749
- }) {
1750
- const chat = this.getChat(id);
1751
- chat.messages = [...chat.messages, { ...message }];
1752
- this.emit({ type: "chat-messages-changed", chatId: id });
1753
- }
1754
1753
  removeAssistantResponse(id) {
1755
1754
  const chat = this.getChat(id);
1756
1755
  const lastMessage = chat.messages[chat.messages.length - 1];
@@ -1893,6 +1892,7 @@ var ChatStore = class {
1893
1892
  }) {
1894
1893
  const self = this;
1895
1894
  const chat = this.getChat(chatId);
1895
+ this.setMessages({ id: chatId, messages: chatMessages });
1896
1896
  this.setStatus({ id: chatId, status: "submitted", error: void 0 });
1897
1897
  const messageCount = chatMessages.length;
1898
1898
  const maxStep = extractMaxToolInvocationStep(
@@ -2235,7 +2235,7 @@ function defaultChatStore({
2235
2235
  prepareRequestBody,
2236
2236
  generateId: generateId3 = import_provider_utils6.generateId,
2237
2237
  messageMetadataSchema,
2238
- maxSteps = 1,
2238
+ maxSteps: maxSteps2 = 1,
2239
2239
  chats
2240
2240
  }) {
2241
2241
  return new ChatStore({
@@ -2250,7 +2250,7 @@ function defaultChatStore({
2250
2250
  }),
2251
2251
  generateId: generateId3,
2252
2252
  messageMetadataSchema,
2253
- maxSteps,
2253
+ maxSteps: maxSteps2,
2254
2254
  chats
2255
2255
  });
2256
2256
  }
@@ -2290,7 +2290,7 @@ function createUIMessageStream({
2290
2290
  safeEnqueue(value);
2291
2291
  }
2292
2292
  })().catch((error) => {
2293
- safeEnqueue({ type: "error", value: onError(error) });
2293
+ safeEnqueue({ type: "error", errorText: onError(error) });
2294
2294
  })
2295
2295
  );
2296
2296
  },
@@ -2299,12 +2299,12 @@ function createUIMessageStream({
2299
2299
  if (result) {
2300
2300
  ongoingStreamPromises.push(
2301
2301
  result.catch((error) => {
2302
- safeEnqueue({ type: "error", value: onError(error) });
2302
+ safeEnqueue({ type: "error", errorText: onError(error) });
2303
2303
  })
2304
2304
  );
2305
2305
  }
2306
2306
  } catch (error) {
2307
- safeEnqueue({ type: "error", value: onError(error) });
2307
+ safeEnqueue({ type: "error", errorText: onError(error) });
2308
2308
  }
2309
2309
  const waitForStreams = new Promise(async (resolve) => {
2310
2310
  while (ongoingStreamPromises.length > 0) {
@@ -3948,7 +3948,7 @@ async function standardizePrompt(prompt) {
3948
3948
  if (!validationResult.success) {
3949
3949
  throw new import_provider19.InvalidPromptError({
3950
3950
  prompt,
3951
- message: "messages must be an array of ModelMessage",
3951
+ message: "The messages must be a ModelMessage[]. If you have passed a UIMessage[], you can use convertToModelMessages to convert them.",
3952
3952
  cause: validationResult.error
3953
3953
  });
3954
3954
  }
@@ -5581,6 +5581,19 @@ var DefaultStepResult = class {
5581
5581
  }
5582
5582
  };
5583
5583
 
5584
+ // core/generate-text/stop-condition.ts
5585
+ function maxSteps(maxSteps2) {
5586
+ return ({ steps }) => steps.length >= maxSteps2;
5587
+ }
5588
+ function hasToolCall(toolName) {
5589
+ return ({ steps }) => {
5590
+ var _a17, _b, _c;
5591
+ return (_c = (_b = (_a17 = steps[steps.length - 1]) == null ? void 0 : _a17.toolCalls) == null ? void 0 : _b.some(
5592
+ (toolCall) => toolCall.toolName === toolName
5593
+ )) != null ? _c : false;
5594
+ };
5595
+ }
5596
+
5584
5597
  // core/generate-text/to-response-messages.ts
5585
5598
  function toResponseMessages({
5586
5599
  content: inputContent,
@@ -5654,7 +5667,7 @@ async function generateText({
5654
5667
  maxRetries: maxRetriesArg,
5655
5668
  abortSignal,
5656
5669
  headers,
5657
- maxSteps = 1,
5670
+ continueUntil = maxSteps(1),
5658
5671
  experimental_output: output,
5659
5672
  experimental_telemetry: telemetry,
5660
5673
  providerOptions,
@@ -5668,13 +5681,6 @@ async function generateText({
5668
5681
  onStepFinish,
5669
5682
  ...settings
5670
5683
  }) {
5671
- if (maxSteps < 1) {
5672
- throw new InvalidArgumentError({
5673
- parameter: "maxSteps",
5674
- value: maxSteps,
5675
- message: "maxSteps must be at least 1"
5676
- });
5677
- }
5678
5684
  const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });
5679
5685
  const callSettings = prepareCallSettings(settings);
5680
5686
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
@@ -5705,8 +5711,7 @@ async function generateText({
5705
5711
  // specific settings that only make sense on the outer level:
5706
5712
  "ai.prompt": {
5707
5713
  input: () => JSON.stringify({ system, prompt, messages })
5708
- },
5709
- "ai.settings.maxSteps": maxSteps
5714
+ }
5710
5715
  }
5711
5716
  }),
5712
5717
  tracer,
@@ -5716,7 +5721,6 @@ async function generateText({
5716
5721
  let currentModelResponse;
5717
5722
  let currentToolCalls = [];
5718
5723
  let currentToolResults = [];
5719
- let stepCount = 0;
5720
5724
  const responseMessages = [];
5721
5725
  const steps = [];
5722
5726
  do {
@@ -5727,8 +5731,7 @@ async function generateText({
5727
5731
  const prepareStepResult = await (prepareStep == null ? void 0 : prepareStep({
5728
5732
  model,
5729
5733
  steps,
5730
- maxSteps,
5731
- stepNumber: stepCount
5734
+ stepNumber: steps.length
5732
5735
  }));
5733
5736
  const promptMessages = await convertToLanguageModelPrompt({
5734
5737
  prompt: {
@@ -5883,9 +5886,12 @@ async function generateText({
5883
5886
  });
5884
5887
  steps.push(currentStepResult);
5885
5888
  await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
5886
- } while (++stepCount < maxSteps && // there are tool calls:
5887
- currentToolCalls.length > 0 && // all current tool calls have results:
5888
- currentToolResults.length === currentToolCalls.length);
5889
+ } while (
5890
+ // there are tool calls:
5891
+ currentToolCalls.length > 0 && // all current tool calls have results:
5892
+ currentToolResults.length === currentToolCalls.length && // continue until the stop condition is met:
5893
+ !await continueUntil({ steps })
5894
+ );
5889
5895
  span.setAttributes(
5890
5896
  selectTelemetryAttributes({
5891
5897
  telemetry,
@@ -6445,7 +6451,7 @@ function streamText({
6445
6451
  maxRetries,
6446
6452
  abortSignal,
6447
6453
  headers,
6448
- maxSteps = 1,
6454
+ maxSteps: maxSteps2 = 1,
6449
6455
  experimental_output: output,
6450
6456
  experimental_telemetry: telemetry,
6451
6457
  providerOptions,
@@ -6481,7 +6487,7 @@ function streamText({
6481
6487
  transforms: asArray(transform),
6482
6488
  activeTools,
6483
6489
  repairToolCall,
6484
- maxSteps,
6490
+ maxSteps: maxSteps2,
6485
6491
  output,
6486
6492
  providerOptions,
6487
6493
  onChunk,
@@ -6558,7 +6564,7 @@ var DefaultStreamTextResult = class {
6558
6564
  transforms,
6559
6565
  activeTools,
6560
6566
  repairToolCall,
6561
- maxSteps,
6567
+ maxSteps: maxSteps2,
6562
6568
  output,
6563
6569
  providerOptions,
6564
6570
  now: now2,
@@ -6572,10 +6578,10 @@ var DefaultStreamTextResult = class {
6572
6578
  this.totalUsagePromise = new DelayedPromise();
6573
6579
  this.finishReasonPromise = new DelayedPromise();
6574
6580
  this.stepsPromise = new DelayedPromise();
6575
- if (maxSteps < 1) {
6581
+ if (maxSteps2 < 1) {
6576
6582
  throw new InvalidArgumentError({
6577
6583
  parameter: "maxSteps",
6578
- value: maxSteps,
6584
+ value: maxSteps2,
6579
6585
  message: "maxSteps must be at least 1"
6580
6586
  });
6581
6587
  }
@@ -6732,6 +6738,13 @@ var DefaultStreamTextResult = class {
6732
6738
  this.addStream = stitchableStream.addStream;
6733
6739
  this.closeStream = stitchableStream.close;
6734
6740
  let stream = stitchableStream.stream;
6741
+ stream = stream.pipeThrough(
6742
+ new TransformStream({
6743
+ start(controller) {
6744
+ controller.enqueue({ type: "start" });
6745
+ }
6746
+ })
6747
+ );
6735
6748
  for (const transform of transforms) {
6736
6749
  stream = stream.pipeThrough(
6737
6750
  transform({
@@ -6766,7 +6779,7 @@ var DefaultStreamTextResult = class {
6766
6779
  "ai.prompt": {
6767
6780
  input: () => JSON.stringify({ system, prompt, messages })
6768
6781
  },
6769
- "ai.settings.maxSteps": maxSteps
6782
+ "ai.settings.maxSteps": maxSteps2
6770
6783
  }
6771
6784
  }),
6772
6785
  tracer,
@@ -6904,7 +6917,6 @@ var DefaultStreamTextResult = class {
6904
6917
  var _a17, _b, _c, _d;
6905
6918
  if (chunk.type === "stream-start") {
6906
6919
  warnings = chunk.warnings;
6907
- controller.enqueue({ type: "start" });
6908
6920
  return;
6909
6921
  }
6910
6922
  if (stepFirstChunk) {
@@ -7054,7 +7066,7 @@ var DefaultStreamTextResult = class {
7054
7066
  }
7055
7067
  });
7056
7068
  const combinedUsage = addLanguageModelUsage(usage, stepUsage);
7057
- if (currentStep + 1 < maxSteps && // there are tool calls:
7069
+ if (currentStep + 1 < maxSteps2 && // there are tool calls:
7058
7070
  stepToolCalls.length > 0 && // all current tool calls have results:
7059
7071
  stepToolResults.length === stepToolCalls.length) {
7060
7072
  responseMessages.push(
@@ -7239,125 +7251,122 @@ var DefaultStreamTextResult = class {
7239
7251
  const partType = part.type;
7240
7252
  switch (partType) {
7241
7253
  case "text": {
7242
- controller.enqueue({ type: "text", value: part.text });
7254
+ controller.enqueue({
7255
+ type: "text",
7256
+ text: part.text
7257
+ });
7243
7258
  break;
7244
7259
  }
7245
7260
  case "reasoning": {
7246
7261
  if (sendReasoning) {
7247
- controller.enqueue({ type: "reasoning", value: part });
7262
+ controller.enqueue({
7263
+ type: "reasoning",
7264
+ text: part.text,
7265
+ providerMetadata: part.providerMetadata
7266
+ });
7248
7267
  }
7249
7268
  break;
7250
7269
  }
7251
7270
  case "reasoning-part-finish": {
7252
7271
  if (sendReasoning) {
7253
- controller.enqueue({
7254
- type: "reasoning-part-finish",
7255
- value: null
7256
- });
7272
+ controller.enqueue({ type: "reasoning-part-finish" });
7257
7273
  }
7258
7274
  break;
7259
7275
  }
7260
7276
  case "file": {
7261
7277
  controller.enqueue({
7262
7278
  type: "file",
7263
- value: {
7264
- mediaType: part.file.mediaType,
7265
- url: `data:${part.file.mediaType};base64,${part.file.base64}`
7266
- }
7279
+ mediaType: part.file.mediaType,
7280
+ url: `data:${part.file.mediaType};base64,${part.file.base64}`
7267
7281
  });
7268
7282
  break;
7269
7283
  }
7270
7284
  case "source": {
7271
7285
  if (sendSources) {
7272
- controller.enqueue({ type: "source", value: part });
7286
+ controller.enqueue({
7287
+ type: "source",
7288
+ sourceType: part.sourceType,
7289
+ id: part.id,
7290
+ url: part.url,
7291
+ title: part.title,
7292
+ providerMetadata: part.providerMetadata
7293
+ });
7273
7294
  }
7274
7295
  break;
7275
7296
  }
7276
7297
  case "tool-call-streaming-start": {
7277
7298
  controller.enqueue({
7278
7299
  type: "tool-call-streaming-start",
7279
- value: {
7280
- toolCallId: part.toolCallId,
7281
- toolName: part.toolName
7282
- }
7300
+ toolCallId: part.toolCallId,
7301
+ toolName: part.toolName
7283
7302
  });
7284
7303
  break;
7285
7304
  }
7286
7305
  case "tool-call-delta": {
7287
7306
  controller.enqueue({
7288
7307
  type: "tool-call-delta",
7289
- value: {
7290
- toolCallId: part.toolCallId,
7291
- argsTextDelta: part.argsTextDelta
7292
- }
7308
+ toolCallId: part.toolCallId,
7309
+ argsTextDelta: part.argsTextDelta
7293
7310
  });
7294
7311
  break;
7295
7312
  }
7296
7313
  case "tool-call": {
7297
7314
  controller.enqueue({
7298
7315
  type: "tool-call",
7299
- value: {
7300
- toolCallId: part.toolCallId,
7301
- toolName: part.toolName,
7302
- args: part.args
7303
- }
7316
+ toolCallId: part.toolCallId,
7317
+ toolName: part.toolName,
7318
+ args: part.args
7304
7319
  });
7305
7320
  break;
7306
7321
  }
7307
7322
  case "tool-result": {
7308
7323
  controller.enqueue({
7309
7324
  type: "tool-result",
7310
- value: {
7311
- toolCallId: part.toolCallId,
7312
- result: part.result
7313
- }
7325
+ toolCallId: part.toolCallId,
7326
+ result: part.result
7314
7327
  });
7315
7328
  break;
7316
7329
  }
7317
7330
  case "error": {
7318
7331
  controller.enqueue({
7319
7332
  type: "error",
7320
- value: onError(part.error)
7333
+ errorText: onError(part.error)
7321
7334
  });
7322
7335
  break;
7323
7336
  }
7324
7337
  case "start-step": {
7338
+ const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
7325
7339
  controller.enqueue({
7326
7340
  type: "start-step",
7327
- value: {
7328
- metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
7329
- }
7341
+ metadata
7330
7342
  });
7331
7343
  break;
7332
7344
  }
7333
7345
  case "finish-step": {
7346
+ const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
7334
7347
  controller.enqueue({
7335
7348
  type: "finish-step",
7336
- value: {
7337
- metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
7338
- }
7349
+ metadata
7339
7350
  });
7340
7351
  break;
7341
7352
  }
7342
7353
  case "start": {
7343
7354
  if (experimental_sendStart) {
7355
+ const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
7344
7356
  controller.enqueue({
7345
7357
  type: "start",
7346
- value: {
7347
- messageId,
7348
- metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
7349
- }
7358
+ messageId,
7359
+ metadata
7350
7360
  });
7351
7361
  }
7352
7362
  break;
7353
7363
  }
7354
7364
  case "finish": {
7355
7365
  if (experimental_sendFinish) {
7366
+ const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
7356
7367
  controller.enqueue({
7357
7368
  type: "finish",
7358
- value: {
7359
- metadata: messageMetadata == null ? void 0 : messageMetadata({ part })
7360
- }
7369
+ metadata
7361
7370
  });
7362
7371
  }
7363
7372
  break;
@@ -8548,9 +8557,11 @@ var DefaultTranscriptionResult = class {
8548
8557
  generateText,
8549
8558
  getTextFromDataUrl,
8550
8559
  getToolInvocations,
8560
+ hasToolCall,
8551
8561
  isAssistantMessageWithCompletedToolCalls,
8552
8562
  isDeepEqualData,
8553
8563
  jsonSchema,
8564
+ maxSteps,
8554
8565
  modelMessageSchema,
8555
8566
  parsePartialJson,
8556
8567
  pipeTextStreamToResponse,