ai 5.0.0-alpha.2 → 5.0.0-alpha.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -47,6 +47,7 @@ __export(src_exports, {
47
47
  NoSuchToolError: () => NoSuchToolError,
48
48
  Output: () => output_exports,
49
49
  RetryError: () => RetryError,
50
+ TextStreamChatTransport: () => TextStreamChatTransport,
50
51
  ToolCallRepairError: () => ToolCallRepairError,
51
52
  ToolExecutionError: () => ToolExecutionError,
52
53
  TypeValidationError: () => import_provider16.TypeValidationError,
@@ -591,9 +592,8 @@ var uiMessageStreamPartSchema = import_zod.z.union([
591
592
  providerMetadata: import_zod.z.record(import_zod.z.any()).optional()
592
593
  }),
593
594
  import_zod.z.object({
594
- type: import_zod.z.literal("source"),
595
- sourceType: import_zod.z.literal("url"),
596
- id: import_zod.z.string(),
595
+ type: import_zod.z.literal("source-url"),
596
+ sourceId: import_zod.z.string(),
597
597
  url: import_zod.z.string(),
598
598
  title: import_zod.z.string().optional(),
599
599
  providerMetadata: import_zod.z.any().optional()
@@ -1147,16 +1147,13 @@ function processUIMessageStream({
1147
1147
  write();
1148
1148
  break;
1149
1149
  }
1150
- case "source": {
1150
+ case "source-url": {
1151
1151
  state.message.parts.push({
1152
- type: "source",
1153
- source: {
1154
- sourceType: "url",
1155
- id: part.id,
1156
- url: part.url,
1157
- title: part.title,
1158
- providerMetadata: part.providerMetadata
1159
- }
1152
+ type: "source-url",
1153
+ sourceId: part.sourceId,
1154
+ url: part.url,
1155
+ title: part.title,
1156
+ providerMetadata: part.providerMetadata
1160
1157
  });
1161
1158
  write();
1162
1159
  break;
@@ -1339,7 +1336,6 @@ var getOriginalFetch = () => fetch;
1339
1336
  async function fetchUIMessageStream({
1340
1337
  api,
1341
1338
  body,
1342
- streamProtocol = "ui-message",
1343
1339
  credentials,
1344
1340
  headers,
1345
1341
  abortController,
@@ -1373,9 +1369,7 @@ async function fetchUIMessageStream({
1373
1369
  if (!response.body) {
1374
1370
  throw new Error("The response body is empty.");
1375
1371
  }
1376
- return streamProtocol === "text" ? transformTextToUiMessageStream({
1377
- stream: response.body.pipeThrough(new TextDecoderStream())
1378
- }) : (0, import_provider_utils3.parseJsonEventStream)({
1372
+ return (0, import_provider_utils3.parseJsonEventStream)({
1379
1373
  stream: response.body,
1380
1374
  schema: uiMessageStreamPartSchema
1381
1375
  }).pipeThrough(
@@ -1389,6 +1383,46 @@ async function fetchUIMessageStream({
1389
1383
  })
1390
1384
  );
1391
1385
  }
1386
+ async function fetchTextStream({
1387
+ api,
1388
+ body,
1389
+ credentials,
1390
+ headers,
1391
+ abortController,
1392
+ fetch: fetch2 = getOriginalFetch(),
1393
+ requestType = "generate"
1394
+ }) {
1395
+ var _a17, _b, _c;
1396
+ const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
1397
+ method: "GET",
1398
+ headers: {
1399
+ "Content-Type": "application/json",
1400
+ ...headers
1401
+ },
1402
+ signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
1403
+ credentials
1404
+ }) : await fetch2(api, {
1405
+ method: "POST",
1406
+ body: JSON.stringify(body),
1407
+ headers: {
1408
+ "Content-Type": "application/json",
1409
+ ...headers
1410
+ },
1411
+ signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
1412
+ credentials
1413
+ });
1414
+ if (!response.ok) {
1415
+ throw new Error(
1416
+ (_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
1417
+ );
1418
+ }
1419
+ if (!response.body) {
1420
+ throw new Error("The response body is empty.");
1421
+ }
1422
+ return transformTextToUiMessageStream({
1423
+ stream: response.body.pipeThrough(new TextDecoderStream())
1424
+ });
1425
+ }
1392
1426
  async function consumeUIMessageStream({
1393
1427
  stream,
1394
1428
  onUpdate,
@@ -1439,10 +1473,17 @@ async function callChatApi({
1439
1473
  requestType = "generate",
1440
1474
  messageMetadataSchema
1441
1475
  }) {
1442
- const stream = await fetchUIMessageStream({
1476
+ const stream = streamProtocol === "text" ? await fetchTextStream({
1477
+ api,
1478
+ body,
1479
+ credentials,
1480
+ headers,
1481
+ abortController,
1482
+ fetch: fetch2,
1483
+ requestType
1484
+ }) : await fetchUIMessageStream({
1443
1485
  api,
1444
1486
  body,
1445
- streamProtocol,
1446
1487
  credentials,
1447
1488
  headers,
1448
1489
  abortController,
@@ -1994,7 +2035,6 @@ var DefaultChatTransport = class {
1994
2035
  credentials,
1995
2036
  headers,
1996
2037
  body,
1997
- streamProtocol,
1998
2038
  fetch: fetch2,
1999
2039
  prepareRequestBody
2000
2040
  }) {
@@ -2002,7 +2042,6 @@ var DefaultChatTransport = class {
2002
2042
  this.credentials = credentials;
2003
2043
  this.headers = headers;
2004
2044
  this.body = body;
2005
- this.streamProtocol = streamProtocol;
2006
2045
  this.fetch = fetch2;
2007
2046
  this.prepareRequestBody = prepareRequestBody;
2008
2047
  }
@@ -2032,7 +2071,55 @@ var DefaultChatTransport = class {
2032
2071
  ...this.body,
2033
2072
  ...body
2034
2073
  },
2035
- streamProtocol: this.streamProtocol,
2074
+ credentials: this.credentials,
2075
+ abortController: () => abortController,
2076
+ fetch: this.fetch,
2077
+ requestType
2078
+ });
2079
+ }
2080
+ };
2081
+ var TextStreamChatTransport = class {
2082
+ constructor({
2083
+ api,
2084
+ credentials,
2085
+ headers,
2086
+ body,
2087
+ fetch: fetch2,
2088
+ prepareRequestBody
2089
+ }) {
2090
+ this.api = api;
2091
+ this.credentials = credentials;
2092
+ this.headers = headers;
2093
+ this.body = body;
2094
+ this.fetch = fetch2;
2095
+ this.prepareRequestBody = prepareRequestBody;
2096
+ }
2097
+ submitMessages({
2098
+ chatId,
2099
+ messages,
2100
+ abortController,
2101
+ body,
2102
+ headers,
2103
+ requestType
2104
+ }) {
2105
+ var _a17, _b;
2106
+ return fetchTextStream({
2107
+ api: this.api,
2108
+ headers: {
2109
+ ...this.headers,
2110
+ ...headers
2111
+ },
2112
+ body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
2113
+ chatId,
2114
+ messages,
2115
+ ...this.body,
2116
+ ...body
2117
+ })) != null ? _b : {
2118
+ chatId,
2119
+ messages,
2120
+ ...this.body,
2121
+ ...body
2122
+ },
2036
2123
  credentials: this.credentials,
2037
2124
  abortController: () => abortController,
2038
2125
  fetch: this.fetch,
@@ -2232,7 +2319,6 @@ var import_provider_utils6 = require("@ai-sdk/provider-utils");
2232
2319
  function defaultChatStore({
2233
2320
  api,
2234
2321
  fetch: fetch2,
2235
- streamProtocol = "ui-message",
2236
2322
  credentials,
2237
2323
  headers,
2238
2324
  body,
@@ -2247,7 +2333,6 @@ function defaultChatStore({
2247
2333
  transport: new DefaultChatTransport({
2248
2334
  api,
2249
2335
  fetch: fetch2,
2250
- streamProtocol,
2251
2336
  credentials,
2252
2337
  headers,
2253
2338
  body,
@@ -4745,11 +4830,11 @@ var DelayedPromise = class {
4745
4830
  this._resolve = void 0;
4746
4831
  this._reject = void 0;
4747
4832
  }
4748
- get value() {
4749
- if (this.promise) {
4750
- return this.promise;
4833
+ get promise() {
4834
+ if (this._promise) {
4835
+ return this._promise;
4751
4836
  }
4752
- this.promise = new Promise((resolve, reject) => {
4837
+ this._promise = new Promise((resolve, reject) => {
4753
4838
  if (this.status.type === "resolved") {
4754
4839
  resolve(this.status.value);
4755
4840
  } else if (this.status.type === "rejected") {
@@ -4758,19 +4843,19 @@ var DelayedPromise = class {
4758
4843
  this._resolve = resolve;
4759
4844
  this._reject = reject;
4760
4845
  });
4761
- return this.promise;
4846
+ return this._promise;
4762
4847
  }
4763
4848
  resolve(value) {
4764
4849
  var _a17;
4765
4850
  this.status = { type: "resolved", value };
4766
- if (this.promise) {
4851
+ if (this._promise) {
4767
4852
  (_a17 = this._resolve) == null ? void 0 : _a17.call(this, value);
4768
4853
  }
4769
4854
  }
4770
4855
  reject(error) {
4771
4856
  var _a17;
4772
4857
  this.status = { type: "rejected", error };
4773
- if (this.promise) {
4858
+ if (this._promise) {
4774
4859
  (_a17 = this._reject) == null ? void 0 : _a17.call(this, error);
4775
4860
  }
4776
4861
  }
@@ -4865,12 +4950,12 @@ var DefaultStreamObjectResult = class {
4865
4950
  currentDate,
4866
4951
  now: now2
4867
4952
  }) {
4868
- this.objectPromise = new DelayedPromise();
4869
- this.usagePromise = new DelayedPromise();
4870
- this.providerMetadataPromise = new DelayedPromise();
4871
- this.warningsPromise = new DelayedPromise();
4872
- this.requestPromise = new DelayedPromise();
4873
- this.responsePromise = new DelayedPromise();
4953
+ this._object = new DelayedPromise();
4954
+ this._usage = new DelayedPromise();
4955
+ this._providerMetadata = new DelayedPromise();
4956
+ this._warnings = new DelayedPromise();
4957
+ this._request = new DelayedPromise();
4958
+ this._response = new DelayedPromise();
4874
4959
  const { maxRetries, retry } = prepareRetries({
4875
4960
  maxRetries: maxRetriesArg
4876
4961
  });
@@ -4989,7 +5074,7 @@ var DefaultStreamObjectResult = class {
4989
5074
  })
4990
5075
  })
4991
5076
  );
4992
- self.requestPromise.resolve(request != null ? request : {});
5077
+ self._request.resolve(request != null ? request : {});
4993
5078
  let warnings;
4994
5079
  let usage = {
4995
5080
  inputTokens: void 0,
@@ -5082,9 +5167,9 @@ var DefaultStreamObjectResult = class {
5082
5167
  usage,
5083
5168
  response: fullResponse
5084
5169
  });
5085
- self.usagePromise.resolve(usage);
5086
- self.providerMetadataPromise.resolve(providerMetadata);
5087
- self.responsePromise.resolve({
5170
+ self._usage.resolve(usage);
5171
+ self._providerMetadata.resolve(providerMetadata);
5172
+ self._response.resolve({
5088
5173
  ...fullResponse,
5089
5174
  headers: response == null ? void 0 : response.headers
5090
5175
  });
@@ -5098,7 +5183,7 @@ var DefaultStreamObjectResult = class {
5098
5183
  );
5099
5184
  if (validationResult.success) {
5100
5185
  object2 = validationResult.value;
5101
- self.objectPromise.resolve(object2);
5186
+ self._object.resolve(object2);
5102
5187
  } else {
5103
5188
  error = new NoObjectGeneratedError({
5104
5189
  message: "No object generated: response did not match schema.",
@@ -5108,7 +5193,7 @@ var DefaultStreamObjectResult = class {
5108
5193
  usage,
5109
5194
  finishReason
5110
5195
  });
5111
- self.objectPromise.reject(error);
5196
+ self._object.reject(error);
5112
5197
  }
5113
5198
  break;
5114
5199
  }
@@ -5203,22 +5288,22 @@ var DefaultStreamObjectResult = class {
5203
5288
  this.outputStrategy = outputStrategy;
5204
5289
  }
5205
5290
  get object() {
5206
- return this.objectPromise.value;
5291
+ return this._object.promise;
5207
5292
  }
5208
5293
  get usage() {
5209
- return this.usagePromise.value;
5294
+ return this._usage.promise;
5210
5295
  }
5211
5296
  get providerMetadata() {
5212
- return this.providerMetadataPromise.value;
5297
+ return this._providerMetadata.promise;
5213
5298
  }
5214
5299
  get warnings() {
5215
- return this.warningsPromise.value;
5300
+ return this._warnings.promise;
5216
5301
  }
5217
5302
  get request() {
5218
- return this.requestPromise.value;
5303
+ return this._request.promise;
5219
5304
  }
5220
5305
  get response() {
5221
- return this.responsePromise.value;
5306
+ return this._response.promise;
5222
5307
  }
5223
5308
  get partialObjectStream() {
5224
5309
  return createAsyncIterableStream(
@@ -6466,7 +6551,7 @@ function streamText({
6466
6551
  maxRetries,
6467
6552
  abortSignal,
6468
6553
  headers,
6469
- maxSteps: maxSteps2 = 1,
6554
+ continueUntil = maxSteps(1),
6470
6555
  experimental_output: output,
6471
6556
  experimental_telemetry: telemetry,
6472
6557
  providerOptions,
@@ -6502,7 +6587,7 @@ function streamText({
6502
6587
  transforms: asArray(transform),
6503
6588
  activeTools,
6504
6589
  repairToolCall,
6505
- maxSteps: maxSteps2,
6590
+ continueUntil,
6506
6591
  output,
6507
6592
  providerOptions,
6508
6593
  onChunk,
@@ -6579,7 +6664,7 @@ var DefaultStreamTextResult = class {
6579
6664
  transforms,
6580
6665
  activeTools,
6581
6666
  repairToolCall,
6582
- maxSteps: maxSteps2,
6667
+ continueUntil,
6583
6668
  output,
6584
6669
  providerOptions,
6585
6670
  now: now2,
@@ -6590,18 +6675,12 @@ var DefaultStreamTextResult = class {
6590
6675
  onFinish,
6591
6676
  onStepFinish
6592
6677
  }) {
6593
- this.totalUsagePromise = new DelayedPromise();
6594
- this.finishReasonPromise = new DelayedPromise();
6595
- this.stepsPromise = new DelayedPromise();
6596
- if (maxSteps2 < 1) {
6597
- throw new InvalidArgumentError({
6598
- parameter: "maxSteps",
6599
- value: maxSteps2,
6600
- message: "maxSteps must be at least 1"
6601
- });
6602
- }
6678
+ this._totalUsage = new DelayedPromise();
6679
+ this._finishReason = new DelayedPromise();
6680
+ this._steps = new DelayedPromise();
6603
6681
  this.output = output;
6604
6682
  this.generateId = generateId3;
6683
+ let stepFinish;
6605
6684
  let activeReasoningPart = void 0;
6606
6685
  let recordedContent = [];
6607
6686
  const recordedResponseMessages = [];
@@ -6683,6 +6762,7 @@ var DefaultStreamTextResult = class {
6683
6762
  recordedContent = [];
6684
6763
  activeReasoningPart = void 0;
6685
6764
  recordedResponseMessages.push(...stepMessages);
6765
+ stepFinish.resolve();
6686
6766
  }
6687
6767
  if (part.type === "finish") {
6688
6768
  recordedTotalUsage = part.totalUsage;
@@ -6700,9 +6780,9 @@ var DefaultStreamTextResult = class {
6700
6780
  outputTokens: void 0,
6701
6781
  totalTokens: void 0
6702
6782
  };
6703
- self.finishReasonPromise.resolve(finishReason);
6704
- self.totalUsagePromise.resolve(totalUsage);
6705
- self.stepsPromise.resolve(recordedSteps);
6783
+ self._finishReason.resolve(finishReason);
6784
+ self._totalUsage.resolve(totalUsage);
6785
+ self._steps.resolve(recordedSteps);
6706
6786
  const finalStep = recordedSteps[recordedSteps.length - 1];
6707
6787
  await (onFinish == null ? void 0 : onFinish({
6708
6788
  finishReason,
@@ -6793,8 +6873,7 @@ var DefaultStreamTextResult = class {
6793
6873
  // specific settings that only make sense on the outer level:
6794
6874
  "ai.prompt": {
6795
6875
  input: () => JSON.stringify({ system, prompt, messages })
6796
- },
6797
- "ai.settings.maxSteps": maxSteps2
6876
+ }
6798
6877
  }
6799
6878
  }),
6800
6879
  tracer,
@@ -6806,6 +6885,7 @@ var DefaultStreamTextResult = class {
6806
6885
  responseMessages,
6807
6886
  usage
6808
6887
  }) {
6888
+ stepFinish = new DelayedPromise();
6809
6889
  const initialPrompt = await standardizePrompt({
6810
6890
  system,
6811
6891
  prompt,
@@ -6887,7 +6967,7 @@ var DefaultStreamTextResult = class {
6887
6967
  }
6888
6968
  })
6889
6969
  );
6890
- const transformedStream = runToolsTransformation({
6970
+ const streamWithToolResults = runToolsTransformation({
6891
6971
  tools,
6892
6972
  generatorStream: stream2,
6893
6973
  toolCallStreaming,
@@ -6926,7 +7006,7 @@ var DefaultStreamTextResult = class {
6926
7006
  stepText += chunk.text;
6927
7007
  }
6928
7008
  self.addStream(
6929
- transformedStream.pipeThrough(
7009
+ streamWithToolResults.pipeThrough(
6930
7010
  new TransformStream({
6931
7011
  async transform(chunk, controller) {
6932
7012
  var _a17, _b, _c, _d;
@@ -7081,9 +7161,9 @@ var DefaultStreamTextResult = class {
7081
7161
  }
7082
7162
  });
7083
7163
  const combinedUsage = addLanguageModelUsage(usage, stepUsage);
7084
- if (currentStep + 1 < maxSteps2 && // there are tool calls:
7085
- stepToolCalls.length > 0 && // all current tool calls have results:
7086
- stepToolResults.length === stepToolCalls.length) {
7164
+ await stepFinish.promise;
7165
+ if (stepToolCalls.length > 0 && // all current tool calls have results:
7166
+ stepToolResults.length === stepToolCalls.length && !await continueUntil({ steps: recordedSteps })) {
7087
7167
  responseMessages.push(
7088
7168
  ...toResponseMessages({
7089
7169
  content: stepContent,
@@ -7131,7 +7211,7 @@ var DefaultStreamTextResult = class {
7131
7211
  });
7132
7212
  }
7133
7213
  get steps() {
7134
- return this.stepsPromise.value;
7214
+ return this._steps.promise;
7135
7215
  }
7136
7216
  get finalStep() {
7137
7217
  return this.steps.then((steps) => steps[steps.length - 1]);
@@ -7176,10 +7256,10 @@ var DefaultStreamTextResult = class {
7176
7256
  return this.finalStep.then((step) => step.response);
7177
7257
  }
7178
7258
  get totalUsage() {
7179
- return this.totalUsagePromise.value;
7259
+ return this._totalUsage.promise;
7180
7260
  }
7181
7261
  get finishReason() {
7182
- return this.finishReasonPromise.value;
7262
+ return this._finishReason.promise;
7183
7263
  }
7184
7264
  /**
7185
7265
  Split out a new stream from the original stream.
@@ -7299,9 +7379,8 @@ var DefaultStreamTextResult = class {
7299
7379
  case "source": {
7300
7380
  if (sendSources) {
7301
7381
  controller.enqueue({
7302
- type: "source",
7303
- sourceType: part.sourceType,
7304
- id: part.id,
7382
+ type: "source-url",
7383
+ sourceId: part.id,
7305
7384
  url: part.url,
7306
7385
  title: part.title,
7307
7386
  providerMetadata: part.providerMetadata
@@ -8531,6 +8610,7 @@ var DefaultTranscriptionResult = class {
8531
8610
  NoSuchToolError,
8532
8611
  Output,
8533
8612
  RetryError,
8613
+ TextStreamChatTransport,
8534
8614
  ToolCallRepairError,
8535
8615
  ToolExecutionError,
8536
8616
  TypeValidationError,