ai 5.0.0-beta.23 → 5.0.0-beta.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,26 @@
1
1
  # ai
2
2
 
3
+ ## 5.0.0-beta.25
4
+
5
+ ### Patch Changes
6
+
7
+ - Updated dependencies [8bd3624]
8
+ - Updated dependencies [e001ea1]
9
+ - @ai-sdk/gateway@1.0.0-beta.11
10
+
11
+ ## 5.0.0-beta.24
12
+
13
+ ### Patch Changes
14
+
15
+ - add5ac1: feat (ai): make streamText toUIMessageStream async iterable
16
+ - ff1c81a: feat (ai): add streamText onAbort callback
17
+ - e4c8647: feat (ui): allow asynchronous onFinish in createUIMessageStream
18
+ - 383cbfa: feat (ai): add isAborted to onFinish callback for ui message streams
19
+ - Updated dependencies [57edfcb]
20
+ - Updated dependencies [383cbfa]
21
+ - @ai-sdk/provider-utils@3.0.0-beta.5
22
+ - @ai-sdk/gateway@1.0.0-beta.10
23
+
3
24
  ## 5.0.0-beta.23
4
25
 
5
26
  ### Patch Changes
package/dist/bin/ai.js CHANGED
@@ -336,6 +336,9 @@ var uiMessageChunkSchema = import_v43.z.union([
336
336
  type: import_v43.z.literal("finish"),
337
337
  messageMetadata: import_v43.z.unknown().optional()
338
338
  }),
339
+ import_v43.z.strictObject({
340
+ type: import_v43.z.literal("abort")
341
+ }),
339
342
  import_v43.z.strictObject({
340
343
  type: import_v43.z.literal("message-metadata"),
341
344
  messageMetadata: import_v43.z.unknown()
@@ -1079,6 +1082,7 @@ function handleUIMessageStreamFinish({
1079
1082
  } else {
1080
1083
  messageId = lastMessage.id;
1081
1084
  }
1085
+ let isAborted = false;
1082
1086
  const idInjectedStream = stream.pipeThrough(
1083
1087
  new TransformStream({
1084
1088
  transform(chunk, controller) {
@@ -1088,6 +1092,9 @@ function handleUIMessageStreamFinish({
1088
1092
  startChunk.messageId = messageId;
1089
1093
  }
1090
1094
  }
1095
+ if (chunk.type === "abort") {
1096
+ isAborted = true;
1097
+ }
1091
1098
  controller.enqueue(chunk);
1092
1099
  }
1093
1100
  })
@@ -1113,9 +1120,10 @@ function handleUIMessageStreamFinish({
1113
1120
  transform(chunk, controller) {
1114
1121
  controller.enqueue(chunk);
1115
1122
  },
1116
- flush() {
1123
+ async flush() {
1117
1124
  const isContinuation = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
1118
- onFinish({
1125
+ await onFinish({
1126
+ isAborted,
1119
1127
  isContinuation,
1120
1128
  responseMessage: state.message,
1121
1129
  messages: [
@@ -1214,6 +1222,13 @@ function createStitchableStream() {
1214
1222
  let controller = null;
1215
1223
  let isClosed = false;
1216
1224
  let waitForNewStream = createResolvablePromise();
1225
+ const terminate = () => {
1226
+ isClosed = true;
1227
+ waitForNewStream.resolve();
1228
+ innerStreamReaders.forEach((reader) => reader.cancel());
1229
+ innerStreamReaders = [];
1230
+ controller == null ? void 0 : controller.close();
1231
+ };
1217
1232
  const processPull = async () => {
1218
1233
  if (isClosed && innerStreamReaders.length === 0) {
1219
1234
  controller == null ? void 0 : controller.close();
@@ -1239,9 +1254,7 @@ function createStitchableStream() {
1239
1254
  } catch (error) {
1240
1255
  controller == null ? void 0 : controller.error(error);
1241
1256
  innerStreamReaders.shift();
1242
- if (isClosed && innerStreamReaders.length === 0) {
1243
- controller == null ? void 0 : controller.close();
1244
- }
1257
+ terminate();
1245
1258
  }
1246
1259
  };
1247
1260
  return {
@@ -1280,13 +1293,7 @@ function createStitchableStream() {
1280
1293
  * Immediately close the outer stream. This will cancel all inner streams
1281
1294
  * and close the outer stream.
1282
1295
  */
1283
- terminate: () => {
1284
- isClosed = true;
1285
- waitForNewStream.resolve();
1286
- innerStreamReaders.forEach((reader) => reader.cancel());
1287
- innerStreamReaders = [];
1288
- controller == null ? void 0 : controller.close();
1289
- }
1296
+ terminate
1290
1297
  };
1291
1298
  }
1292
1299
 
@@ -2646,6 +2653,30 @@ function addTokenCounts(tokenCount1, tokenCount2) {
2646
2653
  return tokenCount1 == null && tokenCount2 == null ? void 0 : (tokenCount1 != null ? tokenCount1 : 0) + (tokenCount2 != null ? tokenCount2 : 0);
2647
2654
  }
2648
2655
 
2656
+ // src/util/filter-stream-errors.ts
2657
+ function filterStreamErrors(readable, onError) {
2658
+ return new ReadableStream({
2659
+ async start(controller) {
2660
+ const reader = readable.getReader();
2661
+ try {
2662
+ while (true) {
2663
+ const { done, value } = await reader.read();
2664
+ if (done) {
2665
+ controller.close();
2666
+ break;
2667
+ }
2668
+ controller.enqueue(value);
2669
+ }
2670
+ } catch (error) {
2671
+ await onError({ error, controller });
2672
+ }
2673
+ },
2674
+ cancel(reason) {
2675
+ return readable.cancel(reason);
2676
+ }
2677
+ });
2678
+ }
2679
+
2649
2680
  // src/generate-text/run-tools-transformation.ts
2650
2681
  var import_provider_utils11 = require("@ai-sdk/provider-utils");
2651
2682
 
@@ -3165,6 +3196,7 @@ function streamText({
3165
3196
  console.error(error);
3166
3197
  },
3167
3198
  onFinish,
3199
+ onAbort,
3168
3200
  onStepFinish,
3169
3201
  _internal: {
3170
3202
  now: now2 = now,
@@ -3196,6 +3228,7 @@ function streamText({
3196
3228
  onChunk,
3197
3229
  onError,
3198
3230
  onFinish,
3231
+ onAbort,
3199
3232
  onStepFinish,
3200
3233
  now: now2,
3201
3234
  currentDate,
@@ -3294,6 +3327,7 @@ var DefaultStreamTextResult = class {
3294
3327
  onChunk,
3295
3328
  onError,
3296
3329
  onFinish,
3330
+ onAbort,
3297
3331
  onStepFinish
3298
3332
  }) {
3299
3333
  this._totalUsage = new DelayedPromise();
@@ -3301,7 +3335,6 @@ var DefaultStreamTextResult = class {
3301
3335
  this._steps = new DelayedPromise();
3302
3336
  this.output = output;
3303
3337
  this.includeRawChunks = includeRawChunks;
3304
- this.generateId = generateId2;
3305
3338
  let stepFinish;
3306
3339
  let recordedContent = [];
3307
3340
  const recordedResponseMessages = [];
@@ -3504,6 +3537,15 @@ var DefaultStreamTextResult = class {
3504
3537
  this.addStream = stitchableStream.addStream;
3505
3538
  this.closeStream = stitchableStream.close;
3506
3539
  let stream = stitchableStream.stream;
3540
+ stream = filterStreamErrors(stream, ({ error, controller }) => {
3541
+ if ((0, import_provider_utils12.isAbortError)(error) && (abortSignal == null ? void 0 : abortSignal.aborted)) {
3542
+ onAbort == null ? void 0 : onAbort({ steps: recordedSteps });
3543
+ controller.enqueue({ type: "abort" });
3544
+ controller.close();
3545
+ } else {
3546
+ controller.error(error);
3547
+ }
3548
+ });
3507
3549
  stream = stream.pipeThrough(
3508
3550
  new TransformStream({
3509
3551
  start(controller) {
@@ -4246,6 +4288,10 @@ var DefaultStreamTextResult = class {
4246
4288
  }
4247
4289
  break;
4248
4290
  }
4291
+ case "abort": {
4292
+ controller.enqueue(part);
4293
+ break;
4294
+ }
4249
4295
  case "tool-input-end": {
4250
4296
  break;
4251
4297
  }
@@ -4266,13 +4312,15 @@ var DefaultStreamTextResult = class {
4266
4312
  }
4267
4313
  })
4268
4314
  );
4269
- return handleUIMessageStreamFinish({
4270
- stream: baseStream,
4271
- messageId: responseMessageId != null ? responseMessageId : generateMessageId == null ? void 0 : generateMessageId(),
4272
- originalMessages,
4273
- onFinish,
4274
- onError
4275
- });
4315
+ return createAsyncIterableStream(
4316
+ handleUIMessageStreamFinish({
4317
+ stream: baseStream,
4318
+ messageId: responseMessageId != null ? responseMessageId : generateMessageId == null ? void 0 : generateMessageId(),
4319
+ originalMessages,
4320
+ onFinish,
4321
+ onError
4322
+ })
4323
+ );
4276
4324
  }
4277
4325
  pipeUIMessageStreamToResponse(response, {
4278
4326
  originalMessages,