ai 3.3.6 → 3.3.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -22,7 +22,7 @@ import { getErrorMessage, isAbortError } from "@ai-sdk/provider-utils";
22
22
 
23
23
  // util/delay.ts
24
24
  async function delay(delayInMs) {
25
- return new Promise((resolve) => setTimeout(resolve, delayInMs));
25
+ return delayInMs === void 0 ? Promise.resolve() : new Promise((resolve) => setTimeout(resolve, delayInMs));
26
26
  }
27
27
 
28
28
  // util/retry-error.ts
@@ -2776,6 +2776,7 @@ async function streamText({
2776
2776
  headers,
2777
2777
  experimental_telemetry: telemetry,
2778
2778
  experimental_toolCallStreaming: toolCallStreaming = false,
2779
+ onChunk,
2779
2780
  onFinish,
2780
2781
  ...settings
2781
2782
  }) {
@@ -2867,6 +2868,7 @@ async function streamText({
2867
2868
  }),
2868
2869
  warnings,
2869
2870
  rawResponse,
2871
+ onChunk,
2870
2872
  onFinish,
2871
2873
  rootSpan,
2872
2874
  doStreamSpan,
@@ -2880,6 +2882,7 @@ var DefaultStreamTextResult = class {
2880
2882
  stream,
2881
2883
  warnings,
2882
2884
  rawResponse,
2885
+ onChunk,
2883
2886
  onFinish,
2884
2887
  rootSpan,
2885
2888
  doStreamSpan,
@@ -2887,7 +2890,6 @@ var DefaultStreamTextResult = class {
2887
2890
  }) {
2888
2891
  this.warnings = warnings;
2889
2892
  this.rawResponse = rawResponse;
2890
- this.onFinish = onFinish;
2891
2893
  const { resolve: resolveUsage, promise: usagePromise } = createResolvablePromise();
2892
2894
  this.usage = usagePromise;
2893
2895
  const { resolve: resolveFinishReason, promise: finishReasonPromise } = createResolvablePromise();
@@ -2904,25 +2906,30 @@ var DefaultStreamTextResult = class {
2904
2906
  const toolCalls = [];
2905
2907
  const toolResults = [];
2906
2908
  let firstChunk = true;
2907
- const self = this;
2908
2909
  this.originalStream = stream.pipeThrough(
2909
2910
  new TransformStream({
2910
2911
  async transform(chunk, controller) {
2911
- controller.enqueue(chunk);
2912
2912
  if (firstChunk) {
2913
2913
  firstChunk = false;
2914
2914
  doStreamSpan.addEvent("ai.stream.firstChunk");
2915
2915
  }
2916
+ if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
2917
+ return;
2918
+ }
2919
+ controller.enqueue(chunk);
2916
2920
  const chunkType = chunk.type;
2917
2921
  switch (chunkType) {
2918
2922
  case "text-delta":
2919
2923
  text += chunk.textDelta;
2924
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
2920
2925
  break;
2921
2926
  case "tool-call":
2922
2927
  toolCalls.push(chunk);
2928
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
2923
2929
  break;
2924
2930
  case "tool-result":
2925
2931
  toolResults.push(chunk);
2932
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
2926
2933
  break;
2927
2934
  case "finish":
2928
2935
  usage = chunk.usage;
@@ -2933,7 +2940,10 @@ var DefaultStreamTextResult = class {
2933
2940
  resolveToolCalls(toolCalls);
2934
2941
  break;
2935
2942
  case "tool-call-streaming-start":
2936
- case "tool-call-delta":
2943
+ case "tool-call-delta": {
2944
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
2945
+ break;
2946
+ }
2937
2947
  case "error":
2938
2948
  break;
2939
2949
  default: {
@@ -2944,7 +2954,6 @@ var DefaultStreamTextResult = class {
2944
2954
  },
2945
2955
  // invoke onFinish callback and resolve toolResults promise when the stream is about to close:
2946
2956
  async flush(controller) {
2947
- var _a12;
2948
2957
  try {
2949
2958
  const finalUsage = usage != null ? usage : {
2950
2959
  promptTokens: NaN,
@@ -2983,7 +2992,7 @@ var DefaultStreamTextResult = class {
2983
2992
  })
2984
2993
  );
2985
2994
  resolveToolResults(toolResults);
2986
- await ((_a12 = self.onFinish) == null ? void 0 : _a12.call(self, {
2995
+ await (onFinish == null ? void 0 : onFinish({
2987
2996
  finishReason: finalFinishReason,
2988
2997
  usage: finalUsage,
2989
2998
  text,
@@ -3022,9 +3031,7 @@ var DefaultStreamTextResult = class {
3022
3031
  return createAsyncIterableStream(this.teeStream(), {
3023
3032
  transform(chunk, controller) {
3024
3033
  if (chunk.type === "text-delta") {
3025
- if (chunk.textDelta.length > 0) {
3026
- controller.enqueue(chunk.textDelta);
3027
- }
3034
+ controller.enqueue(chunk.textDelta);
3028
3035
  } else if (chunk.type === "error") {
3029
3036
  controller.error(chunk.error);
3030
3037
  }
@@ -3034,13 +3041,7 @@ var DefaultStreamTextResult = class {
3034
3041
  get fullStream() {
3035
3042
  return createAsyncIterableStream(this.teeStream(), {
3036
3043
  transform(chunk, controller) {
3037
- if (chunk.type === "text-delta") {
3038
- if (chunk.textDelta.length > 0) {
3039
- controller.enqueue(chunk);
3040
- }
3041
- } else {
3042
- controller.enqueue(chunk);
3043
- }
3044
+ controller.enqueue(chunk);
3044
3045
  }
3045
3046
  });
3046
3047
  }
@@ -3695,7 +3696,11 @@ function readableFromAsyncIterable(iterable) {
3695
3696
 
3696
3697
  // streams/stream-data.ts
3697
3698
  import { formatStreamPart as formatStreamPart2 } from "@ai-sdk/ui-utils";
3698
- var STREAM_DATA_WARNING_TIME_MS = 15 * 1e3;
3699
+
3700
+ // util/constants.ts
3701
+ var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
3702
+
3703
+ // streams/stream-data.ts
3699
3704
  var StreamData2 = class {
3700
3705
  constructor() {
3701
3706
  this.encoder = new TextEncoder();
@@ -3711,7 +3716,7 @@ var StreamData2 = class {
3711
3716
  console.warn(
3712
3717
  "The data stream is hanging. Did you forget to close it with `data.close()`?"
3713
3718
  );
3714
- }, STREAM_DATA_WARNING_TIME_MS);
3719
+ }, HANGING_STREAM_WARNING_TIME_MS);
3715
3720
  }
3716
3721
  },
3717
3722
  pull: (controller) => {