ai 3.3.6 → 3.3.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -1420,13 +1420,14 @@ If set and supported by the model, calls will generate deterministic results.
1420
1420
  @param abortSignal - An optional abort signal that can be used to cancel the call.
1421
1421
  @param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.
1422
1422
 
1423
+ @param onChunk - Callback that is called for each chunk of the stream. The stream processing will pause until the callback promise is resolved.
1423
1424
  @param onFinish - Callback that is called when the LLM response and all request tool executions
1424
1425
  (for tools that have an `execute` function) are finished.
1425
1426
 
1426
1427
  @return
1427
1428
  A result object for accessing different stream types and additional information.
1428
1429
  */
1429
- declare function streamText<TOOLS extends Record<string, CoreTool>>({ model, tools, toolChoice, system, prompt, messages, maxRetries, abortSignal, headers, experimental_telemetry: telemetry, experimental_toolCallStreaming: toolCallStreaming, onFinish, ...settings }: CallSettings & Prompt & {
1430
+ declare function streamText<TOOLS extends Record<string, CoreTool>>({ model, tools, toolChoice, system, prompt, messages, maxRetries, abortSignal, headers, experimental_telemetry: telemetry, experimental_toolCallStreaming: toolCallStreaming, onChunk, onFinish, ...settings }: CallSettings & Prompt & {
1430
1431
  /**
1431
1432
  The language model to use.
1432
1433
  */
@@ -1448,6 +1449,14 @@ Enable streaming of tool call deltas as they are generated. Disabled by default.
1448
1449
  */
1449
1450
  experimental_toolCallStreaming?: boolean;
1450
1451
  /**
1452
+ Callback that is called for each chunk of the stream. The stream processing will pause until the callback promise is resolved.
1453
+ */
1454
+ onChunk?: (event: {
1455
+ chunk: Extract<TextStreamPart<TOOLS>, {
1456
+ type: 'text-delta' | 'tool-call' | 'tool-call-streaming-start' | 'tool-call-delta' | 'tool-result';
1457
+ }>;
1458
+ }) => Promise<void> | void;
1459
+ /**
1451
1460
  Callback that is called when the LLM response and all request tool executions
1452
1461
  (for tools that have an `execute` function) are finished.
1453
1462
  */
@@ -1489,7 +1498,6 @@ Callback that is called when the LLM response and all request tool executions
1489
1498
  }): Promise<DefaultStreamTextResult<TOOLS>>;
1490
1499
  declare class DefaultStreamTextResult<TOOLS extends Record<string, CoreTool>> implements StreamTextResult<TOOLS> {
1491
1500
  private originalStream;
1492
- private onFinish?;
1493
1501
  readonly warnings: StreamTextResult<TOOLS>['warnings'];
1494
1502
  readonly usage: StreamTextResult<TOOLS>['usage'];
1495
1503
  readonly finishReason: StreamTextResult<TOOLS>['finishReason'];
@@ -1497,11 +1505,12 @@ declare class DefaultStreamTextResult<TOOLS extends Record<string, CoreTool>> im
1497
1505
  readonly toolCalls: StreamTextResult<TOOLS>['toolCalls'];
1498
1506
  readonly toolResults: StreamTextResult<TOOLS>['toolResults'];
1499
1507
  readonly rawResponse: StreamTextResult<TOOLS>['rawResponse'];
1500
- constructor({ stream, warnings, rawResponse, onFinish, rootSpan, doStreamSpan, telemetry, }: {
1508
+ constructor({ stream, warnings, rawResponse, onChunk, onFinish, rootSpan, doStreamSpan, telemetry, }: {
1501
1509
  stream: ReadableStream<TextStreamPart<TOOLS>>;
1502
1510
  warnings: StreamTextResult<TOOLS>['warnings'];
1503
1511
  rawResponse: StreamTextResult<TOOLS>['rawResponse'];
1504
- onFinish?: Parameters<typeof streamText>[0]['onFinish'];
1512
+ onChunk: Parameters<typeof streamText>[0]['onChunk'];
1513
+ onFinish: Parameters<typeof streamText>[0]['onFinish'];
1505
1514
  rootSpan: Span;
1506
1515
  doStreamSpan: Span;
1507
1516
  telemetry: TelemetrySettings | undefined;
package/dist/index.d.ts CHANGED
@@ -1420,13 +1420,14 @@ If set and supported by the model, calls will generate deterministic results.
1420
1420
  @param abortSignal - An optional abort signal that can be used to cancel the call.
1421
1421
  @param headers - Additional HTTP headers to be sent with the request. Only applicable for HTTP-based providers.
1422
1422
 
1423
+ @param onChunk - Callback that is called for each chunk of the stream. The stream processing will pause until the callback promise is resolved.
1423
1424
  @param onFinish - Callback that is called when the LLM response and all request tool executions
1424
1425
  (for tools that have an `execute` function) are finished.
1425
1426
 
1426
1427
  @return
1427
1428
  A result object for accessing different stream types and additional information.
1428
1429
  */
1429
- declare function streamText<TOOLS extends Record<string, CoreTool>>({ model, tools, toolChoice, system, prompt, messages, maxRetries, abortSignal, headers, experimental_telemetry: telemetry, experimental_toolCallStreaming: toolCallStreaming, onFinish, ...settings }: CallSettings & Prompt & {
1430
+ declare function streamText<TOOLS extends Record<string, CoreTool>>({ model, tools, toolChoice, system, prompt, messages, maxRetries, abortSignal, headers, experimental_telemetry: telemetry, experimental_toolCallStreaming: toolCallStreaming, onChunk, onFinish, ...settings }: CallSettings & Prompt & {
1430
1431
  /**
1431
1432
  The language model to use.
1432
1433
  */
@@ -1448,6 +1449,14 @@ Enable streaming of tool call deltas as they are generated. Disabled by default.
1448
1449
  */
1449
1450
  experimental_toolCallStreaming?: boolean;
1450
1451
  /**
1452
+ Callback that is called for each chunk of the stream. The stream processing will pause until the callback promise is resolved.
1453
+ */
1454
+ onChunk?: (event: {
1455
+ chunk: Extract<TextStreamPart<TOOLS>, {
1456
+ type: 'text-delta' | 'tool-call' | 'tool-call-streaming-start' | 'tool-call-delta' | 'tool-result';
1457
+ }>;
1458
+ }) => Promise<void> | void;
1459
+ /**
1451
1460
  Callback that is called when the LLM response and all request tool executions
1452
1461
  (for tools that have an `execute` function) are finished.
1453
1462
  */
@@ -1489,7 +1498,6 @@ Callback that is called when the LLM response and all request tool executions
1489
1498
  }): Promise<DefaultStreamTextResult<TOOLS>>;
1490
1499
  declare class DefaultStreamTextResult<TOOLS extends Record<string, CoreTool>> implements StreamTextResult<TOOLS> {
1491
1500
  private originalStream;
1492
- private onFinish?;
1493
1501
  readonly warnings: StreamTextResult<TOOLS>['warnings'];
1494
1502
  readonly usage: StreamTextResult<TOOLS>['usage'];
1495
1503
  readonly finishReason: StreamTextResult<TOOLS>['finishReason'];
@@ -1497,11 +1505,12 @@ declare class DefaultStreamTextResult<TOOLS extends Record<string, CoreTool>> im
1497
1505
  readonly toolCalls: StreamTextResult<TOOLS>['toolCalls'];
1498
1506
  readonly toolResults: StreamTextResult<TOOLS>['toolResults'];
1499
1507
  readonly rawResponse: StreamTextResult<TOOLS>['rawResponse'];
1500
- constructor({ stream, warnings, rawResponse, onFinish, rootSpan, doStreamSpan, telemetry, }: {
1508
+ constructor({ stream, warnings, rawResponse, onChunk, onFinish, rootSpan, doStreamSpan, telemetry, }: {
1501
1509
  stream: ReadableStream<TextStreamPart<TOOLS>>;
1502
1510
  warnings: StreamTextResult<TOOLS>['warnings'];
1503
1511
  rawResponse: StreamTextResult<TOOLS>['rawResponse'];
1504
- onFinish?: Parameters<typeof streamText>[0]['onFinish'];
1512
+ onChunk: Parameters<typeof streamText>[0]['onChunk'];
1513
+ onFinish: Parameters<typeof streamText>[0]['onFinish'];
1505
1514
  rootSpan: Span;
1506
1515
  doStreamSpan: Span;
1507
1516
  telemetry: TelemetrySettings | undefined;
package/dist/index.js CHANGED
@@ -106,7 +106,7 @@ var import_provider_utils = require("@ai-sdk/provider-utils");
106
106
 
107
107
  // util/delay.ts
108
108
  async function delay(delayInMs) {
109
- return new Promise((resolve) => setTimeout(resolve, delayInMs));
109
+ return delayInMs === void 0 ? Promise.resolve() : new Promise((resolve) => setTimeout(resolve, delayInMs));
110
110
  }
111
111
 
112
112
  // util/retry-error.ts
@@ -2853,6 +2853,7 @@ async function streamText({
2853
2853
  headers,
2854
2854
  experimental_telemetry: telemetry,
2855
2855
  experimental_toolCallStreaming: toolCallStreaming = false,
2856
+ onChunk,
2856
2857
  onFinish,
2857
2858
  ...settings
2858
2859
  }) {
@@ -2944,6 +2945,7 @@ async function streamText({
2944
2945
  }),
2945
2946
  warnings,
2946
2947
  rawResponse,
2948
+ onChunk,
2947
2949
  onFinish,
2948
2950
  rootSpan,
2949
2951
  doStreamSpan,
@@ -2957,6 +2959,7 @@ var DefaultStreamTextResult = class {
2957
2959
  stream,
2958
2960
  warnings,
2959
2961
  rawResponse,
2962
+ onChunk,
2960
2963
  onFinish,
2961
2964
  rootSpan,
2962
2965
  doStreamSpan,
@@ -2964,7 +2967,6 @@ var DefaultStreamTextResult = class {
2964
2967
  }) {
2965
2968
  this.warnings = warnings;
2966
2969
  this.rawResponse = rawResponse;
2967
- this.onFinish = onFinish;
2968
2970
  const { resolve: resolveUsage, promise: usagePromise } = createResolvablePromise();
2969
2971
  this.usage = usagePromise;
2970
2972
  const { resolve: resolveFinishReason, promise: finishReasonPromise } = createResolvablePromise();
@@ -2981,25 +2983,30 @@ var DefaultStreamTextResult = class {
2981
2983
  const toolCalls = [];
2982
2984
  const toolResults = [];
2983
2985
  let firstChunk = true;
2984
- const self = this;
2985
2986
  this.originalStream = stream.pipeThrough(
2986
2987
  new TransformStream({
2987
2988
  async transform(chunk, controller) {
2988
- controller.enqueue(chunk);
2989
2989
  if (firstChunk) {
2990
2990
  firstChunk = false;
2991
2991
  doStreamSpan.addEvent("ai.stream.firstChunk");
2992
2992
  }
2993
+ if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
2994
+ return;
2995
+ }
2996
+ controller.enqueue(chunk);
2993
2997
  const chunkType = chunk.type;
2994
2998
  switch (chunkType) {
2995
2999
  case "text-delta":
2996
3000
  text += chunk.textDelta;
3001
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
2997
3002
  break;
2998
3003
  case "tool-call":
2999
3004
  toolCalls.push(chunk);
3005
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
3000
3006
  break;
3001
3007
  case "tool-result":
3002
3008
  toolResults.push(chunk);
3009
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
3003
3010
  break;
3004
3011
  case "finish":
3005
3012
  usage = chunk.usage;
@@ -3010,7 +3017,10 @@ var DefaultStreamTextResult = class {
3010
3017
  resolveToolCalls(toolCalls);
3011
3018
  break;
3012
3019
  case "tool-call-streaming-start":
3013
- case "tool-call-delta":
3020
+ case "tool-call-delta": {
3021
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
3022
+ break;
3023
+ }
3014
3024
  case "error":
3015
3025
  break;
3016
3026
  default: {
@@ -3021,7 +3031,6 @@ var DefaultStreamTextResult = class {
3021
3031
  },
3022
3032
  // invoke onFinish callback and resolve toolResults promise when the stream is about to close:
3023
3033
  async flush(controller) {
3024
- var _a12;
3025
3034
  try {
3026
3035
  const finalUsage = usage != null ? usage : {
3027
3036
  promptTokens: NaN,
@@ -3060,7 +3069,7 @@ var DefaultStreamTextResult = class {
3060
3069
  })
3061
3070
  );
3062
3071
  resolveToolResults(toolResults);
3063
- await ((_a12 = self.onFinish) == null ? void 0 : _a12.call(self, {
3072
+ await (onFinish == null ? void 0 : onFinish({
3064
3073
  finishReason: finalFinishReason,
3065
3074
  usage: finalUsage,
3066
3075
  text,
@@ -3099,9 +3108,7 @@ var DefaultStreamTextResult = class {
3099
3108
  return createAsyncIterableStream(this.teeStream(), {
3100
3109
  transform(chunk, controller) {
3101
3110
  if (chunk.type === "text-delta") {
3102
- if (chunk.textDelta.length > 0) {
3103
- controller.enqueue(chunk.textDelta);
3104
- }
3111
+ controller.enqueue(chunk.textDelta);
3105
3112
  } else if (chunk.type === "error") {
3106
3113
  controller.error(chunk.error);
3107
3114
  }
@@ -3111,13 +3118,7 @@ var DefaultStreamTextResult = class {
3111
3118
  get fullStream() {
3112
3119
  return createAsyncIterableStream(this.teeStream(), {
3113
3120
  transform(chunk, controller) {
3114
- if (chunk.type === "text-delta") {
3115
- if (chunk.textDelta.length > 0) {
3116
- controller.enqueue(chunk);
3117
- }
3118
- } else {
3119
- controller.enqueue(chunk);
3120
- }
3121
+ controller.enqueue(chunk);
3121
3122
  }
3122
3123
  });
3123
3124
  }
@@ -3760,7 +3761,11 @@ function readableFromAsyncIterable(iterable) {
3760
3761
 
3761
3762
  // streams/stream-data.ts
3762
3763
  var import_ui_utils7 = require("@ai-sdk/ui-utils");
3763
- var STREAM_DATA_WARNING_TIME_MS = 15 * 1e3;
3764
+
3765
+ // util/constants.ts
3766
+ var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
3767
+
3768
+ // streams/stream-data.ts
3764
3769
  var StreamData2 = class {
3765
3770
  constructor() {
3766
3771
  this.encoder = new TextEncoder();
@@ -3776,7 +3781,7 @@ var StreamData2 = class {
3776
3781
  console.warn(
3777
3782
  "The data stream is hanging. Did you forget to close it with `data.close()`?"
3778
3783
  );
3779
- }, STREAM_DATA_WARNING_TIME_MS);
3784
+ }, HANGING_STREAM_WARNING_TIME_MS);
3780
3785
  }
3781
3786
  },
3782
3787
  pull: (controller) => {