ai 6.0.12 → 6.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # ai
2
2
 
3
+ ## 6.0.13
4
+
5
+ ### Patch Changes
6
+
7
+ - e2c445d: feat(ai): smoothStream reasoning support
8
+
3
9
  ## 6.0.12
4
10
 
5
11
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -56,7 +56,7 @@ declare global {
56
56
  *
57
57
  * You can register model IDs in two ways:
58
58
  *
59
- * 1. Register baesd on Model IDs from a provider package:
59
+ * 1. Register based on Model IDs from a provider package:
60
60
  * @example
61
61
  * ```typescript
62
62
  * import { openai } from '@ai-sdk/openai';
@@ -1448,7 +1448,7 @@ declare function pruneMessages({ messages, reasoning, toolCalls, emptyMessages,
1448
1448
  */
1449
1449
  type ChunkDetector = (buffer: string) => string | undefined | null;
1450
1450
  /**
1451
- * Smooths text streaming output.
1451
+ * Smooths text and reasoning streaming output.
1452
1452
  *
1453
1453
  * @param delayInMs - The delay in milliseconds between each chunk. Defaults to 10ms. Can be set to `null` to skip the delay.
1454
1454
  * @param chunking - Controls how the text is chunked for streaming. Use "word" to stream word by word (default), "line" to stream line by line, or provide a custom RegExp pattern for custom chunking.
package/dist/index.d.ts CHANGED
@@ -56,7 +56,7 @@ declare global {
56
56
  *
57
57
  * You can register model IDs in two ways:
58
58
  *
59
- * 1. Register baesd on Model IDs from a provider package:
59
+ * 1. Register based on Model IDs from a provider package:
60
60
  * @example
61
61
  * ```typescript
62
62
  * import { openai } from '@ai-sdk/openai';
@@ -1448,7 +1448,7 @@ declare function pruneMessages({ messages, reasoning, toolCalls, emptyMessages,
1448
1448
  */
1449
1449
  type ChunkDetector = (buffer: string) => string | undefined | null;
1450
1450
  /**
1451
- * Smooths text streaming output.
1451
+ * Smooths text and reasoning streaming output.
1452
1452
  *
1453
1453
  * @param delayInMs - The delay in milliseconds between each chunk. Defaults to 10ms. Can be set to `null` to skip the delay.
1454
1454
  * @param chunking - Controls how the text is chunked for streaming. Use "word" to stream word by word (default), "line" to stream line by line, or provide a custom RegExp pattern for custom chunking.
package/dist/index.js CHANGED
@@ -1001,7 +1001,7 @@ var import_provider_utils3 = require("@ai-sdk/provider-utils");
1001
1001
  var import_provider_utils4 = require("@ai-sdk/provider-utils");
1002
1002
 
1003
1003
  // src/version.ts
1004
- var VERSION = true ? "6.0.12" : "0.0.0-test";
1004
+ var VERSION = true ? "6.0.13" : "0.0.0-test";
1005
1005
 
1006
1006
  // src/util/download/download.ts
1007
1007
  var download = async ({ url }) => {
@@ -10202,25 +10202,29 @@ function smoothStream({
10202
10202
  return () => {
10203
10203
  let buffer = "";
10204
10204
  let id = "";
10205
+ let type = void 0;
10206
+ function flushBuffer(controller) {
10207
+ if (buffer.length > 0 && type !== void 0) {
10208
+ controller.enqueue({ type, text: buffer, id });
10209
+ buffer = "";
10210
+ }
10211
+ }
10205
10212
  return new TransformStream({
10206
10213
  async transform(chunk, controller) {
10207
- if (chunk.type !== "text-delta") {
10208
- if (buffer.length > 0) {
10209
- controller.enqueue({ type: "text-delta", text: buffer, id });
10210
- buffer = "";
10211
- }
10214
+ if (chunk.type !== "text-delta" && chunk.type !== "reasoning-delta") {
10215
+ flushBuffer(controller);
10212
10216
  controller.enqueue(chunk);
10213
10217
  return;
10214
10218
  }
10215
- if (chunk.id !== id && buffer.length > 0) {
10216
- controller.enqueue({ type: "text-delta", text: buffer, id });
10217
- buffer = "";
10219
+ if ((chunk.type !== type || chunk.id !== id) && buffer.length > 0) {
10220
+ flushBuffer(controller);
10218
10221
  }
10219
10222
  buffer += chunk.text;
10220
10223
  id = chunk.id;
10224
+ type = chunk.type;
10221
10225
  let match;
10222
10226
  while ((match = detectChunk(buffer)) != null) {
10223
- controller.enqueue({ type: "text-delta", text: match, id });
10227
+ controller.enqueue({ type, text: match, id });
10224
10228
  buffer = buffer.slice(match.length);
10225
10229
  await delay2(delayInMs);
10226
10230
  }