ai 3.2.36 → 3.2.38

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -97,7 +97,7 @@ export async function POST(req: Request) {
97
97
  messages,
98
98
  });
99
99
 
100
- return result.toAIStreamResponse();
100
+ return result.toDataStreamamResponse();
101
101
  }
102
102
  ```
103
103
 
package/dist/index.d.mts CHANGED
@@ -34,7 +34,7 @@ Represents the number of tokens used in a prompt and completion.
34
34
  */
35
35
  type CompletionTokenUsage$1 = {
36
36
  /**
37
- The number of tokens used in the prompt
37
+ The number of tokens used in the prompt.
38
38
  */
39
39
  promptTokens: number;
40
40
  /**
@@ -1281,7 +1281,9 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1281
1281
  @param callbacks
1282
1282
  Stream callbacks that will be called when the stream emits events.
1283
1283
 
1284
- @returns an `AIStream` object.
1284
+ @returns A data stream.
1285
+
1286
+ @deprecated Use `toDataStreamResponse` instead.
1285
1287
  */
1286
1288
  toAIStream(callbacks?: AIStreamCallbacksAndOptions): ReadableStream<Uint8Array>;
1287
1289
  /**
@@ -1291,12 +1293,26 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1291
1293
 
1292
1294
  @param response A Node.js response-like object (ServerResponse).
1293
1295
  @param init Optional headers and status code.
1296
+
1297
+ @deprecated Use `pipeDataStreamToResponse` instead.
1294
1298
  */
1295
1299
  pipeAIStreamToResponse(response: ServerResponse$1, init?: {
1296
1300
  headers?: Record<string, string>;
1297
1301
  status?: number;
1298
1302
  }): void;
1299
1303
  /**
1304
+ Writes data stream output to a Node.js response-like object.
1305
+ It sets a `Content-Type` header to `text/plain; charset=utf-8` and
1306
+ writes each data stream part as a separate chunk.
1307
+
1308
+ @param response A Node.js response-like object (ServerResponse).
1309
+ @param init Optional headers and status code.
1310
+ */
1311
+ pipeDataStreamToResponse(response: ServerResponse$1, init?: {
1312
+ headers?: Record<string, string>;
1313
+ status?: number;
1314
+ }): void;
1315
+ /**
1300
1316
  Writes text delta output to a Node.js response-like object.
1301
1317
  It sets a `Content-Type` header to `text/plain; charset=utf-8` and
1302
1318
  writes each text delta as a separate chunk.
@@ -1316,12 +1332,27 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1316
1332
  You can also pass in a ResponseInit directly (deprecated).
1317
1333
 
1318
1334
  @return A response object.
1335
+
1336
+ @deprecated Use `toDataStreamResponse` instead.
1319
1337
  */
1320
1338
  toAIStreamResponse(options?: ResponseInit | {
1321
1339
  init?: ResponseInit;
1322
1340
  data?: StreamData;
1323
1341
  }): Response;
1324
1342
  /**
1343
+ Converts the result to a streamed response object with a stream data part stream.
1344
+ It can be used with the `useChat` and `useCompletion` hooks.
1345
+
1346
+ @param options An object with an init property (ResponseInit) and a data property.
1347
+ You can also pass in a ResponseInit directly (deprecated).
1348
+
1349
+ @return A response object.
1350
+ */
1351
+ toDataStreamResponse(options?: ResponseInit | {
1352
+ init?: ResponseInit;
1353
+ data?: StreamData;
1354
+ }): Response;
1355
+ /**
1325
1356
  Creates a simple text stream response.
1326
1357
  Each text delta is encoded as UTF-8 and sent as a separate chunk.
1327
1358
  Non-text-delta events are ignored.
@@ -1498,6 +1529,10 @@ declare class DefaultStreamTextResult<TOOLS extends Record<string, CoreTool>> im
1498
1529
  headers?: Record<string, string>;
1499
1530
  status?: number;
1500
1531
  }): void;
1532
+ pipeDataStreamToResponse(response: ServerResponse$1, init?: {
1533
+ headers?: Record<string, string>;
1534
+ status?: number;
1535
+ }): void;
1501
1536
  pipeTextStreamToResponse(response: ServerResponse$1, init?: {
1502
1537
  headers?: Record<string, string>;
1503
1538
  status?: number;
@@ -1506,6 +1541,10 @@ declare class DefaultStreamTextResult<TOOLS extends Record<string, CoreTool>> im
1506
1541
  init?: ResponseInit;
1507
1542
  data?: StreamData;
1508
1543
  }): Response;
1544
+ toDataStreamResponse(options?: ResponseInit | {
1545
+ init?: ResponseInit;
1546
+ data?: StreamData;
1547
+ }): Response;
1509
1548
  toTextStreamResponse(init?: ResponseInit): Response;
1510
1549
  }
1511
1550
  /**
@@ -2215,13 +2254,25 @@ Converts LangChain output streams to AIStream.
2215
2254
  The following streams are supported:
2216
2255
  - `LangChainAIMessageChunk` streams (LangChain `model.stream` output)
2217
2256
  - `string` streams (LangChain `StringOutputParser` output)
2257
+
2258
+ @deprecated Use `toDataStream` instead.
2218
2259
  */
2219
2260
  declare function toAIStream(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, callbacks?: AIStreamCallbacksAndOptions): ReadableStream<any>;
2261
+ /**
2262
+ Converts LangChain output streams to AIStream.
2263
+
2264
+ The following streams are supported:
2265
+ - `LangChainAIMessageChunk` streams (LangChain `model.stream` output)
2266
+ - `string` streams (LangChain `StringOutputParser` output)
2267
+ */
2268
+ declare function toDataStream(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, callbacks?: AIStreamCallbacksAndOptions): ReadableStream<any>;
2220
2269
 
2221
2270
  declare const langchainAdapter_toAIStream: typeof toAIStream;
2271
+ declare const langchainAdapter_toDataStream: typeof toDataStream;
2222
2272
  declare namespace langchainAdapter {
2223
2273
  export {
2224
2274
  langchainAdapter_toAIStream as toAIStream,
2275
+ langchainAdapter_toDataStream as toDataStream,
2225
2276
  };
2226
2277
  }
2227
2278
 
@@ -2353,6 +2404,8 @@ declare function streamToResponse(res: ReadableStream, response: ServerResponse$
2353
2404
 
2354
2405
  /**
2355
2406
  * A utility class for streaming text responses.
2407
+ *
2408
+ * @deprecated Use `streamText.toDataStreamResponse()` instead.
2356
2409
  */
2357
2410
  declare class StreamingTextResponse extends Response {
2358
2411
  constructor(res: ReadableStream, init?: ResponseInit, data?: StreamData);
package/dist/index.d.ts CHANGED
@@ -34,7 +34,7 @@ Represents the number of tokens used in a prompt and completion.
34
34
  */
35
35
  type CompletionTokenUsage$1 = {
36
36
  /**
37
- The number of tokens used in the prompt
37
+ The number of tokens used in the prompt.
38
38
  */
39
39
  promptTokens: number;
40
40
  /**
@@ -1281,7 +1281,9 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1281
1281
  @param callbacks
1282
1282
  Stream callbacks that will be called when the stream emits events.
1283
1283
 
1284
- @returns an `AIStream` object.
1284
+ @returns A data stream.
1285
+
1286
+ @deprecated Use `toDataStreamResponse` instead.
1285
1287
  */
1286
1288
  toAIStream(callbacks?: AIStreamCallbacksAndOptions): ReadableStream<Uint8Array>;
1287
1289
  /**
@@ -1291,12 +1293,26 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1291
1293
 
1292
1294
  @param response A Node.js response-like object (ServerResponse).
1293
1295
  @param init Optional headers and status code.
1296
+
1297
+ @deprecated Use `pipeDataStreamToResponse` instead.
1294
1298
  */
1295
1299
  pipeAIStreamToResponse(response: ServerResponse$1, init?: {
1296
1300
  headers?: Record<string, string>;
1297
1301
  status?: number;
1298
1302
  }): void;
1299
1303
  /**
1304
+ Writes data stream output to a Node.js response-like object.
1305
+ It sets a `Content-Type` header to `text/plain; charset=utf-8` and
1306
+ writes each data stream part as a separate chunk.
1307
+
1308
+ @param response A Node.js response-like object (ServerResponse).
1309
+ @param init Optional headers and status code.
1310
+ */
1311
+ pipeDataStreamToResponse(response: ServerResponse$1, init?: {
1312
+ headers?: Record<string, string>;
1313
+ status?: number;
1314
+ }): void;
1315
+ /**
1300
1316
  Writes text delta output to a Node.js response-like object.
1301
1317
  It sets a `Content-Type` header to `text/plain; charset=utf-8` and
1302
1318
  writes each text delta as a separate chunk.
@@ -1316,12 +1332,27 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1316
1332
  You can also pass in a ResponseInit directly (deprecated).
1317
1333
 
1318
1334
  @return A response object.
1335
+
1336
+ @deprecated Use `toDataStreamResponse` instead.
1319
1337
  */
1320
1338
  toAIStreamResponse(options?: ResponseInit | {
1321
1339
  init?: ResponseInit;
1322
1340
  data?: StreamData;
1323
1341
  }): Response;
1324
1342
  /**
1343
+ Converts the result to a streamed response object with a stream data part stream.
1344
+ It can be used with the `useChat` and `useCompletion` hooks.
1345
+
1346
+ @param options An object with an init property (ResponseInit) and a data property.
1347
+ You can also pass in a ResponseInit directly (deprecated).
1348
+
1349
+ @return A response object.
1350
+ */
1351
+ toDataStreamResponse(options?: ResponseInit | {
1352
+ init?: ResponseInit;
1353
+ data?: StreamData;
1354
+ }): Response;
1355
+ /**
1325
1356
  Creates a simple text stream response.
1326
1357
  Each text delta is encoded as UTF-8 and sent as a separate chunk.
1327
1358
  Non-text-delta events are ignored.
@@ -1498,6 +1529,10 @@ declare class DefaultStreamTextResult<TOOLS extends Record<string, CoreTool>> im
1498
1529
  headers?: Record<string, string>;
1499
1530
  status?: number;
1500
1531
  }): void;
1532
+ pipeDataStreamToResponse(response: ServerResponse$1, init?: {
1533
+ headers?: Record<string, string>;
1534
+ status?: number;
1535
+ }): void;
1501
1536
  pipeTextStreamToResponse(response: ServerResponse$1, init?: {
1502
1537
  headers?: Record<string, string>;
1503
1538
  status?: number;
@@ -1506,6 +1541,10 @@ declare class DefaultStreamTextResult<TOOLS extends Record<string, CoreTool>> im
1506
1541
  init?: ResponseInit;
1507
1542
  data?: StreamData;
1508
1543
  }): Response;
1544
+ toDataStreamResponse(options?: ResponseInit | {
1545
+ init?: ResponseInit;
1546
+ data?: StreamData;
1547
+ }): Response;
1509
1548
  toTextStreamResponse(init?: ResponseInit): Response;
1510
1549
  }
1511
1550
  /**
@@ -2215,13 +2254,25 @@ Converts LangChain output streams to AIStream.
2215
2254
  The following streams are supported:
2216
2255
  - `LangChainAIMessageChunk` streams (LangChain `model.stream` output)
2217
2256
  - `string` streams (LangChain `StringOutputParser` output)
2257
+
2258
+ @deprecated Use `toDataStream` instead.
2218
2259
  */
2219
2260
  declare function toAIStream(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, callbacks?: AIStreamCallbacksAndOptions): ReadableStream<any>;
2261
+ /**
2262
+ Converts LangChain output streams to AIStream.
2263
+
2264
+ The following streams are supported:
2265
+ - `LangChainAIMessageChunk` streams (LangChain `model.stream` output)
2266
+ - `string` streams (LangChain `StringOutputParser` output)
2267
+ */
2268
+ declare function toDataStream(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, callbacks?: AIStreamCallbacksAndOptions): ReadableStream<any>;
2220
2269
 
2221
2270
  declare const langchainAdapter_toAIStream: typeof toAIStream;
2271
+ declare const langchainAdapter_toDataStream: typeof toDataStream;
2222
2272
  declare namespace langchainAdapter {
2223
2273
  export {
2224
2274
  langchainAdapter_toAIStream as toAIStream,
2275
+ langchainAdapter_toDataStream as toDataStream,
2225
2276
  };
2226
2277
  }
2227
2278
 
@@ -2353,6 +2404,8 @@ declare function streamToResponse(res: ReadableStream, response: ServerResponse$
2353
2404
 
2354
2405
  /**
2355
2406
  * A utility class for streaming text responses.
2407
+ *
2408
+ * @deprecated Use `streamText.toDataStreamResponse()` instead.
2356
2409
  */
2357
2410
  declare class StreamingTextResponse extends Response {
2358
2411
  constructor(res: ReadableStream, init?: ResponseInit, data?: StreamData);
package/dist/index.js CHANGED
@@ -978,12 +978,18 @@ function calculateCompletionTokenUsage(usage) {
978
978
  }
979
979
 
980
980
  // core/util/prepare-response-headers.ts
981
- function prepareResponseHeaders(init, { contentType }) {
981
+ function prepareResponseHeaders(init, {
982
+ contentType,
983
+ dataStreamVersion
984
+ }) {
982
985
  var _a;
983
986
  const headers = new Headers((_a = init == null ? void 0 : init.headers) != null ? _a : {});
984
987
  if (!headers.has("Content-Type")) {
985
988
  headers.set("Content-Type", contentType);
986
989
  }
990
+ if (dataStreamVersion !== void 0) {
991
+ headers.set("X-Vercel-AI-Data-Stream", dataStreamVersion);
992
+ }
987
993
  return headers;
988
994
  }
989
995
 
@@ -1076,7 +1082,6 @@ async function generateObject({
1076
1082
  },
1077
1083
  tracer,
1078
1084
  fn: async (span) => {
1079
- var _a2, _b;
1080
1085
  const retry = retryWithExponentialBackoff({ maxRetries });
1081
1086
  if (mode === "auto" || mode == null) {
1082
1087
  mode = model.defaultObjectGenerationMode;
@@ -1121,20 +1126,20 @@ async function generateObject({
1121
1126
  abortSignal,
1122
1127
  headers
1123
1128
  });
1129
+ if (result2.text === void 0) {
1130
+ throw new import_provider6.NoObjectGeneratedError();
1131
+ }
1124
1132
  span2.setAttributes({
1125
1133
  "ai.finishReason": result2.finishReason,
1126
1134
  "ai.usage.promptTokens": result2.usage.promptTokens,
1127
1135
  "ai.usage.completionTokens": result2.usage.completionTokens,
1128
- "ai.result.text": result2.text
1136
+ "ai.result.object": result2.text
1129
1137
  });
1130
- return result2;
1138
+ return { ...result2, objectText: result2.text };
1131
1139
  }
1132
1140
  })
1133
1141
  );
1134
- if (generateResult.text === void 0) {
1135
- throw new import_provider6.NoObjectGeneratedError();
1136
- }
1137
- result = generateResult.text;
1142
+ result = generateResult.objectText;
1138
1143
  finishReason = generateResult.finishReason;
1139
1144
  usage = generateResult.usage;
1140
1145
  warnings = generateResult.warnings;
@@ -1164,6 +1169,7 @@ async function generateObject({
1164
1169
  },
1165
1170
  tracer,
1166
1171
  fn: async (span2) => {
1172
+ var _a2, _b;
1167
1173
  const result2 = await model.doGenerate({
1168
1174
  mode: {
1169
1175
  type: "object-tool",
@@ -1180,22 +1186,21 @@ async function generateObject({
1180
1186
  abortSignal,
1181
1187
  headers
1182
1188
  });
1189
+ const objectText = (_b = (_a2 = result2.toolCalls) == null ? void 0 : _a2[0]) == null ? void 0 : _b.args;
1190
+ if (objectText === void 0) {
1191
+ throw new import_provider6.NoObjectGeneratedError();
1192
+ }
1183
1193
  span2.setAttributes({
1184
1194
  "ai.finishReason": result2.finishReason,
1185
1195
  "ai.usage.promptTokens": result2.usage.promptTokens,
1186
1196
  "ai.usage.completionTokens": result2.usage.completionTokens,
1187
- "ai.result.text": result2.text,
1188
- "ai.result.toolCalls": JSON.stringify(result2.toolCalls)
1197
+ "ai.result.object": objectText
1189
1198
  });
1190
- return result2;
1199
+ return { ...result2, objectText };
1191
1200
  }
1192
1201
  })
1193
1202
  );
1194
- const functionArgs = (_b = (_a2 = generateResult.toolCalls) == null ? void 0 : _a2[0]) == null ? void 0 : _b.args;
1195
- if (functionArgs === void 0) {
1196
- throw new import_provider6.NoObjectGeneratedError();
1197
- }
1198
- result = functionArgs;
1203
+ result = generateResult.objectText;
1199
1204
  finishReason = generateResult.finishReason;
1200
1205
  usage = generateResult.usage;
1201
1206
  warnings = generateResult.warnings;
@@ -2568,6 +2573,15 @@ var DefaultStreamTextResult = class {
2568
2573
  );
2569
2574
  break;
2570
2575
  case "finish":
2576
+ controller.enqueue(
2577
+ (0, import_ui_utils6.formatStreamPart)("finish_message", {
2578
+ finishReason: chunk.finishReason,
2579
+ usage: {
2580
+ promptTokens: chunk.usage.promptTokens,
2581
+ completionTokens: chunk.usage.completionTokens
2582
+ }
2583
+ })
2584
+ );
2571
2585
  break;
2572
2586
  default: {
2573
2587
  const exhaustiveCheck = chunkType;
@@ -2579,6 +2593,9 @@ var DefaultStreamTextResult = class {
2579
2593
  return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer).pipeThrough(new TextEncoderStream());
2580
2594
  }
2581
2595
  pipeAIStreamToResponse(response, init) {
2596
+ return this.pipeDataStreamToResponse(response, init);
2597
+ }
2598
+ pipeDataStreamToResponse(response, init) {
2582
2599
  var _a;
2583
2600
  response.writeHead((_a = init == null ? void 0 : init.status) != null ? _a : 200, {
2584
2601
  "Content-Type": "text/plain; charset=utf-8",
@@ -2625,6 +2642,9 @@ var DefaultStreamTextResult = class {
2625
2642
  read();
2626
2643
  }
2627
2644
  toAIStreamResponse(options) {
2645
+ return this.toDataStreamResponse(options);
2646
+ }
2647
+ toDataStreamResponse(options) {
2628
2648
  var _a;
2629
2649
  const init = options == null ? void 0 : "init" in options ? options.init : {
2630
2650
  headers: "headers" in options ? options.headers : void 0,
@@ -2637,7 +2657,8 @@ var DefaultStreamTextResult = class {
2637
2657
  status: (_a = init == null ? void 0 : init.status) != null ? _a : 200,
2638
2658
  statusText: init == null ? void 0 : init.statusText,
2639
2659
  headers: prepareResponseHeaders(init, {
2640
- contentType: "text/plain; charset=utf-8"
2660
+ contentType: "text/plain; charset=utf-8",
2661
+ dataStreamVersion: "v1"
2641
2662
  })
2642
2663
  });
2643
2664
  }
@@ -3454,9 +3475,13 @@ function InkeepStream(res, callbacks) {
3454
3475
  // streams/langchain-adapter.ts
3455
3476
  var langchain_adapter_exports = {};
3456
3477
  __export(langchain_adapter_exports, {
3457
- toAIStream: () => toAIStream
3478
+ toAIStream: () => toAIStream,
3479
+ toDataStream: () => toDataStream
3458
3480
  });
3459
3481
  function toAIStream(stream, callbacks) {
3482
+ return toDataStream(stream, callbacks);
3483
+ }
3484
+ function toDataStream(stream, callbacks) {
3460
3485
  return stream.pipeThrough(
3461
3486
  new TransformStream({
3462
3487
  transform: async (value, controller) => {