ai 4.0.8 → 4.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -7,7 +7,7 @@ var __export = (target, all) => {
7
7
  // streams/index.ts
8
8
  import {
9
9
  formatAssistantStreamPart as formatAssistantStreamPart2,
10
- formatDataStreamPart as formatDataStreamPart3,
10
+ formatDataStreamPart as formatDataStreamPart6,
11
11
  parseAssistantStreamPart,
12
12
  parseDataStreamPart,
13
13
  processDataStream,
@@ -18,6 +18,174 @@ import { generateId as generateId2 } from "@ai-sdk/provider-utils";
18
18
  // core/index.ts
19
19
  import { jsonSchema } from "@ai-sdk/ui-utils";
20
20
 
21
+ // core/data-stream/create-data-stream.ts
22
+ import { formatDataStreamPart } from "@ai-sdk/ui-utils";
23
+ function createDataStream({
24
+ execute,
25
+ onError = () => "An error occurred."
26
+ // mask error messages for safety by default
27
+ }) {
28
+ let controller;
29
+ const ongoingStreamPromises = [];
30
+ const stream = new ReadableStream({
31
+ start(controllerArg) {
32
+ controller = controllerArg;
33
+ }
34
+ });
35
+ try {
36
+ const result = execute({
37
+ writeData(data) {
38
+ controller.enqueue(formatDataStreamPart("data", [data]));
39
+ },
40
+ writeMessageAnnotation(annotation) {
41
+ controller.enqueue(
42
+ formatDataStreamPart("message_annotations", [annotation])
43
+ );
44
+ },
45
+ merge(streamArg) {
46
+ ongoingStreamPromises.push(
47
+ (async () => {
48
+ const reader = streamArg.getReader();
49
+ while (true) {
50
+ const { done, value } = await reader.read();
51
+ if (done)
52
+ break;
53
+ controller.enqueue(value);
54
+ }
55
+ })().catch((error) => {
56
+ controller.enqueue(formatDataStreamPart("error", onError(error)));
57
+ })
58
+ );
59
+ },
60
+ onError
61
+ });
62
+ if (result) {
63
+ ongoingStreamPromises.push(
64
+ result.catch((error) => {
65
+ controller.enqueue(formatDataStreamPart("error", onError(error)));
66
+ })
67
+ );
68
+ }
69
+ } catch (error) {
70
+ controller.enqueue(formatDataStreamPart("error", onError(error)));
71
+ }
72
+ const waitForStreams = new Promise(async (resolve) => {
73
+ while (ongoingStreamPromises.length > 0) {
74
+ await ongoingStreamPromises.shift();
75
+ }
76
+ resolve();
77
+ });
78
+ waitForStreams.finally(() => {
79
+ controller.close();
80
+ });
81
+ return stream;
82
+ }
83
+
84
+ // core/util/prepare-response-headers.ts
85
+ function prepareResponseHeaders(headers, {
86
+ contentType,
87
+ dataStreamVersion
88
+ }) {
89
+ const responseHeaders = new Headers(headers != null ? headers : {});
90
+ if (!responseHeaders.has("Content-Type")) {
91
+ responseHeaders.set("Content-Type", contentType);
92
+ }
93
+ if (dataStreamVersion !== void 0) {
94
+ responseHeaders.set("X-Vercel-AI-Data-Stream", dataStreamVersion);
95
+ }
96
+ return responseHeaders;
97
+ }
98
+
99
+ // core/data-stream/create-data-stream-response.ts
100
+ function createDataStreamResponse({
101
+ status,
102
+ statusText,
103
+ headers,
104
+ execute,
105
+ onError
106
+ }) {
107
+ return new Response(
108
+ createDataStream({ execute, onError }).pipeThrough(new TextEncoderStream()),
109
+ {
110
+ status,
111
+ statusText,
112
+ headers: prepareResponseHeaders(headers, {
113
+ contentType: "text/plain; charset=utf-8",
114
+ dataStreamVersion: "v1"
115
+ })
116
+ }
117
+ );
118
+ }
119
+
120
+ // core/util/prepare-outgoing-http-headers.ts
121
+ function prepareOutgoingHttpHeaders(headers, {
122
+ contentType,
123
+ dataStreamVersion
124
+ }) {
125
+ const outgoingHeaders = {};
126
+ if (headers != null) {
127
+ for (const [key, value] of Object.entries(headers)) {
128
+ outgoingHeaders[key] = value;
129
+ }
130
+ }
131
+ if (outgoingHeaders["Content-Type"] == null) {
132
+ outgoingHeaders["Content-Type"] = contentType;
133
+ }
134
+ if (dataStreamVersion !== void 0) {
135
+ outgoingHeaders["X-Vercel-AI-Data-Stream"] = dataStreamVersion;
136
+ }
137
+ return outgoingHeaders;
138
+ }
139
+
140
+ // core/util/write-to-server-response.ts
141
+ function writeToServerResponse({
142
+ response,
143
+ status,
144
+ statusText,
145
+ headers,
146
+ stream
147
+ }) {
148
+ response.writeHead(status != null ? status : 200, statusText, headers);
149
+ const reader = stream.getReader();
150
+ const read = async () => {
151
+ try {
152
+ while (true) {
153
+ const { done, value } = await reader.read();
154
+ if (done)
155
+ break;
156
+ response.write(value);
157
+ }
158
+ } catch (error) {
159
+ throw error;
160
+ } finally {
161
+ response.end();
162
+ }
163
+ };
164
+ read();
165
+ }
166
+
167
+ // core/data-stream/pipe-data-stream-to-response.ts
168
+ function pipeDataStreamToResponse(response, {
169
+ status,
170
+ statusText,
171
+ headers,
172
+ execute,
173
+ onError
174
+ }) {
175
+ writeToServerResponse({
176
+ response,
177
+ status,
178
+ statusText,
179
+ headers: prepareOutgoingHttpHeaders(headers, {
180
+ contentType: "text/plain; charset=utf-8",
181
+ dataStreamVersion: "v1"
182
+ }),
183
+ stream: createDataStream({ execute, onError }).pipeThrough(
184
+ new TextEncoderStream()
185
+ )
186
+ });
187
+ }
188
+
21
189
  // errors/invalid-argument-error.ts
22
190
  import { AISDKError } from "@ai-sdk/provider";
23
191
  var name = "AI_InvalidArgumentError";
@@ -1512,21 +1680,6 @@ function calculateLanguageModelUsage({
1512
1680
  };
1513
1681
  }
1514
1682
 
1515
- // core/util/prepare-response-headers.ts
1516
- function prepareResponseHeaders(headers, {
1517
- contentType,
1518
- dataStreamVersion
1519
- }) {
1520
- const responseHeaders = new Headers(headers != null ? headers : {});
1521
- if (!responseHeaders.has("Content-Type")) {
1522
- responseHeaders.set("Content-Type", contentType);
1523
- }
1524
- if (dataStreamVersion !== void 0) {
1525
- responseHeaders.set("X-Vercel-AI-Data-Stream", dataStreamVersion);
1526
- }
1527
- return responseHeaders;
1528
- }
1529
-
1530
1683
  // core/generate-object/inject-json-instruction.ts
1531
1684
  var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
1532
1685
  var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
@@ -2430,53 +2583,6 @@ function now() {
2430
2583
  return (_b = (_a11 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a11.now()) != null ? _b : Date.now();
2431
2584
  }
2432
2585
 
2433
- // core/util/prepare-outgoing-http-headers.ts
2434
- function prepareOutgoingHttpHeaders(headers, {
2435
- contentType,
2436
- dataStreamVersion
2437
- }) {
2438
- const outgoingHeaders = {};
2439
- if (headers != null) {
2440
- for (const [key, value] of Object.entries(headers)) {
2441
- outgoingHeaders[key] = value;
2442
- }
2443
- }
2444
- if (outgoingHeaders["Content-Type"] == null) {
2445
- outgoingHeaders["Content-Type"] = contentType;
2446
- }
2447
- if (dataStreamVersion !== void 0) {
2448
- outgoingHeaders["X-Vercel-AI-Data-Stream"] = dataStreamVersion;
2449
- }
2450
- return outgoingHeaders;
2451
- }
2452
-
2453
- // core/util/write-to-server-response.ts
2454
- function writeToServerResponse({
2455
- response,
2456
- status,
2457
- statusText,
2458
- headers,
2459
- stream
2460
- }) {
2461
- response.writeHead(status != null ? status : 200, statusText, headers);
2462
- const reader = stream.getReader();
2463
- const read = async () => {
2464
- try {
2465
- while (true) {
2466
- const { done, value } = await reader.read();
2467
- if (done)
2468
- break;
2469
- response.write(value);
2470
- }
2471
- } catch (error) {
2472
- throw error;
2473
- } finally {
2474
- response.end();
2475
- }
2476
- };
2477
- read();
2478
- }
2479
-
2480
2586
  // core/generate-object/stream-object.ts
2481
2587
  var originalGenerateId2 = createIdGenerator2({ prefix: "aiobj", size: 24 });
2482
2588
  function streamObject({
@@ -3605,7 +3711,7 @@ var DefaultGenerateTextResult = class {
3605
3711
 
3606
3712
  // core/generate-text/stream-text.ts
3607
3713
  import { createIdGenerator as createIdGenerator4 } from "@ai-sdk/provider-utils";
3608
- import { formatDataStreamPart } from "@ai-sdk/ui-utils";
3714
+ import { formatDataStreamPart as formatDataStreamPart2 } from "@ai-sdk/ui-utils";
3609
3715
 
3610
3716
  // core/util/merge-streams.ts
3611
3717
  function mergeStreams(stream1, stream2) {
@@ -4544,12 +4650,12 @@ var DefaultStreamTextResult = class {
4544
4650
  const chunkType = chunk.type;
4545
4651
  switch (chunkType) {
4546
4652
  case "text-delta": {
4547
- controller.enqueue(formatDataStreamPart("text", chunk.textDelta));
4653
+ controller.enqueue(formatDataStreamPart2("text", chunk.textDelta));
4548
4654
  break;
4549
4655
  }
4550
4656
  case "tool-call-streaming-start": {
4551
4657
  controller.enqueue(
4552
- formatDataStreamPart("tool_call_streaming_start", {
4658
+ formatDataStreamPart2("tool_call_streaming_start", {
4553
4659
  toolCallId: chunk.toolCallId,
4554
4660
  toolName: chunk.toolName
4555
4661
  })
@@ -4558,7 +4664,7 @@ var DefaultStreamTextResult = class {
4558
4664
  }
4559
4665
  case "tool-call-delta": {
4560
4666
  controller.enqueue(
4561
- formatDataStreamPart("tool_call_delta", {
4667
+ formatDataStreamPart2("tool_call_delta", {
4562
4668
  toolCallId: chunk.toolCallId,
4563
4669
  argsTextDelta: chunk.argsTextDelta
4564
4670
  })
@@ -4567,7 +4673,7 @@ var DefaultStreamTextResult = class {
4567
4673
  }
4568
4674
  case "tool-call": {
4569
4675
  controller.enqueue(
4570
- formatDataStreamPart("tool_call", {
4676
+ formatDataStreamPart2("tool_call", {
4571
4677
  toolCallId: chunk.toolCallId,
4572
4678
  toolName: chunk.toolName,
4573
4679
  args: chunk.args
@@ -4577,7 +4683,7 @@ var DefaultStreamTextResult = class {
4577
4683
  }
4578
4684
  case "tool-result": {
4579
4685
  controller.enqueue(
4580
- formatDataStreamPart("tool_result", {
4686
+ formatDataStreamPart2("tool_result", {
4581
4687
  toolCallId: chunk.toolCallId,
4582
4688
  result: chunk.result
4583
4689
  })
@@ -4586,13 +4692,13 @@ var DefaultStreamTextResult = class {
4586
4692
  }
4587
4693
  case "error": {
4588
4694
  controller.enqueue(
4589
- formatDataStreamPart("error", getErrorMessage3(chunk.error))
4695
+ formatDataStreamPart2("error", getErrorMessage3(chunk.error))
4590
4696
  );
4591
4697
  break;
4592
4698
  }
4593
4699
  case "step-finish": {
4594
4700
  controller.enqueue(
4595
- formatDataStreamPart("finish_step", {
4701
+ formatDataStreamPart2("finish_step", {
4596
4702
  finishReason: chunk.finishReason,
4597
4703
  usage: sendUsage ? {
4598
4704
  promptTokens: chunk.usage.promptTokens,
@@ -4605,7 +4711,7 @@ var DefaultStreamTextResult = class {
4605
4711
  }
4606
4712
  case "finish": {
4607
4713
  controller.enqueue(
4608
- formatDataStreamPart("finish_message", {
4714
+ formatDataStreamPart2("finish_message", {
4609
4715
  finishReason: chunk.finishReason,
4610
4716
  usage: sendUsage ? {
4611
4717
  promptTokens: chunk.usage.promptTokens,
@@ -4622,7 +4728,7 @@ var DefaultStreamTextResult = class {
4622
4728
  }
4623
4729
  }
4624
4730
  });
4625
- return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer).pipeThrough(new TextEncoderStream());
4731
+ return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer);
4626
4732
  }
4627
4733
  pipeDataStreamToResponse(response, {
4628
4734
  status,
@@ -4654,13 +4760,21 @@ var DefaultStreamTextResult = class {
4654
4760
  stream: this.textStream.pipeThrough(new TextEncoderStream())
4655
4761
  });
4656
4762
  }
4763
+ // TODO breaking change 5.0: remove pipeThrough(new TextEncoderStream())
4657
4764
  toDataStream(options) {
4658
4765
  const stream = this.toDataStreamInternal({
4659
4766
  getErrorMessage: options == null ? void 0 : options.getErrorMessage,
4660
4767
  sendUsage: options == null ? void 0 : options.sendUsage
4661
- });
4768
+ }).pipeThrough(new TextEncoderStream());
4662
4769
  return (options == null ? void 0 : options.data) ? mergeStreams(options == null ? void 0 : options.data.stream, stream) : stream;
4663
4770
  }
4771
+ mergeIntoDataStream(writer) {
4772
+ writer.merge(
4773
+ this.toDataStreamInternal({
4774
+ getErrorMessage: writer.onError
4775
+ })
4776
+ );
4777
+ }
4664
4778
  toDataStreamResponse({
4665
4779
  headers,
4666
4780
  status,
@@ -5014,9 +5128,11 @@ function AssistantResponse({ threadId, messageId }, process2) {
5014
5128
  // streams/langchain-adapter.ts
5015
5129
  var langchain_adapter_exports = {};
5016
5130
  __export(langchain_adapter_exports, {
5131
+ mergeIntoDataStream: () => mergeIntoDataStream,
5017
5132
  toDataStream: () => toDataStream,
5018
5133
  toDataStreamResponse: () => toDataStreamResponse
5019
5134
  });
5135
+ import { formatDataStreamPart as formatDataStreamPart3 } from "@ai-sdk/ui-utils";
5020
5136
 
5021
5137
  // streams/stream-callbacks.ts
5022
5138
  function createCallbacksTransformer(callbacks = {}) {
@@ -5047,87 +5163,8 @@ function createCallbacksTransformer(callbacks = {}) {
5047
5163
  });
5048
5164
  }
5049
5165
 
5050
- // streams/stream-data.ts
5051
- import { formatDataStreamPart as formatDataStreamPart2 } from "@ai-sdk/ui-utils";
5052
-
5053
- // util/constants.ts
5054
- var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
5055
-
5056
- // streams/stream-data.ts
5057
- var StreamData = class {
5058
- constructor() {
5059
- this.encoder = new TextEncoder();
5060
- this.controller = null;
5061
- this.isClosed = false;
5062
- this.warningTimeout = null;
5063
- const self = this;
5064
- this.stream = new ReadableStream({
5065
- start: async (controller) => {
5066
- self.controller = controller;
5067
- if (process.env.NODE_ENV === "development") {
5068
- self.warningTimeout = setTimeout(() => {
5069
- console.warn(
5070
- "The data stream is hanging. Did you forget to close it with `data.close()`?"
5071
- );
5072
- }, HANGING_STREAM_WARNING_TIME_MS);
5073
- }
5074
- },
5075
- pull: (controller) => {
5076
- },
5077
- cancel: (reason) => {
5078
- this.isClosed = true;
5079
- }
5080
- });
5081
- }
5082
- async close() {
5083
- if (this.isClosed) {
5084
- throw new Error("Data Stream has already been closed.");
5085
- }
5086
- if (!this.controller) {
5087
- throw new Error("Stream controller is not initialized.");
5088
- }
5089
- this.controller.close();
5090
- this.isClosed = true;
5091
- if (this.warningTimeout) {
5092
- clearTimeout(this.warningTimeout);
5093
- }
5094
- }
5095
- append(value) {
5096
- if (this.isClosed) {
5097
- throw new Error("Data Stream has already been closed.");
5098
- }
5099
- if (!this.controller) {
5100
- throw new Error("Stream controller is not initialized.");
5101
- }
5102
- this.controller.enqueue(
5103
- this.encoder.encode(formatDataStreamPart2("data", [value]))
5104
- );
5105
- }
5106
- appendMessageAnnotation(value) {
5107
- if (this.isClosed) {
5108
- throw new Error("Data Stream has already been closed.");
5109
- }
5110
- if (!this.controller) {
5111
- throw new Error("Stream controller is not initialized.");
5112
- }
5113
- this.controller.enqueue(
5114
- this.encoder.encode(formatDataStreamPart2("message_annotations", [value]))
5115
- );
5116
- }
5117
- };
5118
- function createStreamDataTransformer() {
5119
- const encoder = new TextEncoder();
5120
- const decoder = new TextDecoder();
5121
- return new TransformStream({
5122
- transform: async (chunk, controller) => {
5123
- const message = decoder.decode(chunk);
5124
- controller.enqueue(encoder.encode(formatDataStreamPart2("text", message)));
5125
- }
5126
- });
5127
- }
5128
-
5129
5166
  // streams/langchain-adapter.ts
5130
- function toDataStream(stream, callbacks) {
5167
+ function toDataStreamInternal(stream, callbacks) {
5131
5168
  return stream.pipeThrough(
5132
5169
  new TransformStream({
5133
5170
  transform: async (value, controller) => {
@@ -5148,11 +5185,25 @@ function toDataStream(stream, callbacks) {
5148
5185
  forwardAIMessageChunk(value, controller);
5149
5186
  }
5150
5187
  })
5151
- ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
5188
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
5189
+ new TransformStream({
5190
+ transform: async (chunk, controller) => {
5191
+ controller.enqueue(formatDataStreamPart3("text", chunk));
5192
+ }
5193
+ })
5194
+ );
5195
+ }
5196
+ function toDataStream(stream, callbacks) {
5197
+ return toDataStreamInternal(stream, callbacks).pipeThrough(
5198
+ new TextEncoderStream()
5199
+ );
5152
5200
  }
5153
5201
  function toDataStreamResponse(stream, options) {
5154
5202
  var _a11;
5155
- const dataStream = toDataStream(stream, options == null ? void 0 : options.callbacks);
5203
+ const dataStream = toDataStreamInternal(
5204
+ stream,
5205
+ options == null ? void 0 : options.callbacks
5206
+ ).pipeThrough(new TextEncoderStream());
5156
5207
  const data = options == null ? void 0 : options.data;
5157
5208
  const init = options == null ? void 0 : options.init;
5158
5209
  const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
@@ -5165,6 +5216,9 @@ function toDataStreamResponse(stream, options) {
5165
5216
  })
5166
5217
  });
5167
5218
  }
5219
+ function mergeIntoDataStream(stream, options) {
5220
+ options.dataStream.merge(toDataStreamInternal(stream, options.callbacks));
5221
+ }
5168
5222
  function forwardAIMessageChunk(chunk, controller) {
5169
5223
  if (typeof chunk.content === "string") {
5170
5224
  controller.enqueue(chunk.content);
@@ -5181,11 +5235,13 @@ function forwardAIMessageChunk(chunk, controller) {
5181
5235
  // streams/llamaindex-adapter.ts
5182
5236
  var llamaindex_adapter_exports = {};
5183
5237
  __export(llamaindex_adapter_exports, {
5238
+ mergeIntoDataStream: () => mergeIntoDataStream2,
5184
5239
  toDataStream: () => toDataStream2,
5185
5240
  toDataStreamResponse: () => toDataStreamResponse2
5186
5241
  });
5187
5242
  import { convertAsyncIteratorToReadableStream } from "@ai-sdk/provider-utils";
5188
- function toDataStream2(stream, callbacks) {
5243
+ import { formatDataStreamPart as formatDataStreamPart4 } from "@ai-sdk/ui-utils";
5244
+ function toDataStreamInternal2(stream, callbacks) {
5189
5245
  const trimStart = trimStartOfStream();
5190
5246
  return convertAsyncIteratorToReadableStream(stream[Symbol.asyncIterator]()).pipeThrough(
5191
5247
  new TransformStream({
@@ -5193,12 +5249,25 @@ function toDataStream2(stream, callbacks) {
5193
5249
  controller.enqueue(trimStart(message.delta));
5194
5250
  }
5195
5251
  })
5196
- ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
5252
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(new TextDecoderStream()).pipeThrough(
5253
+ new TransformStream({
5254
+ transform: async (chunk, controller) => {
5255
+ controller.enqueue(formatDataStreamPart4("text", chunk));
5256
+ }
5257
+ })
5258
+ );
5259
+ }
5260
+ function toDataStream2(stream, callbacks) {
5261
+ return toDataStreamInternal2(stream, callbacks).pipeThrough(
5262
+ new TextEncoderStream()
5263
+ );
5197
5264
  }
5198
5265
  function toDataStreamResponse2(stream, options = {}) {
5199
5266
  var _a11;
5200
5267
  const { init, data, callbacks } = options;
5201
- const dataStream = toDataStream2(stream, callbacks);
5268
+ const dataStream = toDataStreamInternal2(stream, callbacks).pipeThrough(
5269
+ new TextEncoderStream()
5270
+ );
5202
5271
  const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
5203
5272
  return new Response(responseStream, {
5204
5273
  status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
@@ -5209,6 +5278,9 @@ function toDataStreamResponse2(stream, options = {}) {
5209
5278
  })
5210
5279
  });
5211
5280
  }
5281
+ function mergeIntoDataStream2(stream, options) {
5282
+ options.dataStream.merge(toDataStreamInternal2(stream, options.callbacks));
5283
+ }
5212
5284
  function trimStartOfStream() {
5213
5285
  let isStreamStart = true;
5214
5286
  return (text2) => {
@@ -5220,6 +5292,75 @@ function trimStartOfStream() {
5220
5292
  return text2;
5221
5293
  };
5222
5294
  }
5295
+
5296
+ // streams/stream-data.ts
5297
+ import { formatDataStreamPart as formatDataStreamPart5 } from "@ai-sdk/ui-utils";
5298
+
5299
+ // util/constants.ts
5300
+ var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
5301
+
5302
+ // streams/stream-data.ts
5303
+ var StreamData = class {
5304
+ constructor() {
5305
+ this.encoder = new TextEncoder();
5306
+ this.controller = null;
5307
+ this.isClosed = false;
5308
+ this.warningTimeout = null;
5309
+ const self = this;
5310
+ this.stream = new ReadableStream({
5311
+ start: async (controller) => {
5312
+ self.controller = controller;
5313
+ if (process.env.NODE_ENV === "development") {
5314
+ self.warningTimeout = setTimeout(() => {
5315
+ console.warn(
5316
+ "The data stream is hanging. Did you forget to close it with `data.close()`?"
5317
+ );
5318
+ }, HANGING_STREAM_WARNING_TIME_MS);
5319
+ }
5320
+ },
5321
+ pull: (controller) => {
5322
+ },
5323
+ cancel: (reason) => {
5324
+ this.isClosed = true;
5325
+ }
5326
+ });
5327
+ }
5328
+ async close() {
5329
+ if (this.isClosed) {
5330
+ throw new Error("Data Stream has already been closed.");
5331
+ }
5332
+ if (!this.controller) {
5333
+ throw new Error("Stream controller is not initialized.");
5334
+ }
5335
+ this.controller.close();
5336
+ this.isClosed = true;
5337
+ if (this.warningTimeout) {
5338
+ clearTimeout(this.warningTimeout);
5339
+ }
5340
+ }
5341
+ append(value) {
5342
+ if (this.isClosed) {
5343
+ throw new Error("Data Stream has already been closed.");
5344
+ }
5345
+ if (!this.controller) {
5346
+ throw new Error("Stream controller is not initialized.");
5347
+ }
5348
+ this.controller.enqueue(
5349
+ this.encoder.encode(formatDataStreamPart5("data", [value]))
5350
+ );
5351
+ }
5352
+ appendMessageAnnotation(value) {
5353
+ if (this.isClosed) {
5354
+ throw new Error("Data Stream has already been closed.");
5355
+ }
5356
+ if (!this.controller) {
5357
+ throw new Error("Stream controller is not initialized.");
5358
+ }
5359
+ this.controller.enqueue(
5360
+ this.encoder.encode(formatDataStreamPart5("message_annotations", [value]))
5361
+ );
5362
+ }
5363
+ };
5223
5364
  export {
5224
5365
  AISDKError10 as AISDKError,
5225
5366
  APICallError2 as APICallError,
@@ -5249,20 +5390,22 @@ export {
5249
5390
  UnsupportedFunctionalityError2 as UnsupportedFunctionalityError,
5250
5391
  convertToCoreMessages,
5251
5392
  cosineSimilarity,
5252
- createStreamDataTransformer,
5393
+ createDataStream,
5394
+ createDataStreamResponse,
5253
5395
  embed,
5254
5396
  embedMany,
5255
5397
  experimental_createProviderRegistry,
5256
5398
  experimental_customProvider,
5257
5399
  experimental_wrapLanguageModel,
5258
5400
  formatAssistantStreamPart2 as formatAssistantStreamPart,
5259
- formatDataStreamPart3 as formatDataStreamPart,
5401
+ formatDataStreamPart6 as formatDataStreamPart,
5260
5402
  generateId2 as generateId,
5261
5403
  generateObject,
5262
5404
  generateText,
5263
5405
  jsonSchema,
5264
5406
  parseAssistantStreamPart,
5265
5407
  parseDataStreamPart,
5408
+ pipeDataStreamToResponse,
5266
5409
  processDataStream,
5267
5410
  processTextStream,
5268
5411
  streamObject,