ai 4.0.8 → 4.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # ai
2
2
 
3
+ ## 4.0.10
4
+
5
+ ### Patch Changes
6
+
7
+ - 913872d: fix (ai/core): track promise from async createDataStream.execute
8
+
9
+ ## 4.0.9
10
+
11
+ ### Patch Changes
12
+
13
+ - fda9695: feat (ai/core): reworked data stream management
14
+
3
15
  ## 4.0.8
4
16
 
5
17
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -1,12 +1,48 @@
1
- import { ToolInvocation, Attachment, Schema, DeepPartial, JSONValue as JSONValue$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
1
+ import { DataStreamString, ToolInvocation, Attachment, Schema, DeepPartial, JSONValue as JSONValue$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
2
2
  export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DataStreamPart, DeepPartial, IdGenerator, JSONValue, Message, RequestOptions, Schema, ToolInvocation, UseAssistantOptions, formatAssistantStreamPart, formatDataStreamPart, jsonSchema, parseAssistantStreamPart, parseDataStreamPart, processDataStream, processTextStream } from '@ai-sdk/ui-utils';
3
3
  export { ToolCall as CoreToolCall, ToolResult as CoreToolResult, generateId } from '@ai-sdk/provider-utils';
4
- import { AttributeValue, Tracer } from '@opentelemetry/api';
5
- import { EmbeddingModelV1, EmbeddingModelV1Embedding, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, JSONValue, LanguageModelV1CallOptions, NoSuchModelError, AISDKError } from '@ai-sdk/provider';
4
+ import { JSONValue, EmbeddingModelV1, EmbeddingModelV1Embedding, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, LanguageModelV1CallOptions, NoSuchModelError, AISDKError } from '@ai-sdk/provider';
6
5
  export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, InvalidResponseDataError, JSONParseError, LanguageModelV1, LanguageModelV1CallOptions, LanguageModelV1Prompt, LanguageModelV1StreamPart, LoadAPIKeyError, NoContentGeneratedError, NoSuchModelError, TypeValidationError, UnsupportedFunctionalityError } from '@ai-sdk/provider';
6
+ import { ServerResponse } from 'node:http';
7
+ import { AttributeValue, Tracer } from '@opentelemetry/api';
7
8
  import { z } from 'zod';
8
- import { ServerResponse } from 'http';
9
- import { ServerResponse as ServerResponse$1 } from 'node:http';
9
+ import { ServerResponse as ServerResponse$1 } from 'http';
10
+
11
+ interface DataStreamWriter {
12
+ /**
13
+ * Appends a data part to the stream.
14
+ */
15
+ writeData(value: JSONValue): void;
16
+ /**
17
+ * Appends a message annotation to the stream.
18
+ */
19
+ writeMessageAnnotation(value: JSONValue): void;
20
+ /**
21
+ * Merges the contents of another stream to this stream.
22
+ */
23
+ merge(stream: ReadableStream<DataStreamString>): void;
24
+ /**
25
+ * Error handler that is used by the data stream writer.
26
+ * This is intended for forwarding when merging streams
27
+ * to prevent duplicated error masking.
28
+ */
29
+ onError: ((error: unknown) => string) | undefined;
30
+ }
31
+
32
+ declare function createDataStream({ execute, onError, }: {
33
+ execute: (dataStream: DataStreamWriter) => Promise<void> | void;
34
+ onError?: (error: unknown) => string;
35
+ }): ReadableStream<DataStreamString>;
36
+
37
+ declare function createDataStreamResponse({ status, statusText, headers, execute, onError, }: ResponseInit & {
38
+ execute: (dataStream: DataStreamWriter) => Promise<void> | void;
39
+ onError?: (error: unknown) => string;
40
+ }): Response;
41
+
42
+ declare function pipeDataStreamToResponse(response: ServerResponse, { status, statusText, headers, execute, onError, }: ResponseInit & {
43
+ execute: (writer: DataStreamWriter) => Promise<void> | void;
44
+ onError?: (error: unknown) => string;
45
+ }): void;
10
46
 
11
47
  /**
12
48
  * Telemetry configuration.
@@ -943,7 +979,7 @@ interface StreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM> {
943
979
  @param response A Node.js response-like object (ServerResponse).
944
980
  @param init Optional headers, status code, and status text.
945
981
  */
946
- pipeTextStreamToResponse(response: ServerResponse, init?: ResponseInit): void;
982
+ pipeTextStreamToResponse(response: ServerResponse$1, init?: ResponseInit): void;
947
983
  /**
948
984
  Creates a simple text stream response.
949
985
  The response has a `Content-Type` header set to `text/plain; charset=utf-8`.
@@ -1605,6 +1641,8 @@ changing the tool call and result types in the result.
1605
1641
 
1606
1642
  /**
1607
1643
  * A stream wrapper to send custom JSON-encoded data back to the client.
1644
+ *
1645
+ * @deprecated Please use `createDataStream`, `createDataStreamResponse`, and `pipeDataStreamToResponse` instead.
1608
1646
  */
1609
1647
  declare class StreamData {
1610
1648
  private encoder;
@@ -1617,11 +1655,6 @@ declare class StreamData {
1617
1655
  append(value: JSONValue$1): void;
1618
1656
  appendMessageAnnotation(value: JSONValue$1): void;
1619
1657
  }
1620
- /**
1621
- * A TransformStream for LLMs that do not have their own transform stream handlers managing encoding (e.g. OpenAIStream has one for function call handling).
1622
- * This assumes every chunk is a 'text' chunk.
1623
- */
1624
- declare function createStreamDataTransformer(): TransformStream<any, any>;
1625
1658
 
1626
1659
  /**
1627
1660
  A result object for accessing different stream types and additional information.
@@ -1719,6 +1752,12 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1719
1752
  getErrorMessage?: (error: unknown) => string;
1720
1753
  sendUsage?: boolean;
1721
1754
  }): ReadableStream<Uint8Array>;
1755
+ /**
1756
+ * Merges the result as a data stream into another data stream.
1757
+ *
1758
+ * @param dataStream A data stream writer.
1759
+ */
1760
+ mergeIntoDataStream(dataStream: DataStreamWriter): void;
1722
1761
  /**
1723
1762
  Writes data stream output to a Node.js response-like object.
1724
1763
 
@@ -1730,7 +1769,7 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1730
1769
  @param options.getErrorMessage An optional function that converts an error to an error message.
1731
1770
  @param options.sendUsage Whether to send the usage information to the client. Defaults to true.
1732
1771
  */
1733
- pipeDataStreamToResponse(response: ServerResponse$1, options?: ResponseInit & {
1772
+ pipeDataStreamToResponse(response: ServerResponse, options?: ResponseInit & {
1734
1773
  data?: StreamData;
1735
1774
  getErrorMessage?: (error: unknown) => string;
1736
1775
  sendUsage?: boolean;
@@ -1743,7 +1782,7 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1743
1782
  @param response A Node.js response-like object (ServerResponse).
1744
1783
  @param init Optional headers, status code, and status text.
1745
1784
  */
1746
- pipeTextStreamToResponse(response: ServerResponse$1, init?: ResponseInit): void;
1785
+ pipeTextStreamToResponse(response: ServerResponse, init?: ResponseInit): void;
1747
1786
  /**
1748
1787
  Converts the result to a streamed response object with a stream data part stream.
1749
1788
  It can be used with the `useChat` and `useCompletion` hooks.
@@ -2255,15 +2294,20 @@ The following streams are supported:
2255
2294
  - `LangChainAIMessageChunk` streams (LangChain `model.stream` output)
2256
2295
  - `string` streams (LangChain `StringOutputParser` output)
2257
2296
  */
2258
- declare function toDataStream$1(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, callbacks?: StreamCallbacks): ReadableStream<any>;
2297
+ declare function toDataStream$1(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, callbacks?: StreamCallbacks): ReadableStream<Uint8Array>;
2259
2298
  declare function toDataStreamResponse$1(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, options?: {
2260
2299
  init?: ResponseInit;
2261
2300
  data?: StreamData;
2262
2301
  callbacks?: StreamCallbacks;
2263
2302
  }): Response;
2303
+ declare function mergeIntoDataStream$1(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, options: {
2304
+ dataStream: DataStreamWriter;
2305
+ callbacks?: StreamCallbacks;
2306
+ }): void;
2264
2307
 
2265
2308
  declare namespace langchainAdapter {
2266
2309
  export {
2310
+ mergeIntoDataStream$1 as mergeIntoDataStream,
2267
2311
  toDataStream$1 as toDataStream,
2268
2312
  toDataStreamResponse$1 as toDataStreamResponse,
2269
2313
  };
@@ -2272,20 +2316,26 @@ declare namespace langchainAdapter {
2272
2316
  type EngineResponse = {
2273
2317
  delta: string;
2274
2318
  };
2275
- declare function toDataStream(stream: AsyncIterable<EngineResponse>, callbacks?: StreamCallbacks): ReadableStream<any>;
2319
+ declare function toDataStream(stream: AsyncIterable<EngineResponse>, callbacks?: StreamCallbacks): ReadableStream<Uint8Array>;
2276
2320
  declare function toDataStreamResponse(stream: AsyncIterable<EngineResponse>, options?: {
2277
2321
  init?: ResponseInit;
2278
2322
  data?: StreamData;
2279
2323
  callbacks?: StreamCallbacks;
2280
2324
  }): Response;
2325
+ declare function mergeIntoDataStream(stream: AsyncIterable<EngineResponse>, options: {
2326
+ dataStream: DataStreamWriter;
2327
+ callbacks?: StreamCallbacks;
2328
+ }): void;
2281
2329
 
2330
+ declare const llamaindexAdapter_mergeIntoDataStream: typeof mergeIntoDataStream;
2282
2331
  declare const llamaindexAdapter_toDataStream: typeof toDataStream;
2283
2332
  declare const llamaindexAdapter_toDataStreamResponse: typeof toDataStreamResponse;
2284
2333
  declare namespace llamaindexAdapter {
2285
2334
  export {
2335
+ llamaindexAdapter_mergeIntoDataStream as mergeIntoDataStream,
2286
2336
  llamaindexAdapter_toDataStream as toDataStream,
2287
2337
  llamaindexAdapter_toDataStreamResponse as toDataStreamResponse,
2288
2338
  };
2289
2339
  }
2290
2340
 
2291
- export { AssistantContent, AssistantResponse, CallWarning, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, output as Output, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, TextPart, TextStreamPart, ToolCallPart, ToolContent, ToolExecutionOptions, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createStreamDataTransformer, embed, embedMany, experimental_createProviderRegistry, experimental_customProvider, experimental_wrapLanguageModel, generateObject, generateText, streamObject, streamText, tool };
2341
+ export { AssistantContent, AssistantResponse, CallWarning, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DataStreamWriter, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, output as Output, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, TextPart, TextStreamPart, ToolCallPart, ToolContent, ToolExecutionOptions, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createDataStream, createDataStreamResponse, embed, embedMany, experimental_createProviderRegistry, experimental_customProvider, experimental_wrapLanguageModel, generateObject, generateText, pipeDataStreamToResponse, streamObject, streamText, tool };
package/dist/index.d.ts CHANGED
@@ -1,12 +1,48 @@
1
- import { ToolInvocation, Attachment, Schema, DeepPartial, JSONValue as JSONValue$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
1
+ import { DataStreamString, ToolInvocation, Attachment, Schema, DeepPartial, JSONValue as JSONValue$1, AssistantMessage, DataMessage } from '@ai-sdk/ui-utils';
2
2
  export { AssistantMessage, AssistantStatus, Attachment, ChatRequest, ChatRequestOptions, CreateMessage, DataMessage, DataStreamPart, DeepPartial, IdGenerator, JSONValue, Message, RequestOptions, Schema, ToolInvocation, UseAssistantOptions, formatAssistantStreamPart, formatDataStreamPart, jsonSchema, parseAssistantStreamPart, parseDataStreamPart, processDataStream, processTextStream } from '@ai-sdk/ui-utils';
3
3
  export { ToolCall as CoreToolCall, ToolResult as CoreToolResult, generateId } from '@ai-sdk/provider-utils';
4
- import { AttributeValue, Tracer } from '@opentelemetry/api';
5
- import { EmbeddingModelV1, EmbeddingModelV1Embedding, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, JSONValue, LanguageModelV1CallOptions, NoSuchModelError, AISDKError } from '@ai-sdk/provider';
4
+ import { JSONValue, EmbeddingModelV1, EmbeddingModelV1Embedding, LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1LogProbs, LanguageModelV1CallWarning, LanguageModelV1ProviderMetadata, LanguageModelV1CallOptions, NoSuchModelError, AISDKError } from '@ai-sdk/provider';
6
5
  export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, InvalidResponseDataError, JSONParseError, LanguageModelV1, LanguageModelV1CallOptions, LanguageModelV1Prompt, LanguageModelV1StreamPart, LoadAPIKeyError, NoContentGeneratedError, NoSuchModelError, TypeValidationError, UnsupportedFunctionalityError } from '@ai-sdk/provider';
6
+ import { ServerResponse } from 'node:http';
7
+ import { AttributeValue, Tracer } from '@opentelemetry/api';
7
8
  import { z } from 'zod';
8
- import { ServerResponse } from 'http';
9
- import { ServerResponse as ServerResponse$1 } from 'node:http';
9
+ import { ServerResponse as ServerResponse$1 } from 'http';
10
+
11
+ interface DataStreamWriter {
12
+ /**
13
+ * Appends a data part to the stream.
14
+ */
15
+ writeData(value: JSONValue): void;
16
+ /**
17
+ * Appends a message annotation to the stream.
18
+ */
19
+ writeMessageAnnotation(value: JSONValue): void;
20
+ /**
21
+ * Merges the contents of another stream to this stream.
22
+ */
23
+ merge(stream: ReadableStream<DataStreamString>): void;
24
+ /**
25
+ * Error handler that is used by the data stream writer.
26
+ * This is intended for forwarding when merging streams
27
+ * to prevent duplicated error masking.
28
+ */
29
+ onError: ((error: unknown) => string) | undefined;
30
+ }
31
+
32
+ declare function createDataStream({ execute, onError, }: {
33
+ execute: (dataStream: DataStreamWriter) => Promise<void> | void;
34
+ onError?: (error: unknown) => string;
35
+ }): ReadableStream<DataStreamString>;
36
+
37
+ declare function createDataStreamResponse({ status, statusText, headers, execute, onError, }: ResponseInit & {
38
+ execute: (dataStream: DataStreamWriter) => Promise<void> | void;
39
+ onError?: (error: unknown) => string;
40
+ }): Response;
41
+
42
+ declare function pipeDataStreamToResponse(response: ServerResponse, { status, statusText, headers, execute, onError, }: ResponseInit & {
43
+ execute: (writer: DataStreamWriter) => Promise<void> | void;
44
+ onError?: (error: unknown) => string;
45
+ }): void;
10
46
 
11
47
  /**
12
48
  * Telemetry configuration.
@@ -943,7 +979,7 @@ interface StreamObjectResult<PARTIAL, RESULT, ELEMENT_STREAM> {
943
979
  @param response A Node.js response-like object (ServerResponse).
944
980
  @param init Optional headers, status code, and status text.
945
981
  */
946
- pipeTextStreamToResponse(response: ServerResponse, init?: ResponseInit): void;
982
+ pipeTextStreamToResponse(response: ServerResponse$1, init?: ResponseInit): void;
947
983
  /**
948
984
  Creates a simple text stream response.
949
985
  The response has a `Content-Type` header set to `text/plain; charset=utf-8`.
@@ -1605,6 +1641,8 @@ changing the tool call and result types in the result.
1605
1641
 
1606
1642
  /**
1607
1643
  * A stream wrapper to send custom JSON-encoded data back to the client.
1644
+ *
1645
+ * @deprecated Please use `createDataStream`, `createDataStreamResponse`, and `pipeDataStreamToResponse` instead.
1608
1646
  */
1609
1647
  declare class StreamData {
1610
1648
  private encoder;
@@ -1617,11 +1655,6 @@ declare class StreamData {
1617
1655
  append(value: JSONValue$1): void;
1618
1656
  appendMessageAnnotation(value: JSONValue$1): void;
1619
1657
  }
1620
- /**
1621
- * A TransformStream for LLMs that do not have their own transform stream handlers managing encoding (e.g. OpenAIStream has one for function call handling).
1622
- * This assumes every chunk is a 'text' chunk.
1623
- */
1624
- declare function createStreamDataTransformer(): TransformStream<any, any>;
1625
1658
 
1626
1659
  /**
1627
1660
  A result object for accessing different stream types and additional information.
@@ -1719,6 +1752,12 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1719
1752
  getErrorMessage?: (error: unknown) => string;
1720
1753
  sendUsage?: boolean;
1721
1754
  }): ReadableStream<Uint8Array>;
1755
+ /**
1756
+ * Merges the result as a data stream into another data stream.
1757
+ *
1758
+ * @param dataStream A data stream writer.
1759
+ */
1760
+ mergeIntoDataStream(dataStream: DataStreamWriter): void;
1722
1761
  /**
1723
1762
  Writes data stream output to a Node.js response-like object.
1724
1763
 
@@ -1730,7 +1769,7 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1730
1769
  @param options.getErrorMessage An optional function that converts an error to an error message.
1731
1770
  @param options.sendUsage Whether to send the usage information to the client. Defaults to true.
1732
1771
  */
1733
- pipeDataStreamToResponse(response: ServerResponse$1, options?: ResponseInit & {
1772
+ pipeDataStreamToResponse(response: ServerResponse, options?: ResponseInit & {
1734
1773
  data?: StreamData;
1735
1774
  getErrorMessage?: (error: unknown) => string;
1736
1775
  sendUsage?: boolean;
@@ -1743,7 +1782,7 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1743
1782
  @param response A Node.js response-like object (ServerResponse).
1744
1783
  @param init Optional headers, status code, and status text.
1745
1784
  */
1746
- pipeTextStreamToResponse(response: ServerResponse$1, init?: ResponseInit): void;
1785
+ pipeTextStreamToResponse(response: ServerResponse, init?: ResponseInit): void;
1747
1786
  /**
1748
1787
  Converts the result to a streamed response object with a stream data part stream.
1749
1788
  It can be used with the `useChat` and `useCompletion` hooks.
@@ -2255,15 +2294,20 @@ The following streams are supported:
2255
2294
  - `LangChainAIMessageChunk` streams (LangChain `model.stream` output)
2256
2295
  - `string` streams (LangChain `StringOutputParser` output)
2257
2296
  */
2258
- declare function toDataStream$1(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, callbacks?: StreamCallbacks): ReadableStream<any>;
2297
+ declare function toDataStream$1(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, callbacks?: StreamCallbacks): ReadableStream<Uint8Array>;
2259
2298
  declare function toDataStreamResponse$1(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, options?: {
2260
2299
  init?: ResponseInit;
2261
2300
  data?: StreamData;
2262
2301
  callbacks?: StreamCallbacks;
2263
2302
  }): Response;
2303
+ declare function mergeIntoDataStream$1(stream: ReadableStream<LangChainStreamEvent> | ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, options: {
2304
+ dataStream: DataStreamWriter;
2305
+ callbacks?: StreamCallbacks;
2306
+ }): void;
2264
2307
 
2265
2308
  declare namespace langchainAdapter {
2266
2309
  export {
2310
+ mergeIntoDataStream$1 as mergeIntoDataStream,
2267
2311
  toDataStream$1 as toDataStream,
2268
2312
  toDataStreamResponse$1 as toDataStreamResponse,
2269
2313
  };
@@ -2272,20 +2316,26 @@ declare namespace langchainAdapter {
2272
2316
  type EngineResponse = {
2273
2317
  delta: string;
2274
2318
  };
2275
- declare function toDataStream(stream: AsyncIterable<EngineResponse>, callbacks?: StreamCallbacks): ReadableStream<any>;
2319
+ declare function toDataStream(stream: AsyncIterable<EngineResponse>, callbacks?: StreamCallbacks): ReadableStream<Uint8Array>;
2276
2320
  declare function toDataStreamResponse(stream: AsyncIterable<EngineResponse>, options?: {
2277
2321
  init?: ResponseInit;
2278
2322
  data?: StreamData;
2279
2323
  callbacks?: StreamCallbacks;
2280
2324
  }): Response;
2325
+ declare function mergeIntoDataStream(stream: AsyncIterable<EngineResponse>, options: {
2326
+ dataStream: DataStreamWriter;
2327
+ callbacks?: StreamCallbacks;
2328
+ }): void;
2281
2329
 
2330
+ declare const llamaindexAdapter_mergeIntoDataStream: typeof mergeIntoDataStream;
2282
2331
  declare const llamaindexAdapter_toDataStream: typeof toDataStream;
2283
2332
  declare const llamaindexAdapter_toDataStreamResponse: typeof toDataStreamResponse;
2284
2333
  declare namespace llamaindexAdapter {
2285
2334
  export {
2335
+ llamaindexAdapter_mergeIntoDataStream as mergeIntoDataStream,
2286
2336
  llamaindexAdapter_toDataStream as toDataStream,
2287
2337
  llamaindexAdapter_toDataStreamResponse as toDataStreamResponse,
2288
2338
  };
2289
2339
  }
2290
2340
 
2291
- export { AssistantContent, AssistantResponse, CallWarning, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, output as Output, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, TextPart, TextStreamPart, ToolCallPart, ToolContent, ToolExecutionOptions, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createStreamDataTransformer, embed, embedMany, experimental_createProviderRegistry, experimental_customProvider, experimental_wrapLanguageModel, generateObject, generateText, streamObject, streamText, tool };
2341
+ export { AssistantContent, AssistantResponse, CallWarning, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreTool, ToolCallUnion as CoreToolCallUnion, CoreToolChoice, CoreToolMessage, ToolResultUnion as CoreToolResultUnion, CoreUserMessage, DataContent, DataStreamWriter, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, Experimental_LanguageModelV1Middleware, FilePart, FinishReason, GenerateObjectResult, GenerateTextResult, ImagePart, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidToolArgumentsError, langchainAdapter as LangChainAdapter, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, llamaindexAdapter as LlamaIndexAdapter, LogProbs, MessageConversionError, NoObjectGeneratedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, output as Output, Provider, ProviderMetadata, RetryError, StepResult, StreamData, StreamObjectResult, StreamTextResult, TextPart, TextStreamPart, ToolCallPart, ToolContent, ToolExecutionOptions, ToolResultPart, UserContent, convertToCoreMessages, cosineSimilarity, createDataStream, createDataStreamResponse, embed, embedMany, experimental_createProviderRegistry, experimental_customProvider, experimental_wrapLanguageModel, generateObject, generateText, pipeDataStreamToResponse, streamObject, streamText, tool };