ai 3.0.24 → 3.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -1,9 +1,8 @@
1
1
  import { LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1CallWarning } from '@ai-sdk/provider';
2
2
  import { z } from 'zod';
3
+ import { ServerResponse } from 'node:http';
3
4
  import { AssistantStream } from 'openai/lib/AssistantStream';
4
5
  import { Run } from 'openai/resources/beta/threads/runs/runs';
5
- import { ChatCompletionResponseChunk } from '@mistralai/mistralai';
6
- import { ServerResponse } from 'node:http';
7
6
 
8
7
  type TokenUsage = {
9
8
  promptTokens: number;
@@ -684,6 +683,18 @@ declare class StreamTextResult<TOOLS extends Record<string, ExperimentalTool>> {
684
683
  */
685
684
  toAIStream(callbacks?: AIStreamCallbacksAndOptions): ReadableStream<any>;
686
685
  /**
686
+ Writes stream data output to a Node.js response-like object.
687
+ It sets a `Content-Type` header to `text/plain; charset=utf-8` and
688
+ writes each text delta as a separate chunk.
689
+
690
+ @param response A Node.js response-like object (ServerResponse).
691
+ @param init Optional headers and status code.
692
+ */
693
+ pipeAIStreamToResponse(response: ServerResponse, init?: {
694
+ headers?: Record<string, string>;
695
+ status?: number;
696
+ }): void;
697
+ /**
687
698
  Creates a simple text stream response.
688
699
  Each text delta is encoded as UTF-8 and sent as a separate chunk.
689
700
  Non-text-delta events are ignored.
@@ -691,7 +702,7 @@ declare class StreamTextResult<TOOLS extends Record<string, ExperimentalTool>> {
691
702
  toTextStreamResponse(init?: ResponseInit): Response;
692
703
  }
693
704
 
694
- interface FunctionCall {
705
+ interface FunctionCall$1 {
695
706
  /**
696
707
  * The arguments to call the function with, as generated by the model in JSON
697
708
  * format. Note that the model does not always generate valid JSON, and may
@@ -779,7 +790,7 @@ interface Message$1 {
779
790
  * contains the function call name and arguments. Otherwise, the field should
780
791
  * not be set. (Deprecated and replaced by tool_calls.)
781
792
  */
782
- function_call?: string | FunctionCall;
793
+ function_call?: string | FunctionCall$1;
783
794
  data?: JSONValue;
784
795
  /**
785
796
  * If the assistant role makes a tool call, the `tool_calls` field contains
@@ -798,12 +809,12 @@ type ChatRequest = {
798
809
  messages: Message$1[];
799
810
  options?: RequestOptions;
800
811
  functions?: Array<Function>;
801
- function_call?: FunctionCall;
812
+ function_call?: FunctionCall$1;
802
813
  data?: Record<string, string>;
803
814
  tools?: Array<Tool>;
804
815
  tool_choice?: ToolChoice;
805
816
  };
806
- type FunctionCallHandler = (chatMessages: Message$1[], functionCall: FunctionCall) => Promise<ChatRequest | void>;
817
+ type FunctionCallHandler = (chatMessages: Message$1[], functionCall: FunctionCall$1) => Promise<ChatRequest | void>;
807
818
  type ToolCallHandler = (chatMessages: Message$1[], toolCalls: ToolCall[]) => Promise<ChatRequest | void>;
808
819
  type RequestOptions = {
809
820
  headers?: Record<string, string> | Headers;
@@ -812,7 +823,7 @@ type RequestOptions = {
812
823
  type ChatRequestOptions = {
813
824
  options?: RequestOptions;
814
825
  functions?: Array<Function>;
815
- function_call?: FunctionCall;
826
+ function_call?: FunctionCall$1;
816
827
  tools?: Array<Tool>;
817
828
  tool_choice?: ToolChoice;
818
829
  data?: Record<string, string>;
@@ -985,7 +996,7 @@ interface StreamPart<CODE extends string, NAME extends string, TYPE> {
985
996
  }
986
997
  declare const textStreamPart: StreamPart<'0', 'text', string>;
987
998
  declare const functionCallStreamPart: StreamPart<'1', 'function_call', {
988
- function_call: FunctionCall;
999
+ function_call: FunctionCall$1;
989
1000
  }>;
990
1001
  declare const dataStreamPart: StreamPart<'2', 'data', Array<JSONValue>>;
991
1002
  declare const errorStreamPart: StreamPart<'3', 'error', string>;
@@ -1210,7 +1221,7 @@ interface ChoiceDelta {
1210
1221
  * The name and arguments of a function that should be called, as generated by the
1211
1222
  * model.
1212
1223
  */
1213
- function_call?: FunctionCall;
1224
+ function_call?: FunctionCall$1;
1214
1225
  /**
1215
1226
  * The role of the author of this message.
1216
1227
  */
@@ -1645,6 +1656,31 @@ declare function LangChainStream(callbacks?: AIStreamCallbacksAndOptions): {
1645
1656
  };
1646
1657
  };
1647
1658
 
1659
+ interface ChatCompletionResponseChunk {
1660
+ id: string;
1661
+ object: 'chat.completion.chunk';
1662
+ created: number;
1663
+ model: string;
1664
+ choices: ChatCompletionResponseChunkChoice[];
1665
+ }
1666
+ interface ChatCompletionResponseChunkChoice {
1667
+ index: number;
1668
+ delta: {
1669
+ role?: string;
1670
+ content?: string;
1671
+ tool_calls?: ToolCalls[];
1672
+ };
1673
+ finish_reason: string;
1674
+ }
1675
+ interface FunctionCall {
1676
+ name: string;
1677
+ arguments: string;
1678
+ }
1679
+ interface ToolCalls {
1680
+ id: 'null';
1681
+ type: 'function';
1682
+ function: FunctionCall;
1683
+ }
1648
1684
  declare function MistralStream(response: AsyncGenerator<ChatCompletionResponseChunk, void, unknown>, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
1649
1685
 
1650
1686
  interface Prediction {
@@ -1767,4 +1803,4 @@ declare function streamToResponse(res: ReadableStream, response: ServerResponse,
1767
1803
  status?: number;
1768
1804
  }): void;
1769
1805
 
1770
- export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantMessage, AssistantResponse, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, DataContent, DataMessage, DeepPartial, ErrorStreamPart, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, Function, FunctionCall, FunctionCallHandler, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, IdGenerator, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, JSONValue, LangChainStream, Message$1 as Message, MistralStream, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamData, StreamObjectResult, StreamPart, StreamString, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, Tool, ToolCall, ToolCallHandler, ToolCallPart, ToolCallPayload, ToolChoice, ToolContent, ToolResultPart, UseChatOptions, UseCompletionOptions, UserContent, convertDataContentToBase64String, convertDataContentToUint8Array, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, formatStreamPart, generateId, isStreamStringEqualToType, generateId as nanoid, parseStreamPart, readDataStream, readableFromAsyncIterable, streamToResponse, tool, trimStartOfStreamHelper };
1806
+ export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantMessage, AssistantResponse, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, DataContent, DataMessage, DeepPartial, ErrorStreamPart, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, Function, FunctionCall$1 as FunctionCall, FunctionCallHandler, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, IdGenerator, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, JSONValue, LangChainStream, Message$1 as Message, MistralStream, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamData, StreamObjectResult, StreamPart, StreamString, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, Tool, ToolCall, ToolCallHandler, ToolCallPart, ToolCallPayload, ToolChoice, ToolContent, ToolResultPart, UseChatOptions, UseCompletionOptions, UserContent, convertDataContentToBase64String, convertDataContentToUint8Array, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, formatStreamPart, generateId, isStreamStringEqualToType, generateId as nanoid, parseStreamPart, readDataStream, readableFromAsyncIterable, streamToResponse, tool, trimStartOfStreamHelper };
package/dist/index.d.ts CHANGED
@@ -1,9 +1,8 @@
1
1
  import { LanguageModelV1, LanguageModelV1FinishReason, LanguageModelV1CallWarning } from '@ai-sdk/provider';
2
2
  import { z } from 'zod';
3
+ import { ServerResponse } from 'node:http';
3
4
  import { AssistantStream } from 'openai/lib/AssistantStream';
4
5
  import { Run } from 'openai/resources/beta/threads/runs/runs';
5
- import { ChatCompletionResponseChunk } from '@mistralai/mistralai';
6
- import { ServerResponse } from 'node:http';
7
6
 
8
7
  type TokenUsage = {
9
8
  promptTokens: number;
@@ -684,6 +683,18 @@ declare class StreamTextResult<TOOLS extends Record<string, ExperimentalTool>> {
684
683
  */
685
684
  toAIStream(callbacks?: AIStreamCallbacksAndOptions): ReadableStream<any>;
686
685
  /**
686
+ Writes stream data output to a Node.js response-like object.
687
+ It sets a `Content-Type` header to `text/plain; charset=utf-8` and
688
+ writes each text delta as a separate chunk.
689
+
690
+ @param response A Node.js response-like object (ServerResponse).
691
+ @param init Optional headers and status code.
692
+ */
693
+ pipeAIStreamToResponse(response: ServerResponse, init?: {
694
+ headers?: Record<string, string>;
695
+ status?: number;
696
+ }): void;
697
+ /**
687
698
  Creates a simple text stream response.
688
699
  Each text delta is encoded as UTF-8 and sent as a separate chunk.
689
700
  Non-text-delta events are ignored.
@@ -691,7 +702,7 @@ declare class StreamTextResult<TOOLS extends Record<string, ExperimentalTool>> {
691
702
  toTextStreamResponse(init?: ResponseInit): Response;
692
703
  }
693
704
 
694
- interface FunctionCall {
705
+ interface FunctionCall$1 {
695
706
  /**
696
707
  * The arguments to call the function with, as generated by the model in JSON
697
708
  * format. Note that the model does not always generate valid JSON, and may
@@ -779,7 +790,7 @@ interface Message$1 {
779
790
  * contains the function call name and arguments. Otherwise, the field should
780
791
  * not be set. (Deprecated and replaced by tool_calls.)
781
792
  */
782
- function_call?: string | FunctionCall;
793
+ function_call?: string | FunctionCall$1;
783
794
  data?: JSONValue;
784
795
  /**
785
796
  * If the assistant role makes a tool call, the `tool_calls` field contains
@@ -798,12 +809,12 @@ type ChatRequest = {
798
809
  messages: Message$1[];
799
810
  options?: RequestOptions;
800
811
  functions?: Array<Function>;
801
- function_call?: FunctionCall;
812
+ function_call?: FunctionCall$1;
802
813
  data?: Record<string, string>;
803
814
  tools?: Array<Tool>;
804
815
  tool_choice?: ToolChoice;
805
816
  };
806
- type FunctionCallHandler = (chatMessages: Message$1[], functionCall: FunctionCall) => Promise<ChatRequest | void>;
817
+ type FunctionCallHandler = (chatMessages: Message$1[], functionCall: FunctionCall$1) => Promise<ChatRequest | void>;
807
818
  type ToolCallHandler = (chatMessages: Message$1[], toolCalls: ToolCall[]) => Promise<ChatRequest | void>;
808
819
  type RequestOptions = {
809
820
  headers?: Record<string, string> | Headers;
@@ -812,7 +823,7 @@ type RequestOptions = {
812
823
  type ChatRequestOptions = {
813
824
  options?: RequestOptions;
814
825
  functions?: Array<Function>;
815
- function_call?: FunctionCall;
826
+ function_call?: FunctionCall$1;
816
827
  tools?: Array<Tool>;
817
828
  tool_choice?: ToolChoice;
818
829
  data?: Record<string, string>;
@@ -985,7 +996,7 @@ interface StreamPart<CODE extends string, NAME extends string, TYPE> {
985
996
  }
986
997
  declare const textStreamPart: StreamPart<'0', 'text', string>;
987
998
  declare const functionCallStreamPart: StreamPart<'1', 'function_call', {
988
- function_call: FunctionCall;
999
+ function_call: FunctionCall$1;
989
1000
  }>;
990
1001
  declare const dataStreamPart: StreamPart<'2', 'data', Array<JSONValue>>;
991
1002
  declare const errorStreamPart: StreamPart<'3', 'error', string>;
@@ -1210,7 +1221,7 @@ interface ChoiceDelta {
1210
1221
  * The name and arguments of a function that should be called, as generated by the
1211
1222
  * model.
1212
1223
  */
1213
- function_call?: FunctionCall;
1224
+ function_call?: FunctionCall$1;
1214
1225
  /**
1215
1226
  * The role of the author of this message.
1216
1227
  */
@@ -1645,6 +1656,31 @@ declare function LangChainStream(callbacks?: AIStreamCallbacksAndOptions): {
1645
1656
  };
1646
1657
  };
1647
1658
 
1659
+ interface ChatCompletionResponseChunk {
1660
+ id: string;
1661
+ object: 'chat.completion.chunk';
1662
+ created: number;
1663
+ model: string;
1664
+ choices: ChatCompletionResponseChunkChoice[];
1665
+ }
1666
+ interface ChatCompletionResponseChunkChoice {
1667
+ index: number;
1668
+ delta: {
1669
+ role?: string;
1670
+ content?: string;
1671
+ tool_calls?: ToolCalls[];
1672
+ };
1673
+ finish_reason: string;
1674
+ }
1675
+ interface FunctionCall {
1676
+ name: string;
1677
+ arguments: string;
1678
+ }
1679
+ interface ToolCalls {
1680
+ id: 'null';
1681
+ type: 'function';
1682
+ function: FunctionCall;
1683
+ }
1648
1684
  declare function MistralStream(response: AsyncGenerator<ChatCompletionResponseChunk, void, unknown>, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
1649
1685
 
1650
1686
  interface Prediction {
@@ -1767,4 +1803,4 @@ declare function streamToResponse(res: ReadableStream, response: ServerResponse,
1767
1803
  status?: number;
1768
1804
  }): void;
1769
1805
 
1770
- export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantMessage, AssistantResponse, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, DataContent, DataMessage, DeepPartial, ErrorStreamPart, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, Function, FunctionCall, FunctionCallHandler, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, IdGenerator, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, JSONValue, LangChainStream, Message$1 as Message, MistralStream, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamData, StreamObjectResult, StreamPart, StreamString, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, Tool, ToolCall, ToolCallHandler, ToolCallPart, ToolCallPayload, ToolChoice, ToolContent, ToolResultPart, UseChatOptions, UseCompletionOptions, UserContent, convertDataContentToBase64String, convertDataContentToUint8Array, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, formatStreamPart, generateId, isStreamStringEqualToType, generateId as nanoid, parseStreamPart, readDataStream, readableFromAsyncIterable, streamToResponse, tool, trimStartOfStreamHelper };
1806
+ export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AIStreamParserOptions, AWSBedrockAnthropicMessagesStream, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantContent, AssistantMessage, AssistantResponse, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, DataContent, DataMessage, DeepPartial, ErrorStreamPart, ExperimentalAssistantMessage, ExperimentalMessage, ExperimentalTool, ExperimentalToolMessage, ExperimentalUserMessage, Function, FunctionCall$1 as FunctionCall, FunctionCallHandler, FunctionCallPayload, GenerateObjectResult, GenerateTextResult, GoogleGenerativeAIStream, HuggingFaceStream, IdGenerator, ImagePart, InkeepAIStreamCallbacksAndOptions, InkeepChatResultCallbacks, InkeepOnFinalMetadata, InkeepStream, JSONValue, LangChainStream, Message$1 as Message, MistralStream, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamData, StreamObjectResult, StreamPart, StreamString, StreamTextResult, StreamingTextResponse, TextPart$1 as TextPart, TextStreamPart, Tool, ToolCall, ToolCallHandler, ToolCallPart, ToolCallPayload, ToolChoice, ToolContent, ToolResultPart, UseChatOptions, UseCompletionOptions, UserContent, convertDataContentToBase64String, convertDataContentToUint8Array, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, experimental_generateObject, experimental_generateText, experimental_streamObject, experimental_streamText, formatStreamPart, generateId, isStreamStringEqualToType, generateId as nanoid, parseStreamPart, readDataStream, readableFromAsyncIterable, streamToResponse, tool, trimStartOfStreamHelper };
package/dist/index.js CHANGED
@@ -1492,11 +1492,43 @@ var StreamTextResult = class {
1492
1492
  return this.textStream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
1493
1493
  }
1494
1494
  /**
1495
+ Writes stream data output to a Node.js response-like object.
1496
+ It sets a `Content-Type` header to `text/plain; charset=utf-8` and
1497
+ writes each text delta as a separate chunk.
1498
+
1499
+ @param response A Node.js response-like object (ServerResponse).
1500
+ @param init Optional headers and status code.
1501
+ */
1502
+ pipeAIStreamToResponse(response, init) {
1503
+ var _a;
1504
+ response.writeHead((_a = init == null ? void 0 : init.status) != null ? _a : 200, {
1505
+ "Content-Type": "text/plain; charset=utf-8",
1506
+ ...init == null ? void 0 : init.headers
1507
+ });
1508
+ const reader = this.textStream.pipeThrough(createCallbacksTransformer(void 0)).pipeThrough(createStreamDataTransformer()).getReader();
1509
+ const read = async () => {
1510
+ try {
1511
+ while (true) {
1512
+ const { done, value } = await reader.read();
1513
+ if (done)
1514
+ break;
1515
+ response.write(value);
1516
+ }
1517
+ } catch (error) {
1518
+ throw error;
1519
+ } finally {
1520
+ response.end();
1521
+ }
1522
+ };
1523
+ read();
1524
+ }
1525
+ /**
1495
1526
  Creates a simple text stream response.
1496
1527
  Each text delta is encoded as UTF-8 and sent as a separate chunk.
1497
1528
  Non-text-delta events are ignored.
1498
1529
  */
1499
1530
  toTextStreamResponse(init) {
1531
+ var _a;
1500
1532
  const encoder = new TextEncoder();
1501
1533
  return new Response(
1502
1534
  this.textStream.pipeThrough(
@@ -1507,8 +1539,7 @@ var StreamTextResult = class {
1507
1539
  })
1508
1540
  ),
1509
1541
  {
1510
- ...init,
1511
- status: 200,
1542
+ status: (_a = init == null ? void 0 : init.status) != null ? _a : 200,
1512
1543
  headers: {
1513
1544
  "Content-Type": "text/plain; charset=utf-8",
1514
1545
  ...init == null ? void 0 : init.headers